From e9e17e5df34051bce60232890ea042582af31f8c Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Tue, 13 Oct 2020 00:27:51 -0500 Subject: [PATCH 001/174] Upgrade Earmark to v1.4.10 --- lib/pleroma/earmark_renderer.ex | 256 ------------------ lib/pleroma/formatter.ex | 8 + .../audio_video_validator.ex | 3 +- lib/pleroma/web/common_api/utils.ex | 3 +- mix.exs | 2 +- mix.lock | 2 +- test/pleroma/formatter_test.exs | 7 + test/pleroma/web/common_api/utils_test.exs | 75 +++++ 8 files changed, 95 insertions(+), 261 deletions(-) delete mode 100644 lib/pleroma/earmark_renderer.ex diff --git a/lib/pleroma/earmark_renderer.ex b/lib/pleroma/earmark_renderer.ex deleted file mode 100644 index 6211a3b4a..000000000 --- a/lib/pleroma/earmark_renderer.ex +++ /dev/null @@ -1,256 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2020 Pleroma Authors <https://pleroma.social/> -# SPDX-License-Identifier: AGPL-3.0-only -# -# This file is derived from Earmark, under the following copyright: -# Copyright © 2014 Dave Thomas, The Pragmatic Programmers -# SPDX-License-Identifier: Apache-2.0 -# Upstream: https://github.com/pragdave/earmark/blob/master/lib/earmark/html_renderer.ex -defmodule Pleroma.EarmarkRenderer do - @moduledoc false - - alias Earmark.Block - alias Earmark.Context - alias Earmark.HtmlRenderer - alias Earmark.Options - - import Earmark.Inline, only: [convert: 3] - import Earmark.Helpers.HtmlHelpers - import Earmark.Message, only: [add_messages_from: 2, get_messages: 1, set_messages: 2] - import Earmark.Context, only: [append: 2, set_value: 2] - import Earmark.Options, only: [get_mapper: 1] - - @doc false - def render(blocks, %Context{options: %Options{}} = context) do - messages = get_messages(context) - - {contexts, html} = - get_mapper(context.options).( - blocks, - &render_block(&1, put_in(context.options.messages, [])) - ) - |> Enum.unzip() - - all_messages = - contexts - |> Enum.reduce(messages, fn ctx, messages1 -> messages1 ++ get_messages(ctx) end) - - {put_in(context.options.messages, all_messages), html |> IO.iodata_to_binary()} - end - - ############# - # Paragraph # - ############# - defp render_block(%Block.Para{lnb: lnb, lines: lines, attrs: attrs}, context) do - lines = convert(lines, lnb, context) - add_attrs(lines, "<p>#{lines.value}</p>", attrs, [], lnb) - end - - ######## - # Html # - ######## - defp render_block(%Block.Html{html: html}, context) do - {context, html} - end - - defp render_block(%Block.HtmlComment{lines: lines}, context) do - {context, lines} - end - - defp render_block(%Block.HtmlOneline{html: html}, context) do - {context, html} - end - - ######### - # Ruler # - ######### - defp render_block(%Block.Ruler{lnb: lnb, attrs: attrs}, context) do - add_attrs(context, "<hr />", attrs, [], lnb) - end - - ########### - # Heading # - ########### - defp render_block( - %Block.Heading{lnb: lnb, level: level, content: content, attrs: attrs}, - context - ) do - converted = convert(content, lnb, context) - html = "<h#{level}>#{converted.value}</h#{level}>" - add_attrs(converted, html, attrs, [], lnb) - end - - ############## - # Blockquote # - ############## - - defp render_block(%Block.BlockQuote{lnb: lnb, blocks: blocks, attrs: attrs}, context) do - {context1, body} = render(blocks, context) - html = "<blockquote>#{body}</blockquote>" - add_attrs(context1, html, attrs, [], lnb) - end - - ######### - # Table # - ######### - - defp render_block( - %Block.Table{lnb: lnb, header: header, rows: rows, alignments: aligns, attrs: attrs}, - context - ) do - {context1, html} = add_attrs(context, "<table>", attrs, [], lnb) - context2 = set_value(context1, html) - - context3 = - if header do - append(add_trs(append(context2, "<thead>"), [header], "th", aligns, lnb), "</thead>") - else - # Maybe an error, needed append(context, html) - context2 - end - - context4 = append(add_trs(append(context3, "<tbody>"), rows, "td", aligns, lnb), "</tbody>") - - {context4, [context4.value, "</table>"]} - end - - ######## - # Code # - ######## - - defp render_block( - %Block.Code{lnb: lnb, language: language, attrs: attrs} = block, - %Context{options: options} = context - ) do - class = - if language, do: ~s{ class="#{code_classes(language, options.code_class_prefix)}"}, else: "" - - tag = ~s[<pre><code#{class}>] - lines = options.render_code.(block) - html = ~s[#{tag}#{lines}</code></pre>] - add_attrs(context, html, attrs, [], lnb) - end - - ######### - # Lists # - ######### - - defp render_block( - %Block.List{lnb: lnb, type: type, blocks: items, attrs: attrs, start: start}, - context - ) do - {context1, content} = render(items, context) - html = "<#{type}#{start}>#{content}</#{type}>" - add_attrs(context1, html, attrs, [], lnb) - end - - # format a single paragraph list item, and remove the para tags - defp render_block( - %Block.ListItem{lnb: lnb, blocks: blocks, spaced: false, attrs: attrs}, - context - ) - when length(blocks) == 1 do - {context1, content} = render(blocks, context) - content = Regex.replace(~r{</?p>}, content, "") - html = "<li>#{content}</li>" - add_attrs(context1, html, attrs, [], lnb) - end - - # format a spaced list item - defp render_block(%Block.ListItem{lnb: lnb, blocks: blocks, attrs: attrs}, context) do - {context1, content} = render(blocks, context) - html = "<li>#{content}</li>" - add_attrs(context1, html, attrs, [], lnb) - end - - ################## - # Footnote Block # - ################## - - defp render_block(%Block.FnList{blocks: footnotes}, context) do - items = - Enum.map(footnotes, fn note -> - blocks = append_footnote_link(note) - %Block.ListItem{attrs: "#fn:#{note.number}", type: :ol, blocks: blocks} - end) - - {context1, html} = render_block(%Block.List{type: :ol, blocks: items}, context) - {context1, Enum.join([~s[<div class="footnotes">], "<hr />", html, "</div>"])} - end - - ####################################### - # Isolated IALs are rendered as paras # - ####################################### - - defp render_block(%Block.Ial{verbatim: verbatim}, context) do - {context, "<p>{:#{verbatim}}</p>"} - end - - #################### - # IDDef is ignored # - #################### - - defp render_block(%Block.IdDef{}, context), do: {context, ""} - - ##################################### - # And here are the inline renderers # - ##################################### - - defdelegate br, to: HtmlRenderer - defdelegate codespan(text), to: HtmlRenderer - defdelegate em(text), to: HtmlRenderer - defdelegate strong(text), to: HtmlRenderer - defdelegate strikethrough(text), to: HtmlRenderer - - defdelegate link(url, text), to: HtmlRenderer - defdelegate link(url, text, title), to: HtmlRenderer - - defdelegate image(path, alt, title), to: HtmlRenderer - - defdelegate footnote_link(ref, backref, number), to: HtmlRenderer - - # Table rows - defp add_trs(context, rows, tag, aligns, lnb) do - numbered_rows = - rows - |> Enum.zip(Stream.iterate(lnb, &(&1 + 1))) - - numbered_rows - |> Enum.reduce(context, fn {row, lnb}, ctx -> - append(add_tds(append(ctx, "<tr>"), row, tag, aligns, lnb), "</tr>") - end) - end - - defp add_tds(context, row, tag, aligns, lnb) do - Enum.reduce(1..length(row), context, add_td_fn(row, tag, aligns, lnb)) - end - - defp add_td_fn(row, tag, aligns, lnb) do - fn n, ctx -> - style = - case Enum.at(aligns, n - 1, :default) do - :default -> "" - align -> " style=\"text-align: #{align}\"" - end - - col = Enum.at(row, n - 1) - converted = convert(col, lnb, set_messages(ctx, [])) - append(add_messages_from(ctx, converted), "<#{tag}#{style}>#{converted.value}</#{tag}>") - end - end - - ############################### - # Append Footnote Return Link # - ############################### - - defdelegate append_footnote_link(note), to: HtmlRenderer - defdelegate append_footnote_link(note, fnlink), to: HtmlRenderer - - defdelegate render_code(lines), to: HtmlRenderer - - defp code_classes(language, prefix) do - ["" | String.split(prefix || "")] - |> Enum.map(fn pfx -> "#{pfx}#{language}" end) - |> Enum.join(" ") - end -end diff --git a/lib/pleroma/formatter.ex b/lib/pleroma/formatter.ex index 0c450eae4..b0e4a84ae 100644 --- a/lib/pleroma/formatter.ex +++ b/lib/pleroma/formatter.ex @@ -138,6 +138,14 @@ def html_escape(text, "text/plain") do |> Enum.join("") end + def minify({text, mentions, hashtags}, type) do + {minify(text, type), mentions, hashtags} + end + + def minify(text, "text/html") do + String.replace(text, "\n", "") + end + def truncate(text, max_length \\ 200, omission \\ "...") do # Remove trailing whitespace text = Regex.replace(~r/([^ \t\r\n])([ \t]+$)/u, text, "\\g{1}") diff --git a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex index 16973e5db..eaf94797a 100644 --- a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex @@ -5,7 +5,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator do use Ecto.Schema - alias Pleroma.EarmarkRenderer alias Pleroma.EctoType.ActivityPub.ObjectValidators alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes @@ -96,7 +95,7 @@ defp fix_content(%{"mediaType" => "text/markdown", "content" => content} = data) when is_binary(content) do content = content - |> Earmark.as_html!(%Earmark.Options{renderer: EarmarkRenderer}) + |> Earmark.as_html!() |> Pleroma.HTML.filter_tags() Map.put(data, "content", content) diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex index 1c74ea787..b434a069e 100644 --- a/lib/pleroma/web/common_api/utils.ex +++ b/lib/pleroma/web/common_api/utils.ex @@ -294,8 +294,9 @@ def format_input(text, "text/html", options) do def format_input(text, "text/markdown", options) do text |> Formatter.mentions_escape(options) - |> Earmark.as_html!(%Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + |> Earmark.as_html!() |> Formatter.linkify(options) + |> Formatter.minify("text/html") |> Formatter.html_escape("text/html") end diff --git a/mix.exs b/mix.exs index 72a6346b5..feb7eefa3 100644 --- a/mix.exs +++ b/mix.exs @@ -144,7 +144,7 @@ defp deps do {:ex_aws, "~> 2.1.6"}, {:ex_aws_s3, "~> 2.0"}, {:sweet_xml, "~> 0.6.6"}, - {:earmark, "1.4.3"}, + {:earmark, "1.4.10"}, {:bbcode_pleroma, "~> 0.2.0"}, {:crypt, git: "https://github.com/msantos/crypt.git", diff --git a/mix.lock b/mix.lock index 6b551a012..29439a438 100644 --- a/mix.lock +++ b/mix.lock @@ -27,7 +27,7 @@ "db_connection": {:hex, :db_connection, "2.2.2", "3bbca41b199e1598245b716248964926303b5d4609ff065125ce98bcd368939e", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm", "642af240d8a8affb93b4ba5a6fcd2bbcbdc327e1a524b825d383711536f8070c"}, "decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"}, "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, - "earmark": {:hex, :earmark, "1.4.3", "364ca2e9710f6bff494117dbbd53880d84bebb692dafc3a78eb50aa3183f2bfd", [:mix], [], "hexpm", "8cf8a291ebf1c7b9539e3cddb19e9cef066c2441b1640f13c34c1d3cfc825fec"}, + "earmark": {:hex, :earmark, "1.4.10", "bddce5e8ea37712a5bfb01541be8ba57d3b171d3fa4f80a0be9bcf1db417bcaf", [:mix], [{:earmark_parser, ">= 1.4.10", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "12dbfa80810478e521d3ffb941ad9fbfcbbd7debe94e1341b4c4a1b2411c1c27"}, "earmark_parser": {:hex, :earmark_parser, "1.4.10", "6603d7a603b9c18d3d20db69921527f82ef09990885ed7525003c7fe7dc86c56", [:mix], [], "hexpm", "8e2d5370b732385db2c9b22215c3f59c84ac7dda7ed7e544d7c459496ae519c0"}, "ecto": {:hex, :ecto, "3.4.6", "08f7afad3257d6eb8613309af31037e16c36808dfda5a3cd0cb4e9738db030e4", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6f13a9e2a62e75c2dcfc7207bfc65645ab387af8360db4c89fee8b5a4bf3f70b"}, "ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"}, diff --git a/test/pleroma/formatter_test.exs b/test/pleroma/formatter_test.exs index 5781a3f01..ceedd1b6d 100644 --- a/test/pleroma/formatter_test.exs +++ b/test/pleroma/formatter_test.exs @@ -307,4 +307,11 @@ test "it escapes HTML in plain text" do assert Formatter.html_escape(text, "text/plain") == expected end + + test "it minifies html" do + text = "<p>\nhello</p>\n<p>\nworld</p>\n" + expected = "<p>hello</p><p>world</p>" + + assert Formatter.minify(text, "text/html") == expected + end end diff --git a/test/pleroma/web/common_api/utils_test.exs b/test/pleroma/web/common_api/utils_test.exs index 4d6c9ea26..39ea08ca8 100644 --- a/test/pleroma/web/common_api/utils_test.exs +++ b/test/pleroma/web/common_api/utils_test.exs @@ -168,6 +168,81 @@ test "works for text/markdown with mentions" do end end + describe "format_input/3 with markdown" do + test "Paragraph" do + code = ~s[Hello\n\nWorld!] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "<p>Hello</p><p>World!</p>" + end + + test "raw HTML" do + code = ~s[<a href="http://example.org/">OwO</a><!-- what's this?-->] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "<p>#{code}</p>" + end + + test "rulers" do + code = ~s[before\n\n-----\n\nafter] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "<p>before</p><hr /><p>after</p>" + end + + test "headings" do + code = ~s[# h1\n## h2\n### h3\n] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<h1>h1</h1><h2>h2</h2><h3>h3</h3>] + end + + test "blockquote" do + code = ~s[> whoms't are you quoting?] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "<blockquote><p>whoms’t are you quoting?</p></blockquote>" + end + + test "code" do + code = ~s[`mix`] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<p><code class="inline">mix</code></p>] + + code = ~s[``mix``] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<p><code class="inline">mix</code></p>] + + code = ~s[```\nputs "Hello World"\n```] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<pre><code class="">puts "Hello World"</code></pre>] + end + + test "lists" do + code = ~s[- one\n- two\n- three\n- four] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "<ul><li>one</li><li>two</li><li>three</li><li>four</li></ul>" + + code = ~s[1. one\n2. two\n3. three\n4. four\n] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "<ol><li>one</li><li>two</li><li>three</li><li>four</li></ol>" + end + + test "delegated renderers" do + code = ~s[a<br/>b] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "<p>#{code}</p>" + + code = ~s[*aaaa~*] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<p><em>aaaa~</em></p>] + + code = ~s[**aaaa~**] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<p><strong>aaaa~</strong></p>] + + # strikethrought + code = ~s[<del>aaaa~</del>] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<p><del>aaaa~</del></p>] + end + end + describe "context_to_conversation_id" do test "creates a mapping object" do conversation_id = Utils.context_to_conversation_id("random context") From ba71bbf6101847292346ba3b1fbe78ce4c385919 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Tue, 13 Oct 2020 01:53:25 -0500 Subject: [PATCH 002/174] Improve Formatter.minify/2 --- lib/pleroma/formatter.ex | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/formatter.ex b/lib/pleroma/formatter.ex index b0e4a84ae..61906dda6 100644 --- a/lib/pleroma/formatter.ex +++ b/lib/pleroma/formatter.ex @@ -143,7 +143,10 @@ def minify({text, mentions, hashtags}, type) do end def minify(text, "text/html") do - String.replace(text, "\n", "") + text + |> String.replace(">\n", ">") + |> String.replace("> ", ">") + |> String.replace(" <", "<") end def truncate(text, max_length \\ 200, omission \\ "...") do From c4f4e48e574362d1ec86eaf11a382e81ca97cb35 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Tue, 13 Oct 2020 02:08:41 -0500 Subject: [PATCH 003/174] Remove some N/A tests --- test/pleroma/web/common_api/utils_test.exs | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/test/pleroma/web/common_api/utils_test.exs b/test/pleroma/web/common_api/utils_test.exs index 39ea08ca8..c6abbbe84 100644 --- a/test/pleroma/web/common_api/utils_test.exs +++ b/test/pleroma/web/common_api/utils_test.exs @@ -187,12 +187,6 @@ test "rulers" do assert result == "<p>before</p><hr /><p>after</p>" end - test "headings" do - code = ~s[# h1\n## h2\n### h3\n] - {result, [], []} = Utils.format_input(code, "text/markdown") - assert result == ~s[<h1>h1</h1><h2>h2</h2><h3>h3</h3>] - end - test "blockquote" do code = ~s[> whoms't are you quoting?] {result, [], []} = Utils.format_input(code, "text/markdown") @@ -224,10 +218,6 @@ test "lists" do end test "delegated renderers" do - code = ~s[a<br/>b] - {result, [], []} = Utils.format_input(code, "text/markdown") - assert result == "<p>#{code}</p>" - code = ~s[*aaaa~*] {result, [], []} = Utils.format_input(code, "text/markdown") assert result == ~s[<p><em>aaaa~</em></p>] @@ -236,7 +226,7 @@ test "delegated renderers" do {result, [], []} = Utils.format_input(code, "text/markdown") assert result == ~s[<p><strong>aaaa~</strong></p>] - # strikethrought + # strikethrough code = ~s[<del>aaaa~</del>] {result, [], []} = Utils.format_input(code, "text/markdown") assert result == ~s[<p><del>aaaa~</del></p>] From b2548cfcdabdcb90bfcc9f4022c0b1cff9157a4a Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Tue, 13 Oct 2020 13:54:53 -0500 Subject: [PATCH 004/174] Sanitizer: allow <hr> tags --- priv/scrubbers/default.ex | 1 + 1 file changed, 1 insertion(+) diff --git a/priv/scrubbers/default.ex b/priv/scrubbers/default.ex index 7b06994de..0893b17e5 100644 --- a/priv/scrubbers/default.ex +++ b/priv/scrubbers/default.ex @@ -39,6 +39,7 @@ defmodule Pleroma.HTML.Scrubber.Default do Meta.allow_tag_with_these_attributes(:code, []) Meta.allow_tag_with_these_attributes(:del, []) Meta.allow_tag_with_these_attributes(:em, []) + Meta.allow_tag_with_these_attributes(:hr, []) Meta.allow_tag_with_these_attributes(:i, []) Meta.allow_tag_with_these_attributes(:li, []) Meta.allow_tag_with_these_attributes(:ol, []) From f8c93246d69a193ead81248879ba260e98673b3d Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Tue, 13 Oct 2020 14:27:50 -0500 Subject: [PATCH 005/174] Refactor Earmark code, fix tests --- lib/pleroma/formatter.ex | 4 ++++ .../object_validators/audio_video_validator.ex | 2 +- lib/pleroma/web/common_api/utils.ex | 2 +- priv/scrubbers/default.ex | 2 ++ test/pleroma/web/common_api/utils_test.exs | 10 +++++----- test/pleroma/web/common_api_test.exs | 2 +- 6 files changed, 14 insertions(+), 8 deletions(-) diff --git a/lib/pleroma/formatter.ex b/lib/pleroma/formatter.ex index 61906dda6..1be12055f 100644 --- a/lib/pleroma/formatter.ex +++ b/lib/pleroma/formatter.ex @@ -121,6 +121,10 @@ def mentions_escape(text, options \\ []) do end end + def markdown_to_html(text) do + Earmark.as_html!(text) + end + def html_escape({text, mentions, hashtags}, type) do {html_escape(text, type), mentions, hashtags} end diff --git a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex index eaf94797a..9b38aa4c2 100644 --- a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex @@ -95,7 +95,7 @@ defp fix_content(%{"mediaType" => "text/markdown", "content" => content} = data) when is_binary(content) do content = content - |> Earmark.as_html!() + |> Pleroma.Formatter.markdown_to_html() |> Pleroma.HTML.filter_tags() Map.put(data, "content", content) diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex index b434a069e..be86009af 100644 --- a/lib/pleroma/web/common_api/utils.ex +++ b/lib/pleroma/web/common_api/utils.ex @@ -294,7 +294,7 @@ def format_input(text, "text/html", options) do def format_input(text, "text/markdown", options) do text |> Formatter.mentions_escape(options) - |> Earmark.as_html!() + |> Formatter.markdown_to_html() |> Formatter.linkify(options) |> Formatter.minify("text/html") |> Formatter.html_escape("text/html") diff --git a/priv/scrubbers/default.ex b/priv/scrubbers/default.ex index 0893b17e5..4694a92a5 100644 --- a/priv/scrubbers/default.ex +++ b/priv/scrubbers/default.ex @@ -59,6 +59,8 @@ defmodule Pleroma.HTML.Scrubber.Default do Meta.allow_tag_with_this_attribute_values(:span, "class", ["h-card"]) Meta.allow_tag_with_these_attributes(:span, []) + Meta.allow_tag_with_this_attribute_values(:code, "class", ["inline"]) + @allow_inline_images Pleroma.Config.get([:markup, :allow_inline_images]) if @allow_inline_images do diff --git a/test/pleroma/web/common_api/utils_test.exs b/test/pleroma/web/common_api/utils_test.exs index c6abbbe84..ab6392b1f 100644 --- a/test/pleroma/web/common_api/utils_test.exs +++ b/test/pleroma/web/common_api/utils_test.exs @@ -178,13 +178,13 @@ test "Paragraph" do test "raw HTML" do code = ~s[<a href="http://example.org/">OwO</a><!-- what's this?-->] {result, [], []} = Utils.format_input(code, "text/markdown") - assert result == "<p>#{code}</p>" + assert result == ~s[<a href="http://example.org/">OwO</a>] end test "rulers" do code = ~s[before\n\n-----\n\nafter] {result, [], []} = Utils.format_input(code, "text/markdown") - assert result == "<p>before</p><hr /><p>after</p>" + assert result == "<p>before</p><hr/><p>after</p>" end test "blockquote" do @@ -204,7 +204,7 @@ test "code" do code = ~s[```\nputs "Hello World"\n```] {result, [], []} = Utils.format_input(code, "text/markdown") - assert result == ~s[<pre><code class="">puts "Hello World"</code></pre>] + assert result == ~s[<pre><code>puts "Hello World"</code></pre>] end test "lists" do @@ -227,9 +227,9 @@ test "delegated renderers" do assert result == ~s[<p><strong>aaaa~</strong></p>] # strikethrough - code = ~s[<del>aaaa~</del>] + code = ~s[~~aaaa~~~] {result, [], []} = Utils.format_input(code, "text/markdown") - assert result == ~s[<p><del>aaaa~</del></p>] + assert result == ~s[<p><del>aaaa</del>~</p>] end end diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index 585b2c174..c1b1af073 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -558,7 +558,7 @@ test "it filters out obviously bad tags when accepting a post as Markdown" do object = Object.normalize(activity) - assert object.data["content"] == "<p><b>2hu</b></p>alert('xss')" + assert object.data["content"] == "<p><b>2hu</b></p>" assert object.data["source"] == post end From f1c67115d89ddcc7b10b963579dd621fca2094db Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Tue, 13 Oct 2020 18:09:49 -0500 Subject: [PATCH 006/174] Upgrade linkify, test URL issues, fixes #2026 #1942 --- test/pleroma/web/common_api/utils_test.exs | 52 ++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/test/pleroma/web/common_api/utils_test.exs b/test/pleroma/web/common_api/utils_test.exs index ab6392b1f..28b05ed91 100644 --- a/test/pleroma/web/common_api/utils_test.exs +++ b/test/pleroma/web/common_api/utils_test.exs @@ -175,6 +175,54 @@ test "Paragraph" do assert result == "<p>Hello</p><p>World!</p>" end + test "links" do + code = "https://en.wikipedia.org/wiki/Animal_Crossing_(video_game)" + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<p><a href="#{code}">#{code}</a></p>] + + code = "https://github.com/pragdave/earmark/" + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<p><a href="#{code}">#{code}</a></p>] + end + + test "link with local mention" do + insert(:user, %{nickname: "lain"}) + + code = "https://example.com/@lain" + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<p><a href="#{code}">#{code}</a></p>] + end + + test "local mentions" do + mario = insert(:user, %{nickname: "mario"}) + luigi = insert(:user, %{nickname: "luigi"}) + + code = "@mario @luigi yo what's up?" + {result, _, []} = Utils.format_input(code, "text/markdown") + + assert result == + ~s[<p><span class="h-card"><a class="u-url mention" data-user="#{mario.id}" href="#{ + mario.ap_id + }" rel="ugc">@<span>mario</span></a></span> <span class="h-card"><a class="u-url mention" data-user="#{ + luigi.id + }" href="#{luigi.ap_id}" rel="ugc">@<span>luigi</span></a></span> yo what’s up?</p>] + end + + test "remote mentions" do + mario = insert(:user, %{nickname: "mario@mushroom.kingdom", local: false}) + luigi = insert(:user, %{nickname: "luigi@mushroom.kingdom", local: false}) + + code = "@mario@mushroom.kingdom @luigi@mushroom.kingdom yo what's up?" + {result, _, []} = Utils.format_input(code, "text/markdown") + + assert result == + ~s[<p><span class="h-card"><a class="u-url mention" data-user="#{mario.id}" href="#{ + mario.ap_id + }" rel="ugc">@<span>mario</span></a></span> <span class="h-card"><a class="u-url mention" data-user="#{ + luigi.id + }" href="#{luigi.ap_id}" rel="ugc">@<span>luigi</span></a></span> yo what’s up?</p>] + end + test "raw HTML" do code = ~s[<a href="http://example.org/">OwO</a><!-- what's this?-->] {result, [], []} = Utils.format_input(code, "text/markdown") @@ -205,6 +253,10 @@ test "code" do code = ~s[```\nputs "Hello World"\n```] {result, [], []} = Utils.format_input(code, "text/markdown") assert result == ~s[<pre><code>puts "Hello World"</code></pre>] + + code = ~s[ <div>\n </div>] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[<pre><code><div>\n</div></code></pre>] end test "lists" do From 642729b49fca41fb142c6121fedf35c96c03b018 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Tue, 13 Oct 2020 19:16:57 -0500 Subject: [PATCH 007/174] Fix AudioVideoValidator markdown --- .../web/activity_pub/object_validators/audio_video_validator.ex | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex index 9b38aa4c2..fa3e2c026 100644 --- a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex @@ -96,6 +96,7 @@ defp fix_content(%{"mediaType" => "text/markdown", "content" => content} = data) content = content |> Pleroma.Formatter.markdown_to_html() + |> Pleroma.Formatter.minify("text/html") |> Pleroma.HTML.filter_tags() Map.put(data, "content", content) From 6520599b7deac56780e1496c969cc45ff2e9f5da Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@FreeBSD.org> Date: Fri, 11 Dec 2020 13:43:40 -0600 Subject: [PATCH 008/174] Update Earmark to 1.4.13, use the new compact_output mode --- lib/pleroma/formatter.ex | 2 +- mix.exs | 2 +- mix.lock | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/pleroma/formatter.ex b/lib/pleroma/formatter.ex index 1be12055f..2aa236ca9 100644 --- a/lib/pleroma/formatter.ex +++ b/lib/pleroma/formatter.ex @@ -122,7 +122,7 @@ def mentions_escape(text, options \\ []) do end def markdown_to_html(text) do - Earmark.as_html!(text) + Earmark.as_html!(text, %Earmark.Options{compact_output: true}) end def html_escape({text, mentions, hashtags}, type) do diff --git a/mix.exs b/mix.exs index feb7eefa3..06d77edb7 100644 --- a/mix.exs +++ b/mix.exs @@ -144,7 +144,7 @@ defp deps do {:ex_aws, "~> 2.1.6"}, {:ex_aws_s3, "~> 2.0"}, {:sweet_xml, "~> 0.6.6"}, - {:earmark, "1.4.10"}, + {:earmark, "1.4.13"}, {:bbcode_pleroma, "~> 0.2.0"}, {:crypt, git: "https://github.com/msantos/crypt.git", diff --git a/mix.lock b/mix.lock index 29439a438..e4dd32c83 100644 --- a/mix.lock +++ b/mix.lock @@ -27,8 +27,8 @@ "db_connection": {:hex, :db_connection, "2.2.2", "3bbca41b199e1598245b716248964926303b5d4609ff065125ce98bcd368939e", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm", "642af240d8a8affb93b4ba5a6fcd2bbcbdc327e1a524b825d383711536f8070c"}, "decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"}, "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, - "earmark": {:hex, :earmark, "1.4.10", "bddce5e8ea37712a5bfb01541be8ba57d3b171d3fa4f80a0be9bcf1db417bcaf", [:mix], [{:earmark_parser, ">= 1.4.10", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "12dbfa80810478e521d3ffb941ad9fbfcbbd7debe94e1341b4c4a1b2411c1c27"}, - "earmark_parser": {:hex, :earmark_parser, "1.4.10", "6603d7a603b9c18d3d20db69921527f82ef09990885ed7525003c7fe7dc86c56", [:mix], [], "hexpm", "8e2d5370b732385db2c9b22215c3f59c84ac7dda7ed7e544d7c459496ae519c0"}, + "earmark": {:hex, :earmark, "1.4.13", "2c6ce9768fc9fdbf4046f457e207df6360ee6c91ee1ecb8e9a139f96a4289d91", [:mix], [{:earmark_parser, ">= 1.4.12", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "a0cf3ed88ef2b1964df408889b5ecb886d1a048edde53497fc935ccd15af3403"}, + "earmark_parser": {:hex, :earmark_parser, "1.4.12", "b245e875ec0a311a342320da0551da407d9d2b65d98f7a9597ae078615af3449", [:mix], [], "hexpm", "711e2cc4d64abb7d566d43f54b78f7dc129308a63bc103fbd88550d2174b3160"}, "ecto": {:hex, :ecto, "3.4.6", "08f7afad3257d6eb8613309af31037e16c36808dfda5a3cd0cb4e9738db030e4", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6f13a9e2a62e75c2dcfc7207bfc65645ab387af8360db4c89fee8b5a4bf3f70b"}, "ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"}, "ecto_sql": {:hex, :ecto_sql, "3.4.5", "30161f81b167d561a9a2df4329c10ae05ff36eca7ccc84628f2c8b9fa1e43323", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.4.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.0", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "31990c6a3579b36a3c0841d34a94c275e727de8b84f58509da5f1b2032c98ac2"}, From f318d8e56df1e30f41c7ddf2e306b3552034921f Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@FreeBSD.org> Date: Fri, 11 Dec 2020 17:28:00 -0600 Subject: [PATCH 009/174] Use Pleroma.Formatter.markdown_to_html/1 in the tests --- test/pleroma/earmark_renderer_test.exs | 28 +++++++++++++------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/test/pleroma/earmark_renderer_test.exs b/test/pleroma/earmark_renderer_test.exs index 220d97d16..3adbefc1e 100644 --- a/test/pleroma/earmark_renderer_test.exs +++ b/test/pleroma/earmark_renderer_test.exs @@ -6,74 +6,74 @@ defmodule Pleroma.EarmarkRendererTest do test "Paragraph" do code = ~s[Hello\n\nWorld!] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == "<p>Hello</p><p>World!</p>" end test "raw HTML" do code = ~s[<a href="http://example.org/">OwO</a><!-- what's this?-->] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == "<p>#{code}</p>" end test "rulers" do code = ~s[before\n\n-----\n\nafter] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == "<p>before</p><hr /><p>after</p>" end test "headings" do code = ~s[# h1\n## h2\n### h3\n] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == ~s[<h1>h1</h1><h2>h2</h2><h3>h3</h3>] end test "blockquote" do code = ~s[> whoms't are you quoting?] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == "<blockquote><p>whoms’t are you quoting?</p></blockquote>" end test "code" do code = ~s[`mix`] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == ~s[<p><code class="inline">mix</code></p>] code = ~s[``mix``] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == ~s[<p><code class="inline">mix</code></p>] code = ~s[```\nputs "Hello World"\n```] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == ~s[<pre><code class="">puts "Hello World"</code></pre>] end test "lists" do code = ~s[- one\n- two\n- three\n- four] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == "<ul><li>one</li><li>two</li><li>three</li><li>four</li></ul>" code = ~s[1. one\n2. two\n3. three\n4. four\n] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == "<ol><li>one</li><li>two</li><li>three</li><li>four</li></ol>" end test "delegated renderers" do code = ~s[a<br/>b] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == "<p>#{code}</p>" code = ~s[*aaaa~*] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == ~s[<p><em>aaaa~</em></p>] code = ~s[**aaaa~**] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == ~s[<p><strong>aaaa~</strong></p>] # strikethrought code = ~s[<del>aaaa~</del>] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + result = Pleroma.Formatter.markdown_to_html(code) assert result == ~s[<p><del>aaaa~</del></p>] end end From ee221277b05d2f682c340c1e1b81fbce4931735a Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Mon, 21 Dec 2020 22:54:26 +0300 Subject: [PATCH 010/174] Encapsulation of tags / hashtags fetching from objects. --- lib/pleroma/activity/ir/topics.ex | 10 ++--- lib/pleroma/object.ex | 45 ++++++++++++++++--- .../web/activity_pub/mrf/simple_policy.ex | 8 ++-- .../web/activity_pub/transmogrifier.ex | 29 ++++++------ lib/pleroma/web/feed/feed_view.ex | 1 + .../web/mastodon_api/views/status_view.ex | 6 ++- .../templates/feed/feed/_activity.atom.eex | 2 +- .../web/templates/feed/feed/_activity.rss.eex | 2 +- .../feed/feed/_tag_activity.atom.eex | 2 +- .../transmogrifier/note_handling_test.exs | 6 ++- test/pleroma/web/common_api_test.exs | 2 +- .../mastodon_api/views/status_view_test.exs | 4 +- 12 files changed, 78 insertions(+), 39 deletions(-) diff --git a/lib/pleroma/activity/ir/topics.ex b/lib/pleroma/activity/ir/topics.ex index fe2e8cb5c..2c74ac2bf 100644 --- a/lib/pleroma/activity/ir/topics.ex +++ b/lib/pleroma/activity/ir/topics.ex @@ -48,14 +48,12 @@ defp item_creation_tags(tags, _, _) do tags end - defp hashtags_to_topics(%{data: %{"tag" => tags}}) do - tags - |> Enum.filter(&is_bitstring(&1)) - |> Enum.map(fn tag -> "hashtag:" <> tag end) + defp hashtags_to_topics(object) do + object + |> Object.hashtags() + |> Enum.map(fn hashtag -> "hashtag:" <> hashtag end) end - defp hashtags_to_topics(_), do: [] - defp remote_topics(%{local: true}), do: [] defp remote_topics(%{actor: actor}) when is_binary(actor), diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 052ad413b..2088c7656 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -47,17 +47,33 @@ def with_joined_activity(query, activity_type \\ "Create", join_type \\ :inner) end def create(data) do - Object.change(%Object{}, %{data: data}) + %Object{} + |> Object.change(%{data: data}) |> Repo.insert() end def change(struct, params \\ %{}) do - struct - |> cast(params, [:data]) - |> validate_required([:data]) - |> unique_constraint(:ap_id, name: :objects_unique_apid_index) + changeset = + struct + |> cast(params, [:data]) + |> validate_required([:data]) + |> unique_constraint(:ap_id, name: :objects_unique_apid_index) + + if hashtags_changed?(struct, get_change(changeset, :data)) do + # TODO: modify assoc once it's introduced + changeset + else + changeset + end end + defp hashtags_changed?(%Object{} = struct, %{"tag" => _} = data) do + Enum.sort(embedded_hashtags(struct)) != + Enum.sort(object_data_hashtags(data)) + end + + defp hashtags_changed?(_, _), do: false + def get_by_id(nil), do: nil def get_by_id(id), do: Repo.get(Object, id) @@ -344,4 +360,23 @@ def replies(object, opts \\ []) do def self_replies(object, opts \\ []), do: replies(object, Keyword.put(opts, :self_only, true)) + + def tags(%Object{data: %{"tag" => tags}}) when is_list(tags), do: tags + + def tags(_), do: [] + + def hashtags(object), do: embedded_hashtags(object) + + defp embedded_hashtags(%Object{data: data}) do + object_data_hashtags(data) + end + + defp embedded_hashtags(_), do: [] + + defp object_data_hashtags(%{"tag" => tags}) when is_list(tags) do + # Note: AS2 map-type elements are ignored + Enum.filter(tags, &is_bitstring(&1)) + end + + defp object_data_hashtags(_), do: [] end diff --git a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex index 6cd91826d..e92091d66 100644 --- a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex @@ -74,9 +74,11 @@ defp check_media_nsfw( object = if MRF.subdomain_match?(media_nsfw, actor_host) do - tags = (child_object["tag"] || []) ++ ["nsfw"] - child_object = Map.put(child_object, "tag", tags) - child_object = Map.put(child_object, "sensitive", true) + child_object = + child_object + |> Map.put("tag", (child_object["tag"] || []) ++ ["nsfw"]) + |> Map.put("sensitive", true) + Map.put(object, "object", child_object) else object diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 565d32433..fd17793d0 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -32,18 +32,18 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do """ def fix_object(object, options \\ []) do object - |> strip_internal_fields - |> fix_actor - |> fix_url - |> fix_attachments - |> fix_context + |> strip_internal_fields() + |> fix_actor() + |> fix_url() + |> fix_attachments() + |> fix_context() |> fix_in_reply_to(options) - |> fix_emoji - |> fix_tag - |> set_sensitive - |> fix_content_map - |> fix_addressing - |> fix_summary + |> fix_emoji() + |> fix_tag() + |> set_sensitive() + |> fix_content_map() + |> fix_addressing() + |> fix_summary() |> fix_type(options) end @@ -315,10 +315,9 @@ def fix_tag(%{"tag" => tag} = object) when is_list(tag) do tags = tag |> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end) - |> Enum.map(fn %{"name" => name} -> - name - |> String.slice(1..-1) - |> String.downcase() + |> Enum.map(fn + %{"name" => "#" <> hashtag} -> String.downcase(hashtag) + %{"name" => hashtag} -> String.downcase(hashtag) end) Map.put(object, "tag", tag ++ tags) diff --git a/lib/pleroma/web/feed/feed_view.ex b/lib/pleroma/web/feed/feed_view.ex index 30e0a2a55..1155c6a39 100644 --- a/lib/pleroma/web/feed/feed_view.ex +++ b/lib/pleroma/web/feed/feed_view.ex @@ -32,6 +32,7 @@ def prepare_activity(activity, opts \\ []) do %{ activity: activity, + object: object, data: Map.get(object, :data), actor: actor } diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index 2301e21cf..bd08aa203 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -201,8 +201,10 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} like_count = object.data["like_count"] || 0 announcement_count = object.data["announcement_count"] || 0 - tags = object.data["tag"] || [] - sensitive = object.data["sensitive"] || Enum.member?(tags, "nsfw") + hashtags = Object.hashtags(object) + sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw") + + tags = Object.tags(object) tag_mentions = tags diff --git a/lib/pleroma/web/templates/feed/feed/_activity.atom.eex b/lib/pleroma/web/templates/feed/feed/_activity.atom.eex index 3fd150c4e..6688830ba 100644 --- a/lib/pleroma/web/templates/feed/feed/_activity.atom.eex +++ b/lib/pleroma/web/templates/feed/feed/_activity.atom.eex @@ -22,7 +22,7 @@ <link type="text/html" href='<%= @data["external_url"] %>' rel="alternate"/> <% end %> - <%= for tag <- @data["tag"] || [] do %> + <%= for tag <- Pleroma.Object.hashtags(@object) do %> <category term="<%= tag %>"></category> <% end %> diff --git a/lib/pleroma/web/templates/feed/feed/_activity.rss.eex b/lib/pleroma/web/templates/feed/feed/_activity.rss.eex index 42960de7d..fc6d74b42 100644 --- a/lib/pleroma/web/templates/feed/feed/_activity.rss.eex +++ b/lib/pleroma/web/templates/feed/feed/_activity.rss.eex @@ -21,7 +21,7 @@ <link><%= @data["external_url"] %></link> <% end %> - <%= for tag <- @data["tag"] || [] do %> + <%= for tag <- Pleroma.Object.hashtags(@object) do %> <category term="<%= tag %>"></category> <% end %> diff --git a/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex b/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex index cf5874a91..c2de28fe4 100644 --- a/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex +++ b/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex @@ -41,7 +41,7 @@ <% end %> <% end %> - <%= for tag <- @data["tag"] || [] do %> + <%= for tag <- Pleroma.Object.hashtags(@object) do %> <category term="<%= tag %>"></category> <% end %> diff --git a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs index b4a006aec..a33959d9f 100644 --- a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs @@ -39,7 +39,8 @@ test "it works for incoming notices with tag not being an array (kroeg)" do {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data) object = Object.normalize(data["object"]) - assert "test" in object.data["tag"] + assert "test" in Object.tags(object) + assert Object.hashtags(object) == ["test"] end test "it cleans up incoming notices which are not really DMs" do @@ -220,7 +221,8 @@ test "it works for incoming notices with hashtags" do {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data) object = Object.normalize(data["object"]) - assert Enum.at(object.data["tag"], 2) == "moo" + assert Enum.at(Object.tags(object), 2) == "moo" + assert Object.hashtags(object) == ["moo"] end test "it works for incoming notices with contentMap" do diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index 585b2c174..1e98208fb 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -493,7 +493,7 @@ test "it de-duplicates tags" do object = Object.normalize(activity) - assert object.data["tag"] == ["2hu"] + assert Object.tags(object) == ["2hu"] end test "it adds emoji in the object" do diff --git a/test/pleroma/web/mastodon_api/views/status_view_test.exs b/test/pleroma/web/mastodon_api/views/status_view_test.exs index f2a7469ed..6b8afc960 100644 --- a/test/pleroma/web/mastodon_api/views/status_view_test.exs +++ b/test/pleroma/web/mastodon_api/views/status_view_test.exs @@ -262,8 +262,8 @@ test "a note activity" do mentions: [], tags: [ %{ - name: "#{object_data["tag"]}", - url: "/tag/#{object_data["tag"]}" + name: "#{hd(object_data["tag"])}", + url: "/tag/#{hd(object_data["tag"])}" } ], application: %{ From e369b1306b2f8b9732c21333b9957f7e4e408f90 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Tue, 22 Dec 2020 22:04:33 +0300 Subject: [PATCH 011/174] Added Hashtag entity and objects-hashtags association with auto-sync with `data.tag` on Object update. --- lib/pleroma/hashtag.ex | 58 +++++++++++++++++++ lib/pleroma/object.ex | 35 ++++++++--- .../20201221202251_create_hashtags.exs | 14 +++++ ...20201221203824_create_hashtags_objects.exs | 13 +++++ test/pleroma/object_test.exs | 27 +++++++++ .../web/activity_pub/activity_pub_test.exs | 5 ++ 6 files changed, 143 insertions(+), 9 deletions(-) create mode 100644 lib/pleroma/hashtag.ex create mode 100644 priv/repo/migrations/20201221202251_create_hashtags.exs create mode 100644 priv/repo/migrations/20201221203824_create_hashtags_objects.exs diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex new file mode 100644 index 000000000..b05927563 --- /dev/null +++ b/lib/pleroma/hashtag.ex @@ -0,0 +1,58 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Hashtag do + use Ecto.Schema + + import Ecto.Changeset + + alias Pleroma.Hashtag + alias Pleroma.Repo + + @derive {Jason.Encoder, only: [:data]} + + schema "hashtags" do + field(:name, :string) + field(:data, :map, default: %{}) + + many_to_many(:objects, Pleroma.Object, join_through: "hashtags_objects", on_replace: :delete) + + timestamps() + end + + def get_by_name(name) do + Repo.get_by(Hashtag, name: name) + end + + def get_or_create_by_name(name) when is_bitstring(name) do + with %Hashtag{} = hashtag <- get_by_name(name) do + {:ok, hashtag} + else + _ -> + %Hashtag{} + |> changeset(%{name: name}) + |> Repo.insert() + end + end + + def get_or_create_by_names(names) when is_list(names) do + Enum.reduce_while(names, {:ok, []}, fn name, {:ok, list} -> + case get_or_create_by_name(name) do + {:ok, %Hashtag{} = hashtag} -> + {:cont, {:ok, list ++ [hashtag]}} + + error -> + {:halt, error} + end + end) + end + + def changeset(%Hashtag{} = struct, params) do + struct + |> cast(params, [:name, :data]) + |> update_change(:name, &String.downcase/1) + |> validate_required([:name]) + |> unique_constraint(:name) + end +end diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 2088c7656..357a3b504 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Object do alias Pleroma.Activity alias Pleroma.Config + alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Object.Fetcher alias Pleroma.ObjectTombstone @@ -26,6 +27,8 @@ defmodule Pleroma.Object do schema "objects" do field(:data, :map) + many_to_many(:hashtags, Hashtag, join_through: "hashtags_objects", on_replace: :delete) + timestamps() end @@ -53,17 +56,31 @@ def create(data) do end def change(struct, params \\ %{}) do - changeset = - struct - |> cast(params, [:data]) - |> validate_required([:data]) - |> unique_constraint(:ap_id, name: :objects_unique_apid_index) + struct + |> cast(params, [:data]) + |> validate_required([:data]) + |> unique_constraint(:ap_id, name: :objects_unique_apid_index) + |> maybe_handle_hashtags_change(struct) + end - if hashtags_changed?(struct, get_change(changeset, :data)) do - # TODO: modify assoc once it's introduced - changeset + defp maybe_handle_hashtags_change(changeset, struct) do + with data_hashtags_change = get_change(changeset, :data), + true <- hashtags_changed?(struct, data_hashtags_change), + {:ok, hashtag_records} <- + data_hashtags_change + |> object_data_hashtags() + |> Hashtag.get_or_create_by_names() do + put_assoc(changeset, :hashtags, hashtag_records) else - changeset + false -> + changeset + + {:error, hashtag_changeset} -> + failed_hashtag = get_field(hashtag_changeset, :name) + + validate_change(changeset, :data, fn _, _ -> + [data: "error referencing hashtag: #{failed_hashtag}"] + end) end end diff --git a/priv/repo/migrations/20201221202251_create_hashtags.exs b/priv/repo/migrations/20201221202251_create_hashtags.exs new file mode 100644 index 000000000..afc522002 --- /dev/null +++ b/priv/repo/migrations/20201221202251_create_hashtags.exs @@ -0,0 +1,14 @@ +defmodule Pleroma.Repo.Migrations.CreateHashtags do + use Ecto.Migration + + def change do + create_if_not_exists table(:hashtags) do + add(:name, :citext, null: false) + add(:data, :map, default: %{}) + + timestamps() + end + + create_if_not_exists(unique_index(:hashtags, [:name])) + end +end diff --git a/priv/repo/migrations/20201221203824_create_hashtags_objects.exs b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs new file mode 100644 index 000000000..b2649b4fb --- /dev/null +++ b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs @@ -0,0 +1,13 @@ +defmodule Pleroma.Repo.Migrations.CreateHashtagsObjects do + use Ecto.Migration + + def change do + create_if_not_exists table(:hashtags_objects) do + add(:hashtag_id, references(:hashtags), null: false) + add(:object_id, references(:objects), null: false) + end + + create_if_not_exists(unique_index(:hashtags_objects, [:hashtag_id, :object_id])) + create_if_not_exists(index(:hashtags_objects, [:object_id])) + end +end diff --git a/test/pleroma/object_test.exs b/test/pleroma/object_test.exs index 5d4e6fb84..819ecd210 100644 --- a/test/pleroma/object_test.exs +++ b/test/pleroma/object_test.exs @@ -5,10 +5,13 @@ defmodule Pleroma.ObjectTest do use Pleroma.DataCase use Oban.Testing, repo: Pleroma.Repo + import ExUnit.CaptureLog import Pleroma.Factory import Tesla.Mock + alias Pleroma.Activity + alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Repo alias Pleroma.Tests.ObanHelpers @@ -406,4 +409,28 @@ test "preserves internal fields on refetch", %{mock_modified: mock_modified} do assert updated_object.data["like_count"] == 1 end end + + describe ":hashtags association" do + test "Hashtag records are created with Object record and updated on its change" do + user = insert(:user) + + {:ok, %{object: object}} = + CommonAPI.post(user, %{status: "some text #hashtag1 #hashtag2 ..."}) + + assert [%Hashtag{name: "hashtag1"}, %Hashtag{name: "hashtag2"}] = + Enum.sort_by(object.hashtags, & &1.name) + + {:ok, object} = Object.update_data(object, %{"tag" => []}) + + assert [] = object.hashtags + + object = Object.get_by_id(object.id) |> Repo.preload(:hashtags) + assert [] = object.hashtags + + {:ok, object} = Object.update_data(object, %{"tag" => ["abc", "def"]}) + + assert [%Hashtag{name: "abc"}, %Hashtag{name: "def"}] = + Enum.sort_by(object.hashtags, & &1.name) + end + end end diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index 9eb7ae86b..bfec32042 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -217,6 +217,11 @@ test "it fetches the appropriate tag-restricted posts" do tag_all: ["test", "reject"] }) + [fetch_one, fetch_two, fetch_three, fetch_four] = + Enum.map([fetch_one, fetch_two, fetch_three, fetch_four], fn statuses -> + Enum.map(statuses, fn s -> Repo.preload(s, object: :hashtags) end) + end) + assert fetch_one == [status_one, status_three] assert fetch_two == [status_one, status_two, status_three] assert fetch_three == [status_one, status_two] From cbb19d0e1882f5ce641f30b51d7156336f81aba9 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sat, 26 Dec 2020 22:20:55 +0300 Subject: [PATCH 012/174] [#3213] Hashtag-filtering functions in ActivityPub. Mix task for migrating hashtags to `hashtags` table. --- lib/mix/tasks/pleroma/database.ex | 64 +++++++ lib/pleroma/web/activity_pub/activity_pub.ex | 171 +++++++++++++----- .../web/activity_pub/activity_pub_test.exs | 48 ++--- 3 files changed, 218 insertions(+), 65 deletions(-) diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index 22151ce08..093c7dd30 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -4,14 +4,18 @@ defmodule Mix.Tasks.Pleroma.Database do alias Pleroma.Conversation + alias Pleroma.Hashtag alias Pleroma.Maintenance alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User + require Logger require Pleroma.Constants + import Ecto.Query import Mix.Pleroma + use Mix.Task @shortdoc "A collection of database related tasks" @@ -128,6 +132,66 @@ def run(["fix_likes_collections"]) do |> Stream.run() end + def run(["transfer_hashtags"]) do + import Ecto.Query + + start_pleroma() + + from( + object in Object, + left_join: hashtag in assoc(object, :hashtags), + where: is_nil(hashtag.id), + where: fragment("(?)->>'tag' != '[]'", object.data), + select: %{ + id: object.id, + inserted_at: object.inserted_at, + tag: fragment("(?)->>'tag'", object.data) + }, + order_by: [desc: object.id] + ) + |> Pleroma.Repo.chunk_stream(100, :batches) + |> Stream.each(fn objects -> + chunk_start = List.first(objects) + chunk_end = List.last(objects) + + Logger.info( + "transfer_hashtags: " <> + "#{chunk_start.id} (#{chunk_start.inserted_at}) -- " <> + "#{chunk_end.id} (#{chunk_end.inserted_at})" + ) + + Enum.map( + objects, + fn object -> + hashtags = + object.tag + |> Jason.decode!() + |> Enum.filter(&is_bitstring(&1)) + + with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do + Repo.transaction(fn -> + for hashtag_record <- hashtag_records do + with {:error, _} <- + Ecto.Adapters.SQL.query( + Repo, + "insert into hashtags_objects(hashtag_id, object_id) values " <> + "(#{hashtag_record.id}, #{object.id});" + ) do + Logger.warn( + "ERROR: could not link object #{object.id} and hashtag #{hashtag_record.id}" + ) + end + end + end) + else + e -> Logger.warn("ERROR: could not process object #{object.id}: #{inspect(e)}") + end + end + ) + end) + |> Stream.run() + end + def run(["vacuum", args]) do start_pleroma() diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 1c91bc074..2e25412c6 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -660,33 +660,41 @@ defp restrict_since(query, %{since_id: since_id}) do defp restrict_since(query, _), do: query defp restrict_tag_reject(_query, %{tag_reject: _tag_reject, skip_preload: true}) do - raise "Can't use the child object without preloading!" + raise_on_missing_preload() end - defp restrict_tag_reject(query, %{tag_reject: [_ | _] = tag_reject}) do + defp restrict_tag_reject(query, %{tag_reject: tag_reject}) when is_list(tag_reject) do from( [_activity, object] in query, where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject) ) end + defp restrict_tag_reject(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do + restrict_tag_reject(query, %{tag_reject: [tag_reject]}) + end + defp restrict_tag_reject(query, _), do: query defp restrict_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do - raise "Can't use the child object without preloading!" + raise_on_missing_preload() end - defp restrict_tag_all(query, %{tag_all: [_ | _] = tag_all}) do + defp restrict_tag_all(query, %{tag_all: tag_all}) when is_list(tag_all) do from( [_activity, object] in query, where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all) ) end + defp restrict_tag_all(query, %{tag_all: tag}) when is_binary(tag) do + restrict_tag(query, %{tag: tag}) + end + defp restrict_tag_all(query, _), do: query defp restrict_tag(_query, %{tag: _tag, skip_preload: true}) do - raise "Can't use the child object without preloading!" + raise_on_missing_preload() end defp restrict_tag(query, %{tag: tag}) when is_list(tag) do @@ -697,14 +705,80 @@ defp restrict_tag(query, %{tag: tag}) when is_list(tag) do end defp restrict_tag(query, %{tag: tag}) when is_binary(tag) do - from( - [_activity, object] in query, - where: fragment("(?)->'tag' \\? (?)", object.data, ^tag) - ) + restrict_tag(query, %{tag: [tag]}) end defp restrict_tag(query, _), do: query + defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: tags_reject}) when is_list(tags_reject) do + if has_named_binding?(query, :thread_mute) do + from( + [activity, object, thread_mute] in query, + group_by: [activity.id, object.id, thread_mute.id] + ) + else + from( + [activity, object] in query, + group_by: [activity.id, object.id] + ) + end + |> join(:left, [_activity, object], hashtag in assoc(object, :hashtags), as: :hashtag) + |> having( + [hashtag: hashtag], + fragment("not(array_agg(?) && (?))", hashtag.name, ^tags_reject) + ) + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do + restrict_hashtag_reject_any(query, %{tag_reject: [tag_reject]}) + end + + defp restrict_hashtag_reject_any(query, _), do: query + + defp restrict_hashtag_all(_query, %{tag_all: _tag, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_all(query, %{tag_all: tags}) when is_list(tags) do + Enum.reduce( + tags, + query, + fn tag, acc -> restrict_hashtag_any(acc, %{tag: tag}) end + ) + end + + defp restrict_hashtag_all(query, %{tag_all: tag}) when is_binary(tag) do + restrict_hashtag_any(query, %{tag: tag}) + end + + defp restrict_hashtag_all(query, _), do: query + + defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_any(query, %{tag: tags}) when is_list(tags) do + from( + [_activity, object] in query, + join: hashtag in assoc(object, :hashtags), + where: hashtag.name in ^tags + ) + end + + defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do + restrict_hashtag_any(query, %{tag: [tag]}) + end + + defp restrict_hashtag_any(query, _), do: query + + defp raise_on_missing_preload do + raise "Can't use the child object without preloading!" + end + defp restrict_recipients(query, [], _user), do: query defp restrict_recipients(query, recipients, nil) do @@ -1088,40 +1162,51 @@ def fetch_activities_query(recipients, opts \\ %{}) do skip_thread_containment: Config.get([:instance, :skip_thread_containment]) } - Activity - |> maybe_preload_objects(opts) - |> maybe_preload_bookmarks(opts) - |> maybe_preload_report_notes(opts) - |> maybe_set_thread_muted_field(opts) - |> maybe_order(opts) - |> restrict_recipients(recipients, opts[:user]) - |> restrict_replies(opts) - |> restrict_tag(opts) - |> restrict_tag_reject(opts) - |> restrict_tag_all(opts) - |> restrict_since(opts) - |> restrict_local(opts) - |> restrict_actor(opts) - |> restrict_type(opts) - |> restrict_state(opts) - |> restrict_favorited_by(opts) - |> restrict_blocked(restrict_blocked_opts) - |> restrict_muted(restrict_muted_opts) - |> restrict_filtered(opts) - |> restrict_media(opts) - |> restrict_visibility(opts) - |> restrict_thread_visibility(opts, config) - |> restrict_reblogs(opts) - |> restrict_pinned(opts) - |> restrict_muted_reblogs(restrict_muted_reblogs_opts) - |> restrict_instance(opts) - |> restrict_announce_object_actor(opts) - |> restrict_filtered(opts) - |> Activity.restrict_deactivated_users() - |> exclude_poll_votes(opts) - |> exclude_chat_messages(opts) - |> exclude_invisible_actors(opts) - |> exclude_visibility(opts) + query = + Activity + |> distinct([a], true) + |> maybe_preload_objects(opts) + |> maybe_preload_bookmarks(opts) + |> maybe_preload_report_notes(opts) + |> maybe_set_thread_muted_field(opts) + |> maybe_order(opts) + |> restrict_recipients(recipients, opts[:user]) + |> restrict_replies(opts) + |> restrict_since(opts) + |> restrict_local(opts) + |> restrict_actor(opts) + |> restrict_type(opts) + |> restrict_state(opts) + |> restrict_favorited_by(opts) + |> restrict_blocked(restrict_blocked_opts) + |> restrict_muted(restrict_muted_opts) + |> restrict_filtered(opts) + |> restrict_media(opts) + |> restrict_visibility(opts) + |> restrict_thread_visibility(opts, config) + |> restrict_reblogs(opts) + |> restrict_pinned(opts) + |> restrict_muted_reblogs(restrict_muted_reblogs_opts) + |> restrict_instance(opts) + |> restrict_announce_object_actor(opts) + |> restrict_filtered(opts) + |> Activity.restrict_deactivated_users() + |> exclude_poll_votes(opts) + |> exclude_chat_messages(opts) + |> exclude_invisible_actors(opts) + |> exclude_visibility(opts) + + if Config.get([:instance, :improved_hashtag_timeline]) do + query + |> restrict_hashtag_any(opts) + |> restrict_hashtag_all(opts) + |> restrict_hashtag_reject_any(opts) + else + query + |> restrict_tag(opts) + |> restrict_tag_reject(opts) + |> restrict_tag_all(opts) + end end def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index bfec32042..573b26d66 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -199,33 +199,37 @@ test "it fetches the appropriate tag-restricted posts" do {:ok, status_two} = CommonAPI.post(user, %{status: ". #essais"}) {:ok, status_three} = CommonAPI.post(user, %{status: ". #test #reject"}) - fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) + for new_timeline_enabled <- [true, false] do + clear_config([:instance, :improved_hashtag_timeline], new_timeline_enabled) - fetch_two = ActivityPub.fetch_activities([], %{type: "Create", tag: ["test", "essais"]}) + fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) - fetch_three = - ActivityPub.fetch_activities([], %{ - type: "Create", - tag: ["test", "essais"], - tag_reject: ["reject"] - }) + fetch_two = ActivityPub.fetch_activities([], %{type: "Create", tag: ["test", "essais"]}) - fetch_four = - ActivityPub.fetch_activities([], %{ - type: "Create", - tag: ["test"], - tag_all: ["test", "reject"] - }) + fetch_three = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["test", "essais"], + tag_reject: ["reject"] + }) - [fetch_one, fetch_two, fetch_three, fetch_four] = - Enum.map([fetch_one, fetch_two, fetch_three, fetch_four], fn statuses -> - Enum.map(statuses, fn s -> Repo.preload(s, object: :hashtags) end) - end) + fetch_four = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["test"], + tag_all: ["test", "reject"] + }) - assert fetch_one == [status_one, status_three] - assert fetch_two == [status_one, status_two, status_three] - assert fetch_three == [status_one, status_two] - assert fetch_four == [status_three] + [fetch_one, fetch_two, fetch_three, fetch_four] = + Enum.map([fetch_one, fetch_two, fetch_three, fetch_four], fn statuses -> + Enum.map(statuses, fn s -> Repo.preload(s, object: :hashtags) end) + end) + + assert fetch_one == [status_one, status_three] + assert fetch_two == [status_one, status_two, status_three] + assert fetch_three == [status_one, status_two] + assert fetch_four == [status_three] + end end describe "insertion" do From 14fae94c0e4b04123c7af148260d0a4a51042570 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Mon, 28 Dec 2020 00:08:09 +0300 Subject: [PATCH 013/174] [#3213] Made Object.hashtags/1 work with :hashtags assoc. Adjusted tests. --- lib/pleroma/config.ex | 2 ++ lib/pleroma/object.ex | 14 +++++++++++++- lib/pleroma/web/activity_pub/activity_pub.ex | 12 ++++++------ test/pleroma/activity/ir/topics_test.exs | 15 ++++++++------- 4 files changed, 29 insertions(+), 14 deletions(-) diff --git a/lib/pleroma/config.ex b/lib/pleroma/config.ex index 86d4f6b72..ee0167f4e 100644 --- a/lib/pleroma/config.ex +++ b/lib/pleroma/config.ex @@ -96,6 +96,8 @@ def restrict_unauthenticated_access?(resource, kind) do end end + def object_embedded_hashtags?, do: !get([:instance, :improved_hashtag_timeline]) + def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], []) def oauth_consumer_enabled?, do: oauth_consumer_strategies() != [] diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 1d756bcd1..08114d4f2 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -384,7 +384,19 @@ def tags(%Object{data: %{"tag" => tags}}) when is_list(tags), do: tags def tags(_), do: [] - def hashtags(object), do: embedded_hashtags(object) + def hashtags(%Object{} = object) do + cond do + Config.object_embedded_hashtags?() -> + embedded_hashtags(object) + + object.id == "pleroma:fake_object_id" -> + [] + + true -> + hashtag_records = Repo.preload(object, :hashtags).hashtags + Enum.map(hashtag_records, & &1.name) + end + end defp embedded_hashtags(%Object{data: data}) do object_data_hashtags(data) diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 54d1a2350..626cad336 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -1199,16 +1199,16 @@ def fetch_activities_query(recipients, opts \\ %{}) do |> exclude_invisible_actors(opts) |> exclude_visibility(opts) - if Config.get([:instance, :improved_hashtag_timeline]) do - query - |> restrict_hashtag_any(opts) - |> restrict_hashtag_all(opts) - |> restrict_hashtag_reject_any(opts) - else + if Config.object_embedded_hashtags?() do query |> restrict_tag(opts) |> restrict_tag_reject(opts) |> restrict_tag_all(opts) + else + query + |> restrict_hashtag_any(opts) + |> restrict_hashtag_all(opts) + |> restrict_hashtag_reject_any(opts) end end diff --git a/test/pleroma/activity/ir/topics_test.exs b/test/pleroma/activity/ir/topics_test.exs index b464822d9..984777bda 100644 --- a/test/pleroma/activity/ir/topics_test.exs +++ b/test/pleroma/activity/ir/topics_test.exs @@ -11,6 +11,8 @@ defmodule Pleroma.Activity.Ir.TopicsTest do require Pleroma.Constants + import Mock + describe "poll answer" do test "produce no topics" do activity = %Activity{object: %Object{data: %{"type" => "Answer"}}} @@ -77,14 +79,13 @@ test "with no attachments doesn't produce public:media topics", %{activity: acti refute Enum.member?(topics, "public:local:media") end - test "converts tags to hash tags", %{activity: %{object: %{data: data} = object} = activity} do - tagged_data = Map.put(data, "tag", ["foo", "bar"]) - activity = %{activity | object: %{object | data: tagged_data}} + test "converts tags to hash tags", %{activity: activity} do + with_mock(Object, [:passthrough], hashtags: fn _ -> ["foo", "bar"] end) do + topics = Topics.get_activity_topics(activity) - topics = Topics.get_activity_topics(activity) - - assert Enum.member?(topics, "hashtag:foo") - assert Enum.member?(topics, "hashtag:bar") + assert Enum.member?(topics, "hashtag:foo") + assert Enum.member?(topics, "hashtag:bar") + end end test "only converts strings to hash tags", %{ From a25c1e8ec0b6f4ef2e9f68c4ad5e48e18f5f01a7 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Wed, 30 Dec 2020 14:35:19 +0300 Subject: [PATCH 014/174] [#3213] Improved `database.transfer_hashtags` mix task: proper rollback, speedup. --- lib/mix/tasks/pleroma/database.ex | 46 +++++++++++++++++-------------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index 093c7dd30..d44bd3478 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -137,6 +137,8 @@ def run(["transfer_hashtags"]) do start_pleroma() + Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") + from( object in Object, left_join: hashtag in assoc(object, :hashtags), @@ -144,21 +146,12 @@ def run(["transfer_hashtags"]) do where: fragment("(?)->>'tag' != '[]'", object.data), select: %{ id: object.id, - inserted_at: object.inserted_at, tag: fragment("(?)->>'tag'", object.data) - }, - order_by: [desc: object.id] + } ) |> Pleroma.Repo.chunk_stream(100, :batches) |> Stream.each(fn objects -> - chunk_start = List.first(objects) - chunk_end = List.last(objects) - - Logger.info( - "transfer_hashtags: " <> - "#{chunk_start.id} (#{chunk_start.inserted_at}) -- " <> - "#{chunk_end.id} (#{chunk_end.inserted_at})" - ) + Logger.info("Processing #{length(objects)} objects...") Enum.map( objects, @@ -168,28 +161,39 @@ def run(["transfer_hashtags"]) do |> Jason.decode!() |> Enum.filter(&is_bitstring(&1)) - with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do - Repo.transaction(fn -> + Repo.transaction(fn -> + with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do for hashtag_record <- hashtag_records do - with {:error, _} <- + with {:ok, _} <- Ecto.Adapters.SQL.query( Repo, "insert into hashtags_objects(hashtag_id, object_id) values " <> "(#{hashtag_record.id}, #{object.id});" ) do - Logger.warn( - "ERROR: could not link object #{object.id} and hashtag #{hashtag_record.id}" - ) + :noop + else + {:error, e} -> + error = + "ERROR: could not link object #{object.id} and hashtag " <> + "#{hashtag_record.id}: #{inspect(e)}" + + Logger.error(error) + Repo.rollback(error) end end - end) - else - e -> Logger.warn("ERROR: could not process object #{object.id}: #{inspect(e)}") - end + else + e -> + error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}" + Logger.error(error) + Repo.rollback(error) + end + end) end ) end) |> Stream.run() + + Logger.info("Done transferring hashtags. Please check logs to ensure no errors.") end def run(["vacuum", args]) do From e0b5edb6d5a423bfd247e0774d2f5bc642b2fb80 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Wed, 30 Dec 2020 14:42:35 +0300 Subject: [PATCH 015/174] [#3213] Fixed Object.object_data_hashtags/1 to process only AS2 elements of `data.tag` (basing on #2984). --- lib/pleroma/object.ex | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 08114d4f2..dad572f2b 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -405,8 +405,16 @@ defp embedded_hashtags(%Object{data: data}) do defp embedded_hashtags(_), do: [] defp object_data_hashtags(%{"tag" => tags}) when is_list(tags) do - # Note: AS2 map-type elements are ignored - Enum.filter(tags, &is_bitstring(&1)) + # Note: Old format with copy of hashtags as strings is ignored, using AS2 + tags + |> Enum.filter(fn + %{"type" => "Hashtag"} = data -> Map.has_key?(data, "name") + _ -> false + end) + |> Enum.map(fn + %{"name" => "#" <> hashtag} -> String.downcase(hashtag) + %{"name" => hashtag} -> String.downcase(hashtag) + end) end defp object_data_hashtags(_), do: [] From 8d1a0c1afd46f8683e9022523cecffb9b60c9f8c Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Wed, 30 Dec 2020 15:22:49 +0300 Subject: [PATCH 016/174] [#3213] Made Object.object_data_hashtags/1 handle both AS2 and plain text hashtags. --- lib/pleroma/object.ex | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index dad572f2b..7e79e15ee 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -405,16 +405,18 @@ defp embedded_hashtags(%Object{data: data}) do defp embedded_hashtags(_), do: [] defp object_data_hashtags(%{"tag" => tags}) when is_list(tags) do - # Note: Old format with copy of hashtags as strings is ignored, using AS2 tags |> Enum.filter(fn %{"type" => "Hashtag"} = data -> Map.has_key?(data, "name") + plain_text when is_bitstring(plain_text) -> true _ -> false end) |> Enum.map(fn %{"name" => "#" <> hashtag} -> String.downcase(hashtag) %{"name" => hashtag} -> String.downcase(hashtag) + hashtag when is_bitstring(hashtag) -> String.downcase(hashtag) end) + |> Enum.uniq() end defp object_data_hashtags(_), do: [] From 367f0c31c3c15f75aed1d3ba66914e4197c19596 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Thu, 31 Dec 2020 09:36:26 +0300 Subject: [PATCH 017/174] [#3213] Added query options support for Repo.chunk_stream/4. Used infinite timeout in transfer_hashtags select query. --- lib/mix/tasks/pleroma/database.ex | 11 +++++------ lib/pleroma/repo.ex | 6 +++--- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index d44bd3478..f903cf75b 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -149,9 +149,9 @@ def run(["transfer_hashtags"]) do tag: fragment("(?)->>'tag'", object.data) } ) - |> Pleroma.Repo.chunk_stream(100, :batches) + |> Repo.chunk_stream(100, :batches, timeout: :infinity) |> Stream.each(fn objects -> - Logger.info("Processing #{length(objects)} objects...") + Logger.info("Processing #{length(objects)} objects starting from id #{hd(objects).id}...") Enum.map( objects, @@ -165,10 +165,9 @@ def run(["transfer_hashtags"]) do with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do for hashtag_record <- hashtag_records do with {:ok, _} <- - Ecto.Adapters.SQL.query( - Repo, - "insert into hashtags_objects(hashtag_id, object_id) values " <> - "(#{hashtag_record.id}, #{object.id});" + Repo.query( + "insert into hashtags_objects(hashtag_id, object_id) values ($1, $2);", + [hashtag_record.id, object.id] ) do :noop else diff --git a/lib/pleroma/repo.ex b/lib/pleroma/repo.ex index 4524bd5e2..78711e6ac 100644 --- a/lib/pleroma/repo.ex +++ b/lib/pleroma/repo.ex @@ -63,8 +63,8 @@ def get_assoc(resource, association) do iex> Pleroma.Repo.chunk_stream(Pleroma.Activity.Queries.by_actor(ap_id), 500, :batches) """ @spec chunk_stream(Ecto.Query.t(), integer(), atom()) :: Enumerable.t() - def chunk_stream(query, chunk_size, returns_as \\ :one) do - # We don't actually need start and end funcitons of resource streaming, + def chunk_stream(query, chunk_size, returns_as \\ :one, query_options \\ []) do + # We don't actually need start and end functions of resource streaming, # but it seems to be the only way to not fetch records one-by-one and # have individual records be the elements of the stream, instead of # lists of records @@ -76,7 +76,7 @@ def chunk_stream(query, chunk_size, returns_as \\ :one) do |> order_by(asc: :id) |> where([r], r.id > ^last_id) |> limit(^chunk_size) - |> all() + |> all(query_options) |> case do [] -> {:halt, last_id} From 303055456f19152821ec5ec1df88d60c03f60905 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Thu, 31 Dec 2020 12:45:23 +0300 Subject: [PATCH 018/174] Alternative implementation of hashtag-filtering queries in ActivityPub. Fixed GROUP BY clause for aggregation on hashtags. --- lib/pleroma/activity.ex | 2 + lib/pleroma/web/activity_pub/activity_pub.ex | 120 +++++++++++++++---- 2 files changed, 100 insertions(+), 22 deletions(-) diff --git a/lib/pleroma/activity.ex b/lib/pleroma/activity.ex index 9d970a808..df216e4de 100644 --- a/lib/pleroma/activity.ex +++ b/lib/pleroma/activity.ex @@ -113,6 +113,7 @@ def with_preloaded_bookmark(query, %User{} = user) do from([a] in query, left_join: b in Bookmark, on: b.user_id == ^user.id and b.activity_id == a.id, + as: :bookmark, preload: [bookmark: b] ) end @@ -123,6 +124,7 @@ def with_preloaded_report_notes(query) do from([a] in query, left_join: r in ReportNote, on: a.id == r.activity_id, + as: :report_note, preload: [report_notes: r] ) end diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 626cad336..339843330 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -713,22 +713,92 @@ defp restrict_tag(query, %{tag: tag}) when is_binary(tag) do defp restrict_tag(query, _), do: query + defp restrict_hashtag(query, opts) do + [tag_any, tag_all, tag_reject] = + [:tag, :tag_all, :tag_reject] + |> Enum.map(&opts[&1]) + |> Enum.map(&List.wrap(&1)) + + has_conditions = Enum.any?([tag_any, tag_all, tag_reject], &Enum.any?(&1)) + + cond do + !has_conditions -> + query + + opts[:skip_preload] -> + raise_on_missing_preload() + + true -> + query + |> group_by_all_bindings() + |> join(:left, [_activity, object], hashtag in assoc(object, :hashtags), as: :hashtag) + |> maybe_restrict_hashtag_any(tag_any) + |> maybe_restrict_hashtag_all(tag_all) + |> maybe_restrict_hashtag_reject_any(tag_reject) + end + end + + # Groups by all bindings to allow aggregation on hashtags + defp group_by_all_bindings(query) do + # Expecting named bindings: :object, :bookmark, :thread_mute, :report_note + cond do + Enum.count(query.aliases) == 4 -> + from([a, o, b3, b4, b5] in query, group_by: [a.id, o.id, b3.id, b4.id, b5.id]) + + Enum.count(query.aliases) == 3 -> + from([a, o, b3, b4] in query, group_by: [a.id, o.id, b3.id, b4.id]) + + Enum.count(query.aliases) == 2 -> + from([a, o, b3] in query, group_by: [a.id, o.id, b3.id]) + + true -> + from([a, o] in query, group_by: [a.id, o.id]) + end + end + + defp maybe_restrict_hashtag_any(query, []) do + query + end + + defp maybe_restrict_hashtag_any(query, tags) do + having( + query, + [hashtag: hashtag], + fragment("array_agg(?) && (?)", hashtag.name, ^tags) + ) + end + + defp maybe_restrict_hashtag_all(query, []) do + query + end + + defp maybe_restrict_hashtag_all(query, tags) do + having( + query, + [hashtag: hashtag], + fragment("array_agg(?) @> (?)", hashtag.name, ^tags) + ) + end + + defp maybe_restrict_hashtag_reject_any(query, []) do + query + end + + defp maybe_restrict_hashtag_reject_any(query, tags) do + having( + query, + [hashtag: hashtag], + fragment("not(array_agg(?) && (?))", hashtag.name, ^tags) + ) + end + defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do raise_on_missing_preload() end defp restrict_hashtag_reject_any(query, %{tag_reject: tags_reject}) when is_list(tags_reject) do - if has_named_binding?(query, :thread_mute) do - from( - [activity, object, thread_mute] in query, - group_by: [activity.id, object.id, thread_mute.id] - ) - else - from( - [activity, object] in query, - group_by: [activity.id, object.id] - ) - end + query + |> group_by_all_bindings() |> join(:left, [_activity, object], hashtag in assoc(object, :hashtags), as: :hashtag) |> having( [hashtag: hashtag], @@ -1167,7 +1237,6 @@ def fetch_activities_query(recipients, opts \\ %{}) do query = Activity - |> distinct([a], true) |> maybe_preload_objects(opts) |> maybe_preload_bookmarks(opts) |> maybe_preload_report_notes(opts) @@ -1199,16 +1268,23 @@ def fetch_activities_query(recipients, opts \\ %{}) do |> exclude_invisible_actors(opts) |> exclude_visibility(opts) - if Config.object_embedded_hashtags?() do - query - |> restrict_tag(opts) - |> restrict_tag_reject(opts) - |> restrict_tag_all(opts) - else - query - |> restrict_hashtag_any(opts) - |> restrict_hashtag_all(opts) - |> restrict_hashtag_reject_any(opts) + cond do + Config.object_embedded_hashtags?() -> + query + |> restrict_tag(opts) + |> restrict_tag_reject(opts) + |> restrict_tag_all(opts) + + # TODO: benchmark (initial approach preferring non-aggregate ops when possible) + Config.get([:instance, :improved_hashtag_timeline]) == :join -> + query + |> distinct([activity], true) + |> restrict_hashtag_any(opts) + |> restrict_hashtag_all(opts) + |> restrict_hashtag_reject_any(opts) + + true -> + restrict_hashtag(query, opts) end end From 0d521022fe6157ce9a346c6915ce38292e653bb3 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Thu, 7 Jan 2021 12:20:29 +0300 Subject: [PATCH 019/174] [#3213] Removed PK from hashtags_objects table. Improved hashtags_transfer mix task (logging of failed ids). --- lib/mix/tasks/pleroma/database.ex | 29 +++++++++++-------- lib/pleroma/object.ex | 4 +-- ...20201221203824_create_hashtags_objects.exs | 2 +- 3 files changed, 20 insertions(+), 15 deletions(-) diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index f903cf75b..918752dc2 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -139,6 +139,7 @@ def run(["transfer_hashtags"]) do Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") + # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) from( object in Object, left_join: hashtag in assoc(object, :hashtags), @@ -153,13 +154,10 @@ def run(["transfer_hashtags"]) do |> Stream.each(fn objects -> Logger.info("Processing #{length(objects)} objects starting from id #{hd(objects).id}...") - Enum.map( - objects, - fn object -> - hashtags = - object.tag - |> Jason.decode!() - |> Enum.filter(&is_bitstring(&1)) + failed_ids = + objects + |> Enum.map(fn object -> + hashtags = Object.object_data_hashtags(%{"tag" => Jason.decode!(object.tag)}) Repo.transaction(fn -> with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do @@ -169,7 +167,7 @@ def run(["transfer_hashtags"]) do "insert into hashtags_objects(hashtag_id, object_id) values ($1, $2);", [hashtag_record.id, object.id] ) do - :noop + nil else {:error, e} -> error = @@ -177,18 +175,25 @@ def run(["transfer_hashtags"]) do "#{hashtag_record.id}: #{inspect(e)}" Logger.error(error) - Repo.rollback(error) + Repo.rollback(object.id) end end + + object.id else e -> error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}" Logger.error(error) - Repo.rollback(error) + Repo.rollback(object.id) end end) - end - ) + end) + |> Enum.filter(&(elem(&1, 0) == :error)) + |> Enum.map(&elem(&1, 1)) + + if Enum.any?(failed_ids) do + Logger.error("ERROR: transfer_hashtags iteration failed for ids: #{inspect(failed_ids)}") + end end) |> Stream.run() diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 7e79e15ee..61f2ffa19 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -404,7 +404,7 @@ defp embedded_hashtags(%Object{data: data}) do defp embedded_hashtags(_), do: [] - defp object_data_hashtags(%{"tag" => tags}) when is_list(tags) do + def object_data_hashtags(%{"tag" => tags}) when is_list(tags) do tags |> Enum.filter(fn %{"type" => "Hashtag"} = data -> Map.has_key?(data, "name") @@ -419,5 +419,5 @@ defp object_data_hashtags(%{"tag" => tags}) when is_list(tags) do |> Enum.uniq() end - defp object_data_hashtags(_), do: [] + def object_data_hashtags(_), do: [] end diff --git a/priv/repo/migrations/20201221203824_create_hashtags_objects.exs b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs index b2649b4fb..214ea81c3 100644 --- a/priv/repo/migrations/20201221203824_create_hashtags_objects.exs +++ b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs @@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateHashtagsObjects do use Ecto.Migration def change do - create_if_not_exists table(:hashtags_objects) do + create_if_not_exists table(:hashtags_objects, primary_key: false) do add(:hashtag_id, references(:hashtags), null: false) add(:object_id, references(:objects), null: false) end From 8c972de0457199098c5f3378313d08a9dd2d64ce Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 10 Jan 2021 11:44:39 +0300 Subject: [PATCH 020/174] [#3213] transfer_hashtags mix task refactoring. --- lib/mix/tasks/pleroma/database.ex | 127 ++++++++++++++---------------- 1 file changed, 59 insertions(+), 68 deletions(-) diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index 918752dc2..e9686fc1b 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -132,74 +132,6 @@ def run(["fix_likes_collections"]) do |> Stream.run() end - def run(["transfer_hashtags"]) do - import Ecto.Query - - start_pleroma() - - Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") - - # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) - from( - object in Object, - left_join: hashtag in assoc(object, :hashtags), - where: is_nil(hashtag.id), - where: fragment("(?)->>'tag' != '[]'", object.data), - select: %{ - id: object.id, - tag: fragment("(?)->>'tag'", object.data) - } - ) - |> Repo.chunk_stream(100, :batches, timeout: :infinity) - |> Stream.each(fn objects -> - Logger.info("Processing #{length(objects)} objects starting from id #{hd(objects).id}...") - - failed_ids = - objects - |> Enum.map(fn object -> - hashtags = Object.object_data_hashtags(%{"tag" => Jason.decode!(object.tag)}) - - Repo.transaction(fn -> - with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do - for hashtag_record <- hashtag_records do - with {:ok, _} <- - Repo.query( - "insert into hashtags_objects(hashtag_id, object_id) values ($1, $2);", - [hashtag_record.id, object.id] - ) do - nil - else - {:error, e} -> - error = - "ERROR: could not link object #{object.id} and hashtag " <> - "#{hashtag_record.id}: #{inspect(e)}" - - Logger.error(error) - Repo.rollback(object.id) - end - end - - object.id - else - e -> - error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}" - Logger.error(error) - Repo.rollback(object.id) - end - end) - end) - |> Enum.filter(&(elem(&1, 0) == :error)) - |> Enum.map(&elem(&1, 1)) - - if Enum.any?(failed_ids) do - Logger.error("ERROR: transfer_hashtags iteration failed for ids: #{inspect(failed_ids)}") - end - end) - |> Stream.run() - - Logger.info("Done transferring hashtags. Please check logs to ensure no errors.") - end - def run(["vacuum", args]) do start_pleroma() @@ -239,4 +171,63 @@ def run(["ensure_expiration"]) do end) |> Stream.run() end + + def run(["transfer_hashtags"]) do + import Ecto.Query + + start_pleroma() + + Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") + + # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) + from( + object in Object, + left_join: hashtag in assoc(object, :hashtags), + where: is_nil(hashtag.id), + where: + fragment("(?)->'tag' IS NOT NULL AND (?)->'tag' != '[]'::jsonb", object.data, object.data), + select: %{ + id: object.id, + tag: fragment("(?)->'tag'", object.data) + } + ) + |> Repo.chunk_stream(100, :one, timeout: :infinity) + |> Stream.each(&transfer_object_hashtags(&1)) + |> Stream.run() + + Logger.info("Done transferring hashtags. Please check logs to ensure no errors.") + end + + defp transfer_object_hashtags(object) do + hashtags = Object.object_data_hashtags(%{"tag" => object.tag}) + + Repo.transaction(fn -> + with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do + for hashtag_record <- hashtag_records do + with {:ok, _} <- + Repo.query( + "insert into hashtags_objects(hashtag_id, object_id) values ($1, $2);", + [hashtag_record.id, object.id] + ) do + nil + else + {:error, e} -> + error = + "ERROR: could not link object #{object.id} and hashtag " <> + "#{hashtag_record.id}: #{inspect(e)}" + + Logger.error(error) + Repo.rollback(object.id) + end + end + + object.id + else + e -> + error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}" + Logger.error(error) + Repo.rollback(object.id) + end + end) + end end From 3e4d84729a4ca8d9779d439a9aa2c8c23b3acd1d Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Wed, 13 Jan 2021 22:07:38 +0300 Subject: [PATCH 021/174] [#3213] Prototype of data migrations functionality / HashtagsTableMigrator. --- lib/mix/tasks/pleroma/database.ex | 60 ----- lib/pleroma/application.ex | 3 +- lib/pleroma/config.ex | 4 +- lib/pleroma/data_migration.ex | 46 ++++ lib/pleroma/delivery.ex | 1 - lib/pleroma/ecto_enums.ex | 8 + .../migrators/hashtags_table_migrator.ex | 211 ++++++++++++++++++ lib/pleroma/web/activity_pub/activity_pub.ex | 2 +- .../20210105195018_create_data_migrations.exs | 17 ++ ...gration_create_populate_hashtags_table.exs | 14 ++ ...72254_create_data_migration_failed_ids.exs | 14 ++ 11 files changed, 316 insertions(+), 64 deletions(-) create mode 100644 lib/pleroma/data_migration.ex create mode 100644 lib/pleroma/migrators/hashtags_table_migrator.ex create mode 100644 priv/repo/migrations/20210105195018_create_data_migrations.exs create mode 100644 priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs create mode 100644 priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index e9686fc1b..08ede9eef 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -4,7 +4,6 @@ defmodule Mix.Tasks.Pleroma.Database do alias Pleroma.Conversation - alias Pleroma.Hashtag alias Pleroma.Maintenance alias Pleroma.Object alias Pleroma.Repo @@ -171,63 +170,4 @@ def run(["ensure_expiration"]) do end) |> Stream.run() end - - def run(["transfer_hashtags"]) do - import Ecto.Query - - start_pleroma() - - Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") - - # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) - from( - object in Object, - left_join: hashtag in assoc(object, :hashtags), - where: is_nil(hashtag.id), - where: - fragment("(?)->'tag' IS NOT NULL AND (?)->'tag' != '[]'::jsonb", object.data, object.data), - select: %{ - id: object.id, - tag: fragment("(?)->'tag'", object.data) - } - ) - |> Repo.chunk_stream(100, :one, timeout: :infinity) - |> Stream.each(&transfer_object_hashtags(&1)) - |> Stream.run() - - Logger.info("Done transferring hashtags. Please check logs to ensure no errors.") - end - - defp transfer_object_hashtags(object) do - hashtags = Object.object_data_hashtags(%{"tag" => object.tag}) - - Repo.transaction(fn -> - with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do - for hashtag_record <- hashtag_records do - with {:ok, _} <- - Repo.query( - "insert into hashtags_objects(hashtag_id, object_id) values ($1, $2);", - [hashtag_record.id, object.id] - ) do - nil - else - {:error, e} -> - error = - "ERROR: could not link object #{object.id} and hashtag " <> - "#{hashtag_record.id}: #{inspect(e)}" - - Logger.error(error) - Repo.rollback(object.id) - end - end - - object.id - else - e -> - error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}" - Logger.error(error) - Repo.rollback(object.id) - end - end) - end end diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index bd568d858..962529dfd 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -104,7 +104,8 @@ def start(_type, _args) do chat_child(chat_enabled?()) ++ [ Pleroma.Web.Endpoint, - Pleroma.Gopher.Server + Pleroma.Gopher.Server, + Pleroma.Migrators.HashtagsTableMigrator ] # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html diff --git a/lib/pleroma/config.ex b/lib/pleroma/config.ex index ee0167f4e..dbfb114d6 100644 --- a/lib/pleroma/config.ex +++ b/lib/pleroma/config.ex @@ -96,7 +96,9 @@ def restrict_unauthenticated_access?(resource, kind) do end end - def object_embedded_hashtags?, do: !get([:instance, :improved_hashtag_timeline]) + def improved_hashtag_timeline_path, do: [:instance, :improved_hashtag_timeline] + def improved_hashtag_timeline, do: get(improved_hashtag_timeline_path()) + def object_embedded_hashtags?, do: !improved_hashtag_timeline() def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], []) diff --git a/lib/pleroma/data_migration.ex b/lib/pleroma/data_migration.ex new file mode 100644 index 000000000..64fa155ff --- /dev/null +++ b/lib/pleroma/data_migration.ex @@ -0,0 +1,46 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.DataMigration do + use Ecto.Schema + + alias Pleroma.DataMigration + alias Pleroma.DataMigration.State + alias Pleroma.Repo + + import Ecto.Changeset + + schema "data_migrations" do + field(:name, :string) + field(:state, State, default: :pending) + field(:feature_lock, :boolean, default: false) + field(:params, :map, default: %{}) + field(:data, :map, default: %{}) + + timestamps() + end + + def changeset(data_migration, params \\ %{}) do + data_migration + |> cast(params, [:name, :state, :feature_lock, :params, :data]) + |> validate_required([:name]) + |> unique_constraint(:name) + end + + def update(data_migration, params \\ %{}) do + data_migration + |> changeset(params) + |> Repo.update() + end + + def update_state(data_migration, new_state) do + update(data_migration, %{state: new_state}) + end + + def get_by_name(name) do + Repo.get_by(DataMigration, name: name) + end + + def populate_hashtags_table, do: get_by_name("populate_hashtags_table") +end diff --git a/lib/pleroma/delivery.ex b/lib/pleroma/delivery.ex index 0ded2855c..baf79dda7 100644 --- a/lib/pleroma/delivery.ex +++ b/lib/pleroma/delivery.ex @@ -9,7 +9,6 @@ defmodule Pleroma.Delivery do alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User - alias Pleroma.User import Ecto.Changeset import Ecto.Query diff --git a/lib/pleroma/ecto_enums.ex b/lib/pleroma/ecto_enums.ex index 6fc47620c..f0ae658a4 100644 --- a/lib/pleroma/ecto_enums.ex +++ b/lib/pleroma/ecto_enums.ex @@ -17,3 +17,11 @@ follow_accept: 2, follow_reject: 3 ) + +defenum(Pleroma.DataMigration.State, + pending: 1, + running: 2, + complete: 3, + failed: 4, + manual: 5 +) diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex new file mode 100644 index 000000000..a7e3de542 --- /dev/null +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -0,0 +1,211 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Migrators.HashtagsTableMigrator do + defmodule State do + use Agent + + @init_state %{} + + def start_link(_) do + Agent.start_link(fn -> @init_state end, name: __MODULE__) + end + + def get do + Agent.get(__MODULE__, & &1) + end + + def put(key, value) do + Agent.update(__MODULE__, fn state -> + Map.put(state, key, value) + end) + end + + def increment(key, increment \\ 1) do + Agent.update(__MODULE__, fn state -> + updated_value = (state[key] || 0) + increment + Map.put(state, key, updated_value) + end) + end + end + + use GenServer + + require Logger + + import Ecto.Query + + alias Pleroma.Config + alias Pleroma.DataMigration + alias Pleroma.Hashtag + alias Pleroma.Object + alias Pleroma.Repo + + defdelegate state(), to: State, as: :get + defdelegate put_state(key, value), to: State, as: :put + defdelegate increment_state(key, increment), to: State, as: :increment + + defdelegate data_migration(), to: DataMigration, as: :populate_hashtags_table + + def start_link(_) do + GenServer.start_link(__MODULE__, nil, name: __MODULE__) + end + + @impl true + def init(_) do + {:ok, nil, {:continue, :init_state}} + end + + @impl true + def handle_continue(:init_state, _state) do + {:ok, _} = State.start_link(nil) + + put_state(:status, :init) + + dm = data_migration() + + cond do + Config.get(:env) == :test -> + put_state(:status, :noop) + + is_nil(dm) -> + put_state(:status, :halt) + put_state(:message, "Data migration does not exist.") + + dm.state == :manual -> + put_state(:status, :noop) + put_state(:message, "Data migration is in manual execution state.") + + dm.state == :complete -> + handle_success() + + true -> + send(self(), :migrate_hashtags) + end + + {:noreply, nil} + end + + @impl true + def handle_info(:migrate_hashtags, state) do + data_migration = data_migration() + + {:ok, data_migration} = DataMigration.update_state(data_migration, :running) + put_state(:status, :running) + + Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") + + max_processed_id = data_migration.data["max_processed_id"] || 0 + + # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) + from( + object in Object, + left_join: hashtag in assoc(object, :hashtags), + where: object.id > ^max_processed_id, + where: is_nil(hashtag.id), + where: + fragment("(?)->'tag' IS NOT NULL AND (?)->'tag' != '[]'::jsonb", object.data, object.data), + select: %{ + id: object.id, + tag: fragment("(?)->'tag'", object.data) + } + ) + |> Repo.chunk_stream(100, :batches, timeout: :infinity) + |> Stream.each(fn objects -> + object_ids = Enum.map(objects, & &1.id) + + failed_ids = + objects + |> Enum.map(&transfer_object_hashtags(&1)) + |> Enum.filter(&(elem(&1, 0) == :error)) + |> Enum.map(&elem(&1, 1)) + + for failed_id <- failed_ids do + _ = + Repo.query( + "INSERT INTO data_migration_failed_ids(data_migration_id, record_id) " <> + "VALUES ($1, $2) ON CONFLICT DO NOTHING;", + [data_migration.id, failed_id] + ) + end + + _ = + Repo.query( + "DELETE FROM data_migration_failed_ids WHERE id = ANY($1)", + [object_ids -- failed_ids] + ) + + max_object_id = Enum.at(object_ids, -1) + _ = DataMigration.update(data_migration, %{data: %{"max_processed_id" => max_object_id}}) + + increment_state(:processed_count, length(object_ids)) + increment_state(:failed_count, length(failed_ids)) + + # A quick and dirty approach to controlling the load this background migration imposes + sleep_interval = Config.get([:populate_hashtags_table, :sleep_interval_ms], 0) + Process.sleep(sleep_interval) + end) + |> Stream.run() + + with {:ok, %{rows: [[0]]}} <- + Repo.query( + "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;", + [data_migration.id] + ) do + put_state(:status, :complete) + _ = DataMigration.update_state(data_migration, :complete) + + handle_success() + else + _ -> + put_state(:status, :failed) + put_state(:message, "Please check data_migration_failed_ids records.") + end + + {:noreply, state} + end + + defp transfer_object_hashtags(object) do + hashtags = Object.object_data_hashtags(%{"tag" => object.tag}) + + Repo.transaction(fn -> + with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do + for hashtag_record <- hashtag_records do + with {:ok, _} <- + Repo.query( + "insert into hashtags_objects(hashtag_id, object_id) values ($1, $2);", + [hashtag_record.id, object.id] + ) do + nil + else + {:error, e} -> + error = + "ERROR: could not link object #{object.id} and hashtag " <> + "#{hashtag_record.id}: #{inspect(e)}" + + Logger.error(error) + Repo.rollback(object.id) + end + end + + object.id + else + e -> + error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}" + Logger.error(error) + Repo.rollback(object.id) + end + end) + end + + defp handle_success do + put_state(:status, :complete) + + unless Config.improved_hashtag_timeline() do + Config.put(Config.improved_hashtag_timeline_path(), true) + end + + :ok + end +end diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 339843330..6131ae85b 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -1276,7 +1276,7 @@ def fetch_activities_query(recipients, opts \\ %{}) do |> restrict_tag_all(opts) # TODO: benchmark (initial approach preferring non-aggregate ops when possible) - Config.get([:instance, :improved_hashtag_timeline]) == :join -> + Config.improved_hashtag_timeline() == :join -> query |> distinct([activity], true) |> restrict_hashtag_any(opts) diff --git a/priv/repo/migrations/20210105195018_create_data_migrations.exs b/priv/repo/migrations/20210105195018_create_data_migrations.exs new file mode 100644 index 000000000..5f2e8d96c --- /dev/null +++ b/priv/repo/migrations/20210105195018_create_data_migrations.exs @@ -0,0 +1,17 @@ +defmodule Pleroma.Repo.Migrations.CreateDataMigrations do + use Ecto.Migration + + def change do + create_if_not_exists table(:data_migrations) do + add(:name, :string, null: false) + add(:state, :integer, default: 1) + add(:feature_lock, :boolean, default: false) + add(:params, :map, default: %{}) + add(:data, :map, default: %{}) + + timestamps() + end + + create_if_not_exists(unique_index(:data_migrations, [:name])) + end +end diff --git a/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs b/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs new file mode 100644 index 000000000..2a965f075 --- /dev/null +++ b/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs @@ -0,0 +1,14 @@ +defmodule Pleroma.Repo.Migrations.DataMigrationCreatePopulateHashtagsTable do + use Ecto.Migration + + def up do + dt = NaiveDateTime.utc_now() + + execute( + "INSERT INTO data_migrations(name, inserted_at, updated_at) " <> + "VALUES ('populate_hashtags_table', '#{dt}', '#{dt}') ON CONFLICT DO NOTHING;" + ) + end + + def down, do: :ok +end diff --git a/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs b/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs new file mode 100644 index 000000000..ba0be98af --- /dev/null +++ b/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs @@ -0,0 +1,14 @@ +defmodule Pleroma.Repo.Migrations.CreateDataMigrationFailedIds do + use Ecto.Migration + + def change do + create_if_not_exists table(:data_migration_failed_ids, primary_key: false) do + add(:data_migration_id, references(:data_migrations), null: false) + add(:record_id, :bigint, null: false) + end + + create_if_not_exists( + unique_index(:data_migration_failed_ids, [:data_migration_id, :record_id]) + ) + end +end From f5f267fa764f53ef617bc9504c7ecb68b5d3d7ab Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Thu, 14 Jan 2021 22:41:27 +0300 Subject: [PATCH 022/174] [#3213] Refactoring of HashtagsTableMigrator. --- .../migrators/hashtags_table_migrator.ex | 98 ++++++++++--------- .../hashtags_table_migrator/state.ex | 26 +++++ .../20190711042021_create_safe_jsonb_set.exs | 2 +- 3 files changed, 79 insertions(+), 47 deletions(-) create mode 100644 lib/pleroma/migrators/hashtags_table_migrator/state.ex diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index a7e3de542..9f1a00f9c 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -3,39 +3,13 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Migrators.HashtagsTableMigrator do - defmodule State do - use Agent - - @init_state %{} - - def start_link(_) do - Agent.start_link(fn -> @init_state end, name: __MODULE__) - end - - def get do - Agent.get(__MODULE__, & &1) - end - - def put(key, value) do - Agent.update(__MODULE__, fn state -> - Map.put(state, key, value) - end) - end - - def increment(key, increment \\ 1) do - Agent.update(__MODULE__, fn state -> - updated_value = (state[key] || 0) + increment - Map.put(state, key, updated_value) - end) - end - end - use GenServer require Logger import Ecto.Query + alias __MODULE__.State alias Pleroma.Config alias Pleroma.DataMigration alias Pleroma.Hashtag @@ -43,13 +17,23 @@ def increment(key, increment \\ 1) do alias Pleroma.Repo defdelegate state(), to: State, as: :get - defdelegate put_state(key, value), to: State, as: :put - defdelegate increment_state(key, increment), to: State, as: :increment + defdelegate put_stat(key, value), to: State, as: :put + defdelegate increment_stat(key, increment), to: State, as: :increment defdelegate data_migration(), to: DataMigration, as: :populate_hashtags_table + @reg_name {:global, __MODULE__} + + def whereis, do: GenServer.whereis(@reg_name) + def start_link(_) do - GenServer.start_link(__MODULE__, nil, name: __MODULE__) + case whereis() do + nil -> + GenServer.start_link(__MODULE__, nil, name: @reg_name) + + pid -> + {:ok, pid} + end end @impl true @@ -61,21 +45,22 @@ def init(_) do def handle_continue(:init_state, _state) do {:ok, _} = State.start_link(nil) - put_state(:status, :init) + put_stat(:status, :init) dm = data_migration() + manual_migrations = Config.get([:instance, :manual_data_migrations], []) cond do Config.get(:env) == :test -> - put_state(:status, :noop) + put_stat(:status, :noop) is_nil(dm) -> - put_state(:status, :halt) - put_state(:message, "Data migration does not exist.") + put_stat(:status, :halt) + put_stat(:message, "Data migration does not exist.") - dm.state == :manual -> - put_state(:status, :noop) - put_state(:message, "Data migration is in manual execution state.") + dm.state == :manual or dm.name in manual_migrations -> + put_stat(:status, :noop) + put_stat(:message, "Data migration is in manual execution state.") dm.state == :complete -> handle_success() @@ -91,8 +76,12 @@ def handle_continue(:init_state, _state) do def handle_info(:migrate_hashtags, state) do data_migration = data_migration() - {:ok, data_migration} = DataMigration.update_state(data_migration, :running) - put_state(:status, :running) + persistent_data = Map.take(data_migration.data, ["max_processed_id"]) + + {:ok, data_migration} = + DataMigration.update(data_migration, %{state: :running, data: persistent_data}) + + put_stat(:status, :running) Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") @@ -137,10 +126,12 @@ def handle_info(:migrate_hashtags, state) do ) max_object_id = Enum.at(object_ids, -1) - _ = DataMigration.update(data_migration, %{data: %{"max_processed_id" => max_object_id}}) - increment_state(:processed_count, length(object_ids)) - increment_state(:failed_count, length(failed_ids)) + put_stat(:max_processed_id, max_object_id) + increment_stat(:processed_count, length(object_ids)) + increment_stat(:failed_count, length(failed_ids)) + + persist_stats(data_migration) # A quick and dirty approach to controlling the load this background migration imposes sleep_interval = Config.get([:populate_hashtags_table, :sleep_interval_ms], 0) @@ -153,14 +144,15 @@ def handle_info(:migrate_hashtags, state) do "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;", [data_migration.id] ) do - put_state(:status, :complete) _ = DataMigration.update_state(data_migration, :complete) handle_success() else _ -> - put_state(:status, :failed) - put_state(:message, "Please check data_migration_failed_ids records.") + _ = DataMigration.update_state(data_migration, :failed) + + put_stat(:status, :failed) + put_stat(:message, "Please check data_migration_failed_ids records.") end {:noreply, state} @@ -199,8 +191,13 @@ defp transfer_object_hashtags(object) do end) end + defp persist_stats(data_migration) do + runner_state = Map.drop(state(), [:status]) + _ = DataMigration.update(data_migration, %{data: runner_state}) + end + defp handle_success do - put_state(:status, :complete) + put_stat(:status, :complete) unless Config.improved_hashtag_timeline() do Config.put(Config.improved_hashtag_timeline_path(), true) @@ -208,4 +205,13 @@ defp handle_success do :ok end + + def force_continue do + send(whereis(), :migrate_hashtags) + end + + def force_restart do + {:ok, _} = DataMigration.update(data_migration(), %{state: :pending, data: %{}}) + force_continue() + end end diff --git a/lib/pleroma/migrators/hashtags_table_migrator/state.ex b/lib/pleroma/migrators/hashtags_table_migrator/state.ex new file mode 100644 index 000000000..79926892c --- /dev/null +++ b/lib/pleroma/migrators/hashtags_table_migrator/state.ex @@ -0,0 +1,26 @@ +defmodule Pleroma.Migrators.HashtagsTableMigrator.State do + use Agent + + @init_state %{} + + def start_link(_) do + Agent.start_link(fn -> @init_state end, name: __MODULE__) + end + + def get do + Agent.get(__MODULE__, & &1) + end + + def put(key, value) do + Agent.update(__MODULE__, fn state -> + Map.put(state, key, value) + end) + end + + def increment(key, increment \\ 1) do + Agent.update(__MODULE__, fn state -> + updated_value = (state[key] || 0) + increment + Map.put(state, key, updated_value) + end) + end +end diff --git a/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs b/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs index 43d616705..bfac09f9e 100644 --- a/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs +++ b/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs @@ -9,7 +9,7 @@ def change do begin result := jsonb_set(target, path, coalesce(new_value, 'null'::jsonb), create_missing); if result is NULL then - raise 'jsonb_set tried to wipe the object, please report this incindent to Pleroma bug tracker. https://git.pleroma.social/pleroma/pleroma/issues/new'; + raise 'jsonb_set tried to wipe the object, please report this incident to Pleroma bug tracker. https://git.pleroma.social/pleroma/pleroma/issues/new'; return target; else return result; From 48b399cedb7d46ea0f08181cfbe4df222861f65b Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sat, 16 Jan 2021 20:22:14 +0300 Subject: [PATCH 023/174] [#3213] Refactoring of HashtagsTableMigrator. Hashtag timeline performance optimization (auto switch to non-aggregate join strategy when efficient). --- CHANGELOG.md | 1 + config/description.exs | 6 ++ .../migrators/hashtags_table_migrator.ex | 47 +++++++----- .../hashtags_table_migrator/state.ex | 9 +-- lib/pleroma/web/activity_pub/activity_pub.ex | 72 +++++++++++-------- .../web/activity_pub/activity_pub_test.exs | 4 +- 6 files changed, 86 insertions(+), 53 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 25b24bf07..9a053156f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Search: When using Postgres 11+, Pleroma will use the `websearch_to_tsvector` function to parse search queries. - Emoji: Support the full Unicode 13.1 set of Emoji for reactions, plus regional indicators. - Admin API: Reports now ordered by newest +- Extracted object hashtags into separate table in order to improve hashtag timeline performance (via background migration in `Pleroma.Migrators.HashtagsTableMigrator`). ### Added diff --git a/config/description.exs b/config/description.exs index f438a88ab..c73d50f7d 100644 --- a/config/description.exs +++ b/config/description.exs @@ -941,6 +941,12 @@ key: :show_reactions, type: :boolean, description: "Let favourites and emoji reactions be viewed through the API." + }, + %{ + key: :improved_hashtag_timeline, + type: :keyword, + description: + "If `true` / `:prefer_aggregation` / `:avoid_aggregation`, hashtags table and selected strategy will be used for hashtags timeline. When `false`, object-embedded hashtags will be used (slower). Is auto-set to `true` (unless overridden) when HashtagsTableMigrator completes." } ] }, diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 9f1a00f9c..b40578d50 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -45,25 +45,23 @@ def init(_) do def handle_continue(:init_state, _state) do {:ok, _} = State.start_link(nil) - put_stat(:status, :init) + update_status(:init) - dm = data_migration() + data_migration = data_migration() manual_migrations = Config.get([:instance, :manual_data_migrations], []) cond do Config.get(:env) == :test -> - put_stat(:status, :noop) + update_status(:noop) - is_nil(dm) -> - put_stat(:status, :halt) - put_stat(:message, "Data migration does not exist.") + is_nil(data_migration) -> + update_status(:halt, "Data migration does not exist.") - dm.state == :manual or dm.name in manual_migrations -> - put_stat(:status, :noop) - put_stat(:message, "Data migration is in manual execution state.") + data_migration.state == :manual or data_migration.name in manual_migrations -> + update_status(:noop, "Data migration is in manual execution state.") - dm.state == :complete -> - handle_success() + data_migration.state == :complete -> + handle_success(data_migration) true -> send(self(), :migrate_hashtags) @@ -81,7 +79,7 @@ def handle_info(:migrate_hashtags, state) do {:ok, data_migration} = DataMigration.update(data_migration, %{state: :running, data: persistent_data}) - put_stat(:status, :running) + update_status(:running) Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") @@ -146,13 +144,12 @@ def handle_info(:migrate_hashtags, state) do ) do _ = DataMigration.update_state(data_migration, :complete) - handle_success() + handle_success(data_migration) else _ -> _ = DataMigration.update_state(data_migration, :failed) - put_stat(:status, :failed) - put_stat(:message, "Please check data_migration_failed_ids records.") + update_status(:failed, "Please check data_migration_failed_ids records.") end {:noreply, state} @@ -196,16 +193,25 @@ defp persist_stats(data_migration) do _ = DataMigration.update(data_migration, %{data: runner_state}) end - defp handle_success do - put_stat(:status, :complete) + defp handle_success(data_migration) do + update_status(:complete) - unless Config.improved_hashtag_timeline() do + unless data_migration.feature_lock || Config.improved_hashtag_timeline() do Config.put(Config.improved_hashtag_timeline_path(), true) end :ok end + def failed_objects_query do + from(o in Object) + |> join(:inner, [o], dmf in fragment("SELECT * FROM data_migration_failed_ids"), + on: dmf.record_id == o.id + ) + |> where([_o, dmf], dmf.data_migration_id == ^data_migration().id) + |> order_by([o], asc: o.id) + end + def force_continue do send(whereis(), :migrate_hashtags) end @@ -214,4 +220,9 @@ def force_restart do {:ok, _} = DataMigration.update(data_migration(), %{state: :pending, data: %{}}) force_continue() end + + defp update_status(status, message \\ nil) do + put_stat(:status, status) + put_stat(:message, message) + end end diff --git a/lib/pleroma/migrators/hashtags_table_migrator/state.ex b/lib/pleroma/migrators/hashtags_table_migrator/state.ex index 79926892c..c1a2709fc 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator/state.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator/state.ex @@ -2,23 +2,24 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator.State do use Agent @init_state %{} + @reg_name {:global, __MODULE__} def start_link(_) do - Agent.start_link(fn -> @init_state end, name: __MODULE__) + Agent.start_link(fn -> @init_state end, name: @reg_name) end def get do - Agent.get(__MODULE__, & &1) + Agent.get(@reg_name, & &1) end def put(key, value) do - Agent.update(__MODULE__, fn state -> + Agent.update(@reg_name, fn state -> Map.put(state, key, value) end) end def increment(key, increment \\ 1) do - Agent.update(__MODULE__, fn state -> + Agent.update(@reg_name, fn state -> updated_value = (state[key] || 0) + increment Map.put(state, key, updated_value) end) diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index f5563b0fd..0609827ec 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -669,63 +669,66 @@ defp restrict_since(query, %{since_id: since_id}) do defp restrict_since(query, _), do: query - defp restrict_tag_reject(_query, %{tag_reject: _tag_reject, skip_preload: true}) do + defp restrict_embedded_tag_reject(_query, %{tag_reject: _tag_reject, skip_preload: true}) do raise_on_missing_preload() end - defp restrict_tag_reject(query, %{tag_reject: tag_reject}) when is_list(tag_reject) do + defp restrict_embedded_tag_reject(query, %{tag_reject: tag_reject}) when is_list(tag_reject) do from( [_activity, object] in query, where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject) ) end - defp restrict_tag_reject(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do - restrict_tag_reject(query, %{tag_reject: [tag_reject]}) + defp restrict_embedded_tag_reject(query, %{tag_reject: tag_reject}) + when is_binary(tag_reject) do + restrict_embedded_tag_reject(query, %{tag_reject: [tag_reject]}) end - defp restrict_tag_reject(query, _), do: query + defp restrict_embedded_tag_reject(query, _), do: query - defp restrict_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do + defp restrict_embedded_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do raise_on_missing_preload() end - defp restrict_tag_all(query, %{tag_all: tag_all}) when is_list(tag_all) do + defp restrict_embedded_tag_all(query, %{tag_all: tag_all}) when is_list(tag_all) do from( [_activity, object] in query, where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all) ) end - defp restrict_tag_all(query, %{tag_all: tag}) when is_binary(tag) do - restrict_tag(query, %{tag: tag}) + defp restrict_embedded_tag_all(query, %{tag_all: tag}) when is_binary(tag) do + restrict_embedded_tag(query, %{tag: tag}) end - defp restrict_tag_all(query, _), do: query + defp restrict_embedded_tag_all(query, _), do: query - defp restrict_tag(_query, %{tag: _tag, skip_preload: true}) do + defp restrict_embedded_tag(_query, %{tag: _tag, skip_preload: true}) do raise_on_missing_preload() end - defp restrict_tag(query, %{tag: tag}) when is_list(tag) do + defp restrict_embedded_tag(query, %{tag: tag}) when is_list(tag) do from( [_activity, object] in query, where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag) ) end - defp restrict_tag(query, %{tag: tag}) when is_binary(tag) do - restrict_tag(query, %{tag: [tag]}) + defp restrict_embedded_tag(query, %{tag: tag}) when is_binary(tag) do + restrict_embedded_tag(query, %{tag: [tag]}) end - defp restrict_tag(query, _), do: query + defp restrict_embedded_tag(query, _), do: query - defp restrict_hashtag(query, opts) do - [tag_any, tag_all, tag_reject] = - [:tag, :tag_all, :tag_reject] - |> Enum.map(&opts[&1]) - |> Enum.map(&List.wrap(&1)) + defp hashtag_conditions(opts) do + [:tag, :tag_all, :tag_reject] + |> Enum.map(&opts[&1]) + |> Enum.map(&List.wrap(&1)) + end + defp restrict_hashtag_agg(query, opts) do + [tag_any, tag_all, tag_reject] = hashtag_conditions(opts) has_conditions = Enum.any?([tag_any, tag_all, tag_reject], &Enum.any?(&1)) cond do @@ -1275,15 +1278,19 @@ def fetch_activities_query(recipients, opts \\ %{}) do |> exclude_invisible_actors(opts) |> exclude_visibility(opts) - cond do - Config.object_embedded_hashtags?() -> - query - |> restrict_tag(opts) - |> restrict_tag_reject(opts) - |> restrict_tag_all(opts) + hashtag_timeline_strategy = Config.improved_hashtag_timeline() - # TODO: benchmark (initial approach preferring non-aggregate ops when possible) - Config.improved_hashtag_timeline() == :join -> + cond do + !hashtag_timeline_strategy -> + query + |> restrict_embedded_tag(opts) + |> restrict_embedded_tag_reject(opts) + |> restrict_embedded_tag_all(opts) + + hashtag_timeline_strategy == :prefer_aggregation -> + restrict_hashtag_agg(query, opts) + + hashtag_timeline_strategy == :avoid_aggregation or avoid_hashtags_aggregation?(opts) -> query |> distinct([activity], true) |> restrict_hashtag_any(opts) @@ -1291,10 +1298,17 @@ def fetch_activities_query(recipients, opts \\ %{}) do |> restrict_hashtag_reject_any(opts) true -> - restrict_hashtag(query, opts) + restrict_hashtag_agg(query, opts) end end + defp avoid_hashtags_aggregation?(opts) do + [tag_any, tag_all, tag_reject] = hashtag_conditions(opts) + + joins_count = length(tag_all) + if Enum.any?(tag_any), do: 1, else: 0 + Enum.empty?(tag_reject) and joins_count <= 2 + end + def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do list_memberships = Pleroma.List.memberships(opts[:user]) diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index f86d0a265..36fd65c76 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -217,8 +217,8 @@ test "it fetches the appropriate tag-restricted posts" do {:ok, status_two} = CommonAPI.post(user, %{status: ". #essais"}) {:ok, status_three} = CommonAPI.post(user, %{status: ". #test #reject"}) - for new_timeline_enabled <- [true, false] do - clear_config([:instance, :improved_hashtag_timeline], new_timeline_enabled) + for hashtag_timeline_strategy <- [true, :prefer_aggregation, :avoid_aggregation, false] do + clear_config([:instance, :improved_hashtag_timeline], hashtag_timeline_strategy) fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) From 85f7ef4d13adea9d64d279d1395d17c6ebc20678 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 17 Jan 2021 10:57:06 +0300 Subject: [PATCH 024/174] [#3213] Feature lock adjustment for HashtagsTableMigrator. --- lib/pleroma/migrators/hashtags_table_migrator.ex | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index b40578d50..47de5e134 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -196,11 +196,17 @@ defp persist_stats(data_migration) do defp handle_success(data_migration) do update_status(:complete) - unless data_migration.feature_lock || Config.improved_hashtag_timeline() do - Config.put(Config.improved_hashtag_timeline_path(), true) - end + cond do + data_migration.feature_lock -> + :noop - :ok + not is_nil(Config.improved_hashtag_timeline()) -> + :noop + + true -> + Config.put(Config.improved_hashtag_timeline_path(), true) + :ok + end end def failed_objects_query do From 9d28a7ebfbc7bd8fb893cf1e2ad555ed71f4c812 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 17 Jan 2021 21:58:15 +0300 Subject: [PATCH 025/174] [#3213] Missing copyright header for HashtagsTableMigrator.State. --- lib/pleroma/migrators/hashtags_table_migrator/state.ex | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/pleroma/migrators/hashtags_table_migrator/state.ex b/lib/pleroma/migrators/hashtags_table_migrator/state.ex index c1a2709fc..43f7270e2 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator/state.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator/state.ex @@ -1,3 +1,7 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + defmodule Pleroma.Migrators.HashtagsTableMigrator.State do use Agent From 7f07909a7b56eb368b3f8aab4752def1551c12fe Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Tue, 19 Jan 2021 21:13:32 +0300 Subject: [PATCH 026/174] [#3213] Added `HashtagsTableMigrator.count/1`. --- .../migrators/hashtags_table_migrator.ex | 42 +++++++++++++------ 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 47de5e134..048f3c8ee 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -85,19 +85,8 @@ def handle_info(:migrate_hashtags, state) do max_processed_id = data_migration.data["max_processed_id"] || 0 - # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) - from( - object in Object, - left_join: hashtag in assoc(object, :hashtags), - where: object.id > ^max_processed_id, - where: is_nil(hashtag.id), - where: - fragment("(?)->'tag' IS NOT NULL AND (?)->'tag' != '[]'::jsonb", object.data, object.data), - select: %{ - id: object.id, - tag: fragment("(?)->'tag'", object.data) - } - ) + query() + |> where([object], object.id > ^max_processed_id) |> Repo.chunk_stream(100, :batches, timeout: :infinity) |> Stream.each(fn objects -> object_ids = Enum.map(objects, & &1.id) @@ -155,6 +144,21 @@ def handle_info(:migrate_hashtags, state) do {:noreply, state} end + defp query do + # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) + from( + object in Object, + left_join: hashtag in assoc(object, :hashtags), + where: is_nil(hashtag.id), + where: + fragment("(?)->'tag' IS NOT NULL AND (?)->'tag' != '[]'::jsonb", object.data, object.data), + select: %{ + id: object.id, + tag: fragment("(?)->'tag'", object.data) + } + ) + end + defp transfer_object_hashtags(object) do hashtags = Object.object_data_hashtags(%{"tag" => object.tag}) @@ -188,6 +192,18 @@ defp transfer_object_hashtags(object) do end) end + def count(force \\ false) do + stored_count = state()[:count] + + if stored_count && !force do + stored_count + else + count = Repo.aggregate(query(), :count, :id) + put_stat(:count, count) + count + end + end + defp persist_stats(data_migration) do runner_state = Map.drop(state(), [:status]) _ = DataMigration.update(data_migration, %{data: runner_state}) From f0f0f2af00e8b73a7013c1308289795961b23f4b Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Tue, 19 Jan 2021 21:17:06 +0300 Subject: [PATCH 027/174] [#3213] `timeout` option for `HashtagsTableMigrator.count/_`. --- lib/pleroma/migrators/hashtags_table_migrator.ex | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 048f3c8ee..6a6a7b5b8 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -192,13 +192,13 @@ defp transfer_object_hashtags(object) do end) end - def count(force \\ false) do + def count(force \\ false, timeout \\ :infinity) do stored_count = state()[:count] if stored_count && !force do stored_count else - count = Repo.aggregate(query(), :count, :id) + count = Repo.aggregate(query(), :count, :id, timeout: timeout) put_stat(:count, count) count end From b830605577f369d6b1a8730a5b3476ceea4fef5a Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Tue, 19 Jan 2021 22:03:25 +0300 Subject: [PATCH 028/174] [#3213] Performance-related stat in HashtagsTableMigrator. Reworked `count/_` to indicate approximate total count for current iteration. --- lib/pleroma/migrators/hashtags_table_migrator.ex | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 6a6a7b5b8..e9dd9b70c 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -80,6 +80,7 @@ def handle_info(:migrate_hashtags, state) do DataMigration.update(data_migration, %{state: :running, data: persistent_data}) update_status(:running) + put_stat(:started_at, NaiveDateTime.utc_now()) Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") @@ -118,6 +119,12 @@ def handle_info(:migrate_hashtags, state) do increment_stat(:processed_count, length(object_ids)) increment_stat(:failed_count, length(failed_ids)) + put_stat( + :records_per_second, + state()[:processed_count] / + Enum.max([NaiveDateTime.diff(NaiveDateTime.utc_now(), state()[:started_at]), 1]) + ) + persist_stats(data_migration) # A quick and dirty approach to controlling the load this background migration imposes @@ -192,13 +199,18 @@ defp transfer_object_hashtags(object) do end) end + @doc "Approximate count for current iteration (including processed records count)" def count(force \\ false, timeout \\ :infinity) do stored_count = state()[:count] if stored_count && !force do stored_count else - count = Repo.aggregate(query(), :count, :id, timeout: timeout) + processed_count = state()[:processed_count] || 0 + max_processed_id = data_migration().data["max_processed_id"] || 0 + query = where(query(), [object], object.id > ^max_processed_id) + + count = Repo.aggregate(query, :count, :id, timeout: timeout) + processed_count put_stat(:count, count) count end From c041e9c6300726a40a00146bba04d3ec752219d9 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Thu, 21 Jan 2021 20:19:09 +0300 Subject: [PATCH 029/174] [#3213] HashtagsTableMigrator: failures handling fix, retry function. Changed default hashtags filtering strategy to non-aggregate approach. --- config/description.exs | 2 +- .../migrators/hashtags_table_migrator.ex | 52 +++++++++++++++---- lib/pleroma/web/activity_pub/activity_pub.ex | 13 +---- .../web/activity_pub/activity_pub_test.exs | 2 +- 4 files changed, 47 insertions(+), 22 deletions(-) diff --git a/config/description.exs b/config/description.exs index b48616b22..46f085c70 100644 --- a/config/description.exs +++ b/config/description.exs @@ -940,7 +940,7 @@ key: :improved_hashtag_timeline, type: :keyword, description: - "If `true` / `:prefer_aggregation` / `:avoid_aggregation`, hashtags table and selected strategy will be used for hashtags timeline. When `false`, object-embedded hashtags will be used (slower). Is auto-set to `true` (unless overridden) when HashtagsTableMigrator completes." + "If `true` / `:prefer_aggregation`, hashtags table and selected strategy will be used for hashtags timeline. When `false`, object-embedded hashtags will be used (slower). Is auto-set to `true` (unless overridden) when HashtagsTableMigrator completes." } ] }, diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index e9dd9b70c..8ad2c8c73 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -109,8 +109,9 @@ def handle_info(:migrate_hashtags, state) do _ = Repo.query( - "DELETE FROM data_migration_failed_ids WHERE id = ANY($1)", - [object_ids -- failed_ids] + "DELETE FROM data_migration_failed_ids " <> + "WHERE data_migration_id = $1 AND record_id = ANY($2)", + [data_migration.id, object_ids -- failed_ids] ) max_object_id = Enum.at(object_ids, -1) @@ -133,12 +134,8 @@ def handle_info(:migrate_hashtags, state) do end) |> Stream.run() - with {:ok, %{rows: [[0]]}} <- - Repo.query( - "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;", - [data_migration.id] - ) do - _ = DataMigration.update_state(data_migration, :complete) + with 0 <- failures_count(data_migration.id) do + {:ok, data_migration} = DataMigration.update_state(data_migration, :complete) handle_success(data_migration) else @@ -167,7 +164,8 @@ defp query do end defp transfer_object_hashtags(object) do - hashtags = Object.object_data_hashtags(%{"tag" => object.tag}) + embedded_tags = (Map.has_key?(object, :tag) && object.tag) || object.data["tag"] + hashtags = Object.object_data_hashtags(%{"tag" => embedded_tags}) Repo.transaction(fn -> with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do @@ -246,6 +244,36 @@ def failed_objects_query do |> order_by([o], asc: o.id) end + def failures_count(data_migration_id \\ nil) do + data_migration_id = data_migration_id || data_migration().id + + with {:ok, %{rows: [[count]]}} <- + Repo.query( + "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;", + [data_migration_id] + ) do + count + end + end + + def retry_failed do + data_migration = data_migration() + + failed_objects_query() + |> Repo.chunk_stream(100, :one) + |> Stream.each(fn object -> + with {:ok, _} <- transfer_object_hashtags(object) do + _ = + Repo.query( + "DELETE FROM data_migration_failed_ids " <> + "WHERE data_migration_id = $1 AND record_id = $2", + [data_migration.id, object.id] + ) + end + end) + |> Stream.run() + end + def force_continue do send(whereis(), :migrate_hashtags) end @@ -255,6 +283,12 @@ def force_restart do force_continue() end + def force_complete do + {:ok, data_migration} = DataMigration.update_state(data_migration(), :complete) + + handle_success(data_migration) + end + defp update_status(status, message \\ nil) do put_stat(:status, status) put_stat(:message, message) diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 0609827ec..dbfd3839d 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -727,6 +727,7 @@ defp hashtag_conditions(opts) do |> Enum.map(&List.wrap(&1)) end + # Note: times out on larger instances (with default timeout), intended for complex queries defp restrict_hashtag_agg(query, opts) do [tag_any, tag_all, tag_reject] = hashtag_conditions(opts) has_conditions = Enum.any?([tag_any, tag_all, tag_reject], &Enum.any?(&1)) @@ -1290,25 +1291,15 @@ def fetch_activities_query(recipients, opts \\ %{}) do hashtag_timeline_strategy == :prefer_aggregation -> restrict_hashtag_agg(query, opts) - hashtag_timeline_strategy == :avoid_aggregation or avoid_hashtags_aggregation?(opts) -> + true -> query |> distinct([activity], true) |> restrict_hashtag_any(opts) |> restrict_hashtag_all(opts) |> restrict_hashtag_reject_any(opts) - - true -> - restrict_hashtag_agg(query, opts) end end - defp avoid_hashtags_aggregation?(opts) do - [tag_any, tag_all, tag_reject] = hashtag_conditions(opts) - - joins_count = length(tag_all) + if Enum.any?(tag_any), do: 1, else: 0 - Enum.empty?(tag_reject) and joins_count <= 2 - end - def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do list_memberships = Pleroma.List.memberships(opts[:user]) diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index 36fd65c76..1fcaf74d3 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -217,7 +217,7 @@ test "it fetches the appropriate tag-restricted posts" do {:ok, status_two} = CommonAPI.post(user, %{status: ". #essais"}) {:ok, status_three} = CommonAPI.post(user, %{status: ". #test #reject"}) - for hashtag_timeline_strategy <- [true, :prefer_aggregation, :avoid_aggregation, false] do + for hashtag_timeline_strategy <- [true, :prefer_aggregation, false] do clear_config([:instance, :improved_hashtag_timeline], hashtag_timeline_strategy) fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) From ca7f24064304945587fc232325dce4b834ff6c94 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Thu, 21 Jan 2021 20:50:06 +0300 Subject: [PATCH 030/174] [#3213] Ignoring of blank elements from objects.data->tag. --- lib/pleroma/object.ex | 2 ++ test/pleroma/hashtag_test.exs | 17 +++++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 test/pleroma/hashtag_test.exs diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 5102be1de..9b5c1bec1 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -420,6 +420,8 @@ def object_data_hashtags(%{"tag" => tags}) when is_list(tags) do hashtag when is_bitstring(hashtag) -> String.downcase(hashtag) end) |> Enum.uniq() + # Note: "" elements (plain text) might occur in `data.tag` for incoming objects + |> Enum.filter(&(&1 not in [nil, ""])) end def object_data_hashtags(_), do: [] diff --git a/test/pleroma/hashtag_test.exs b/test/pleroma/hashtag_test.exs new file mode 100644 index 000000000..0264dea0b --- /dev/null +++ b/test/pleroma/hashtag_test.exs @@ -0,0 +1,17 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.HashtagTest do + use Pleroma.DataCase + + alias Pleroma.Hashtag + + describe "changeset validations" do + test "ensure non-blank :name" do + changeset = Hashtag.changeset(%Hashtag{}, %{name: ""}) + + assert {:name, {"can't be blank", [validation: :required]}} in changeset.errors + end + end +end From f264d930cc00c463d0f506a94f6f6b494aab7022 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 24 Jan 2021 23:27:02 +0300 Subject: [PATCH 031/174] [#3213] Speedup of HashtagsTableMigrator (query optimization). State handling fix. --- .../migrators/hashtags_table_migrator.ex | 18 +++++++++++++++--- .../migrators/hashtags_table_migrator/state.ex | 4 ++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 8ad2c8c73..6a1c9592c 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -72,6 +72,8 @@ def handle_continue(:init_state, _state) do @impl true def handle_info(:migrate_hashtags, state) do + State.clear() + data_migration = data_migration() persistent_data = Map.take(data_migration.data, ["max_processed_id"]) @@ -152,8 +154,6 @@ defp query do # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) from( object in Object, - left_join: hashtag in assoc(object, :hashtags), - where: is_nil(hashtag.id), where: fragment("(?)->'tag' IS NOT NULL AND (?)->'tag' != '[]'::jsonb", object.data, object.data), select: %{ @@ -161,12 +161,24 @@ defp query do tag: fragment("(?)->'tag'", object.data) } ) + |> join(:left, [o], hashtags_objects in fragment("SELECT object_id FROM hashtags_objects"), + on: hashtags_objects.object_id == o.id + ) + |> where([_o, hashtags_objects], is_nil(hashtags_objects.object_id)) end defp transfer_object_hashtags(object) do - embedded_tags = (Map.has_key?(object, :tag) && object.tag) || object.data["tag"] + embedded_tags = if Map.has_key?(object, :tag), do: object.tag, else: object.data["tag"] hashtags = Object.object_data_hashtags(%{"tag" => embedded_tags}) + if Enum.any?(hashtags) do + transfer_object_hashtags(object, hashtags) + else + {:ok, object.id} + end + end + + defp transfer_object_hashtags(object, hashtags) do Repo.transaction(fn -> with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do for hashtag_record <- hashtag_records do diff --git a/lib/pleroma/migrators/hashtags_table_migrator/state.ex b/lib/pleroma/migrators/hashtags_table_migrator/state.ex index 43f7270e2..901563426 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator/state.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator/state.ex @@ -12,6 +12,10 @@ def start_link(_) do Agent.start_link(fn -> @init_state end, name: @reg_name) end + def clear do + Agent.update(@reg_name, fn _state -> @init_state end) + end + def get do Agent.get(@reg_name, & &1) end From ea4785213a449f3bcd68bcb4ecb3bb6d794736b1 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Mon, 25 Jan 2021 20:12:09 +0300 Subject: [PATCH 032/174] [#3213] Switched to using embedded hashtags in Object.hashtags/1 (to avoid extra joins / preload in timeline queries). --- lib/pleroma/config.ex | 1 - lib/pleroma/object.ex | 18 +++++------------- 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/lib/pleroma/config.ex b/lib/pleroma/config.ex index ceb8c8b5a..0a6ac0ad0 100644 --- a/lib/pleroma/config.ex +++ b/lib/pleroma/config.ex @@ -98,7 +98,6 @@ def restrict_unauthenticated_access?(resource, kind) do def improved_hashtag_timeline_path, do: [:instance, :improved_hashtag_timeline] def improved_hashtag_timeline, do: get(improved_hashtag_timeline_path()) - def object_embedded_hashtags?, do: !improved_hashtag_timeline() def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], []) diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 9b5c1bec1..9edf43e04 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -388,24 +388,16 @@ def tags(%Object{data: %{"tag" => tags}}) when is_list(tags), do: tags def tags(_), do: [] def hashtags(%Object{} = object) do - cond do - Config.object_embedded_hashtags?() -> - embedded_hashtags(object) - - object.id == "pleroma:fake_object_id" -> - [] - - true -> - hashtag_records = Repo.preload(object, :hashtags).hashtags - Enum.map(hashtag_records, & &1.name) - end + # Note: always using embedded hashtags regardless whether they are migrated to hashtags table + # (embedded hashtags stay in sync anyways, and we avoid extra joins and preload hassle) + embedded_hashtags(object) end - defp embedded_hashtags(%Object{data: data}) do + def embedded_hashtags(%Object{data: data}) do object_data_hashtags(data) end - defp embedded_hashtags(_), do: [] + def embedded_hashtags(_), do: [] def object_data_hashtags(%{"tag" => tags}) when is_list(tags) do tags From e7864a32d7c9930e5f6c62bd77cef64c68f1eb21 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Mon, 25 Jan 2021 22:31:23 +0300 Subject: [PATCH 033/174] [#3213] Removed DISTINCT clause from ActivityPub.fetch_activities_query/2. --- lib/pleroma/web/activity_pub/activity_pub.ex | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index fbda89a25..be81e0833 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -1293,7 +1293,6 @@ def fetch_activities_query(recipients, opts \\ %{}) do true -> query - |> distinct([activity], true) |> restrict_hashtag_any(opts) |> restrict_hashtag_all(opts) |> restrict_hashtag_reject_any(opts) From 380d0cce6b802baf4d13031a4a39f169dd65bffd Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Fri, 29 Jan 2021 00:17:33 +0300 Subject: [PATCH 034/174] [#3213] Reinstated DISTINCT clause for hashtag "any" filtering with 2+ terms. Added test. --- lib/pleroma/web/activity_pub/activity_pub.ex | 17 ++++++++++++----- .../controllers/timeline_controller.ex | 2 +- .../web/activity_pub/activity_pub_test.exs | 17 +++++++++++++++-- 3 files changed, 28 insertions(+), 8 deletions(-) diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index be81e0833..0a21ac2f2 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -846,11 +846,18 @@ defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do end defp restrict_hashtag_any(query, %{tag: tags}) when is_list(tags) do - from( - [_activity, object] in query, - join: hashtag in assoc(object, :hashtags), - where: hashtag.name in ^tags - ) + query = + from( + [_activity, object] in query, + join: hashtag in assoc(object, :hashtags), + where: hashtag.name in ^tags + ) + + if length(tags) > 1 do + distinct(query, [activity], true) + else + query + end end defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do diff --git a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex index 08e6f23b9..1fb954a9b 100644 --- a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex @@ -134,9 +134,9 @@ defp hashtag_fetching(params, user, local_only) do tags = [params[:tag], params[:any]] |> List.flatten() - |> Enum.uniq() |> Enum.reject(&is_nil/1) |> Enum.map(&String.downcase/1) + |> Enum.uniq() tag_all = params diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index 1fcaf74d3..0b18269cd 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -217,6 +217,9 @@ test "it fetches the appropriate tag-restricted posts" do {:ok, status_two} = CommonAPI.post(user, %{status: ". #essais"}) {:ok, status_three} = CommonAPI.post(user, %{status: ". #test #reject"}) + {:ok, status_four} = CommonAPI.post(user, %{status: ". #any1 #any2"}) + {:ok, status_five} = CommonAPI.post(user, %{status: ". #any2 #any1"}) + for hashtag_timeline_strategy <- [true, :prefer_aggregation, false] do clear_config([:instance, :improved_hashtag_timeline], hashtag_timeline_strategy) @@ -238,8 +241,17 @@ test "it fetches the appropriate tag-restricted posts" do tag_all: ["test", "reject"] }) - [fetch_one, fetch_two, fetch_three, fetch_four] = - Enum.map([fetch_one, fetch_two, fetch_three, fetch_four], fn statuses -> + # This test would fail if JOIN with 2+ terms in "any" clause is done without DISTINCT. + # The :limit is important (w/o DISTINCT 2 records are deduped by Ecto to 1 b/c of preload). + fetch_five = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["any1", "any2"], + limit: 2 + }) + + [fetch_one, fetch_two, fetch_three, fetch_four, fetch_five] = + Enum.map([fetch_one, fetch_two, fetch_three, fetch_four, fetch_five], fn statuses -> Enum.map(statuses, fn s -> Repo.preload(s, object: :hashtags) end) end) @@ -247,6 +259,7 @@ test "it fetches the appropriate tag-restricted posts" do assert fetch_two == [status_one, status_two, status_three] assert fetch_three == [status_one, status_two] assert fetch_four == [status_three] + assert fetch_five == [status_four, status_five] end end From 9948ff3356f9e9e214584207a53eba614c73383c Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 31 Jan 2021 18:24:19 +0300 Subject: [PATCH 035/174] [#3213] Added HashtagsCleanupWorker periodic job. --- config/config.exs | 2 + config/description.exs | 1 + .../migrators/hashtags_table_migrator.ex | 1 + lib/pleroma/object.ex | 1 + .../workers/cron/hashtags_cleanup_worker.ex | 57 +++++++++++++++++++ 5 files changed, 62 insertions(+) create mode 100644 lib/pleroma/workers/cron/hashtags_cleanup_worker.ex diff --git a/config/config.exs b/config/config.exs index c4a690799..dfd2fc434 100644 --- a/config/config.exs +++ b/config/config.exs @@ -553,10 +553,12 @@ remote_fetcher: 2, attachments_cleanup: 1, new_users_digest: 1, + hashtags_cleanup: 1, mute_expire: 5 ], plugins: [Oban.Plugins.Pruner], crontab: [ + {"0 1 * * *", Pleroma.Workers.Cron.HashtagsCleanupWorker}, {"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker}, {"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker} ] diff --git a/config/description.exs b/config/description.exs index 46f085c70..147c1930c 100644 --- a/config/description.exs +++ b/config/description.exs @@ -1943,6 +1943,7 @@ type: {:list, :tuple}, description: "Settings for cron background jobs", suggestions: [ + {"0 1 * * *", Pleroma.Workers.Cron.HashtagsCleanupWorker}, {"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker}, {"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker} ] diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 6a1c9592c..07b42a7f4 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -152,6 +152,7 @@ def handle_info(:migrate_hashtags, state) do defp query do # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) + # Note: not checking activity type; HashtagsCleanupWorker should clean up unused records later from( object in Object, where: diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 9edf43e04..52b77e41c 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -65,6 +65,7 @@ def change(struct, params \\ %{}) do |> maybe_handle_hashtags_change(struct) end + # Note: not checking activity type; HashtagsCleanupWorker should clean up unused records later defp maybe_handle_hashtags_change(changeset, struct) do with data_hashtags_change = get_change(changeset, :data), true <- hashtags_changed?(struct, data_hashtags_change), diff --git a/lib/pleroma/workers/cron/hashtags_cleanup_worker.ex b/lib/pleroma/workers/cron/hashtags_cleanup_worker.ex new file mode 100644 index 000000000..b319067ca --- /dev/null +++ b/lib/pleroma/workers/cron/hashtags_cleanup_worker.ex @@ -0,0 +1,57 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Workers.Cron.HashtagsCleanupWorker do + @moduledoc """ + The worker to clean up unused hashtags_objects and hashtags. + """ + + use Oban.Worker, queue: "hashtags_cleanup" + + alias Pleroma.Repo + + require Logger + + @hashtags_objects_query """ + DELETE FROM hashtags_objects WHERE object_id IN + (SELECT DISTINCT objects.id FROM objects + JOIN hashtags_objects ON hashtags_objects.object_id = objects.id LEFT JOIN activities + ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = + (objects.data->>'id') + AND activities.data->>'type' = 'Create' + WHERE activities.id IS NULL); + """ + + @hashtags_query """ + DELETE FROM hashtags WHERE id IN + (SELECT hashtags.id FROM hashtags + LEFT OUTER JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id + WHERE hashtags_objects.hashtag_id IS NULL AND hashtags.inserted_at < $1); + """ + + @impl Oban.Worker + def perform(_job) do + Logger.info("Cleaning up unused `hashtags_objects` records...") + + {:ok, %{num_rows: hashtags_objects_count}} = + Repo.query(@hashtags_objects_query, [], timeout: :infinity) + + Logger.info("Deleted #{hashtags_objects_count} unused `hashtags_objects` records.") + + Logger.info("Cleaning up unused `hashtags` records...") + + # Note: ignoring recently created hashtags since references are added after hashtag is created + {:ok, %{num_rows: hashtags_count}} = + Repo.query(@hashtags_query, [NaiveDateTime.add(NaiveDateTime.utc_now(), -3600 * 24)], + timeout: :infinity + ) + + Logger.info("Deleted #{hashtags_count} unused `hashtags` records.") + + Logger.info("HashtagsCleanupWorker complete.") + + :ok + end +end From 6fd4163ab60be07b1a20ac8911e105ddca8e2095 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 31 Jan 2021 20:37:33 +0300 Subject: [PATCH 036/174] [#3213] ActivityPub: implemented subqueries-based hashtags filtering, removed aggregation-based hashtags filtering. --- config/description.exs | 2 +- lib/pleroma/web/activity_pub/activity_pub.ex | 228 ++++++------------ .../web/activity_pub/activity_pub_test.exs | 5 +- 3 files changed, 81 insertions(+), 154 deletions(-) diff --git a/config/description.exs b/config/description.exs index 147c1930c..ead541724 100644 --- a/config/description.exs +++ b/config/description.exs @@ -940,7 +940,7 @@ key: :improved_hashtag_timeline, type: :keyword, description: - "If `true` / `:prefer_aggregation`, hashtags table and selected strategy will be used for hashtags timeline. When `false`, object-embedded hashtags will be used (slower). Is auto-set to `true` (unless overridden) when HashtagsTableMigrator completes." + "If `true`, hashtags will be fetched from `hashtags` table for hashtags timeline. When `false`, object-embedded hashtags will be used (slower). Is auto-set to `true` (unless overridden) when HashtagsTableMigrator completes." } ] }, diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 0a21ac2f2..cda8d3f54 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -669,24 +669,6 @@ defp restrict_since(query, %{since_id: since_id}) do defp restrict_since(query, _), do: query - defp restrict_embedded_tag_reject(_query, %{tag_reject: _tag_reject, skip_preload: true}) do - raise_on_missing_preload() - end - - defp restrict_embedded_tag_reject(query, %{tag_reject: tag_reject}) when is_list(tag_reject) do - from( - [_activity, object] in query, - where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject) - ) - end - - defp restrict_embedded_tag_reject(query, %{tag_reject: tag_reject}) - when is_binary(tag_reject) do - restrict_embedded_tag_reject(query, %{tag_reject: [tag_reject]}) - end - - defp restrict_embedded_tag_reject(query, _), do: query - defp restrict_embedded_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do raise_on_missing_preload() end @@ -699,139 +681,65 @@ defp restrict_embedded_tag_all(query, %{tag_all: tag_all}) when is_list(tag_all) end defp restrict_embedded_tag_all(query, %{tag_all: tag}) when is_binary(tag) do - restrict_embedded_tag(query, %{tag: tag}) + restrict_embedded_tag_any(query, %{tag: tag}) end defp restrict_embedded_tag_all(query, _), do: query - defp restrict_embedded_tag(_query, %{tag: _tag, skip_preload: true}) do + defp restrict_embedded_tag_any(_query, %{tag: _tag, skip_preload: true}) do raise_on_missing_preload() end - defp restrict_embedded_tag(query, %{tag: tag}) when is_list(tag) do + defp restrict_embedded_tag_any(query, %{tag: tag}) when is_list(tag) do from( [_activity, object] in query, where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag) ) end - defp restrict_embedded_tag(query, %{tag: tag}) when is_binary(tag) do - restrict_embedded_tag(query, %{tag: [tag]}) + defp restrict_embedded_tag_any(query, %{tag: tag}) when is_binary(tag) do + restrict_embedded_tag_any(query, %{tag: [tag]}) end - defp restrict_embedded_tag(query, _), do: query + defp restrict_embedded_tag_any(query, _), do: query - defp hashtag_conditions(opts) do - [:tag, :tag_all, :tag_reject] - |> Enum.map(&opts[&1]) - |> Enum.map(&List.wrap(&1)) - end - - # Note: times out on larger instances (with default timeout), intended for complex queries - defp restrict_hashtag_agg(query, opts) do - [tag_any, tag_all, tag_reject] = hashtag_conditions(opts) - has_conditions = Enum.any?([tag_any, tag_all, tag_reject], &Enum.any?(&1)) - - cond do - !has_conditions -> - query - - opts[:skip_preload] -> - raise_on_missing_preload() - - true -> - query - |> group_by_all_bindings() - |> join(:left, [_activity, object], hashtag in assoc(object, :hashtags), as: :hashtag) - |> maybe_restrict_hashtag_any(tag_any) - |> maybe_restrict_hashtag_all(tag_all) - |> maybe_restrict_hashtag_reject_any(tag_reject) - end - end - - # Groups by all bindings to allow aggregation on hashtags - defp group_by_all_bindings(query) do - # Expecting named bindings: :object, :bookmark, :thread_mute, :report_note - cond do - Enum.count(query.aliases) == 4 -> - from([a, o, b3, b4, b5] in query, group_by: [a.id, o.id, b3.id, b4.id, b5.id]) - - Enum.count(query.aliases) == 3 -> - from([a, o, b3, b4] in query, group_by: [a.id, o.id, b3.id, b4.id]) - - Enum.count(query.aliases) == 2 -> - from([a, o, b3] in query, group_by: [a.id, o.id, b3.id]) - - true -> - from([a, o] in query, group_by: [a.id, o.id]) - end - end - - defp maybe_restrict_hashtag_any(query, []) do - query - end - - defp maybe_restrict_hashtag_any(query, tags) do - having( - query, - [hashtag: hashtag], - fragment("array_agg(?) && (?)", hashtag.name, ^tags) - ) - end - - defp maybe_restrict_hashtag_all(query, []) do - query - end - - defp maybe_restrict_hashtag_all(query, tags) do - having( - query, - [hashtag: hashtag], - fragment("array_agg(?) @> (?)", hashtag.name, ^tags) - ) - end - - defp maybe_restrict_hashtag_reject_any(query, []) do - query - end - - defp maybe_restrict_hashtag_reject_any(query, tags) do - having( - query, - [hashtag: hashtag], - fragment("not(array_agg(?) && (?))", hashtag.name, ^tags) - ) - end - - defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do + defp restrict_embedded_tag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do raise_on_missing_preload() end - defp restrict_hashtag_reject_any(query, %{tag_reject: tags_reject}) when is_list(tags_reject) do - query - |> group_by_all_bindings() - |> join(:left, [_activity, object], hashtag in assoc(object, :hashtags), as: :hashtag) - |> having( - [hashtag: hashtag], - fragment("not(array_agg(?) && (?))", hashtag.name, ^tags_reject) + defp restrict_embedded_tag_reject_any(query, %{tag_reject: tag_reject}) + when is_list(tag_reject) do + from( + [_activity, object] in query, + where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject) ) end - defp restrict_hashtag_reject_any(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do - restrict_hashtag_reject_any(query, %{tag_reject: [tag_reject]}) + defp restrict_embedded_tag_reject_any(query, %{tag_reject: tag_reject}) + when is_binary(tag_reject) do + restrict_embedded_tag_reject_any(query, %{tag_reject: [tag_reject]}) end - defp restrict_hashtag_reject_any(query, _), do: query + defp restrict_embedded_tag_reject_any(query, _), do: query defp restrict_hashtag_all(_query, %{tag_all: _tag, skip_preload: true}) do raise_on_missing_preload() end defp restrict_hashtag_all(query, %{tag_all: tags}) when is_list(tags) do - Enum.reduce( - tags, - query, - fn tag, acc -> restrict_hashtag_any(acc, %{tag: tag}) end + from( + [_activity, object] in query, + where: + fragment( + """ + (SELECT array_agg(hashtags.name) FROM hashtags JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + AND hashtags_objects.object_id = ?) @> ? + """, + ^tags, + object.id, + ^tags + ) ) end @@ -846,18 +754,19 @@ defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do end defp restrict_hashtag_any(query, %{tag: tags}) when is_list(tags) do - query = - from( - [_activity, object] in query, - join: hashtag in assoc(object, :hashtags), - where: hashtag.name in ^tags - ) - - if length(tags) > 1 do - distinct(query, [activity], true) - else - query - end + from( + [_activity, object] in query, + where: + fragment( + """ + EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + AND hashtags_objects.object_id = ? LIMIT 1) + """, + ^tags, + object.id + ) + ) end defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do @@ -866,6 +775,32 @@ defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do defp restrict_hashtag_any(query, _), do: query + defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: tags_reject}) when is_list(tags_reject) do + from( + [_activity, object] in query, + where: + fragment( + """ + NOT EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + AND hashtags_objects.object_id = ? LIMIT 1) + """, + ^tags_reject, + object.id + ) + ) + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do + restrict_hashtag_reject_any(query, %{tag_reject: [tag_reject]}) + end + + defp restrict_hashtag_reject_any(query, _), do: query + defp raise_on_missing_preload do raise "Can't use the child object without preloading!" end @@ -1286,23 +1221,16 @@ def fetch_activities_query(recipients, opts \\ %{}) do |> exclude_invisible_actors(opts) |> exclude_visibility(opts) - hashtag_timeline_strategy = Config.improved_hashtag_timeline() - - cond do - !hashtag_timeline_strategy -> - query - |> restrict_embedded_tag(opts) - |> restrict_embedded_tag_reject(opts) - |> restrict_embedded_tag_all(opts) - - hashtag_timeline_strategy == :prefer_aggregation -> - restrict_hashtag_agg(query, opts) - - true -> - query - |> restrict_hashtag_any(opts) - |> restrict_hashtag_all(opts) - |> restrict_hashtag_reject_any(opts) + if Config.improved_hashtag_timeline() do + query + |> restrict_hashtag_any(opts) + |> restrict_hashtag_all(opts) + |> restrict_hashtag_reject_any(opts) + else + query + |> restrict_embedded_tag_any(opts) + |> restrict_embedded_tag_all(opts) + |> restrict_embedded_tag_reject_any(opts) end end diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index 0b18269cd..c2cc5a9af 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -220,7 +220,7 @@ test "it fetches the appropriate tag-restricted posts" do {:ok, status_four} = CommonAPI.post(user, %{status: ". #any1 #any2"}) {:ok, status_five} = CommonAPI.post(user, %{status: ". #any2 #any1"}) - for hashtag_timeline_strategy <- [true, :prefer_aggregation, false] do + for hashtag_timeline_strategy <- [true, false] do clear_config([:instance, :improved_hashtag_timeline], hashtag_timeline_strategy) fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) @@ -241,8 +241,7 @@ test "it fetches the appropriate tag-restricted posts" do tag_all: ["test", "reject"] }) - # This test would fail if JOIN with 2+ terms in "any" clause is done without DISTINCT. - # The :limit is important (w/o DISTINCT 2 records are deduped by Ecto to 1 b/c of preload). + # Testing that deduplication (if needed) is done on DB (not Ecto) level; :limit is important fetch_five = ActivityPub.fetch_activities([], %{ type: "Create", From 108e90b18edcfb57b9839e7c5d6d444a63ae2069 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 31 Jan 2021 22:03:59 +0300 Subject: [PATCH 037/174] [#3213] Explicitly defined PKs in hashtags_objects and data_migration_failed_ids. Added "pleroma.database rollback" task to revert a single migration. --- lib/mix/tasks/pleroma/database.ex | 24 +++++++++++++++++++ ...20201221203824_create_hashtags_objects.exs | 4 ++-- ...gration_create_populate_hashtags_table.exs | 4 +++- ...72254_create_data_migration_failed_ids.exs | 4 ++-- 4 files changed, 31 insertions(+), 5 deletions(-) diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index 4ddace9c9..30c0d2bf1 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -20,6 +20,30 @@ defmodule Mix.Tasks.Pleroma.Database do @shortdoc "A collection of database related tasks" @moduledoc File.read!("docs/administration/CLI_tasks/database.md") + # Rolls back a specific migration (leaving subsequent migrations applied) + # Based on https://stackoverflow.com/a/53825840 + def run(["rollback", version]) do + start_pleroma() + + version = String.to_integer(version) + re = ~r/^#{version}_.*\.exs/ + path = Application.app_dir(:pleroma, Path.join(["priv", "repo", "migrations"])) + + result = + with {:find, "" <> file} <- {:find, Enum.find(File.ls!(path), &String.match?(&1, re))}, + {:compile, [{mod, _} | _]} <- {:compile, Code.compile_file(Path.join(path, file))}, + {:rollback, :ok} <- {:rollback, Ecto.Migrator.down(Repo, version, mod)} do + {:ok, "Reversed migration: #{file}"} + else + {:find, _} -> {:error, "No migration found with version prefix: #{version}"} + {:compile, e} -> {:error, "Problem compiling migration module: #{inspect(e)}"} + {:rollback, e} -> {:error, "Problem reversing migration: #{inspect(e)}"} + e -> {:error, "Something unexpected happened: #{inspect(e)}"} + end + + IO.inspect(result) + end + def run(["remove_embedded_objects" | args]) do {options, [], []} = OptionParser.parse( diff --git a/priv/repo/migrations/20201221203824_create_hashtags_objects.exs b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs index 214ea81c3..efd60369d 100644 --- a/priv/repo/migrations/20201221203824_create_hashtags_objects.exs +++ b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs @@ -3,8 +3,8 @@ defmodule Pleroma.Repo.Migrations.CreateHashtagsObjects do def change do create_if_not_exists table(:hashtags_objects, primary_key: false) do - add(:hashtag_id, references(:hashtags), null: false) - add(:object_id, references(:objects), null: false) + add(:hashtag_id, references(:hashtags), null: false, primary_key: true) + add(:object_id, references(:objects), null: false, primary_key: true) end create_if_not_exists(unique_index(:hashtags_objects, [:hashtag_id, :object_id])) diff --git a/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs b/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs index 2a965f075..cf3cf26a0 100644 --- a/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs +++ b/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs @@ -10,5 +10,7 @@ def up do ) end - def down, do: :ok + def down do + execute("DELETE FROM data_migrations WHERE name = 'populate_hashtags_table';") + end end diff --git a/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs b/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs index ba0be98af..18afa74ac 100644 --- a/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs +++ b/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs @@ -3,8 +3,8 @@ defmodule Pleroma.Repo.Migrations.CreateDataMigrationFailedIds do def change do create_if_not_exists table(:data_migration_failed_ids, primary_key: false) do - add(:data_migration_id, references(:data_migrations), null: false) - add(:record_id, :bigint, null: false) + add(:data_migration_id, references(:data_migrations), null: false, primary_key: true) + add(:record_id, :bigint, null: false, primary_key: true) end create_if_not_exists( From 10207f840ce3515dddfde36288575f203c52840f Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 31 Jan 2021 22:36:46 +0300 Subject: [PATCH 038/174] [#3213] ActivityPub: temporarily reverted to previous hashtags filtering implementation due to blank results issue. --- lib/pleroma/web/activity_pub/activity_pub.ex | 106 ++++++++++--------- 1 file changed, 54 insertions(+), 52 deletions(-) diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index cda8d3f54..fd0144aad 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -722,24 +722,53 @@ defp restrict_embedded_tag_reject_any(query, %{tag_reject: tag_reject}) defp restrict_embedded_tag_reject_any(query, _), do: query + # Groups by all bindings to allow aggregation on hashtags + defp group_by_all_bindings(query) do + # Expecting named bindings: :object, :bookmark, :thread_mute, :report_note + cond do + Enum.count(query.aliases) == 4 -> + from([a, o, b3, b4, b5] in query, group_by: [a.id, o.id, b3.id, b4.id, b5.id]) + + Enum.count(query.aliases) == 3 -> + from([a, o, b3, b4] in query, group_by: [a.id, o.id, b3.id, b4.id]) + + Enum.count(query.aliases) == 2 -> + from([a, o, b3] in query, group_by: [a.id, o.id, b3.id]) + + true -> + from([a, o] in query, group_by: [a.id, o.id]) + end + end + + defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: tags_reject}) when is_list(tags_reject) do + query + |> group_by_all_bindings() + |> join(:left, [_activity, object], hashtag in assoc(object, :hashtags), as: :hashtag) + |> having( + [hashtag: hashtag], + fragment("not(array_agg(?) && (?))", hashtag.name, ^tags_reject) + ) + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do + restrict_hashtag_reject_any(query, %{tag_reject: [tag_reject]}) + end + + defp restrict_hashtag_reject_any(query, _), do: query + defp restrict_hashtag_all(_query, %{tag_all: _tag, skip_preload: true}) do raise_on_missing_preload() end defp restrict_hashtag_all(query, %{tag_all: tags}) when is_list(tags) do - from( - [_activity, object] in query, - where: - fragment( - """ - (SELECT array_agg(hashtags.name) FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) - AND hashtags_objects.object_id = ?) @> ? - """, - ^tags, - object.id, - ^tags - ) + Enum.reduce( + tags, + query, + fn tag, acc -> restrict_hashtag_any(acc, %{tag: tag}) end ) end @@ -754,19 +783,18 @@ defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do end defp restrict_hashtag_any(query, %{tag: tags}) when is_list(tags) do - from( - [_activity, object] in query, - where: - fragment( - """ - EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) - AND hashtags_objects.object_id = ? LIMIT 1) - """, - ^tags, - object.id - ) - ) + query = + from( + [_activity, object] in query, + join: hashtag in assoc(object, :hashtags), + where: hashtag.name in ^tags + ) + + if length(tags) > 1 do + distinct(query, [activity], true) + else + query + end end defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do @@ -775,32 +803,6 @@ defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do defp restrict_hashtag_any(query, _), do: query - defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do - raise_on_missing_preload() - end - - defp restrict_hashtag_reject_any(query, %{tag_reject: tags_reject}) when is_list(tags_reject) do - from( - [_activity, object] in query, - where: - fragment( - """ - NOT EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) - AND hashtags_objects.object_id = ? LIMIT 1) - """, - ^tags_reject, - object.id - ) - ) - end - - defp restrict_hashtag_reject_any(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do - restrict_hashtag_reject_any(query, %{tag_reject: [tag_reject]}) - end - - defp restrict_hashtag_reject_any(query, _), do: query - defp raise_on_missing_preload do raise "Can't use the child object without preloading!" end From cf4765af4098098fa4d6996193432bd19c439a75 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 31 Jan 2021 23:06:38 +0300 Subject: [PATCH 039/174] [#3213] ActivityPub: fixed subquery-based hashtags filtering implementation (addressed empty list options issue). Added regression test. --- lib/pleroma/web/activity_pub/activity_pub.ex | 117 +++++++++--------- .../web/activity_pub/activity_pub_test.exs | 11 ++ 2 files changed, 68 insertions(+), 60 deletions(-) diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index fd0144aad..6cf4093fb 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -673,7 +673,7 @@ defp restrict_embedded_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) raise_on_missing_preload() end - defp restrict_embedded_tag_all(query, %{tag_all: tag_all}) when is_list(tag_all) do + defp restrict_embedded_tag_all(query, %{tag_all: [_ | _] = tag_all}) do from( [_activity, object] in query, where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all) @@ -690,7 +690,7 @@ defp restrict_embedded_tag_any(_query, %{tag: _tag, skip_preload: true}) do raise_on_missing_preload() end - defp restrict_embedded_tag_any(query, %{tag: tag}) when is_list(tag) do + defp restrict_embedded_tag_any(query, %{tag: [_ | _] = tag}) do from( [_activity, object] in query, where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag) @@ -707,8 +707,7 @@ defp restrict_embedded_tag_reject_any(_query, %{tag_reject: _tag_reject, skip_pr raise_on_missing_preload() end - defp restrict_embedded_tag_reject_any(query, %{tag_reject: tag_reject}) - when is_list(tag_reject) do + defp restrict_embedded_tag_reject_any(query, %{tag_reject: [_ | _] = tag_reject}) do from( [_activity, object] in query, where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject) @@ -722,53 +721,24 @@ defp restrict_embedded_tag_reject_any(query, %{tag_reject: tag_reject}) defp restrict_embedded_tag_reject_any(query, _), do: query - # Groups by all bindings to allow aggregation on hashtags - defp group_by_all_bindings(query) do - # Expecting named bindings: :object, :bookmark, :thread_mute, :report_note - cond do - Enum.count(query.aliases) == 4 -> - from([a, o, b3, b4, b5] in query, group_by: [a.id, o.id, b3.id, b4.id, b5.id]) - - Enum.count(query.aliases) == 3 -> - from([a, o, b3, b4] in query, group_by: [a.id, o.id, b3.id, b4.id]) - - Enum.count(query.aliases) == 2 -> - from([a, o, b3] in query, group_by: [a.id, o.id, b3.id]) - - true -> - from([a, o] in query, group_by: [a.id, o.id]) - end - end - - defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do - raise_on_missing_preload() - end - - defp restrict_hashtag_reject_any(query, %{tag_reject: tags_reject}) when is_list(tags_reject) do - query - |> group_by_all_bindings() - |> join(:left, [_activity, object], hashtag in assoc(object, :hashtags), as: :hashtag) - |> having( - [hashtag: hashtag], - fragment("not(array_agg(?) && (?))", hashtag.name, ^tags_reject) - ) - end - - defp restrict_hashtag_reject_any(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do - restrict_hashtag_reject_any(query, %{tag_reject: [tag_reject]}) - end - - defp restrict_hashtag_reject_any(query, _), do: query - defp restrict_hashtag_all(_query, %{tag_all: _tag, skip_preload: true}) do raise_on_missing_preload() end - defp restrict_hashtag_all(query, %{tag_all: tags}) when is_list(tags) do - Enum.reduce( - tags, - query, - fn tag, acc -> restrict_hashtag_any(acc, %{tag: tag}) end + defp restrict_hashtag_all(query, %{tag_all: [_ | _] = tags}) do + from( + [_activity, object] in query, + where: + fragment( + """ + (SELECT array_agg(hashtags.name) FROM hashtags JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + AND hashtags_objects.object_id = ?) @> ? + """, + ^tags, + object.id, + ^tags + ) ) end @@ -782,19 +752,20 @@ defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do raise_on_missing_preload() end - defp restrict_hashtag_any(query, %{tag: tags}) when is_list(tags) do - query = - from( - [_activity, object] in query, - join: hashtag in assoc(object, :hashtags), - where: hashtag.name in ^tags - ) - - if length(tags) > 1 do - distinct(query, [activity], true) - else - query - end + defp restrict_hashtag_any(query, %{tag: [_ | _] = tags}) do + from( + [_activity, object] in query, + where: + fragment( + """ + EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + AND hashtags_objects.object_id = ? LIMIT 1) + """, + ^tags, + object.id + ) + ) end defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do @@ -803,6 +774,32 @@ defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do defp restrict_hashtag_any(query, _), do: query + defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: [_ | _] = tags_reject}) do + from( + [_activity, object] in query, + where: + fragment( + """ + NOT EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + AND hashtags_objects.object_id = ? LIMIT 1) + """, + ^tags_reject, + object.id + ) + ) + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do + restrict_hashtag_reject_any(query, %{tag_reject: [tag_reject]}) + end + + defp restrict_hashtag_reject_any(query, _), do: query + defp raise_on_missing_preload do raise "Can't use the child object without preloading!" end diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index 5b9fc061e..04fd1def3 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -249,6 +249,17 @@ test "it fetches the appropriate tag-restricted posts" do limit: 2 }) + fetch_six = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["any1", "any2"], + tag_all: [], + tag_reject: [] + }) + + # Regression test: passing empty lists as filter options shouldn't affect the results + assert fetch_five == fetch_six + [fetch_one, fetch_two, fetch_three, fetch_four, fetch_five] = Enum.map([fetch_one, fetch_two, fetch_three, fetch_four, fetch_five], fn statuses -> Enum.map(statuses, fn s -> Repo.preload(s, object: :hashtags) end) From d1c6dd97aa503ca7c897d67d98fe8c924e113a61 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 7 Feb 2021 22:24:12 +0300 Subject: [PATCH 040/174] [#3213] Partially addressed code review points. migration rollback task changes, hashtags-related config handling tweaks, `hashtags.data` deletion (unused). --- config/description.exs | 20 ++++--- lib/mix/tasks/pleroma/database.ex | 53 ++++++++++--------- lib/pleroma/config.ex | 3 -- lib/pleroma/hashtag.ex | 5 +- .../migrators/hashtags_table_migrator.ex | 4 +- lib/pleroma/web/activity_pub/activity_pub.ex | 2 +- .../20201221202251_create_hashtags.exs | 1 - ...201221202252_remove_data_from_hashtags.exs | 15 ++++++ .../web/activity_pub/activity_pub_test.exs | 2 +- 9 files changed, 63 insertions(+), 42 deletions(-) create mode 100644 priv/repo/migrations/20201221202252_remove_data_from_hashtags.exs diff --git a/config/description.exs b/config/description.exs index ed3a534a0..02cdf2ff3 100644 --- a/config/description.exs +++ b/config/description.exs @@ -495,6 +495,20 @@ } ] }, + %{ + group: :pleroma, + key: :database, + type: :group, + description: "Database-related settings", + children: [ + %{ + key: :improved_hashtag_timeline, + type: :keyword, + description: + "If `true`, hashtags will be fetched from `hashtags` table for hashtags timeline. When `false`, object-embedded hashtags will be used (slower). Is auto-set to `true` (unless overridden) when HashtagsTableMigrator completes." + } + ] + }, %{ group: :pleroma, key: :instance, @@ -941,12 +955,6 @@ key: :show_reactions, type: :boolean, description: "Let favourites and emoji reactions be viewed through the API." - }, - %{ - key: :improved_hashtag_timeline, - type: :keyword, - description: - "If `true`, hashtags will be fetched from `hashtags` table for hashtags timeline. When `false`, object-embedded hashtags will be used (slower). Is auto-set to `true` (unless overridden) when HashtagsTableMigrator completes." } ] }, diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index 30c0d2bf1..7c4f54141 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -20,30 +20,6 @@ defmodule Mix.Tasks.Pleroma.Database do @shortdoc "A collection of database related tasks" @moduledoc File.read!("docs/administration/CLI_tasks/database.md") - # Rolls back a specific migration (leaving subsequent migrations applied) - # Based on https://stackoverflow.com/a/53825840 - def run(["rollback", version]) do - start_pleroma() - - version = String.to_integer(version) - re = ~r/^#{version}_.*\.exs/ - path = Application.app_dir(:pleroma, Path.join(["priv", "repo", "migrations"])) - - result = - with {:find, "" <> file} <- {:find, Enum.find(File.ls!(path), &String.match?(&1, re))}, - {:compile, [{mod, _} | _]} <- {:compile, Code.compile_file(Path.join(path, file))}, - {:rollback, :ok} <- {:rollback, Ecto.Migrator.down(Repo, version, mod)} do - {:ok, "Reversed migration: #{file}"} - else - {:find, _} -> {:error, "No migration found with version prefix: #{version}"} - {:compile, e} -> {:error, "Problem compiling migration module: #{inspect(e)}"} - {:rollback, e} -> {:error, "Problem reversing migration: #{inspect(e)}"} - e -> {:error, "Something unexpected happened: #{inspect(e)}"} - end - - IO.inspect(result) - end - def run(["remove_embedded_objects" | args]) do {options, [], []} = OptionParser.parse( @@ -194,4 +170,33 @@ def run(["ensure_expiration"]) do end) |> Stream.run() end + + # Rolls back a specific migration (leaving subsequent migrations applied). + # WARNING: imposes a risk of unrecoverable data loss — proceed at your own responsibility. + # Based on https://stackoverflow.com/a/53825840 + def run(["rollback", version]) do + prompt = "SEVERE WARNING: this operation may result in unrecoverable data loss. Continue?" + + if shell_prompt(prompt, "n") in ~w(Yn Y y) do + {_, result, _} = + Ecto.Migrator.with_repo(Pleroma.Repo, fn repo -> + version = String.to_integer(version) + re = ~r/^#{version}_.*\.exs/ + path = Ecto.Migrator.migrations_path(repo) + + with {:find, "" <> file} <- {:find, Enum.find(File.ls!(path), &String.match?(&1, re))}, + {:compile, [{mod, _} | _]} <- {:compile, Code.compile_file(Path.join(path, file))}, + {:rollback, :ok} <- {:rollback, Ecto.Migrator.down(repo, version, mod)} do + {:ok, "Reversed migration: #{file}"} + else + {:find, _} -> {:error, "No migration found with version prefix: #{version}"} + {:compile, e} -> {:error, "Problem compiling migration module: #{inspect(e)}"} + {:rollback, e} -> {:error, "Problem reversing migration: #{inspect(e)}"} + e -> {:error, "Something unexpected happened: #{inspect(e)}"} + end + end) + + IO.inspect(result) + end + end end diff --git a/lib/pleroma/config.ex b/lib/pleroma/config.ex index 0a6ac0ad0..f17e14128 100644 --- a/lib/pleroma/config.ex +++ b/lib/pleroma/config.ex @@ -96,9 +96,6 @@ def restrict_unauthenticated_access?(resource, kind) do end end - def improved_hashtag_timeline_path, do: [:instance, :improved_hashtag_timeline] - def improved_hashtag_timeline, do: get(improved_hashtag_timeline_path()) - def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], []) def oauth_consumer_enabled?, do: oauth_consumer_strategies() != [] diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex index b05927563..9e4c6c894 100644 --- a/lib/pleroma/hashtag.ex +++ b/lib/pleroma/hashtag.ex @@ -10,11 +10,8 @@ defmodule Pleroma.Hashtag do alias Pleroma.Hashtag alias Pleroma.Repo - @derive {Jason.Encoder, only: [:data]} - schema "hashtags" do field(:name, :string) - field(:data, :map, default: %{}) many_to_many(:objects, Pleroma.Object, join_through: "hashtags_objects", on_replace: :delete) @@ -50,7 +47,7 @@ def get_or_create_by_names(names) when is_list(names) do def changeset(%Hashtag{} = struct, params) do struct - |> cast(params, [:name, :data]) + |> cast(params, [:name]) |> update_change(:name, &String.downcase/1) |> validate_required([:name]) |> unique_constraint(:name) diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 07b42a7f4..9a036e0b2 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -239,11 +239,11 @@ defp handle_success(data_migration) do data_migration.feature_lock -> :noop - not is_nil(Config.improved_hashtag_timeline()) -> + not is_nil(Config.get([:database, :improved_hashtag_timeline])) -> :noop true -> - Config.put(Config.improved_hashtag_timeline_path(), true) + Config.put([:database, :improved_hashtag_timeline], true) :ok end end diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 573b4243c..7ac18e5c5 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -1227,7 +1227,7 @@ def fetch_activities_query(recipients, opts \\ %{}) do |> exclude_invisible_actors(opts) |> exclude_visibility(opts) - if Config.improved_hashtag_timeline() do + if Config.get([:database, :improved_hashtag_timeline]) do query |> restrict_hashtag_any(opts) |> restrict_hashtag_all(opts) diff --git a/priv/repo/migrations/20201221202251_create_hashtags.exs b/priv/repo/migrations/20201221202251_create_hashtags.exs index afc522002..8d2e9ae66 100644 --- a/priv/repo/migrations/20201221202251_create_hashtags.exs +++ b/priv/repo/migrations/20201221202251_create_hashtags.exs @@ -4,7 +4,6 @@ defmodule Pleroma.Repo.Migrations.CreateHashtags do def change do create_if_not_exists table(:hashtags) do add(:name, :citext, null: false) - add(:data, :map, default: %{}) timestamps() end diff --git a/priv/repo/migrations/20201221202252_remove_data_from_hashtags.exs b/priv/repo/migrations/20201221202252_remove_data_from_hashtags.exs new file mode 100644 index 000000000..0442c3b87 --- /dev/null +++ b/priv/repo/migrations/20201221202252_remove_data_from_hashtags.exs @@ -0,0 +1,15 @@ +defmodule Pleroma.Repo.Migrations.RemoveDataFromHashtags do + use Ecto.Migration + + def up do + alter table(:hashtags) do + remove_if_exists(:data, :map) + end + end + + def down do + alter table(:hashtags) do + add_if_not_exists(:data, :map, default: %{}) + end + end +end diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index 04fd1def3..bab5a199c 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -221,7 +221,7 @@ test "it fetches the appropriate tag-restricted posts" do {:ok, status_five} = CommonAPI.post(user, %{status: ". #any2 #any1"}) for hashtag_timeline_strategy <- [true, false] do - clear_config([:instance, :improved_hashtag_timeline], hashtag_timeline_strategy) + clear_config([:database, :improved_hashtag_timeline], hashtag_timeline_strategy) fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) From a996ab46a54acbfa7a19da3eae12c78ed6466a1a Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Thu, 11 Feb 2021 19:30:21 +0300 Subject: [PATCH 041/174] [#3213] Reorganized hashtags cleanup. Transaction-wrapped Hashtag.get_or_create_by_names/1. Misc. improvements. --- config/config.exs | 1 - config/description.exs | 1 - lib/pleroma/hashtag.ex | 58 +++++++++++++--- .../migrators/hashtags_table_migrator.ex | 68 +++++++++++++------ lib/pleroma/object.ex | 27 +++++--- .../workers/cron/hashtags_cleanup_worker.ex | 57 ---------------- 6 files changed, 112 insertions(+), 100 deletions(-) delete mode 100644 lib/pleroma/workers/cron/hashtags_cleanup_worker.ex diff --git a/config/config.exs b/config/config.exs index 36c609936..91888c512 100644 --- a/config/config.exs +++ b/config/config.exs @@ -560,7 +560,6 @@ ], plugins: [Oban.Plugins.Pruner], crontab: [ - {"0 1 * * *", Pleroma.Workers.Cron.HashtagsCleanupWorker}, {"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker}, {"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker} ] diff --git a/config/description.exs b/config/description.exs index 02cdf2ff3..b2f301e2d 100644 --- a/config/description.exs +++ b/config/description.exs @@ -1964,7 +1964,6 @@ type: {:list, :tuple}, description: "Settings for cron background jobs", suggestions: [ - {"0 1 * * *", Pleroma.Workers.Cron.HashtagsCleanupWorker}, {"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker}, {"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker} ] diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex index 9e4c6c894..de52c4dae 100644 --- a/lib/pleroma/hashtag.ex +++ b/lib/pleroma/hashtag.ex @@ -6,14 +6,17 @@ defmodule Pleroma.Hashtag do use Ecto.Schema import Ecto.Changeset + import Ecto.Query + alias Ecto.Multi alias Pleroma.Hashtag + alias Pleroma.Object alias Pleroma.Repo schema "hashtags" do field(:name, :string) - many_to_many(:objects, Pleroma.Object, join_through: "hashtags_objects", on_replace: :delete) + many_to_many(:objects, Object, join_through: "hashtags_objects", on_replace: :delete) timestamps() end @@ -34,15 +37,27 @@ def get_or_create_by_name(name) when is_bitstring(name) do end def get_or_create_by_names(names) when is_list(names) do - Enum.reduce_while(names, {:ok, []}, fn name, {:ok, list} -> - case get_or_create_by_name(name) do - {:ok, %Hashtag{} = hashtag} -> - {:cont, {:ok, list ++ [hashtag]}} + timestamp = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second) - error -> - {:halt, error} - end - end) + structs = + Enum.map(names, fn name -> + %Hashtag{} + |> changeset(%{name: name}) + |> Map.get(:changes) + |> Map.merge(%{inserted_at: timestamp, updated_at: timestamp}) + end) + + with {:ok, %{query_op: hashtags}} <- + Multi.new() + |> Multi.insert_all(:insert_all_op, Hashtag, structs, on_conflict: :nothing) + |> Multi.run(:query_op, fn _repo, _changes -> + {:ok, Repo.all(from(ht in Hashtag, where: ht.name in ^names))} + end) + |> Repo.transaction() do + {:ok, hashtags} + else + {:error, _name, value, _changes_so_far} -> {:error, value} + end end def changeset(%Hashtag{} = struct, params) do @@ -52,4 +67,29 @@ def changeset(%Hashtag{} = struct, params) do |> validate_required([:name]) |> unique_constraint(:name) end + + def unlink(%Object{id: object_id}) do + with {_, hashtag_ids} <- + from(hto in "hashtags_objects", + where: hto.object_id == ^object_id, + select: hto.hashtag_id + ) + |> Repo.delete_all() do + delete_unreferenced(hashtag_ids) + end + end + + @delete_unreferenced_query """ + DELETE FROM hashtags WHERE id IN + (SELECT hashtags.id FROM hashtags + LEFT OUTER JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id + WHERE hashtags_objects.hashtag_id IS NULL AND hashtags.id = ANY($1)); + """ + + def delete_unreferenced(ids) do + with {:ok, %{num_rows: deleted_count}} <- Repo.query(@delete_unreferenced_query, [ids]) do + {:ok, deleted_count} + end + end end diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 9a036e0b2..c53f6be12 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -74,16 +74,15 @@ def handle_continue(:init_state, _state) do def handle_info(:migrate_hashtags, state) do State.clear() - data_migration = data_migration() + update_status(:running) + put_stat(:started_at, NaiveDateTime.utc_now()) + data_migration = data_migration() persistent_data = Map.take(data_migration.data, ["max_processed_id"]) {:ok, data_migration} = DataMigration.update(data_migration, %{state: :running, data: persistent_data}) - update_status(:running) - put_stat(:started_at, NaiveDateTime.utc_now()) - Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") max_processed_id = data_migration.data["max_processed_id"] || 0 @@ -137,6 +136,8 @@ def handle_info(:migrate_hashtags, state) do |> Stream.run() with 0 <- failures_count(data_migration.id) do + _ = delete_non_create_activities_hashtags() + {:ok, data_migration} = DataMigration.update_state(data_migration, :complete) handle_success(data_migration) @@ -150,9 +151,37 @@ def handle_info(:migrate_hashtags, state) do {:noreply, state} end + @hashtags_objects_cleanup_query """ + DELETE FROM hashtags_objects WHERE object_id IN + (SELECT DISTINCT objects.id FROM objects + JOIN hashtags_objects ON hashtags_objects.object_id = objects.id LEFT JOIN activities + ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = + (objects.data->>'id') + AND activities.data->>'type' = 'Create' + WHERE activities.id IS NULL); + """ + + @hashtags_cleanup_query """ + DELETE FROM hashtags WHERE id IN + (SELECT hashtags.id FROM hashtags + LEFT OUTER JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id + WHERE hashtags_objects.hashtag_id IS NULL); + """ + + def delete_non_create_activities_hashtags do + {:ok, %{num_rows: hashtags_objects_count}} = + Repo.query(@hashtags_objects_cleanup_query, [], timeout: :infinity) + + {:ok, %{num_rows: hashtags_count}} = + Repo.query(@hashtags_cleanup_query, [], timeout: :infinity) + + {:ok, hashtags_objects_count, hashtags_count} + end + defp query do # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) - # Note: not checking activity type; HashtagsCleanupWorker should clean up unused records later + # Note: not checking activity type, expecting remove_non_create_objects_hashtags/_ to clean up from( object in Object, where: @@ -182,25 +211,20 @@ defp transfer_object_hashtags(object) do defp transfer_object_hashtags(object, hashtags) do Repo.transaction(fn -> with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do - for hashtag_record <- hashtag_records do - with {:ok, _} <- - Repo.query( - "insert into hashtags_objects(hashtag_id, object_id) values ($1, $2);", - [hashtag_record.id, object.id] - ) do - nil - else - {:error, e} -> - error = - "ERROR: could not link object #{object.id} and hashtag " <> - "#{hashtag_record.id}: #{inspect(e)}" + maps = Enum.map(hashtag_records, &%{hashtag_id: &1.id, object_id: object.id}) + expected_rows = length(hashtag_records) - Logger.error(error) - Repo.rollback(object.id) - end + with {^expected_rows, _} <- Repo.insert_all("hashtags_objects", maps) do + object.id + else + e -> + error = + "ERROR when inserting #{expected_rows} hashtags_objects " <> + "for object #{object.id}: #{inspect(e)}" + + Logger.error(error) + Repo.rollback(object.id) end - - object.id else e -> error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}" diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 52b77e41c..3ba749d1a 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -62,27 +62,30 @@ def change(struct, params \\ %{}) do |> cast(params, [:data]) |> validate_required([:data]) |> unique_constraint(:ap_id, name: :objects_unique_apid_index) + # Expecting `maybe_handle_hashtags_change/1` to run last: |> maybe_handle_hashtags_change(struct) end - # Note: not checking activity type; HashtagsCleanupWorker should clean up unused records later + # Note: not checking activity type (assuming non-legacy objects are associated with Create act.) defp maybe_handle_hashtags_change(changeset, struct) do - with data_hashtags_change = get_change(changeset, :data), - true <- hashtags_changed?(struct, data_hashtags_change), + with %Ecto.Changeset{valid?: true} <- changeset, + data_hashtags_change = get_change(changeset, :data), + {_, true} <- {:changed, hashtags_changed?(struct, data_hashtags_change)}, {:ok, hashtag_records} <- data_hashtags_change |> object_data_hashtags() |> Hashtag.get_or_create_by_names() do put_assoc(changeset, :hashtags, hashtag_records) else - false -> + %{valid?: false} -> changeset - {:error, hashtag_changeset} -> - failed_hashtag = get_field(hashtag_changeset, :name) + {:changed, false} -> + changeset + {:error, _} -> validate_change(changeset, :data, fn _, _ -> - [data: "error referencing hashtag: #{failed_hashtag}"] + [data: "error referencing hashtags"] end) end end @@ -221,9 +224,13 @@ def make_tombstone(%Object{data: %{"id" => id, "type" => type}}, deleted \\ Date def swap_object_with_tombstone(object) do tombstone = make_tombstone(object) - object - |> Object.change(%{data: tombstone}) - |> Repo.update() + with {:ok, object} <- + object + |> Object.change(%{data: tombstone}) + |> Repo.update() do + Hashtag.unlink(object) + {:ok, object} + end end def delete(%Object{data: %{"id" => id}} = object) do diff --git a/lib/pleroma/workers/cron/hashtags_cleanup_worker.ex b/lib/pleroma/workers/cron/hashtags_cleanup_worker.ex deleted file mode 100644 index b319067ca..000000000 --- a/lib/pleroma/workers/cron/hashtags_cleanup_worker.ex +++ /dev/null @@ -1,57 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Workers.Cron.HashtagsCleanupWorker do - @moduledoc """ - The worker to clean up unused hashtags_objects and hashtags. - """ - - use Oban.Worker, queue: "hashtags_cleanup" - - alias Pleroma.Repo - - require Logger - - @hashtags_objects_query """ - DELETE FROM hashtags_objects WHERE object_id IN - (SELECT DISTINCT objects.id FROM objects - JOIN hashtags_objects ON hashtags_objects.object_id = objects.id LEFT JOIN activities - ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = - (objects.data->>'id') - AND activities.data->>'type' = 'Create' - WHERE activities.id IS NULL); - """ - - @hashtags_query """ - DELETE FROM hashtags WHERE id IN - (SELECT hashtags.id FROM hashtags - LEFT OUTER JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id - WHERE hashtags_objects.hashtag_id IS NULL AND hashtags.inserted_at < $1); - """ - - @impl Oban.Worker - def perform(_job) do - Logger.info("Cleaning up unused `hashtags_objects` records...") - - {:ok, %{num_rows: hashtags_objects_count}} = - Repo.query(@hashtags_objects_query, [], timeout: :infinity) - - Logger.info("Deleted #{hashtags_objects_count} unused `hashtags_objects` records.") - - Logger.info("Cleaning up unused `hashtags` records...") - - # Note: ignoring recently created hashtags since references are added after hashtag is created - {:ok, %{num_rows: hashtags_count}} = - Repo.query(@hashtags_query, [NaiveDateTime.add(NaiveDateTime.utc_now(), -3600 * 24)], - timeout: :infinity - ) - - Logger.info("Deleted #{hashtags_count} unused `hashtags` records.") - - Logger.info("HashtagsCleanupWorker complete.") - - :ok - end -end From 349b8b0f4fb1c2b86f913e1840f15c052ff43c24 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sat, 13 Feb 2021 22:01:11 +0300 Subject: [PATCH 042/174] [#3213] `rescue` around potentially-raising `Repo.insert_all/_` calls. Misc. improvements (docs etc.). --- CHANGELOG.md | 2 +- config/config.exs | 1 - config/description.exs | 14 +++++++++ docs/configuration/cheatsheet.md | 6 ++++ lib/pleroma/hashtag.ex | 29 +++++++++++-------- .../migrators/hashtags_table_migrator.ex | 21 +++++++++----- 6 files changed, 51 insertions(+), 22 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 23567a97c..a7b5f6ac0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,7 +33,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Admin API: Reports now ordered by newest </details> -- Extracted object hashtags into separate table in order to improve hashtag timeline performance (via background migration in `Pleroma.Migrators.HashtagsTableMigrator`). +- Improved hashtag timeline performance (requires a background migration). ### Added diff --git a/config/config.exs b/config/config.exs index 8a7c466d3..0fbca06f3 100644 --- a/config/config.exs +++ b/config/config.exs @@ -556,7 +556,6 @@ remote_fetcher: 2, attachments_cleanup: 1, new_users_digest: 1, - hashtags_cleanup: 1, mute_expire: 5 ], plugins: [Oban.Plugins.Pruner], diff --git a/config/description.exs b/config/description.exs index 2e96024f5..29fc5fbd4 100644 --- a/config/description.exs +++ b/config/description.exs @@ -473,6 +473,20 @@ } ] }, + %{ + group: :pleroma, + key: :populate_hashtags_table, + type: :group, + description: "`populate_hashtags_table` background migration settings", + children: [ + %{ + key: :sleep_interval_ms, + type: :integer, + description: + "Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances)." + } + ] + }, %{ group: :pleroma, key: :instance, diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index ad5768465..68a5a3c7f 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -65,6 +65,12 @@ To add configuration to your config file, you can copy it from the base config. * `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`). * `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day). +## :database +* `improved_hashtag_timeline`: If `true`, hashtags will be fetched from `hashtags` table for hashtags timeline. When `false`, object-embedded hashtags will be used (slower). Is auto-set to `true` (unless overridden) when `HashtagsTableMigrator` completes. + +## Background migrations +* `populate_hashtags_table/sleep_interval_ms`: Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances). + ## Welcome * `direct_message`: - welcome message sent as a direct message. * `enabled`: Enables the send a direct message to a newly registered user. Defaults to `false`. diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex index de52c4dae..0d6a4d09e 100644 --- a/lib/pleroma/hashtag.ex +++ b/lib/pleroma/hashtag.ex @@ -47,16 +47,20 @@ def get_or_create_by_names(names) when is_list(names) do |> Map.merge(%{inserted_at: timestamp, updated_at: timestamp}) end) - with {:ok, %{query_op: hashtags}} <- - Multi.new() - |> Multi.insert_all(:insert_all_op, Hashtag, structs, on_conflict: :nothing) - |> Multi.run(:query_op, fn _repo, _changes -> - {:ok, Repo.all(from(ht in Hashtag, where: ht.name in ^names))} - end) - |> Repo.transaction() do - {:ok, hashtags} - else - {:error, _name, value, _changes_so_far} -> {:error, value} + try do + with {:ok, %{query_op: hashtags}} <- + Multi.new() + |> Multi.insert_all(:insert_all_op, Hashtag, structs, on_conflict: :nothing) + |> Multi.run(:query_op, fn _repo, _changes -> + {:ok, Repo.all(from(ht in Hashtag, where: ht.name in ^names))} + end) + |> Repo.transaction() do + {:ok, hashtags} + else + {:error, _name, value, _changes_so_far} -> {:error, value} + end + rescue + e -> {:error, e} end end @@ -74,8 +78,9 @@ def unlink(%Object{id: object_id}) do where: hto.object_id == ^object_id, select: hto.hashtag_id ) - |> Repo.delete_all() do - delete_unreferenced(hashtag_ids) + |> Repo.delete_all(), + {:ok, unreferenced_count} <- delete_unreferenced(hashtag_ids) do + {:ok, length(hashtag_ids), unreferenced_count} end end diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index c53f6be12..432c3401a 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -214,15 +214,20 @@ defp transfer_object_hashtags(object, hashtags) do maps = Enum.map(hashtag_records, &%{hashtag_id: &1.id, object_id: object.id}) expected_rows = length(hashtag_records) - with {^expected_rows, _} <- Repo.insert_all("hashtags_objects", maps) do - object.id - else - e -> - error = - "ERROR when inserting #{expected_rows} hashtags_objects " <> - "for object #{object.id}: #{inspect(e)}" + base_error = + "ERROR when inserting #{expected_rows} hashtags_objects for obj. #{object.id}" - Logger.error(error) + try do + with {^expected_rows, _} <- Repo.insert_all("hashtags_objects", maps) do + object.id + else + e -> + Logger.error("#{base_error}: #{inspect(e)}") + Repo.rollback(object.id) + end + rescue + e -> + Logger.error("#{base_error}: #{inspect(e)}") Repo.rollback(object.id) end else From 1dac7d14623f36744953a523650211540d90d1fc Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Mon, 15 Feb 2021 21:13:14 +0300 Subject: [PATCH 043/174] [#3213] Fixed `hashtags.name` lookup (must use `citext` type to do index scan). Fixed embedded hashtags lookup (lowercasing), adjusted tests. --- lib/pleroma/hashtag.ex | 8 +++++-- lib/pleroma/web/activity_pub/activity_pub.ex | 22 ++++++++++++++----- .../web/activity_pub/activity_pub_test.exs | 18 +++++++-------- 3 files changed, 31 insertions(+), 17 deletions(-) diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex index 0d6a4d09e..a6d033816 100644 --- a/lib/pleroma/hashtag.ex +++ b/lib/pleroma/hashtag.ex @@ -22,7 +22,9 @@ defmodule Pleroma.Hashtag do end def get_by_name(name) do - Repo.get_by(Hashtag, name: name) + from(h in Hashtag) + |> where([h], fragment("name = ?::citext", ^String.downcase(name))) + |> Repo.one() end def get_or_create_by_name(name) when is_bitstring(name) do @@ -37,6 +39,7 @@ def get_or_create_by_name(name) when is_bitstring(name) do end def get_or_create_by_names(names) when is_list(names) do + names = Enum.map(names, &String.downcase/1) timestamp = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second) structs = @@ -52,7 +55,8 @@ def get_or_create_by_names(names) when is_list(names) do Multi.new() |> Multi.insert_all(:insert_all_op, Hashtag, structs, on_conflict: :nothing) |> Multi.run(:query_op, fn _repo, _changes -> - {:ok, Repo.all(from(ht in Hashtag, where: ht.name in ^names))} + {:ok, + Repo.all(from(ht in Hashtag, where: ht.name in fragment("?::citext[]", ^names)))} end) |> Repo.transaction() do {:ok, hashtags} diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 9623e635a..e012f2779 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -698,6 +698,8 @@ defp restrict_embedded_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) end defp restrict_embedded_tag_all(query, %{tag_all: [_ | _] = tag_all}) do + tag_all = Enum.map(tag_all, &String.downcase/1) + from( [_activity, object] in query, where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all) @@ -714,10 +716,12 @@ defp restrict_embedded_tag_any(_query, %{tag: _tag, skip_preload: true}) do raise_on_missing_preload() end - defp restrict_embedded_tag_any(query, %{tag: [_ | _] = tag}) do + defp restrict_embedded_tag_any(query, %{tag: [_ | _] = tag_any}) do + tag_any = Enum.map(tag_any, &String.downcase/1) + from( [_activity, object] in query, - where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag) + where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag_any) ) end @@ -732,6 +736,8 @@ defp restrict_embedded_tag_reject_any(_query, %{tag_reject: _tag_reject, skip_pr end defp restrict_embedded_tag_reject_any(query, %{tag_reject: [_ | _] = tag_reject}) do + tag_reject = Enum.map(tag_reject, &String.downcase/1) + from( [_activity, object] in query, where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject) @@ -749,6 +755,10 @@ defp restrict_hashtag_all(_query, %{tag_all: _tag, skip_preload: true}) do raise_on_missing_preload() end + defp restrict_hashtag_all(query, %{tag_all: [single_tag]}) do + restrict_hashtag_any(query, %{tag: single_tag}) + end + defp restrict_hashtag_all(query, %{tag_all: [_ | _] = tags}) do from( [_activity, object] in query, @@ -756,7 +766,7 @@ defp restrict_hashtag_all(query, %{tag_all: [_ | _] = tags}) do fragment( """ (SELECT array_agg(hashtags.name) FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?::citext[]) AND hashtags_objects.object_id = ?) @> ? """, ^tags, @@ -767,7 +777,7 @@ defp restrict_hashtag_all(query, %{tag_all: [_ | _] = tags}) do end defp restrict_hashtag_all(query, %{tag_all: tag}) when is_binary(tag) do - restrict_hashtag_any(query, %{tag: tag}) + restrict_hashtag_all(query, %{tag_all: [tag]}) end defp restrict_hashtag_all(query, _), do: query @@ -783,7 +793,7 @@ defp restrict_hashtag_any(query, %{tag: [_ | _] = tags}) do fragment( """ EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?::citext[]) AND hashtags_objects.object_id = ? LIMIT 1) """, ^tags, @@ -809,7 +819,7 @@ defp restrict_hashtag_reject_any(query, %{tag_reject: [_ | _] = tags_reject}) do fragment( """ NOT EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?::citext[]) AND hashtags_objects.object_id = ? LIMIT 1) """, ^tags_reject, diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index bab5a199c..c41c8a5dd 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -213,24 +213,24 @@ test "works for guppe actors" do test "it fetches the appropriate tag-restricted posts" do user = insert(:user) - {:ok, status_one} = CommonAPI.post(user, %{status: ". #test"}) + {:ok, status_one} = CommonAPI.post(user, %{status: ". #TEST"}) {:ok, status_two} = CommonAPI.post(user, %{status: ". #essais"}) - {:ok, status_three} = CommonAPI.post(user, %{status: ". #test #reject"}) + {:ok, status_three} = CommonAPI.post(user, %{status: ". #test #Reject"}) - {:ok, status_four} = CommonAPI.post(user, %{status: ". #any1 #any2"}) - {:ok, status_five} = CommonAPI.post(user, %{status: ". #any2 #any1"}) + {:ok, status_four} = CommonAPI.post(user, %{status: ". #Any1 #any2"}) + {:ok, status_five} = CommonAPI.post(user, %{status: ". #Any2 #any1"}) for hashtag_timeline_strategy <- [true, false] do clear_config([:database, :improved_hashtag_timeline], hashtag_timeline_strategy) fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) - fetch_two = ActivityPub.fetch_activities([], %{type: "Create", tag: ["test", "essais"]}) + fetch_two = ActivityPub.fetch_activities([], %{type: "Create", tag: ["TEST", "essais"]}) fetch_three = ActivityPub.fetch_activities([], %{ type: "Create", - tag: ["test", "essais"], + tag: ["test", "Essais"], tag_reject: ["reject"] }) @@ -238,21 +238,21 @@ test "it fetches the appropriate tag-restricted posts" do ActivityPub.fetch_activities([], %{ type: "Create", tag: ["test"], - tag_all: ["test", "reject"] + tag_all: ["test", "REJECT"] }) # Testing that deduplication (if needed) is done on DB (not Ecto) level; :limit is important fetch_five = ActivityPub.fetch_activities([], %{ type: "Create", - tag: ["any1", "any2"], + tag: ["ANY1", "any2"], limit: 2 }) fetch_six = ActivityPub.fetch_activities([], %{ type: "Create", - tag: ["any1", "any2"], + tag: ["any1", "Any2"], tag_all: [], tag_reject: [] }) From 938823c73040f6b55896581daf5baf732f859f02 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Tue, 16 Feb 2021 23:14:15 +0300 Subject: [PATCH 044/174] [#3213] HashtagsTableMigrator state management refactoring & improvements (proper stats serialization etc.). --- lib/pleroma/data_migration.ex | 15 ++-- .../migrators/hashtags_table_migrator.ex | 87 ++++++++----------- .../hashtags_table_migrator/state.ex | 87 ++++++++++++++++--- 3 files changed, 122 insertions(+), 67 deletions(-) diff --git a/lib/pleroma/data_migration.ex b/lib/pleroma/data_migration.ex index 64fa155ff..1377af16e 100644 --- a/lib/pleroma/data_migration.ex +++ b/lib/pleroma/data_migration.ex @@ -10,6 +10,7 @@ defmodule Pleroma.DataMigration do alias Pleroma.Repo import Ecto.Changeset + import Ecto.Query schema "data_migrations" do field(:name, :string) @@ -28,14 +29,12 @@ def changeset(data_migration, params \\ %{}) do |> unique_constraint(:name) end - def update(data_migration, params \\ %{}) do - data_migration - |> changeset(params) - |> Repo.update() - end - - def update_state(data_migration, new_state) do - update(data_migration, %{state: new_state}) + def update_one_by_id(id, params \\ %{}) do + with {1, _} <- + from(dm in DataMigration, where: dm.id == ^id) + |> Repo.update_all(set: params) do + :ok + end end def get_by_name(name) do diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 432c3401a..a226d9d29 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -11,16 +11,16 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do alias __MODULE__.State alias Pleroma.Config - alias Pleroma.DataMigration alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Repo - defdelegate state(), to: State, as: :get - defdelegate put_stat(key, value), to: State, as: :put - defdelegate increment_stat(key, increment), to: State, as: :increment + defdelegate data_migration(), to: State - defdelegate data_migration(), to: DataMigration, as: :populate_hashtags_table + defdelegate state(), to: State + defdelegate get_stat(key, value), to: State, as: :get_data_key + defdelegate put_stat(key, value), to: State, as: :put_data_key + defdelegate increment_stat(key, increment), to: State, as: :increment_data_key @reg_name {:global, __MODULE__} @@ -45,7 +45,7 @@ def init(_) do def handle_continue(:init_state, _state) do {:ok, _} = State.start_link(nil) - update_status(:init) + update_status(:pending) data_migration = data_migration() manual_migrations = Config.get([:instance, :manual_data_migrations], []) @@ -55,13 +55,13 @@ def handle_continue(:init_state, _state) do update_status(:noop) is_nil(data_migration) -> - update_status(:halt, "Data migration does not exist.") + update_status(:failed, "Data migration does not exist.") data_migration.state == :manual or data_migration.name in manual_migrations -> - update_status(:noop, "Data migration is in manual execution state.") + update_status(:manual, "Data migration is in manual execution state.") data_migration.state == :complete -> - handle_success(data_migration) + on_complete(data_migration) true -> send(self(), :migrate_hashtags) @@ -72,20 +72,15 @@ def handle_continue(:init_state, _state) do @impl true def handle_info(:migrate_hashtags, state) do - State.clear() + State.reinit() update_status(:running) put_stat(:started_at, NaiveDateTime.utc_now()) - data_migration = data_migration() - persistent_data = Map.take(data_migration.data, ["max_processed_id"]) + %{id: data_migration_id} = data_migration() + max_processed_id = get_stat(:max_processed_id, 0) - {:ok, data_migration} = - DataMigration.update(data_migration, %{state: :running, data: persistent_data}) - - Logger.info("Starting transferring object embedded hashtags to `hashtags` table...") - - max_processed_id = data_migration.data["max_processed_id"] || 0 + Logger.info("Transferring embedded hashtags to `hashtags` (from oid: #{max_processed_id})...") query() |> where([object], object.id > ^max_processed_id) @@ -104,7 +99,7 @@ def handle_info(:migrate_hashtags, state) do Repo.query( "INSERT INTO data_migration_failed_ids(data_migration_id, record_id) " <> "VALUES ($1, $2) ON CONFLICT DO NOTHING;", - [data_migration.id, failed_id] + [data_migration_id, failed_id] ) end @@ -112,7 +107,7 @@ def handle_info(:migrate_hashtags, state) do Repo.query( "DELETE FROM data_migration_failed_ids " <> "WHERE data_migration_id = $1 AND record_id = ANY($2)", - [data_migration.id, object_ids -- failed_ids] + [data_migration_id, object_ids -- failed_ids] ) max_object_id = Enum.at(object_ids, -1) @@ -120,14 +115,8 @@ def handle_info(:migrate_hashtags, state) do put_stat(:max_processed_id, max_object_id) increment_stat(:processed_count, length(object_ids)) increment_stat(:failed_count, length(failed_ids)) - - put_stat( - :records_per_second, - state()[:processed_count] / - Enum.max([NaiveDateTime.diff(NaiveDateTime.utc_now(), state()[:started_at]), 1]) - ) - - persist_stats(data_migration) + put_stat(:records_per_second, records_per_second()) + _ = State.persist_to_db() # A quick and dirty approach to controlling the load this background migration imposes sleep_interval = Config.get([:populate_hashtags_table, :sleep_interval_ms], 0) @@ -135,22 +124,25 @@ def handle_info(:migrate_hashtags, state) do end) |> Stream.run() - with 0 <- failures_count(data_migration.id) do + with 0 <- failures_count(data_migration_id) do _ = delete_non_create_activities_hashtags() - - {:ok, data_migration} = DataMigration.update_state(data_migration, :complete) - - handle_success(data_migration) + set_complete() else _ -> - _ = DataMigration.update_state(data_migration, :failed) - update_status(:failed, "Please check data_migration_failed_ids records.") end {:noreply, state} end + defp records_per_second do + get_stat(:processed_count, 0) / Enum.max([running_time(), 1]) + end + + defp running_time do + NaiveDateTime.diff(NaiveDateTime.utc_now(), get_stat(:started_at, NaiveDateTime.utc_now())) + end + @hashtags_objects_cleanup_query """ DELETE FROM hashtags_objects WHERE object_id IN (SELECT DISTINCT objects.id FROM objects @@ -169,6 +161,10 @@ def handle_info(:migrate_hashtags, state) do WHERE hashtags_objects.hashtag_id IS NULL); """ + @doc """ + Deletes `hashtags_objects` for legacy objects not asoociated with Create activity. + Also deletes unreferenced `hashtags` records (might occur after deletion of `hashtags_objects`). + """ def delete_non_create_activities_hashtags do {:ok, %{num_rows: hashtags_objects_count}} = Repo.query(@hashtags_objects_cleanup_query, [], timeout: :infinity) @@ -256,14 +252,7 @@ def count(force \\ false, timeout \\ :infinity) do end end - defp persist_stats(data_migration) do - runner_state = Map.drop(state(), [:status]) - _ = DataMigration.update(data_migration, %{data: runner_state}) - end - - defp handle_success(data_migration) do - update_status(:complete) - + defp on_complete(data_migration) do cond do data_migration.feature_lock -> :noop @@ -321,18 +310,18 @@ def force_continue do end def force_restart do - {:ok, _} = DataMigration.update(data_migration(), %{state: :pending, data: %{}}) + :ok = State.reset() force_continue() end - def force_complete do - {:ok, data_migration} = DataMigration.update_state(data_migration(), :complete) - - handle_success(data_migration) + def set_complete do + update_status(:complete) + _ = State.persist_to_db() + on_complete(data_migration()) end defp update_status(status, message \\ nil) do - put_stat(:status, status) + put_stat(:state, status) put_stat(:message, message) end end diff --git a/lib/pleroma/migrators/hashtags_table_migrator/state.ex b/lib/pleroma/migrators/hashtags_table_migrator/state.ex index 901563426..ed9848824 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator/state.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator/state.ex @@ -5,31 +5,98 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator.State do use Agent - @init_state %{} + alias Pleroma.DataMigration + + defdelegate data_migration(), to: DataMigration, as: :populate_hashtags_table + @reg_name {:global, __MODULE__} def start_link(_) do - Agent.start_link(fn -> @init_state end, name: @reg_name) + Agent.start_link(fn -> load_state_from_db() end, name: @reg_name) end - def clear do - Agent.update(@reg_name, fn _state -> @init_state end) + defp load_state_from_db do + data_migration = data_migration() + + data = + if data_migration do + Map.new(data_migration.data, fn {k, v} -> {String.to_atom(k), v} end) + else + %{} + end + + %{ + data_migration_id: data_migration && data_migration.id, + data: data + } end - def get do + def persist_to_db do + %{data_migration_id: data_migration_id, data: data} = state() + + if data_migration_id do + DataMigration.update_one_by_id(data_migration_id, data: data) + else + {:error, :nil_data_migration_id} + end + end + + def reset do + %{data_migration_id: data_migration_id} = state() + + with false <- is_nil(data_migration_id), + :ok <- + DataMigration.update_one_by_id(data_migration_id, + state: :pending, + data: %{} + ) do + reinit() + else + true -> {:error, :nil_data_migration_id} + e -> e + end + end + + def reinit do + Agent.update(@reg_name, fn _state -> load_state_from_db() end) + end + + def state do Agent.get(@reg_name, & &1) end - def put(key, value) do + def get_data_key(key, default \\ nil) do + get_in(state(), [:data, key]) || default + end + + def put_data_key(key, value) do + _ = persist_non_data_change(key, value) + Agent.update(@reg_name, fn state -> - Map.put(state, key, value) + put_in(state, [:data, key], value) end) end - def increment(key, increment \\ 1) do + def increment_data_key(key, increment \\ 1) do Agent.update(@reg_name, fn state -> - updated_value = (state[key] || 0) + increment - Map.put(state, key, updated_value) + initial_value = get_in(state, [:data, key]) || 0 + updated_value = initial_value + increment + put_in(state, [:data, key], updated_value) end) end + + defp persist_non_data_change(:state, value) do + with true <- get_data_key(:state) != value, + true <- value in Pleroma.DataMigration.State.__valid_values__(), + %{data_migration_id: data_migration_id} when not is_nil(data_migration_id) <- state() do + DataMigration.update_one_by_id(data_migration_id, state: value) + else + false -> :ok + _ -> {:error, :nil_data_migration_id} + end + end + + defp persist_non_data_change(_, _) do + nil + end end From 854ea1aefb5ff4e03e9e9af6e8dd50f66c61c913 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Wed, 17 Feb 2021 09:23:35 +0300 Subject: [PATCH 045/174] [#3213] Fixed `HashtagsTableMigrator.count/1`. --- lib/pleroma/migrators/hashtags_table_migrator.ex | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index a226d9d29..ac17f91cc 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -18,7 +18,8 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do defdelegate data_migration(), to: State defdelegate state(), to: State - defdelegate get_stat(key, value), to: State, as: :get_data_key + defdelegate persist_state(), to: State, as: :persist_to_db + defdelegate get_stat(key, value \\ nil), to: State, as: :get_data_key defdelegate put_stat(key, value), to: State, as: :put_data_key defdelegate increment_stat(key, increment), to: State, as: :increment_data_key @@ -116,7 +117,7 @@ def handle_info(:migrate_hashtags, state) do increment_stat(:processed_count, length(object_ids)) increment_stat(:failed_count, length(failed_ids)) put_stat(:records_per_second, records_per_second()) - _ = State.persist_to_db() + persist_state() # A quick and dirty approach to controlling the load this background migration imposes sleep_interval = Config.get([:populate_hashtags_table, :sleep_interval_ms], 0) @@ -237,17 +238,19 @@ defp transfer_object_hashtags(object, hashtags) do @doc "Approximate count for current iteration (including processed records count)" def count(force \\ false, timeout \\ :infinity) do - stored_count = state()[:count] + stored_count = get_stat(:count) if stored_count && !force do stored_count else - processed_count = state()[:processed_count] || 0 - max_processed_id = data_migration().data["max_processed_id"] || 0 + processed_count = get_stat(:processed_count, 0) + max_processed_id = get_stat(:max_processed_id, 0) query = where(query(), [object], object.id > ^max_processed_id) count = Repo.aggregate(query, :count, :id, timeout: timeout) + processed_count put_stat(:count, count) + persist_state() + count end end @@ -316,7 +319,7 @@ def force_restart do def set_complete do update_status(:complete) - _ = State.persist_to_db() + persist_state() on_complete(data_migration()) end From b981edad8a7d8f27b231bc6164fc0546efbdb646 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Thu, 18 Feb 2021 20:40:10 +0300 Subject: [PATCH 046/174] [#3213] HashtagsTableMigrator: fault rate allowance to enable the feature (defaults to 1%), counting of affected objects, misc. tweaks. --- config/config.exs | 2 + config/description.exs | 7 ++ docs/configuration/cheatsheet.md | 1 + .../migrators/hashtags_table_migrator.ex | 101 ++++++++++++------ .../hashtags_table_migrator/state.ex | 4 +- 5 files changed, 84 insertions(+), 31 deletions(-) diff --git a/config/config.exs b/config/config.exs index 0fbca06f3..c371c397c 100644 --- a/config/config.exs +++ b/config/config.exs @@ -657,6 +657,8 @@ config :pleroma, :database, rum_enabled: false +config :pleroma, :populate_hashtags_table, fault_rate_allowance: 0.01 + config :pleroma, :env, Mix.env() config :http_signatures, diff --git a/config/description.exs b/config/description.exs index 29fc5fbd4..6ffc71278 100644 --- a/config/description.exs +++ b/config/description.exs @@ -479,6 +479,13 @@ type: :group, description: "`populate_hashtags_table` background migration settings", children: [ + %{ + key: :fault_rate_allowance, + type: :float, + description: + "Max rate of failed objects to actually processed objects in order to enable the feature (any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if hashtags transfer failed for all records).", + suggestions: [0.01] + }, %{ key: :sleep_interval_ms, type: :integer, diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 68a5a3c7f..6a1031f15 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -70,6 +70,7 @@ To add configuration to your config file, you can copy it from the base config. ## Background migrations * `populate_hashtags_table/sleep_interval_ms`: Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances). +* `populate_hashtags_table/fault_rate_allowance`: Max rate of failed objects to actually processed objects in order to enable the feature (any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if hashtags transfer failed for all records). ## Welcome * `direct_message`: - welcome message sent as a direct message. diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index ac17f91cc..45dab8470 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -15,7 +15,8 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do alias Pleroma.Object alias Pleroma.Repo - defdelegate data_migration(), to: State + defdelegate data_migration(), to: Pleroma.DataMigration, as: :populate_hashtags_table + defdelegate data_migration_id(), to: State defdelegate state(), to: State defdelegate persist_state(), to: State, as: :persist_to_db @@ -23,10 +24,13 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do defdelegate put_stat(key, value), to: State, as: :put_data_key defdelegate increment_stat(key, increment), to: State, as: :increment_data_key + @feature_config_path [:database, :improved_hashtag_timeline] @reg_name {:global, __MODULE__} def whereis, do: GenServer.whereis(@reg_name) + def feature_state, do: Config.get(@feature_config_path) + def start_link(_) do case whereis() do nil -> @@ -46,8 +50,6 @@ def init(_) do def handle_continue(:init_state, _state) do {:ok, _} = State.start_link(nil) - update_status(:pending) - data_migration = data_migration() manual_migrations = Config.get([:instance, :manual_data_migrations], []) @@ -56,10 +58,14 @@ def handle_continue(:init_state, _state) do update_status(:noop) is_nil(data_migration) -> - update_status(:failed, "Data migration does not exist.") + message = "Data migration does not exist." + update_status(:failed, message) + Logger.error("#{__MODULE__}: #{message}") data_migration.state == :manual or data_migration.name in manual_migrations -> - update_status(:manual, "Data migration is in manual execution state.") + message = "Data migration is in manual execution or manual fix mode." + update_status(:manual, message) + Logger.warn("#{__MODULE__}: #{message}") data_migration.state == :complete -> on_complete(data_migration) @@ -78,7 +84,7 @@ def handle_info(:migrate_hashtags, state) do update_status(:running) put_stat(:started_at, NaiveDateTime.utc_now()) - %{id: data_migration_id} = data_migration() + data_migration_id = data_migration_id() max_processed_id = get_stat(:max_processed_id, 0) Logger.info("Transferring embedded hashtags to `hashtags` (from oid: #{max_processed_id})...") @@ -89,12 +95,19 @@ def handle_info(:migrate_hashtags, state) do |> Stream.each(fn objects -> object_ids = Enum.map(objects, & &1.id) + results = Enum.map(objects, &transfer_object_hashtags(&1)) + failed_ids = - objects - |> Enum.map(&transfer_object_hashtags(&1)) + results |> Enum.filter(&(elem(&1, 0) == :error)) |> Enum.map(&elem(&1, 1)) + # Count of objects with hashtags (`{:noop, id}` is returned for objects having other AS2 tags) + chunk_affected_count = + results + |> Enum.filter(&(elem(&1, 0) == :ok)) + |> length() + for failed_id <- failed_ids do _ = Repo.query( @@ -116,6 +129,7 @@ def handle_info(:migrate_hashtags, state) do put_stat(:max_processed_id, max_object_id) increment_stat(:processed_count, length(object_ids)) increment_stat(:failed_count, length(failed_ids)) + increment_stat(:affected_count, chunk_affected_count) put_stat(:records_per_second, records_per_second()) persist_state() @@ -125,17 +139,42 @@ def handle_info(:migrate_hashtags, state) do end) |> Stream.run() - with 0 <- failures_count(data_migration_id) do - _ = delete_non_create_activities_hashtags() - set_complete() - else - _ -> - update_status(:failed, "Please check data_migration_failed_ids records.") + fault_rate = fault_rate() + put_stat(:fault_rate, fault_rate) + fault_rate_allowance = Config.get([:populate_hashtags_table, :fault_rate_allowance], 0) + + cond do + fault_rate == 0 -> + set_complete() + + is_float(fault_rate) and fault_rate <= fault_rate_allowance -> + message = """ + Done with fault rate of #{fault_rate} which doesn't exceed #{fault_rate_allowance}. + Putting data migration to manual fix mode. Check `retry_failed/0`. + """ + + Logger.warn("#{__MODULE__}: #{message}") + update_status(:manual, message) + on_complete(data_migration()) + + true -> + message = "Too many failures. Check data_migration_failed_ids records / `retry_failed/0`." + Logger.error("#{__MODULE__}: #{message}") + update_status(:failed, message) end + persist_state() {:noreply, state} end + def fault_rate do + with failures_count when is_integer(failures_count) <- failures_count() do + failures_count / Enum.max([get_stat(:affected_count, 0), 1]) + else + _ -> :error + end + end + defp records_per_second do get_stat(:processed_count, 0) / Enum.max([running_time(), 1]) end @@ -194,6 +233,7 @@ defp query do |> where([_o, hashtags_objects], is_nil(hashtags_objects.object_id)) end + @spec transfer_object_hashtags(Map.t()) :: {:noop | :ok | :error, integer()} defp transfer_object_hashtags(object) do embedded_tags = if Map.has_key?(object, :tag), do: object.tag, else: object.data["tag"] hashtags = Object.object_data_hashtags(%{"tag" => embedded_tags}) @@ -201,7 +241,7 @@ defp transfer_object_hashtags(object) do if Enum.any?(hashtags) do transfer_object_hashtags(object, hashtags) else - {:ok, object.id} + {:noop, object.id} end end @@ -209,13 +249,11 @@ defp transfer_object_hashtags(object, hashtags) do Repo.transaction(fn -> with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do maps = Enum.map(hashtag_records, &%{hashtag_id: &1.id, object_id: object.id}) - expected_rows = length(hashtag_records) - - base_error = - "ERROR when inserting #{expected_rows} hashtags_objects for obj. #{object.id}" + base_error = "ERROR when inserting hashtags_objects for object with id #{object.id}" try do - with {^expected_rows, _} <- Repo.insert_all("hashtags_objects", maps) do + with {rows_count, _} when is_integer(rows_count) <- + Repo.insert_all("hashtags_objects", maps, on_conflict: :nothing) do object.id else e -> @@ -260,11 +298,11 @@ defp on_complete(data_migration) do data_migration.feature_lock -> :noop - not is_nil(Config.get([:database, :improved_hashtag_timeline])) -> + not is_nil(feature_state()) -> :noop true -> - Config.put([:database, :improved_hashtag_timeline], true) + Config.put(@feature_config_path, true) :ok end end @@ -274,38 +312,41 @@ def failed_objects_query do |> join(:inner, [o], dmf in fragment("SELECT * FROM data_migration_failed_ids"), on: dmf.record_id == o.id ) - |> where([_o, dmf], dmf.data_migration_id == ^data_migration().id) + |> where([_o, dmf], dmf.data_migration_id == ^data_migration_id()) |> order_by([o], asc: o.id) end - def failures_count(data_migration_id \\ nil) do - data_migration_id = data_migration_id || data_migration().id - + def failures_count do with {:ok, %{rows: [[count]]}} <- Repo.query( "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;", - [data_migration_id] + [data_migration_id()] ) do count end end def retry_failed do - data_migration = data_migration() + data_migration_id = data_migration_id() failed_objects_query() |> Repo.chunk_stream(100, :one) |> Stream.each(fn object -> - with {:ok, _} <- transfer_object_hashtags(object) do + with {res, _} when res != :error <- transfer_object_hashtags(object) do _ = Repo.query( "DELETE FROM data_migration_failed_ids " <> "WHERE data_migration_id = $1 AND record_id = $2", - [data_migration.id, object.id] + [data_migration_id, object.id] ) end end) |> Stream.run() + + put_stat(:failed_count, failures_count()) + persist_state() + + force_continue() end def force_continue do diff --git a/lib/pleroma/migrators/hashtags_table_migrator/state.ex b/lib/pleroma/migrators/hashtags_table_migrator/state.ex index ed9848824..ee0009b2e 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator/state.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator/state.ex @@ -7,7 +7,7 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator.State do alias Pleroma.DataMigration - defdelegate data_migration(), to: DataMigration, as: :populate_hashtags_table + defdelegate data_migration(), to: Pleroma.Migrators.HashtagsTableMigrator @reg_name {:global, __MODULE__} @@ -99,4 +99,6 @@ defp persist_non_data_change(:state, value) do defp persist_non_data_change(_, _) do nil end + + def data_migration_id, do: Map.get(state(), :data_migration_id) end From 998437d4a4111055e019f28dd84a8af1f9a27047 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Thu, 18 Feb 2021 21:03:06 +0300 Subject: [PATCH 047/174] [#3213] Experimental / debug feature: `database: [improved_hashtag_timeline: :preselect_hashtag_ids]`. --- lib/pleroma/web/activity_pub/activity_pub.ex | 47 +++++++++++++++----- 1 file changed, 35 insertions(+), 12 deletions(-) diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index e012f2779..5392ce7c9 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -787,19 +787,42 @@ defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do end defp restrict_hashtag_any(query, %{tag: [_ | _] = tags}) do - from( - [_activity, object] in query, - where: - fragment( - """ - EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?::citext[]) - AND hashtags_objects.object_id = ? LIMIT 1) - """, - ^tags, - object.id + # TODO: refactor: debug / experimental feature + if Config.get([:database, :improved_hashtag_timeline]) == :preselect_hashtag_ids do + hashtag_ids = + from(ht in Pleroma.Hashtag, + where: fragment("name = ANY(?::citext[])", ^tags), + select: ht.id ) - ) + |> Repo.all() + + from( + [_activity, object] in query, + where: + fragment( + """ + EXISTS ( + SELECT 1 FROM hashtags_objects WHERE hashtag_id = ANY(?) AND object_id = ? LIMIT 1) + """, + ^hashtag_ids, + object.id + ) + ) + else + from( + [_activity, object] in query, + where: + fragment( + """ + EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?::citext[]) + AND hashtags_objects.object_id = ? LIMIT 1) + """, + ^tags, + object.id + ) + ) + end end defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do From 6531eddf361fa52db3906ab011a4e33c7a5f9552 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Mon, 22 Feb 2021 23:26:07 +0300 Subject: [PATCH 048/174] [#3213] `hashtags`: altered `name` type to `text`. `hashtags_objects`: removed unused index. HashtagsTableMigrator: records_per_second calculation fix. ActivityPub: hashtags-related options normalization. --- lib/pleroma/hashtag.ex | 22 ++++-- .../migrators/hashtags_table_migrator.ex | 4 +- lib/pleroma/web/activity_pub/activity_pub.ex | 75 ++++++++----------- ...20201221203824_create_hashtags_objects.exs | 2 +- ...emove_hashtags_objects_duplicate_index.exs | 11 +++ ...222184616_change_hashtags_name_to_text.exs | 15 ++++ 6 files changed, 76 insertions(+), 53 deletions(-) create mode 100644 priv/repo/migrations/20210222183840_remove_hashtags_objects_duplicate_index.exs create mode 100644 priv/repo/migrations/20210222184616_change_hashtags_name_to_text.exs diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex index a6d033816..e9d143fb1 100644 --- a/lib/pleroma/hashtag.ex +++ b/lib/pleroma/hashtag.ex @@ -21,10 +21,14 @@ defmodule Pleroma.Hashtag do timestamps() end + def normalize_name(name) do + name + |> String.downcase() + |> String.trim() + end + def get_by_name(name) do - from(h in Hashtag) - |> where([h], fragment("name = ?::citext", ^String.downcase(name))) - |> Repo.one() + Repo.get_by(Hashtag, name: normalize_name(name)) end def get_or_create_by_name(name) when is_bitstring(name) do @@ -39,7 +43,7 @@ def get_or_create_by_name(name) when is_bitstring(name) do end def get_or_create_by_names(names) when is_list(names) do - names = Enum.map(names, &String.downcase/1) + names = Enum.map(names, &normalize_name/1) timestamp = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second) structs = @@ -53,10 +57,12 @@ def get_or_create_by_names(names) when is_list(names) do try do with {:ok, %{query_op: hashtags}} <- Multi.new() - |> Multi.insert_all(:insert_all_op, Hashtag, structs, on_conflict: :nothing) + |> Multi.insert_all(:insert_all_op, Hashtag, structs, + on_conflict: :nothing, + conflict_target: :name + ) |> Multi.run(:query_op, fn _repo, _changes -> - {:ok, - Repo.all(from(ht in Hashtag, where: ht.name in fragment("?::citext[]", ^names)))} + {:ok, Repo.all(from(ht in Hashtag, where: ht.name in ^names))} end) |> Repo.transaction() do {:ok, hashtags} @@ -71,7 +77,7 @@ def get_or_create_by_names(names) when is_list(names) do def changeset(%Hashtag{} = struct, params) do struct |> cast(params, [:name]) - |> update_change(:name, &String.downcase/1) + |> update_change(:name, &normalize_name/1) |> validate_required([:name]) |> unique_constraint(:name) end diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 45dab8470..07bb9aeb2 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -82,6 +82,7 @@ def handle_info(:migrate_hashtags, state) do State.reinit() update_status(:running) + put_stat(:iteration_processed_count, 0) put_stat(:started_at, NaiveDateTime.utc_now()) data_migration_id = data_migration_id() @@ -127,6 +128,7 @@ def handle_info(:migrate_hashtags, state) do max_object_id = Enum.at(object_ids, -1) put_stat(:max_processed_id, max_object_id) + increment_stat(:iteration_processed_count, length(object_ids)) increment_stat(:processed_count, length(object_ids)) increment_stat(:failed_count, length(failed_ids)) increment_stat(:affected_count, chunk_affected_count) @@ -176,7 +178,7 @@ def fault_rate do end defp records_per_second do - get_stat(:processed_count, 0) / Enum.max([running_time(), 1]) + get_stat(:iteration_processed_count, 0) / Enum.max([running_time(), 1]) end defp running_time do diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 5392ce7c9..8182bc205 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do alias Pleroma.Conversation alias Pleroma.Conversation.Participation alias Pleroma.Filter + alias Pleroma.Hashtag alias Pleroma.Maps alias Pleroma.Notification alias Pleroma.Object @@ -698,8 +699,6 @@ defp restrict_embedded_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) end defp restrict_embedded_tag_all(query, %{tag_all: [_ | _] = tag_all}) do - tag_all = Enum.map(tag_all, &String.downcase/1) - from( [_activity, object] in query, where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all) @@ -717,8 +716,6 @@ defp restrict_embedded_tag_any(_query, %{tag: _tag, skip_preload: true}) do end defp restrict_embedded_tag_any(query, %{tag: [_ | _] = tag_any}) do - tag_any = Enum.map(tag_any, &String.downcase/1) - from( [_activity, object] in query, where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag_any) @@ -736,8 +733,6 @@ defp restrict_embedded_tag_reject_any(_query, %{tag_reject: _tag_reject, skip_pr end defp restrict_embedded_tag_reject_any(query, %{tag_reject: [_ | _] = tag_reject}) do - tag_reject = Enum.map(tag_reject, &String.downcase/1) - from( [_activity, object] in query, where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject) @@ -766,7 +761,7 @@ defp restrict_hashtag_all(query, %{tag_all: [_ | _] = tags}) do fragment( """ (SELECT array_agg(hashtags.name) FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?::citext[]) + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) AND hashtags_objects.object_id = ?) @> ? """, ^tags, @@ -787,42 +782,19 @@ defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do end defp restrict_hashtag_any(query, %{tag: [_ | _] = tags}) do - # TODO: refactor: debug / experimental feature - if Config.get([:database, :improved_hashtag_timeline]) == :preselect_hashtag_ids do - hashtag_ids = - from(ht in Pleroma.Hashtag, - where: fragment("name = ANY(?::citext[])", ^tags), - select: ht.id + from( + [_activity, object] in query, + where: + fragment( + """ + EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + AND hashtags_objects.object_id = ? LIMIT 1) + """, + ^tags, + object.id ) - |> Repo.all() - - from( - [_activity, object] in query, - where: - fragment( - """ - EXISTS ( - SELECT 1 FROM hashtags_objects WHERE hashtag_id = ANY(?) AND object_id = ? LIMIT 1) - """, - ^hashtag_ids, - object.id - ) - ) - else - from( - [_activity, object] in query, - where: - fragment( - """ - EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?::citext[]) - AND hashtags_objects.object_id = ? LIMIT 1) - """, - ^tags, - object.id - ) - ) - end + ) end defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do @@ -842,7 +814,7 @@ defp restrict_hashtag_reject_any(query, %{tag_reject: [_ | _] = tags_reject}) do fragment( """ NOT EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?::citext[]) + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) AND hashtags_objects.object_id = ? LIMIT 1) """, ^tags_reject, @@ -1220,6 +1192,21 @@ defp maybe_order(query, %{order: :asc}) do defp maybe_order(query, _), do: query + defp normalize_fetch_activities_query_opts(opts) do + Enum.reduce([:tag, :tag_all, :tag_reject], opts, fn key, opts -> + case opts[key] do + value when is_bitstring(value) -> + Map.put(opts, key, Hashtag.normalize_name(value)) + + value when is_list(value) -> + Map.put(opts, key, Enum.map(value, &Hashtag.normalize_name/1)) + + _ -> + opts + end + end) + end + defp fetch_activities_query_ap_ids_ops(opts) do source_user = opts[:muting_user] ap_id_relationships = if source_user, do: [:mute, :reblog_mute], else: [] @@ -1243,6 +1230,8 @@ defp fetch_activities_query_ap_ids_ops(opts) do end def fetch_activities_query(recipients, opts \\ %{}) do + opts = normalize_fetch_activities_query_opts(opts) + {restrict_blocked_opts, restrict_muted_opts, restrict_muted_reblogs_opts} = fetch_activities_query_ap_ids_ops(opts) diff --git a/priv/repo/migrations/20201221203824_create_hashtags_objects.exs b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs index efd60369d..581f32b3c 100644 --- a/priv/repo/migrations/20201221203824_create_hashtags_objects.exs +++ b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs @@ -7,7 +7,7 @@ def change do add(:object_id, references(:objects), null: false, primary_key: true) end - create_if_not_exists(unique_index(:hashtags_objects, [:hashtag_id, :object_id])) + # Note: PK index: "hashtags_objects_pkey" PRIMARY KEY, btree (hashtag_id, object_id) create_if_not_exists(index(:hashtags_objects, [:object_id])) end end diff --git a/priv/repo/migrations/20210222183840_remove_hashtags_objects_duplicate_index.exs b/priv/repo/migrations/20210222183840_remove_hashtags_objects_duplicate_index.exs new file mode 100644 index 000000000..6c4a2dfdc --- /dev/null +++ b/priv/repo/migrations/20210222183840_remove_hashtags_objects_duplicate_index.exs @@ -0,0 +1,11 @@ +defmodule Pleroma.Repo.Migrations.RemoveHashtagsObjectsDuplicateIndex do + use Ecto.Migration + + @moduledoc "Removes `hashtags_objects_hashtag_id_object_id_index` index (duplicate of PK index)." + + def up do + drop_if_exists(unique_index(:hashtags_objects, [:hashtag_id, :object_id])) + end + + def down, do: nil +end diff --git a/priv/repo/migrations/20210222184616_change_hashtags_name_to_text.exs b/priv/repo/migrations/20210222184616_change_hashtags_name_to_text.exs new file mode 100644 index 000000000..8940b6ca3 --- /dev/null +++ b/priv/repo/migrations/20210222184616_change_hashtags_name_to_text.exs @@ -0,0 +1,15 @@ +defmodule Pleroma.Repo.Migrations.ChangeHashtagsNameToText do + use Ecto.Migration + + def up do + alter table(:hashtags) do + modify(:name, :text) + end + end + + def down do + alter table(:hashtags) do + modify(:name, :citext) + end + end +end From a98c4423f374c6be8202ae884989e708e7d8ca3b Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantbusiness@gmail.com> Date: Mon, 22 Feb 2021 20:41:57 +0000 Subject: [PATCH 049/174] Apply i1t's suggestion(s) to 1 file(s) --- config/description.exs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/description.exs b/config/description.exs index 6ffc71278..e280ed8cf 100644 --- a/config/description.exs +++ b/config/description.exs @@ -483,7 +483,7 @@ key: :fault_rate_allowance, type: :float, description: - "Max rate of failed objects to actually processed objects in order to enable the feature (any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if hashtags transfer failed for all records).", + "Max accepted rate of objects that failed in the migration. Any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if hashtags transfer failed for all records.", suggestions: [0.01] }, %{ From 77f3da035894e2add911101466bfe41b99ee481e Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Tue, 23 Feb 2021 13:52:28 +0300 Subject: [PATCH 050/174] [#3213] Misc. tweaks: proper upsert in Hashtag, better feature toggle management. --- config/config.exs | 2 ++ config/description.exs | 9 +++++---- docs/configuration/cheatsheet.md | 2 +- lib/pleroma/config.ex | 4 ++++ lib/pleroma/hashtag.ex | 20 ++++++++----------- .../migrators/hashtags_table_migrator.ex | 18 +++++++---------- lib/pleroma/web/activity_pub/activity_pub.ex | 2 +- .../web/activity_pub/activity_pub_test.exs | 4 ++-- 8 files changed, 30 insertions(+), 31 deletions(-) diff --git a/config/config.exs b/config/config.exs index c371c397c..05acbf169 100644 --- a/config/config.exs +++ b/config/config.exs @@ -657,6 +657,8 @@ config :pleroma, :database, rum_enabled: false +config :pleroma, :features, improved_hashtag_timeline: :auto + config :pleroma, :populate_hashtags_table, fault_rate_allowance: 0.01 config :pleroma, :env, Mix.env() diff --git a/config/description.exs b/config/description.exs index e280ed8cf..41e5e4056 100644 --- a/config/description.exs +++ b/config/description.exs @@ -461,15 +461,16 @@ }, %{ group: :pleroma, - key: :database, + key: :features, type: :group, - description: "Database-related settings", + description: "Customizable features", children: [ %{ key: :improved_hashtag_timeline, - type: :keyword, + type: {:dropdown, :atom}, description: - "If `true`, hashtags will be fetched from `hashtags` table for hashtags timeline. When `false`, object-embedded hashtags will be used (slower). Is auto-set to `true` (unless overridden) when HashtagsTableMigrator completes." + "Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).", + suggestions: [:auto, :enabled, :disabled] } ] }, diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 6a1031f15..db1deb665 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -66,7 +66,7 @@ To add configuration to your config file, you can copy it from the base config. * `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day). ## :database -* `improved_hashtag_timeline`: If `true`, hashtags will be fetched from `hashtags` table for hashtags timeline. When `false`, object-embedded hashtags will be used (slower). Is auto-set to `true` (unless overridden) when `HashtagsTableMigrator` completes. +* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes). ## Background migrations * `populate_hashtags_table/sleep_interval_ms`: Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances). diff --git a/lib/pleroma/config.ex b/lib/pleroma/config.ex index f17e14128..e057d8c02 100644 --- a/lib/pleroma/config.ex +++ b/lib/pleroma/config.ex @@ -111,4 +111,8 @@ def oauth_admin_scopes(scopes) when is_list(scopes) do end ) end + + def feature_enabled?(feature_name) do + get([:features, feature_name]) not in [nil, false, :disabled, :auto] + end end diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex index e9d143fb1..53e2e9c89 100644 --- a/lib/pleroma/hashtag.ex +++ b/lib/pleroma/hashtag.ex @@ -27,19 +27,15 @@ def normalize_name(name) do |> String.trim() end - def get_by_name(name) do - Repo.get_by(Hashtag, name: normalize_name(name)) - end + def get_or_create_by_name(name) do + changeset = changeset(%Hashtag{}, %{name: name}) - def get_or_create_by_name(name) when is_bitstring(name) do - with %Hashtag{} = hashtag <- get_by_name(name) do - {:ok, hashtag} - else - _ -> - %Hashtag{} - |> changeset(%{name: name}) - |> Repo.insert() - end + Repo.insert( + changeset, + on_conflict: [set: [name: get_field(changeset, :name)]], + conflict_target: :name, + returning: true + ) end def get_or_create_by_names(names) when is_list(names) do diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 07bb9aeb2..6123c88e0 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -24,7 +24,7 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do defdelegate put_stat(key, value), to: State, as: :put_data_key defdelegate increment_stat(key, increment), to: State, as: :increment_data_key - @feature_config_path [:database, :improved_hashtag_timeline] + @feature_config_path [:features, :improved_hashtag_timeline] @reg_name {:global, __MODULE__} def whereis, do: GenServer.whereis(@reg_name) @@ -296,16 +296,12 @@ def count(force \\ false, timeout \\ :infinity) do end defp on_complete(data_migration) do - cond do - data_migration.feature_lock -> - :noop - - not is_nil(feature_state()) -> - :noop - - true -> - Config.put(@feature_config_path, true) - :ok + if data_migration.feature_lock || feature_state() == :disabled do + Logger.warn("#{__MODULE__}: migration complete but feature is locked; consider enabling.") + :noop + else + Config.put(@feature_config_path, :enabled) + :ok end end diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 8182bc205..9d557c2cd 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -1273,7 +1273,7 @@ def fetch_activities_query(recipients, opts \\ %{}) do |> exclude_invisible_actors(opts) |> exclude_visibility(opts) - if Config.get([:database, :improved_hashtag_timeline]) do + if Config.feature_enabled?(:improved_hashtag_timeline) do query |> restrict_hashtag_any(opts) |> restrict_hashtag_all(opts) diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index c41c8a5dd..f92323abe 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -220,8 +220,8 @@ test "it fetches the appropriate tag-restricted posts" do {:ok, status_four} = CommonAPI.post(user, %{status: ". #Any1 #any2"}) {:ok, status_five} = CommonAPI.post(user, %{status: ". #Any2 #any1"}) - for hashtag_timeline_strategy <- [true, false] do - clear_config([:database, :improved_hashtag_timeline], hashtag_timeline_strategy) + for hashtag_timeline_strategy <- [:eanbled, :disabled] do + clear_config([:features, :improved_hashtag_timeline], hashtag_timeline_strategy) fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) From 40d4362261abaf0856a1b4397a4bff6344137120 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Tue, 23 Feb 2021 18:11:25 +0300 Subject: [PATCH 051/174] [#3213] `mix pleroma.database rollback` tweaks. --- lib/mix/tasks/pleroma/database.ex | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index 2136ddb02..e7f4b67a4 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -231,19 +231,18 @@ def run(["rollback", version]) do re = ~r/^#{version}_.*\.exs/ path = Ecto.Migrator.migrations_path(repo) - with {:find, "" <> file} <- {:find, Enum.find(File.ls!(path), &String.match?(&1, re))}, - {:compile, [{mod, _} | _]} <- {:compile, Code.compile_file(Path.join(path, file))}, - {:rollback, :ok} <- {:rollback, Ecto.Migrator.down(repo, version, mod)} do + with {_, "" <> file} <- {:find, Enum.find(File.ls!(path), &String.match?(&1, re))}, + {_, [{mod, _} | _]} <- {:compile, Code.compile_file(Path.join(path, file))}, + {_, :ok} <- {:rollback, Ecto.Migrator.down(repo, version, mod)} do {:ok, "Reversed migration: #{file}"} else {:find, _} -> {:error, "No migration found with version prefix: #{version}"} {:compile, e} -> {:error, "Problem compiling migration module: #{inspect(e)}"} {:rollback, e} -> {:error, "Problem reversing migration: #{inspect(e)}"} - e -> {:error, "Something unexpected happened: #{inspect(e)}"} end end) - IO.inspect(result) + shell_info(inspect(result)) end end end From 3bc7d122712b5cc35ba509542bde63ca130d6a40 Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" <contact@hacktivis.me> Date: Mon, 28 Dec 2020 23:21:53 +0100 Subject: [PATCH 052/174] Remove sensitive-property setting #nsfw, create HashtagPolicy --- CHANGELOG.md | 2 +- config/config.exs | 5 + docs/configuration/cheatsheet.md | 10 ++ lib/pleroma/web/activity_pub/mrf.ex | 4 +- .../web/activity_pub/mrf/hashtag_policy.ex | 116 ++++++++++++++++++ .../web/activity_pub/mrf/simple_policy.ex | 12 +- .../web/activity_pub/mrf/tag_policy.ex | 13 +- .../web/activity_pub/transmogrifier.ex | 11 -- lib/pleroma/web/common_api/activity_draft.ex | 2 +- lib/pleroma/web/common_api/utils.ex | 8 -- .../activity_pub/mrf/hashtag_policy_test.exs | 31 +++++ .../activity_pub/mrf/simple_policy_test.exs | 10 +- .../web/activity_pub/mrf/tag_policy_test.exs | 2 +- test/pleroma/web/activity_pub/mrf_test.exs | 14 ++- .../web/activity_pub/transmogrifier_test.exs | 9 -- 15 files changed, 187 insertions(+), 62 deletions(-) create mode 100644 lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex create mode 100644 test/pleroma/web/activity_pub/mrf/hashtag_policy_test.exs diff --git a/CHANGELOG.md b/CHANGELOG.md index a7b5f6ac0..52fdcb932 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Breaking**: Changed `mix pleroma.user toggle_confirmed` to `mix pleroma.user confirm` - **Breaking**: Changed `mix pleroma.user toggle_activated` to `mix pleroma.user activate/deactivate` +- **Breaking:** NSFW hashtag is no longer added on sensitive posts - Polls now always return a `voters_count`, even if they are single-choice. - Admin Emails: The ap id is used as the user link in emails now. - Improved registration workflow for email confirmation and account approval modes. @@ -489,7 +490,6 @@ switched to a new configuration mechanism, however it was not officially removed - Static-FE: Fix remote posts not being sanitized ### Fixed -======= - Rate limiter crashes when there is no explicitly specified ip in the config - 500 errors when no `Accept` header is present if Static-FE is enabled - Instance panel not being updated immediately due to wrong `Cache-Control` headers diff --git a/config/config.exs b/config/config.exs index c371c397c..97e440fee 100644 --- a/config/config.exs +++ b/config/config.exs @@ -391,6 +391,11 @@ federated_timeline_removal: [], replace: [] +config :pleroma, :mrf_hashtag, + sensitive: ["nsfw"], + reject: [], + federated_timeline_removal: [] + config :pleroma, :mrf_subchain, match_actor: %{} config :pleroma, :mrf_activity_expiration, days: 365 diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 6a1031f15..f3eee3e67 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -210,6 +210,16 @@ config :pleroma, :mrf_user_allowlist, %{ * `days`: Default global expiration time for all local Create activities (in days) +#### :mrf_hashtag + +* `sensitive`: List of hashtags to mark activities as sensitive (default: `nsfw`) +* `federated_timeline_removal`: List of hashtags to remove activities from the federated timeline (aka TWNK) +* `reject`: List of hashtags to reject activities from + +Notes: +- The hashtags in the configuration do not have a leading `#`. +- This MRF Policy is always enabled, if you want to disable it you have to set empty lists + ### :activitypub * `unfollow_blocked`: Whether blocks result in people getting unfollowed * `outgoing_blocks`: Whether to federate blocks to other instances diff --git a/lib/pleroma/web/activity_pub/mrf.ex b/lib/pleroma/web/activity_pub/mrf.ex index ef5a09a93..f2fec3ff6 100644 --- a/lib/pleroma/web/activity_pub/mrf.ex +++ b/lib/pleroma/web/activity_pub/mrf.ex @@ -92,7 +92,9 @@ def pipeline_filter(%{} = message, meta) do end def get_policies do - Pleroma.Config.get([:mrf, :policies], []) |> get_policies() + Pleroma.Config.get([:mrf, :policies], []) + |> get_policies() + |> Enum.concat([Pleroma.Web.ActivityPub.MRF.HashtagPolicy]) end defp get_policies(policy) when is_atom(policy), do: [policy] diff --git a/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex b/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex new file mode 100644 index 000000000..def0c437c --- /dev/null +++ b/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex @@ -0,0 +1,116 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicy do + require Pleroma.Constants + + alias Pleroma.Config + alias Pleroma.Object + + @moduledoc """ + Reject, TWKN-remove or Set-Sensitive messsages with specific hashtags (without the leading #) + + Note: This MRF Policy is always enabled, if you want to disable it you have to set empty lists. + """ + + @behaviour Pleroma.Web.ActivityPub.MRF + + defp check_reject(message, hashtags) do + if Enum.any?(Config.get([:mrf_hashtag, :reject]), fn match -> match in hashtags end) do + {:reject, "[HashtagPolicy] Matches with rejected keyword"} + else + {:ok, message} + end + end + + defp check_ftl_removal(%{"to" => to} = message, hashtags) do + if Pleroma.Constants.as_public() in to and + Enum.any?(Config.get([:mrf_hashtag, :federated_timeline_removal]), fn match -> + match in hashtags + end) do + to = List.delete(to, Pleroma.Constants.as_public()) + cc = [Pleroma.Constants.as_public() | message["cc"] || []] + + message = + message + |> Map.put("to", to) + |> Map.put("cc", cc) + |> Kernel.put_in(["object", "to"], to) + |> Kernel.put_in(["object", "cc"], cc) + + {:ok, message} + else + {:ok, message} + end + end + + defp check_ftl_removal(message, _hashtags), do: {:ok, message} + + defp check_sensitive(message, hashtags) do + if Enum.any?(Config.get([:mrf_hashtag, :sensitive]), fn match -> match in hashtags end) do + {:ok, Kernel.put_in(message, ["object", "sensitive"], true)} + else + {:ok, message} + end + end + + @impl true + def filter(%{"type" => "Create", "object" => object} = message) do + hashtags = Object.hashtags(%Object{data: object}) + + if hashtags != [] do + with {:ok, message} <- check_reject(message, hashtags), + {:ok, message} <- check_ftl_removal(message, hashtags), + {:ok, message} <- check_sensitive(message, hashtags) do + {:ok, message} + end + else + {:ok, message} + end + end + + @impl true + def filter(message), do: {:ok, message} + + @impl true + def describe do + mrf_hashtag = + Config.get(:mrf_hashtag) + |> Enum.into(%{}) + + {:ok, %{mrf_hashtag: mrf_hashtag}} + end + + @impl true + def config_description do + %{ + key: :mrf_hashtag, + related_policy: "Pleroma.Web.ActivityPub.MRF.HashtagPolicy", + label: "MRF Hashtag", + description: @moduledoc, + children: [ + %{ + key: :reject, + type: {:list, :string}, + description: "A list of hashtags which result in message being rejected.", + suggestions: ["foo"] + }, + %{ + key: :federated_timeline_removal, + type: {:list, :string}, + description: + "A list of hashtags which result in message being removed from federated timelines (a.k.a unlisted).", + suggestions: ["foo"] + }, + %{ + key: :sensitive, + type: {:list, :string}, + description: + "A list of hashtags which result in message being set as sensitive (a.k.a NSFW/R-18)", + suggestions: ["nsfw", "r18"] + } + ] + } + end +end diff --git a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex index 0b1be8c51..62024c58c 100644 --- a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex @@ -64,22 +64,16 @@ defp check_media_nsfw( %{host: actor_host} = _actor_info, %{ "type" => "Create", - "object" => child_object + "object" => %{} = _child_object } = object - ) - when is_map(child_object) do + ) do media_nsfw = Config.get([:mrf_simple, :media_nsfw]) |> MRF.subdomains_regex() object = if MRF.subdomain_match?(media_nsfw, actor_host) do - child_object = - child_object - |> Map.put("tag", (child_object["tag"] || []) ++ ["nsfw"]) - |> Map.put("sensitive", true) - - Map.put(object, "object", child_object) + Kernel.put_in(object, ["object", "sensitive"], true) else object end diff --git a/lib/pleroma/web/activity_pub/mrf/tag_policy.ex b/lib/pleroma/web/activity_pub/mrf/tag_policy.ex index 5739cee63..528093ac0 100644 --- a/lib/pleroma/web/activity_pub/mrf/tag_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/tag_policy.ex @@ -28,20 +28,11 @@ defp process_tag( "mrf_tag:media-force-nsfw", %{ "type" => "Create", - "object" => %{"attachment" => child_attachment} = object + "object" => %{"attachment" => child_attachment} } = message ) when length(child_attachment) > 0 do - tags = (object["tag"] || []) ++ ["nsfw"] - - object = - object - |> Map.put("tag", tags) - |> Map.put("sensitive", true) - - message = Map.put(message, "object", object) - - {:ok, message} + {:ok, Kernel.put_in(message, ["object", "sensitive"], true)} end defp process_tag( diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 0a701334f..8c7d6a747 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -40,7 +40,6 @@ def fix_object(object, options \\ []) do |> fix_in_reply_to(options) |> fix_emoji() |> fix_tag() - |> set_sensitive() |> fix_content_map() |> fix_addressing() |> fix_summary() @@ -741,7 +740,6 @@ def replies(_), do: [] # Prepares the object of an outgoing create activity. def prepare_object(object) do object - |> set_sensitive |> add_hashtags |> add_mention_tags |> add_emoji_tags @@ -932,15 +930,6 @@ def set_conversation(object) do Map.put(object, "conversation", object["context"]) end - def set_sensitive(%{"sensitive" => _} = object) do - object - end - - def set_sensitive(object) do - tags = object["tag"] || [] - Map.put(object, "sensitive", "nsfw" in tags) - end - def set_type(%{"type" => "Answer"} = object) do Map.put(object, "type", "Note") end diff --git a/lib/pleroma/web/common_api/activity_draft.ex b/lib/pleroma/web/common_api/activity_draft.ex index fb059c27c..da726a690 100644 --- a/lib/pleroma/web/common_api/activity_draft.ex +++ b/lib/pleroma/web/common_api/activity_draft.ex @@ -179,7 +179,7 @@ defp context(draft) do end defp sensitive(draft) do - sensitive = draft.params[:sensitive] || Enum.member?(draft.tags, {"#nsfw", "nsfw"}) + sensitive = draft.params[:sensitive] %__MODULE__{draft | sensitive: sensitive} end diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex index 9587dfa25..4e6a3feb0 100644 --- a/lib/pleroma/web/common_api/utils.ex +++ b/lib/pleroma/web/common_api/utils.ex @@ -217,7 +217,6 @@ def make_content_html(%ActivityDraft{} = draft) do draft.status |> format_input(content_type, options) |> maybe_add_attachments(draft.attachments, attachment_links) - |> maybe_add_nsfw_tag(draft.params) end defp get_content_type(content_type) do @@ -228,13 +227,6 @@ defp get_content_type(content_type) do end end - defp maybe_add_nsfw_tag({text, mentions, tags}, %{"sensitive" => sensitive}) - when sensitive in [true, "True", "true", "1"] do - {text, mentions, [{"#nsfw", "nsfw"} | tags]} - end - - defp maybe_add_nsfw_tag(data, _), do: data - def make_context(_, %Participation{} = participation) do Repo.preload(participation, :conversation).conversation.ap_id end diff --git a/test/pleroma/web/activity_pub/mrf/hashtag_policy_test.exs b/test/pleroma/web/activity_pub/mrf/hashtag_policy_test.exs new file mode 100644 index 000000000..13415bb79 --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/hashtag_policy_test.exs @@ -0,0 +1,31 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicyTest do + use Oban.Testing, repo: Pleroma.Repo + use Pleroma.DataCase + + alias Pleroma.Web.ActivityPub.Transmogrifier + alias Pleroma.Web.CommonAPI + + import Pleroma.Factory + + test "it sets the sensitive property with relevant hashtags" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "#nsfw hey"}) + {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) + + assert modified["object"]["sensitive"] + end + + test "it doesn't sets the sensitive property with irrelevant hashtags" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "#cofe hey"}) + {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) + + refute modified["object"]["sensitive"] + end +end diff --git a/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs b/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs index f48e5b39b..5c0aff26e 100644 --- a/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs +++ b/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs @@ -75,10 +75,7 @@ test "has a matching host" do local_message = build_local_message() assert SimplePolicy.filter(media_message) == - {:ok, - media_message - |> put_in(["object", "tag"], ["foo", "nsfw"]) - |> put_in(["object", "sensitive"], true)} + {:ok, put_in(media_message, ["object", "sensitive"], true)} assert SimplePolicy.filter(local_message) == {:ok, local_message} end @@ -89,10 +86,7 @@ test "match with wildcard domain" do local_message = build_local_message() assert SimplePolicy.filter(media_message) == - {:ok, - media_message - |> put_in(["object", "tag"], ["foo", "nsfw"]) - |> put_in(["object", "sensitive"], true)} + {:ok, put_in(media_message, ["object", "sensitive"], true)} assert SimplePolicy.filter(local_message) == {:ok, local_message} end diff --git a/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs b/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs index 66e98b7ee..faaadff79 100644 --- a/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs +++ b/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs @@ -114,7 +114,7 @@ test "Mark as sensitive on presence of attachments" do except_message = %{ "actor" => actor.ap_id, "type" => "Create", - "object" => %{"tag" => ["test", "nsfw"], "attachment" => ["file1"], "sensitive" => true} + "object" => %{"tag" => ["test"], "attachment" => ["file1"], "sensitive" => true} } assert TagPolicy.filter(message) == {:ok, except_message} diff --git a/test/pleroma/web/activity_pub/mrf_test.exs b/test/pleroma/web/activity_pub/mrf_test.exs index 7c1eef7e0..61d308b97 100644 --- a/test/pleroma/web/activity_pub/mrf_test.exs +++ b/test/pleroma/web/activity_pub/mrf_test.exs @@ -68,7 +68,12 @@ test "it works as expected with noop policy" do clear_config([:mrf, :policies], [Pleroma.Web.ActivityPub.MRF.NoOpPolicy]) expected = %{ - mrf_policies: ["NoOpPolicy"], + mrf_policies: ["NoOpPolicy", "HashtagPolicy"], + mrf_hashtag: %{ + federated_timeline_removal: [], + reject: [], + sensitive: ["nsfw"] + }, exclusions: false } @@ -79,8 +84,13 @@ test "it works as expected with mock policy" do clear_config([:mrf, :policies], [MRFModuleMock]) expected = %{ - mrf_policies: ["MRFModuleMock"], + mrf_policies: ["MRFModuleMock", "HashtagPolicy"], mrf_module_mock: "some config data", + mrf_hashtag: %{ + federated_timeline_removal: [], + reject: [], + sensitive: ["nsfw"] + }, exclusions: false } diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs index 7c97fa8f8..a7894a8d0 100644 --- a/test/pleroma/web/activity_pub/transmogrifier_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs @@ -153,15 +153,6 @@ test "it turns mentions into tags" do end end - test "it adds the sensitive property" do - user = insert(:user) - - {:ok, activity} = CommonAPI.post(user, %{status: "#nsfw hey"}) - {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) - - assert modified["object"]["sensitive"] - end - test "it adds the json-ld context and the conversation property" do user = insert(:user) From 3aae5231b2c8f669eadba9228cece254349dd2aa Mon Sep 17 00:00:00 2001 From: Egor Kislitsyn <egor@kislitsyn.com> Date: Tue, 2 Mar 2021 20:49:17 +0400 Subject: [PATCH 053/174] Add OpenAPI spec for AdminAPI.UserController --- CHANGELOG.md | 1 + lib/pleroma/user.ex | 7 - .../admin_api/controllers/user_controller.ex | 128 +++--- .../web/admin_api/views/account_view.ex | 19 +- .../operations/admin/user_operation.ex | 389 ++++++++++++++++++ lib/pleroma/web/router.ex | 2 +- .../controllers/user_controller_test.exs | 118 +++--- 7 files changed, 539 insertions(+), 125 deletions(-) create mode 100644 lib/pleroma/web/api_spec/operations/admin/user_operation.ex diff --git a/CHANGELOG.md b/CHANGELOG.md index 812816f48..78f21e69f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -64,6 +64,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). <details> <summary>API Changes</summary> - Admin API: (`GET /api/pleroma/admin/users`) filter users by `unconfirmed` status and `actor_type`. +- Admin API: OpenAPI spec for the user-related operations - Pleroma API: `GET /api/v2/pleroma/chats` added. It is exactly like `GET /api/v1/pleroma/chats` except supports pagination. - Pleroma API: Add `idempotency_key` to the chat message entity that can be used for optimistic message sending. - Pleroma API: (`GET /api/v1/pleroma/federation_status`) Add a way to get a list of unreachable instances. diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index 9942617d8..c1aa0f716 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -2255,13 +2255,6 @@ def update_background(user, background) do |> update_and_set_cache() end - def roles(%{is_moderator: is_moderator, is_admin: is_admin}) do - %{ - admin: is_admin, - moderator: is_moderator - } - end - def validate_fields(changeset, remote? \\ false) do limit_name = if remote?, do: :max_remote_account_fields, else: :max_account_fields limit = Config.get([:instance, limit_name], 0) diff --git a/lib/pleroma/web/admin_api/controllers/user_controller.ex b/lib/pleroma/web/admin_api/controllers/user_controller.ex index 65bc63cb9..d3e4c18a3 100644 --- a/lib/pleroma/web/admin_api/controllers/user_controller.ex +++ b/lib/pleroma/web/admin_api/controllers/user_controller.ex @@ -13,16 +13,17 @@ defmodule Pleroma.Web.AdminAPI.UserController do alias Pleroma.Web.ActivityPub.Builder alias Pleroma.Web.ActivityPub.Pipeline alias Pleroma.Web.AdminAPI - alias Pleroma.Web.AdminAPI.AccountView alias Pleroma.Web.AdminAPI.Search alias Pleroma.Web.Plugs.OAuthScopesPlug @users_page_size 50 + plug(Pleroma.Web.ApiSpec.CastAndValidate) + plug( OAuthScopesPlug, %{scopes: ["admin:read:accounts"]} - when action in [:list, :show] + when action in [:index, :show] ) plug( @@ -44,13 +45,19 @@ defmodule Pleroma.Web.AdminAPI.UserController do when action in [:follow, :unfollow] ) + plug(:put_view, Pleroma.Web.AdminAPI.AccountView) + action_fallback(AdminAPI.FallbackController) - def delete(conn, %{"nickname" => nickname}) do - delete(conn, %{"nicknames" => [nickname]}) + defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.UserOperation + + def delete(conn, %{nickname: nickname}) do + conn + |> Map.put(:body_params, %{nicknames: [nickname]}) + |> delete(%{}) end - def delete(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do + def delete(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do users = Enum.map(nicknames, &User.get_cached_by_nickname/1) Enum.each(users, fn user -> @@ -67,10 +74,16 @@ def delete(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do json(conn, nicknames) end - def follow(%{assigns: %{user: admin}} = conn, %{ - "follower" => follower_nick, - "followed" => followed_nick - }) do + def follow( + %{ + assigns: %{user: admin}, + body_params: %{ + follower: follower_nick, + followed: followed_nick + } + } = conn, + _ + ) do with %User{} = follower <- User.get_cached_by_nickname(follower_nick), %User{} = followed <- User.get_cached_by_nickname(followed_nick) do User.follow(follower, followed) @@ -86,10 +99,16 @@ def follow(%{assigns: %{user: admin}} = conn, %{ json(conn, "ok") end - def unfollow(%{assigns: %{user: admin}} = conn, %{ - "follower" => follower_nick, - "followed" => followed_nick - }) do + def unfollow( + %{ + assigns: %{user: admin}, + body_params: %{ + follower: follower_nick, + followed: followed_nick + } + } = conn, + _ + ) do with %User{} = follower <- User.get_cached_by_nickname(follower_nick), %User{} = followed <- User.get_cached_by_nickname(followed_nick) do User.unfollow(follower, followed) @@ -105,9 +124,10 @@ def unfollow(%{assigns: %{user: admin}} = conn, %{ json(conn, "ok") end - def create(%{assigns: %{user: admin}} = conn, %{"users" => users}) do + def create(%{assigns: %{user: admin}, body_params: %{users: users}} = conn, _) do changesets = - Enum.map(users, fn %{"nickname" => nickname, "email" => email, "password" => password} -> + users + |> Enum.map(fn %{nickname: nickname, email: email, password: password} -> user_data = %{ nickname: nickname, name: nickname, @@ -124,52 +144,49 @@ def create(%{assigns: %{user: admin}} = conn, %{"users" => users}) do end) case Pleroma.Repo.transaction(changesets) do - {:ok, users} -> - res = - users + {:ok, users_map} -> + users = + users_map |> Map.values() |> Enum.map(fn user -> {:ok, user} = User.post_register_action(user) user end) - |> Enum.map(&AccountView.render("created.json", %{user: &1})) ModerationLog.insert_log(%{ actor: admin, - subjects: Map.values(users), + subjects: users, action: "create" }) - json(conn, res) + render(conn, "created_many.json", users: users) {:error, id, changeset, _} -> - res = + changesets = Enum.map(changesets.operations, fn - {current_id, {:changeset, _current_changeset, _}} when current_id == id -> - AccountView.render("create-error.json", %{changeset: changeset}) + {^id, {:changeset, _current_changeset, _}} -> + changeset {_, {:changeset, current_changeset, _}} -> - AccountView.render("create-error.json", %{changeset: current_changeset}) + current_changeset end) conn |> put_status(:conflict) - |> json(res) + |> render("create_errors.json", changesets: changesets) end end - def show(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do + def show(%{assigns: %{user: admin}} = conn, %{nickname: nickname}) do with %User{} = user <- User.get_cached_by_nickname_or_id(nickname, for: admin) do - conn - |> put_view(AccountView) - |> render("show.json", %{user: user}) + render(conn, "show.json", %{user: user}) else _ -> {:error, :not_found} end end - def toggle_activation(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do + def toggle_activation(%{assigns: %{user: admin}} = conn, %{nickname: nickname}) do user = User.get_cached_by_nickname(nickname) {:ok, updated_user} = User.set_activation(user, !user.is_active) @@ -182,12 +199,10 @@ def toggle_activation(%{assigns: %{user: admin}} = conn, %{"nickname" => nicknam action: action }) - conn - |> put_view(AccountView) - |> render("show.json", %{user: updated_user}) + render(conn, "show.json", user: updated_user) end - def activate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do + def activate(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do users = Enum.map(nicknames, &User.get_cached_by_nickname/1) {:ok, updated_users} = User.set_activation(users, true) @@ -197,12 +212,10 @@ def activate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do action: "activate" }) - conn - |> put_view(AccountView) - |> render("index.json", %{users: Keyword.values(updated_users)}) + render(conn, "index.json", users: Keyword.values(updated_users)) end - def deactivate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do + def deactivate(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do users = Enum.map(nicknames, &User.get_cached_by_nickname/1) {:ok, updated_users} = User.set_activation(users, false) @@ -212,12 +225,10 @@ def deactivate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) d action: "deactivate" }) - conn - |> put_view(AccountView) - |> render("index.json", %{users: Keyword.values(updated_users)}) + render(conn, "index.json", users: Keyword.values(updated_users)) end - def approve(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do + def approve(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do users = Enum.map(nicknames, &User.get_cached_by_nickname/1) {:ok, updated_users} = User.approve(users) @@ -227,36 +238,27 @@ def approve(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do action: "approve" }) - conn - |> put_view(AccountView) - |> render("index.json", %{users: updated_users}) + render(conn, "index.json", users: updated_users) end - def list(conn, params) do + def index(conn, params) do {page, page_size} = page_params(params) - filters = maybe_parse_filters(params["filters"]) + filters = maybe_parse_filters(params[:filters]) search_params = %{ - query: params["query"], + query: params[:query], page: page, page_size: page_size, - tags: params["tags"], - name: params["name"], - email: params["email"], - actor_types: params["actor_types"] + tags: params[:tags], + name: params[:name], + email: params[:email], + actor_types: params[:actor_types] } |> Map.merge(filters) with {:ok, users, count} <- Search.user(search_params) do - json( - conn, - AccountView.render("index.json", - users: users, - count: count, - page_size: page_size - ) - ) + render(conn, "index.json", users: users, count: count, page_size: page_size) end end @@ -274,8 +276,8 @@ defp maybe_parse_filters(filters) do defp page_params(params) do { - fetch_integer_param(params, "page", 1), - fetch_integer_param(params, "page_size", @users_page_size) + fetch_integer_param(params, :page, 1), + fetch_integer_param(params, :page_size, @users_page_size) } end end diff --git a/lib/pleroma/web/admin_api/views/account_view.ex b/lib/pleroma/web/admin_api/views/account_view.ex index d7c63d385..e053a9b67 100644 --- a/lib/pleroma/web/admin_api/views/account_view.ex +++ b/lib/pleroma/web/admin_api/views/account_view.ex @@ -75,7 +75,7 @@ def render("show.json", %{user: user}) do "display_name" => display_name, "is_active" => user.is_active, "local" => user.local, - "roles" => User.roles(user), + "roles" => roles(user), "tags" => user.tags || [], "is_confirmed" => user.is_confirmed, "is_approved" => user.is_approved, @@ -85,6 +85,10 @@ def render("show.json", %{user: user}) do } end + def render("created_many.json", %{users: users}) do + render_many(users, AccountView, "created.json", as: :user) + end + def render("created.json", %{user: user}) do %{ type: "success", @@ -96,7 +100,11 @@ def render("created.json", %{user: user}) do } end - def render("create-error.json", %{changeset: %Ecto.Changeset{changes: changes, errors: errors}}) do + def render("create_errors.json", %{changesets: changesets}) do + render_many(changesets, AccountView, "create_error.json", as: :changeset) + end + + def render("create_error.json", %{changeset: %Ecto.Changeset{changes: changes, errors: errors}}) do %{ type: "error", code: 409, @@ -140,4 +148,11 @@ defp parse_error(errors) do defp image_url(%{"url" => [%{"href" => href} | _]}), do: href defp image_url(_), do: nil + + defp roles(%{is_moderator: is_moderator, is_admin: is_admin}) do + %{ + admin: is_admin, + moderator: is_moderator + } + end end diff --git a/lib/pleroma/web/api_spec/operations/admin/user_operation.ex b/lib/pleroma/web/api_spec/operations/admin/user_operation.ex new file mode 100644 index 000000000..183c61236 --- /dev/null +++ b/lib/pleroma/web/api_spec/operations/admin/user_operation.ex @@ -0,0 +1,389 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ApiSpec.Admin.UserOperation do + alias OpenApiSpex.Operation + alias OpenApiSpex.Schema + alias Pleroma.Web.ApiSpec.Schemas.ActorType + alias Pleroma.Web.ApiSpec.Schemas.ApiError + + import Pleroma.Web.ApiSpec.Helpers + + def open_api_operation(action) do + operation = String.to_existing_atom("#{action}_operation") + apply(__MODULE__, operation, []) + end + + def index_operation do + %Operation{ + tags: ["Users"], + summary: "List users", + operationId: "AdminAPI.UserController.index", + security: [%{"oAuth" => ["admin:read:accounts"]}], + parameters: [ + Operation.parameter(:filters, :query, :string, "Comma separated list of filters"), + Operation.parameter(:query, :query, :string, "Search users query"), + Operation.parameter(:name, :query, :string, "Search by display name"), + Operation.parameter(:email, :query, :string, "Search by email"), + Operation.parameter(:page, :query, :integer, "Page Number"), + Operation.parameter(:page_size, :query, :integer, "Number of users to return per page"), + Operation.parameter( + :actor_types, + :query, + %Schema{type: :array, items: ActorType}, + "Filter by actor type" + ), + Operation.parameter( + :tags, + :query, + %Schema{type: :array, items: %Schema{type: :string}}, + "Filter by tags" + ) + | admin_api_params() + ], + responses: %{ + 200 => + Operation.response( + "Response", + "application/json", + %Schema{ + type: :object, + properties: %{ + users: %Schema{type: :array, items: user()}, + count: %Schema{type: :integer}, + page_size: %Schema{type: :integer} + } + } + ), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def create_operation do + %Operation{ + tags: ["Users"], + summary: "Create a single or multiple users", + operationId: "AdminAPI.UserController.create", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + description: "POST body for creating users", + type: :object, + properties: %{ + users: %Schema{ + type: :array, + items: %Schema{ + type: :object, + properties: %{ + nickname: %Schema{type: :string}, + email: %Schema{type: :string}, + password: %Schema{type: :string} + } + } + } + } + } + ), + responses: %{ + 200 => + Operation.response("Response", "application/json", %Schema{ + type: :array, + items: %Schema{ + type: :object, + properties: %{ + code: %Schema{type: :integer}, + type: %Schema{type: :string}, + data: %Schema{ + type: :object, + properties: %{ + email: %Schema{type: :string, format: :email}, + nickname: %Schema{type: :string} + } + } + } + } + }), + 403 => Operation.response("Forbidden", "application/json", ApiError), + 409 => + Operation.response("Conflict", "application/json", %Schema{ + type: :array, + items: %Schema{ + type: :object, + properties: %{ + code: %Schema{type: :integer}, + error: %Schema{type: :string}, + type: %Schema{type: :string}, + data: %Schema{ + type: :object, + properties: %{ + email: %Schema{type: :string, format: :email}, + nickname: %Schema{type: :string} + } + } + } + } + }) + } + } + end + + def show_operation do + %Operation{ + tags: ["Users"], + summary: "Show user", + operationId: "AdminAPI.UserController.show", + security: [%{"oAuth" => ["admin:read:accounts"]}], + parameters: [ + Operation.parameter( + :nickname, + :path, + :string, + "User nickname or ID" + ) + | admin_api_params() + ], + responses: %{ + 200 => Operation.response("Response", "application/json", user()), + 403 => Operation.response("Forbidden", "application/json", ApiError), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + def follow_operation do + %Operation{ + tags: ["Users"], + summary: "Follow", + operationId: "AdminAPI.UserController.follow", + security: [%{"oAuth" => ["admin:write:follows"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + type: :object, + properties: %{ + follower: %Schema{type: :string, description: "Follower nickname"}, + followed: %Schema{type: :string, description: "Followed nickname"} + } + } + ), + responses: %{ + 200 => Operation.response("Response", "application/json", %Schema{type: :string}), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def unfollow_operation do + %Operation{ + tags: ["Users"], + summary: "Unfollow", + operationId: "AdminAPI.UserController.unfollow", + security: [%{"oAuth" => ["admin:write:follows"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + type: :object, + properties: %{ + follower: %Schema{type: :string, description: "Follower nickname"}, + followed: %Schema{type: :string, description: "Followed nickname"} + } + } + ), + responses: %{ + 200 => Operation.response("Response", "application/json", %Schema{type: :string}), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def approve_operation do + %Operation{ + tags: ["Users"], + summary: "Approve multiple users", + operationId: "AdminAPI.UserController.approve", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + description: "POST body for deleting multiple users", + type: :object, + properties: %{ + nicknames: %Schema{ + type: :array, + items: %Schema{type: :string} + } + } + } + ), + responses: %{ + 200 => + Operation.response("Response", "application/json", %Schema{ + type: :object, + properties: %{user: %Schema{type: :array, items: user()}} + }), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def toggle_activation_operation do + %Operation{ + tags: ["Users"], + summary: "Toggle user activation", + operationId: "AdminAPI.UserController.toggle_activation", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: [ + Operation.parameter(:nickname, :path, :string, "User nickname") + | admin_api_params() + ], + responses: %{ + 200 => Operation.response("Response", "application/json", user()), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def activate_operation do + %Operation{ + tags: ["Users"], + summary: "Activate multiple users", + operationId: "AdminAPI.UserController.activate", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + description: "POST body for deleting multiple users", + type: :object, + properties: %{ + nicknames: %Schema{ + type: :array, + items: %Schema{type: :string} + } + } + } + ), + responses: %{ + 200 => + Operation.response("Response", "application/json", %Schema{ + type: :object, + properties: %{user: %Schema{type: :array, items: user()}} + }), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def deactivate_operation do + %Operation{ + tags: ["Users"], + summary: "Deactivates multiple users", + operationId: "AdminAPI.UserController.deactivate", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + description: "POST body for deleting multiple users", + type: :object, + properties: %{ + nicknames: %Schema{ + type: :array, + items: %Schema{type: :string} + } + } + } + ), + responses: %{ + 200 => + Operation.response("Response", "application/json", %Schema{ + type: :object, + properties: %{user: %Schema{type: :array, items: user()}} + }), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def delete_operation do + %Operation{ + tags: ["Users"], + summary: "Removes a single or multiple users", + operationId: "AdminAPI.UserController.delete", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: [ + Operation.parameter( + :nickname, + :query, + :string, + "User nickname" + ) + | admin_api_params() + ], + requestBody: + request_body( + "Parameters", + %Schema{ + description: "POST body for deleting multiple users", + type: :object, + properties: %{ + nicknames: %Schema{ + type: :array, + items: %Schema{type: :string} + } + } + } + ), + responses: %{ + 200 => + Operation.response("Response", "application/json", %Schema{ + description: "Array of nicknames", + type: :array, + items: %Schema{type: :string} + }), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + defp user do + %Schema{ + type: :object, + properties: %{ + id: %Schema{type: :string}, + email: %Schema{type: :string, format: :email}, + avatar: %Schema{type: :string, format: :uri}, + nickname: %Schema{type: :string}, + display_name: %Schema{type: :string}, + is_active: %Schema{type: :boolean}, + local: %Schema{type: :boolean}, + roles: %Schema{ + type: :object, + properties: %{ + admin: %Schema{type: :boolean}, + moderator: %Schema{type: :boolean} + } + }, + tags: %Schema{type: :array, items: %Schema{type: :string}}, + is_confirmed: %Schema{type: :boolean}, + is_approved: %Schema{type: :boolean}, + url: %Schema{type: :string, format: :uri}, + registration_reason: %Schema{type: :string, nullable: true}, + actor_type: %Schema{type: :string} + } + } + end +end diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index 72ad14f05..de0bd27d7 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -204,7 +204,7 @@ defmodule Pleroma.Web.Router do get("/users/:nickname/credentials", AdminAPIController, :show_user_credentials) patch("/users/:nickname/credentials", AdminAPIController, :update_user_credentials) - get("/users", UserController, :list) + get("/users", UserController, :index) get("/users/:nickname", UserController, :show) get("/users/:nickname/statuses", AdminAPIController, :list_user_statuses) get("/users/:nickname/chats", AdminAPIController, :list_user_chats) diff --git a/test/pleroma/web/admin_api/controllers/user_controller_test.exs b/test/pleroma/web/admin_api/controllers/user_controller_test.exs index beb8a5d58..31319b5e5 100644 --- a/test/pleroma/web/admin_api/controllers/user_controller_test.exs +++ b/test/pleroma/web/admin_api/controllers/user_controller_test.exs @@ -44,7 +44,7 @@ test "with valid `admin_token` query parameter, skips OAuth scopes check" do conn = get(build_conn(), "/api/pleroma/admin/users/#{user.nickname}?admin_token=password123") - assert json_response(conn, 200) + assert json_response_and_validate_schema(conn, 200) end test "GET /api/pleroma/admin/users/:nickname requires admin:read:accounts or broader scope", @@ -67,7 +67,7 @@ test "GET /api/pleroma/admin/users/:nickname requires admin:read:accounts or bro |> assign(:token, good_token) |> get(url) - assert json_response(conn, 200) + assert json_response_and_validate_schema(conn, 200) end for good_token <- [good_token1, good_token2, good_token3] do @@ -87,7 +87,7 @@ test "GET /api/pleroma/admin/users/:nickname requires admin:read:accounts or bro |> assign(:token, bad_token) |> get(url) - assert json_response(conn, :forbidden) + assert json_response_and_validate_schema(conn, :forbidden) end end @@ -131,7 +131,7 @@ test "single user", %{admin: admin, conn: conn} do assert ModerationLog.get_log_entry_message(log_entry) == "@#{admin.nickname} deleted users: @#{user.nickname}" - assert json_response(conn, 200) == [user.nickname] + assert json_response_and_validate_schema(conn, 200) == [user.nickname] user = Repo.get(User, user.id) refute user.is_active @@ -152,28 +152,30 @@ test "multiple users", %{admin: admin, conn: conn} do user_one = insert(:user) user_two = insert(:user) - conn = + response = conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> delete("/api/pleroma/admin/users", %{ nicknames: [user_one.nickname, user_two.nickname] }) + |> json_response_and_validate_schema(200) log_entry = Repo.one(ModerationLog) assert ModerationLog.get_log_entry_message(log_entry) == "@#{admin.nickname} deleted users: @#{user_one.nickname}, @#{user_two.nickname}" - response = json_response(conn, 200) assert response -- [user_one.nickname, user_two.nickname] == [] end end describe "/api/pleroma/admin/users" do test "Create", %{conn: conn} do - conn = + response = conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users", %{ "users" => [ %{ @@ -188,8 +190,9 @@ test "Create", %{conn: conn} do } ] }) + |> json_response_and_validate_schema(200) + |> Enum.map(&Map.get(&1, "type")) - response = json_response(conn, 200) |> Enum.map(&Map.get(&1, "type")) assert response == ["success", "success"] log_entry = Repo.one(ModerationLog) @@ -203,6 +206,7 @@ test "Cannot create user with existing email", %{conn: conn} do conn = conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users", %{ "users" => [ %{ @@ -213,7 +217,7 @@ test "Cannot create user with existing email", %{conn: conn} do ] }) - assert json_response(conn, 409) == [ + assert json_response_and_validate_schema(conn, 409) == [ %{ "code" => 409, "data" => %{ @@ -232,6 +236,7 @@ test "Cannot create user with existing nickname", %{conn: conn} do conn = conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users", %{ "users" => [ %{ @@ -242,7 +247,7 @@ test "Cannot create user with existing nickname", %{conn: conn} do ] }) - assert json_response(conn, 409) == [ + assert json_response_and_validate_schema(conn, 409) == [ %{ "code" => 409, "data" => %{ @@ -261,6 +266,7 @@ test "Multiple user creation works in transaction", %{conn: conn} do conn = conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users", %{ "users" => [ %{ @@ -276,7 +282,7 @@ test "Multiple user creation works in transaction", %{conn: conn} do ] }) - assert json_response(conn, 409) == [ + assert json_response_and_validate_schema(conn, 409) == [ %{ "code" => 409, "data" => %{ @@ -307,7 +313,7 @@ test "Show", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users/#{user.nickname}") - assert user_response(user) == json_response(conn, 200) + assert user_response(user) == json_response_and_validate_schema(conn, 200) end test "when the user doesn't exist", %{conn: conn} do @@ -315,7 +321,7 @@ test "when the user doesn't exist", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users/#{user.nickname}") - assert %{"error" => "Not found"} == json_response(conn, 404) + assert %{"error" => "Not found"} == json_response_and_validate_schema(conn, 404) end end @@ -326,6 +332,7 @@ test "allows to force-follow another user", %{admin: admin, conn: conn} do conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users/follow", %{ "follower" => follower.nickname, "followed" => user.nickname @@ -352,6 +359,7 @@ test "allows to force-unfollow another user", %{admin: admin, conn: conn} do conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users/unfollow", %{ "follower" => follower.nickname, "followed" => user.nickname @@ -395,7 +403,7 @@ test "renders users array for the first page", %{conn: conn, admin: admin} do ] |> Enum.sort_by(& &1["nickname"]) - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 3, "page_size" => 50, "users" => users @@ -410,7 +418,7 @@ test "pagination works correctly with service users", %{conn: conn} do assert %{"count" => 26, "page_size" => 10, "users" => users1} = conn |> get("/api/pleroma/admin/users?page=1&filters=", %{page_size: "10"}) - |> json_response(200) + |> json_response_and_validate_schema(200) assert Enum.count(users1) == 10 assert service1 not in users1 @@ -418,7 +426,7 @@ test "pagination works correctly with service users", %{conn: conn} do assert %{"count" => 26, "page_size" => 10, "users" => users2} = conn |> get("/api/pleroma/admin/users?page=2&filters=", %{page_size: "10"}) - |> json_response(200) + |> json_response_and_validate_schema(200) assert Enum.count(users2) == 10 assert service1 not in users2 @@ -426,7 +434,7 @@ test "pagination works correctly with service users", %{conn: conn} do assert %{"count" => 26, "page_size" => 10, "users" => users3} = conn |> get("/api/pleroma/admin/users?page=3&filters=", %{page_size: "10"}) - |> json_response(200) + |> json_response_and_validate_schema(200) assert Enum.count(users3) == 6 assert service1 not in users3 @@ -437,7 +445,7 @@ test "renders empty array for the second page", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?page=2") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 2, "page_size" => 50, "users" => [] @@ -449,7 +457,7 @@ test "regular search", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?query=bo") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user, %{"local" => true})] @@ -462,7 +470,7 @@ test "search by domain", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?query=domain.com") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -475,7 +483,7 @@ test "search by full nickname", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?query=nickname@domain.com") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -488,7 +496,7 @@ test "search by display name", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?name=display") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -501,7 +509,7 @@ test "search by email", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?email=email@example.com") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -514,7 +522,7 @@ test "regular search with page size", %{conn: conn} do conn1 = get(conn, "/api/pleroma/admin/users?query=a&page_size=1&page=1") - assert json_response(conn1, 200) == %{ + assert json_response_and_validate_schema(conn1, 200) == %{ "count" => 2, "page_size" => 1, "users" => [user_response(user)] @@ -522,7 +530,7 @@ test "regular search with page size", %{conn: conn} do conn2 = get(conn, "/api/pleroma/admin/users?query=a&page_size=1&page=2") - assert json_response(conn2, 200) == %{ + assert json_response_and_validate_schema(conn2, 200) == %{ "count" => 2, "page_size" => 1, "users" => [user_response(user2)] @@ -542,7 +550,7 @@ test "only local users" do |> assign(:token, token) |> get("/api/pleroma/admin/users?query=bo&filters=local") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -570,7 +578,7 @@ test "only local users with no query", %{conn: conn, admin: old_admin} do ] |> Enum.sort_by(& &1["nickname"]) - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 3, "page_size" => 50, "users" => users @@ -587,7 +595,7 @@ test "only unconfirmed users", %{conn: conn} do result = conn |> get("/api/pleroma/admin/users?filters=unconfirmed") - |> json_response(200) + |> json_response_and_validate_schema(200) users = Enum.map([old_user, sad_user], fn user -> @@ -620,7 +628,7 @@ test "only unapproved users", %{conn: conn} do ) ] - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => users @@ -647,7 +655,7 @@ test "load only admins", %{conn: conn, admin: admin} do ] |> Enum.sort_by(& &1["nickname"]) - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 2, "page_size" => 50, "users" => users @@ -661,7 +669,7 @@ test "load only moderators", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?filters=is_moderator") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [ @@ -682,8 +690,8 @@ test "load users with actor_type is Person", %{admin: admin, conn: conn} do response = conn - |> get(user_path(conn, :list), %{actor_types: ["Person"]}) - |> json_response(200) + |> get(user_path(conn, :index), %{actor_types: ["Person"]}) + |> json_response_and_validate_schema(200) users = [ @@ -705,8 +713,8 @@ test "load users with actor_type is Person and Service", %{admin: admin, conn: c response = conn - |> get(user_path(conn, :list), %{actor_types: ["Person", "Service"]}) - |> json_response(200) + |> get(user_path(conn, :index), %{actor_types: ["Person", "Service"]}) + |> json_response_and_validate_schema(200) users = [ @@ -728,8 +736,8 @@ test "load users with actor_type is Service", %{conn: conn} do response = conn - |> get(user_path(conn, :list), %{actor_types: ["Service"]}) - |> json_response(200) + |> get(user_path(conn, :index), %{actor_types: ["Service"]}) + |> json_response_and_validate_schema(200) users = [user_response(user_service, %{"actor_type" => "Service"})] @@ -751,7 +759,7 @@ test "load users with tags list", %{conn: conn} do ] |> Enum.sort_by(& &1["nickname"]) - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 2, "page_size" => 50, "users" => users @@ -776,7 +784,7 @@ test "`active` filters out users pending approval", %{token: token} do %{"id" => ^admin_id}, %{"id" => ^user_id} ] - } = json_response(conn, 200) + } = json_response_and_validate_schema(conn, 200) end test "it works with multiple filters" do @@ -793,7 +801,7 @@ test "it works with multiple filters" do |> assign(:token, token) |> get("/api/pleroma/admin/users?filters=deactivated,external") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -805,7 +813,7 @@ test "it omits relay user", %{admin: admin, conn: conn} do conn = get(conn, "/api/pleroma/admin/users") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [ @@ -820,13 +828,14 @@ test "PATCH /api/pleroma/admin/users/activate", %{admin: admin, conn: conn} do user_two = insert(:user, is_active: false) conn = - patch( - conn, + conn + |> put_req_header("content-type", "application/json") + |> patch( "/api/pleroma/admin/users/activate", %{nicknames: [user_one.nickname, user_two.nickname]} ) - response = json_response(conn, 200) + response = json_response_and_validate_schema(conn, 200) assert Enum.map(response["users"], & &1["is_active"]) == [true, true] log_entry = Repo.one(ModerationLog) @@ -840,13 +849,14 @@ test "PATCH /api/pleroma/admin/users/deactivate", %{admin: admin, conn: conn} do user_two = insert(:user, is_active: true) conn = - patch( - conn, + conn + |> put_req_header("content-type", "application/json") + |> patch( "/api/pleroma/admin/users/deactivate", %{nicknames: [user_one.nickname, user_two.nickname]} ) - response = json_response(conn, 200) + response = json_response_and_validate_schema(conn, 200) assert Enum.map(response["users"], & &1["is_active"]) == [false, false] log_entry = Repo.one(ModerationLog) @@ -860,13 +870,14 @@ test "PATCH /api/pleroma/admin/users/approve", %{admin: admin, conn: conn} do user_two = insert(:user, is_approved: false) conn = - patch( - conn, + conn + |> put_req_header("content-type", "application/json") + |> patch( "/api/pleroma/admin/users/approve", %{nicknames: [user_one.nickname, user_two.nickname]} ) - response = json_response(conn, 200) + response = json_response_and_validate_schema(conn, 200) assert Enum.map(response["users"], & &1["is_approved"]) == [true, true] log_entry = Repo.one(ModerationLog) @@ -878,9 +889,12 @@ test "PATCH /api/pleroma/admin/users/approve", %{admin: admin, conn: conn} do test "PATCH /api/pleroma/admin/users/:nickname/toggle_activation", %{admin: admin, conn: conn} do user = insert(:user) - conn = patch(conn, "/api/pleroma/admin/users/#{user.nickname}/toggle_activation") + conn = + conn + |> put_req_header("content-type", "application/json") + |> patch("/api/pleroma/admin/users/#{user.nickname}/toggle_activation") - assert json_response(conn, 200) == + assert json_response_and_validate_schema(conn, 200) == user_response( user, %{"is_active" => !user.is_active} From 85b2387f665045a303486d10e6879a46a7ab922e Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 2 Mar 2021 11:37:37 -0600 Subject: [PATCH 054/174] Fix build_application/1 match --- lib/pleroma/web/mastodon_api/views/status_view.ex | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index bac897a57..a7e762ac1 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -536,6 +536,8 @@ defp build_emoji_map(emoji, users, current_user) do end @spec build_application(map() | nil) :: map() | nil - defp build_application(%{type: _type, name: name, url: url}), do: %{name: name, website: url} + defp build_application(%{"type" => _type, "name" => name, "url" => url}), + do: %{name: name, website: url} + defp build_application(_), do: nil end From f0208980e48ee361f9eaa40352f519a1b95ace28 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 2 Mar 2021 12:29:16 -0600 Subject: [PATCH 055/174] Test both ingestion of post in the status controller and the correct response during the view --- .../controllers/status_controller_test.exs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs index bd385bccd..634ebf79c 100644 --- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs @@ -376,6 +376,17 @@ test "discloses application metadata when enabled" do "status" => "cofe is my copilot" }) + assert %{ + "content" => "cofe is my copilot" + } = json_response_and_validate_schema(result, 200) + + activity = result.assigns.activity.id + + result = + conn + |> put_req_header("content-type", "application/json") + |> get("api/v1/statuses/#{activity}") + assert %{ "content" => "cofe is my copilot", "application" => %{ From ccbf162088951e4b7f28291ca4cd9b9280b40857 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 2 Mar 2021 12:33:32 -0600 Subject: [PATCH 056/174] Actually test viewing status after ingestion --- .../controllers/status_controller_test.exs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs index 634ebf79c..39ab90ba6 100644 --- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs @@ -407,6 +407,16 @@ test "hides application metadata when disabled" do "status" => "club mate is my wingman" }) + assert %{"content" => "club mate is my wingman"} = + json_response_and_validate_schema(result, 200) + + activity = result.assigns.activity.id + + result = + conn + |> put_req_header("content-type", "application/json") + |> get("api/v1/statuses/#{activity}") + assert %{ "content" => "club mate is my wingman", "application" => nil From 913d53b7d7301445fdb0fc8dbe5ecf8b59aafa43 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 2 Mar 2021 14:04:50 -0600 Subject: [PATCH 057/174] Remove useless header on the get request --- .../web/mastodon_api/controllers/status_controller_test.exs | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs index 39ab90ba6..f616f405e 100644 --- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs @@ -384,7 +384,6 @@ test "discloses application metadata when enabled" do result = conn - |> put_req_header("content-type", "application/json") |> get("api/v1/statuses/#{activity}") assert %{ @@ -414,7 +413,6 @@ test "hides application metadata when disabled" do result = conn - |> put_req_header("content-type", "application/json") |> get("api/v1/statuses/#{activity}") assert %{ From 8d601d3b234cfe2a6a942dd156712cc400af8500 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 2 Mar 2021 14:14:38 -0600 Subject: [PATCH 058/174] Make the object reference in both render("show.json", _) functions consistently named --- lib/pleroma/web/mastodon_api/views/status_view.ex | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index a7e762ac1..f3f54e03d 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -124,16 +124,16 @@ def render( ) do user = CommonAPI.get_user(activity.data["actor"]) created_at = Utils.to_masto_date(activity.data["published"]) - activity_object = Object.normalize(activity, fetch: false) + object = Object.normalize(activity, fetch: false) reblogged_parent_activity = if opts[:parent_activities] do Activity.Queries.find_by_object_ap_id( opts[:parent_activities], - activity_object.data["id"] + object.data["id"] ) else - Activity.create_by_object_ap_id(activity_object.data["id"]) + Activity.create_by_object_ap_id(object.data["id"]) |> Activity.with_preloaded_bookmark(opts[:for]) |> Activity.with_set_thread_muted_field(opts[:for]) |> Repo.one() @@ -142,7 +142,7 @@ def render( reblog_rendering_opts = Map.put(opts, :activity, reblogged_parent_activity) reblogged = render("show.json", reblog_rendering_opts) - favorited = opts[:for] && opts[:for].ap_id in (activity_object.data["likes"] || []) + favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || []) bookmarked = Activity.get_bookmark(reblogged_parent_activity, opts[:for]) != nil @@ -154,8 +154,8 @@ def render( %{ id: to_string(activity.id), - uri: activity_object.data["id"], - url: activity_object.data["id"], + uri: object.data["id"], + url: object.data["id"], account: AccountView.render("show.json", %{ user: user, @@ -180,7 +180,7 @@ def render( media_attachments: reblogged[:media_attachments] || [], mentions: mentions, tags: reblogged[:tags] || [], - application: build_application(activity_object.data["generator"]), + application: build_application(object.data["generator"]), language: nil, emojis: [], pleroma: %{ From 5b8cceba09bda6a01adee4939e3c2521c2ea037e Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 2 Mar 2021 18:17:32 -0600 Subject: [PATCH 059/174] Fix migration in cases where database name has a hyphen --- .../20210121080964_add_default_text_search_config.exs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/priv/repo/migrations/20210121080964_add_default_text_search_config.exs b/priv/repo/migrations/20210121080964_add_default_text_search_config.exs index 09b6cccc9..27f600b70 100644 --- a/priv/repo/migrations/20210121080964_add_default_text_search_config.exs +++ b/priv/repo/migrations/20210121080964_add_default_text_search_config.exs @@ -4,7 +4,7 @@ defmodule Pleroma.Repo.Migrations.AddDefaultTextSearchConfig do def change do execute("DO $$ BEGIN - execute 'ALTER DATABASE '||current_database()||' SET default_text_search_config = ''english'' '; + execute 'ALTER DATABASE \"'||current_database()||'\" SET default_text_search_config = ''english'' '; END $$;") end From c5352e90be363f88f011ed5a63129caf3ee1a9fc Mon Sep 17 00:00:00 2001 From: lain <lain@soykaf.club> Date: Wed, 3 Mar 2021 13:56:40 +0100 Subject: [PATCH 060/174] Changelog, mix: merge in stable --- CHANGELOG.md | 4 ++++ mix.exs | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a55ebbf8a..40c423273 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). +## Unreleased + +## Unreleased (Patch) + ## [2.3.0] - 2020-03-01 ### Security diff --git a/mix.exs b/mix.exs index 436381f32..ec6e92df7 100644 --- a/mix.exs +++ b/mix.exs @@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do def project do [ app: :pleroma, - version: version("2.3.0"), + version: version("2.3.50"), elixir: "~> 1.9", elixirc_paths: elixirc_paths(Mix.env()), compilers: [:phoenix, :gettext] ++ Mix.compilers(), From 2e296c079f0666a8239a0d3ce5b5fba6baf45a29 Mon Sep 17 00:00:00 2001 From: lain <lain@soykaf.club> Date: Wed, 3 Mar 2021 15:33:06 +0100 Subject: [PATCH 061/174] Revert "StatusController: Deactivate application support for now." This reverts commit 024c11c18d289d4acd65d749f939ad3684f31905. --- .../controllers/status_controller.ex | 20 +++++++++---------- .../controllers/status_controller_test.exs | 1 - 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex index d1a58d5e1..b051fca74 100644 --- a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex @@ -21,7 +21,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do alias Pleroma.Web.CommonAPI alias Pleroma.Web.MastodonAPI.AccountView alias Pleroma.Web.MastodonAPI.ScheduledActivityView - # alias Pleroma.Web.OAuth.Token + alias Pleroma.Web.OAuth.Token alias Pleroma.Web.Plugs.OAuthScopesPlug alias Pleroma.Web.Plugs.RateLimiter @@ -420,16 +420,14 @@ def bookmarks(%{assigns: %{user: user}} = conn, params) do ) end - # Deactivated for 2.3.0 - # defp put_application(params, - # %{assigns: %{token: %Token{user: %User{} = user} = token}} = _conn) do - # if user.disclose_client do - # %{client_name: client_name, website: website} = Repo.preload(token, :app).app - # Map.put(params, :generator, %{type: "Application", name: client_name, url: website}) - # else - # Map.put(params, :generator, nil) - # end - # end + defp put_application(params, %{assigns: %{token: %Token{user: %User{} = user} = token}} = _conn) do + if user.disclose_client do + %{client_name: client_name, website: website} = Repo.preload(token, :app).app + Map.put(params, :generator, %{type: "Application", name: client_name, url: website}) + else + Map.put(params, :generator, nil) + end + end defp put_application(params, _), do: Map.put(params, :generator, nil) end diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs index e76c2760d..bd385bccd 100644 --- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs @@ -358,7 +358,6 @@ test "posting a direct status", %{conn: conn} do assert activity.data["cc"] == [] end - @tag :skip test "discloses application metadata when enabled" do user = insert(:user, disclose_client: true) %{user: _user, token: token, conn: conn} = oauth_access(["write:statuses"], user: user) From 10f402af6d0f088aa6ad8a3f26b5e226a2287634 Mon Sep 17 00:00:00 2001 From: lain <lain@soykaf.club> Date: Wed, 3 Mar 2021 15:35:25 +0100 Subject: [PATCH 062/174] Changelog: Re-add application support --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 40c423273..ed08701fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## Unreleased +- The `application` metadata returned with statuses is no longer hardcoded. Apps that want to display these details will now have valid data for new posts after this change. + ## Unreleased (Patch) ## [2.3.0] - 2020-03-01 From 5856f51717c12f4c6b0b89e480ff689c8480393d Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Wed, 3 Mar 2021 23:09:30 +0300 Subject: [PATCH 063/174] [#3213] ActivityPub hashtags filtering refactoring. Test fix. --- lib/pleroma/repo.ex | 2 ++ lib/pleroma/web/activity_pub/activity_pub.ex | 29 ++++++------------- mix.exs | 1 + mix.lock | 1 + .../web/activity_pub/activity_pub_test.exs | 2 +- 5 files changed, 14 insertions(+), 21 deletions(-) diff --git a/lib/pleroma/repo.ex b/lib/pleroma/repo.ex index 61b64ed3e..b8ea06e33 100644 --- a/lib/pleroma/repo.ex +++ b/lib/pleroma/repo.ex @@ -8,6 +8,8 @@ defmodule Pleroma.Repo do adapter: Ecto.Adapters.Postgres, migration_timestamps: [type: :naive_datetime_usec] + use Ecto.Explain + import Ecto.Query require Logger diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 9d557c2cd..a4b48ec9b 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -746,6 +746,13 @@ defp restrict_embedded_tag_reject_any(query, %{tag_reject: tag_reject}) defp restrict_embedded_tag_reject_any(query, _), do: query + defp object_ids_query_for_tags(tags) do + from(hto in "hashtags_objects") + |> join(:inner, [hto], ht in Pleroma.Hashtag, on: hto.hashtag_id == ht.id) + |> where([hto, ht], ht.name in ^tags) + |> select([hto], hto.object_id) + end + defp restrict_hashtag_all(_query, %{tag_all: _tag, skip_preload: true}) do raise_on_missing_preload() end @@ -784,16 +791,7 @@ defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do defp restrict_hashtag_any(query, %{tag: [_ | _] = tags}) do from( [_activity, object] in query, - where: - fragment( - """ - EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) - AND hashtags_objects.object_id = ? LIMIT 1) - """, - ^tags, - object.id - ) + where: object.id in subquery(object_ids_query_for_tags(tags)) ) end @@ -810,16 +808,7 @@ defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload defp restrict_hashtag_reject_any(query, %{tag_reject: [_ | _] = tags_reject}) do from( [_activity, object] in query, - where: - fragment( - """ - NOT EXISTS (SELECT 1 FROM hashtags JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) - AND hashtags_objects.object_id = ? LIMIT 1) - """, - ^tags_reject, - object.id - ) + where: object.id not in subquery(object_ids_query_for_tags(tags_reject)) ) end diff --git a/mix.exs b/mix.exs index 50d4b4080..c06e27314 100644 --- a/mix.exs +++ b/mix.exs @@ -121,6 +121,7 @@ defp deps do {:phoenix_pubsub, "~> 2.0"}, {:phoenix_ecto, "~> 4.0"}, {:ecto_enum, "~> 1.4"}, + {:ecto_explain, "~> 0.1.2"}, {:ecto_sql, "~> 3.4.4"}, {:postgrex, ">= 0.15.5"}, {:oban, "~> 2.3.4"}, diff --git a/mix.lock b/mix.lock index 3e5631c72..cb09ffead 100644 --- a/mix.lock +++ b/mix.lock @@ -31,6 +31,7 @@ "earmark_parser": {:hex, :earmark_parser, "1.4.10", "6603d7a603b9c18d3d20db69921527f82ef09990885ed7525003c7fe7dc86c56", [:mix], [], "hexpm", "8e2d5370b732385db2c9b22215c3f59c84ac7dda7ed7e544d7c459496ae519c0"}, "ecto": {:hex, :ecto, "3.4.6", "08f7afad3257d6eb8613309af31037e16c36808dfda5a3cd0cb4e9738db030e4", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6f13a9e2a62e75c2dcfc7207bfc65645ab387af8360db4c89fee8b5a4bf3f70b"}, "ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"}, + "ecto_explain": {:hex, :ecto_explain, "0.1.2", "a9d504cbd4adc809911f796d5ef7ebb17a576a6d32286c3d464c015bd39d5541", [:mix], [], "hexpm", "1d0e7798ae30ecf4ce34e912e5354a0c1c832b7ebceba39298270b9a9f316330"}, "ecto_sql": {:hex, :ecto_sql, "3.4.5", "30161f81b167d561a9a2df4329c10ae05ff36eca7ccc84628f2c8b9fa1e43323", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.4.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.0", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "31990c6a3579b36a3c0841d34a94c275e727de8b84f58509da5f1b2032c98ac2"}, "eimp": {:hex, :eimp, "1.0.14", "fc297f0c7e2700457a95a60c7010a5f1dcb768a083b6d53f49cd94ab95a28f22", [:rebar3], [{:p1_utils, "1.0.18", [hex: :p1_utils, repo: "hexpm", optional: false]}], "hexpm", "501133f3112079b92d9e22da8b88bf4f0e13d4d67ae9c15c42c30bd25ceb83b6"}, "elixir_make": {:hex, :elixir_make, "0.6.2", "7dffacd77dec4c37b39af867cedaabb0b59f6a871f89722c25b28fcd4bd70530", [:mix], [], "hexpm", "03e49eadda22526a7e5279d53321d1cced6552f344ba4e03e619063de75348d9"}, diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index f92323abe..1e1e74074 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -220,7 +220,7 @@ test "it fetches the appropriate tag-restricted posts" do {:ok, status_four} = CommonAPI.post(user, %{status: ". #Any1 #any2"}) {:ok, status_five} = CommonAPI.post(user, %{status: ". #Any2 #any1"}) - for hashtag_timeline_strategy <- [:eanbled, :disabled] do + for hashtag_timeline_strategy <- [:enabled, :disabled] do clear_config([:features, :improved_hashtag_timeline], hashtag_timeline_strategy) fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) From 9876fa8e902e66a77193ebeef674a9f0e9f37657 Mon Sep 17 00:00:00 2001 From: Egor Kislitsyn <egor@kislitsyn.com> Date: Thu, 4 Mar 2021 21:13:53 +0400 Subject: [PATCH 064/174] Add UserOperation to Redoc --- lib/pleroma/web/api_spec.ex | 5 +++-- .../operations/admin/user_operation.ex | 20 +++++++++---------- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/lib/pleroma/web/api_spec.ex b/lib/pleroma/web/api_spec.ex index adc8762dc..528cd9cf4 100644 --- a/lib/pleroma/web/api_spec.ex +++ b/lib/pleroma/web/api_spec.ex @@ -92,9 +92,10 @@ def spec(opts \\ []) do "Invites", "MediaProxy cache", "OAuth application managment", - "Report managment", "Relays", - "Status administration" + "Report managment", + "Status administration", + "User administration" ] }, %{"name" => "Applications", "tags" => ["Applications", "Push subscriptions"]}, diff --git a/lib/pleroma/web/api_spec/operations/admin/user_operation.ex b/lib/pleroma/web/api_spec/operations/admin/user_operation.ex index 183c61236..c9d0bfd7c 100644 --- a/lib/pleroma/web/api_spec/operations/admin/user_operation.ex +++ b/lib/pleroma/web/api_spec/operations/admin/user_operation.ex @@ -17,7 +17,7 @@ def open_api_operation(action) do def index_operation do %Operation{ - tags: ["Users"], + tags: ["User administration"], summary: "List users", operationId: "AdminAPI.UserController.index", security: [%{"oAuth" => ["admin:read:accounts"]}], @@ -63,7 +63,7 @@ def index_operation do def create_operation do %Operation{ - tags: ["Users"], + tags: ["User administration"], summary: "Create a single or multiple users", operationId: "AdminAPI.UserController.create", security: [%{"oAuth" => ["admin:write:accounts"]}], @@ -134,7 +134,7 @@ def create_operation do def show_operation do %Operation{ - tags: ["Users"], + tags: ["User administration"], summary: "Show user", operationId: "AdminAPI.UserController.show", security: [%{"oAuth" => ["admin:read:accounts"]}], @@ -157,7 +157,7 @@ def show_operation do def follow_operation do %Operation{ - tags: ["Users"], + tags: ["User administration"], summary: "Follow", operationId: "AdminAPI.UserController.follow", security: [%{"oAuth" => ["admin:write:follows"]}], @@ -182,7 +182,7 @@ def follow_operation do def unfollow_operation do %Operation{ - tags: ["Users"], + tags: ["User administration"], summary: "Unfollow", operationId: "AdminAPI.UserController.unfollow", security: [%{"oAuth" => ["admin:write:follows"]}], @@ -207,7 +207,7 @@ def unfollow_operation do def approve_operation do %Operation{ - tags: ["Users"], + tags: ["User administration"], summary: "Approve multiple users", operationId: "AdminAPI.UserController.approve", security: [%{"oAuth" => ["admin:write:accounts"]}], @@ -239,7 +239,7 @@ def approve_operation do def toggle_activation_operation do %Operation{ - tags: ["Users"], + tags: ["User administration"], summary: "Toggle user activation", operationId: "AdminAPI.UserController.toggle_activation", security: [%{"oAuth" => ["admin:write:accounts"]}], @@ -256,7 +256,7 @@ def toggle_activation_operation do def activate_operation do %Operation{ - tags: ["Users"], + tags: ["User administration"], summary: "Activate multiple users", operationId: "AdminAPI.UserController.activate", security: [%{"oAuth" => ["admin:write:accounts"]}], @@ -288,7 +288,7 @@ def activate_operation do def deactivate_operation do %Operation{ - tags: ["Users"], + tags: ["User administration"], summary: "Deactivates multiple users", operationId: "AdminAPI.UserController.deactivate", security: [%{"oAuth" => ["admin:write:accounts"]}], @@ -320,7 +320,7 @@ def deactivate_operation do def delete_operation do %Operation{ - tags: ["Users"], + tags: ["User administration"], summary: "Removes a single or multiple users", operationId: "AdminAPI.UserController.delete", security: [%{"oAuth" => ["admin:write:accounts"]}], From 92ab72dbbb4f56a0e0c3d0882ce29d54739437c1 Mon Sep 17 00:00:00 2001 From: Egor Kislitsyn <egor@kislitsyn.com> Date: Fri, 5 Mar 2021 15:51:29 +0400 Subject: [PATCH 065/174] Update OpenApiSpex dependency --- lib/pleroma/web/api_spec/cast_and_validate.ex | 31 ++++++++----------- .../api_spec/operations/status_operation.ex | 2 +- .../web/api_spec/schemas/boolean_like.ex | 10 ++++-- .../controllers/instance_controller.ex | 2 +- .../controllers/backup_controller.ex | 2 +- .../controllers/chat_controller.ex | 2 +- .../controllers/user_import_controller.ex | 2 +- mix.exs | 4 +-- mix.lock | 2 +- test/support/conn_case.ex | 8 ++--- 10 files changed, 30 insertions(+), 35 deletions(-) diff --git a/lib/pleroma/web/api_spec/cast_and_validate.ex b/lib/pleroma/web/api_spec/cast_and_validate.ex index a3da856ff..d23a7dcb6 100644 --- a/lib/pleroma/web/api_spec/cast_and_validate.ex +++ b/lib/pleroma/web/api_spec/cast_and_validate.ex @@ -15,6 +15,7 @@ defmodule Pleroma.Web.ApiSpec.CastAndValidate do @behaviour Plug + alias OpenApiSpex.Plug.PutApiSpec alias Plug.Conn @impl Plug @@ -25,12 +26,10 @@ def init(opts) do end @impl Plug - def call(%{private: %{open_api_spex: private_data}} = conn, %{ - operation_id: operation_id, - render_error: render_error - }) do - spec = private_data.spec - operation = private_data.operation_lookup[operation_id] + + def call(conn, %{operation_id: operation_id, render_error: render_error}) do + {spec, operation_lookup} = PutApiSpec.get_spec_and_operation_lookup(conn) + operation = operation_lookup[operation_id] content_type = case Conn.get_req_header(conn, "content-type") do @@ -43,8 +42,7 @@ def call(%{private: %{open_api_spex: private_data}} = conn, %{ "application/json" end - private_data = Map.put(private_data, :operation_id, operation_id) - conn = Conn.put_private(conn, :open_api_spex, private_data) + conn = Conn.put_private(conn, :operation_id, operation_id) case cast_and_validate(spec, operation, conn, content_type, strict?()) do {:ok, conn} -> @@ -64,25 +62,22 @@ def call( private: %{ phoenix_controller: controller, phoenix_action: action, - open_api_spex: private_data + open_api_spex: %{spec_module: spec_module} } } = conn, opts ) do + {spec, operation_lookup} = PutApiSpec.get_spec_and_operation_lookup(conn) + operation = - case private_data.operation_lookup[{controller, action}] do + case operation_lookup[{controller, action}] do nil -> operation_id = controller.open_api_operation(action).operationId - operation = private_data.operation_lookup[operation_id] + operation = operation_lookup[operation_id] - operation_lookup = - private_data.operation_lookup - |> Map.put({controller, action}, operation) + operation_lookup = Map.put(operation_lookup, {controller, action}, operation) - OpenApiSpex.Plug.Cache.adapter().put( - private_data.spec_module, - {private_data.spec, operation_lookup} - ) + OpenApiSpex.Plug.Cache.adapter().put(spec_module, {spec, operation_lookup}) operation diff --git a/lib/pleroma/web/api_spec/operations/status_operation.ex b/lib/pleroma/web/api_spec/operations/status_operation.ex index 40edc747d..4bdb8e281 100644 --- a/lib/pleroma/web/api_spec/operations/status_operation.ex +++ b/lib/pleroma/web/api_spec/operations/status_operation.ex @@ -59,7 +59,7 @@ def create_operation do Operation.response( "Status. When `scheduled_at` is present, ScheduledStatus is returned instead", "application/json", - %Schema{oneOf: [Status, ScheduledStatus]} + %Schema{anyOf: [Status, ScheduledStatus]} ), 422 => Operation.response("Bad Request / MRF Rejection", "application/json", ApiError) } diff --git a/lib/pleroma/web/api_spec/schemas/boolean_like.ex b/lib/pleroma/web/api_spec/schemas/boolean_like.ex index eb001c5bb..778158f66 100644 --- a/lib/pleroma/web/api_spec/schemas/boolean_like.ex +++ b/lib/pleroma/web/api_spec/schemas/boolean_like.ex @@ -3,6 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.ApiSpec.Schemas.BooleanLike do + alias OpenApiSpex.Cast alias OpenApiSpex.Schema require OpenApiSpex @@ -27,10 +28,13 @@ defmodule Pleroma.Web.ApiSpec.Schemas.BooleanLike do %Schema{type: :boolean}, %Schema{type: :string}, %Schema{type: :integer} - ] + ], + "x-validate": __MODULE__ }) - def after_cast(value, _schmea) do - {:ok, Pleroma.Web.ControllerHelper.truthy_param?(value)} + def cast(%Cast{value: value} = context) do + context + |> Map.put(:value, Pleroma.Web.ControllerHelper.truthy_param?(value)) + |> Cast.ok() end end diff --git a/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex b/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex index 267d0f03b..c7a5267d4 100644 --- a/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex @@ -5,7 +5,7 @@ defmodule Pleroma.Web.MastodonAPI.InstanceController do use Pleroma.Web, :controller - plug(OpenApiSpex.Plug.CastAndValidate) + plug(Pleroma.Web.ApiSpec.CastAndValidate) plug( :skip_plug, diff --git a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex index 315657e9c..fc5d16771 100644 --- a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex +++ b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex @@ -10,7 +10,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupController do action_fallback(Pleroma.Web.MastodonAPI.FallbackController) plug(OAuthScopesPlug, %{scopes: ["read:accounts"]} when action in [:index, :create]) - plug(OpenApiSpex.Plug.CastAndValidate, render_error: Pleroma.Web.ApiSpec.RenderError) + plug(Pleroma.Web.ApiSpec.CastAndValidate) defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaBackupOperation diff --git a/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex b/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex index 4adc685fe..dcd54b1af 100644 --- a/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex +++ b/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex @@ -38,7 +38,7 @@ defmodule Pleroma.Web.PleromaAPI.ChatController do %{scopes: ["read:chats"]} when action in [:messages, :index, :index2, :show] ) - plug(OpenApiSpex.Plug.CastAndValidate, render_error: Pleroma.Web.ApiSpec.RenderError) + plug(Pleroma.Web.ApiSpec.CastAndValidate) defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.ChatOperation diff --git a/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex b/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex index 6d9a11fb6..078d470d9 100644 --- a/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex +++ b/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex @@ -15,7 +15,7 @@ defmodule Pleroma.Web.PleromaAPI.UserImportController do plug(OAuthScopesPlug, %{scopes: ["follow", "write:blocks"]} when action == :blocks) plug(OAuthScopesPlug, %{scopes: ["follow", "write:mutes"]} when action == :mutes) - plug(OpenApiSpex.Plug.CastAndValidate) + plug(Pleroma.Web.ApiSpec.CastAndValidate) defdelegate open_api_operation(action), to: ApiSpec.UserImportOperation def follow(%{body_params: %{list: %Plug.Upload{path: path}}} = conn, _) do diff --git a/mix.exs b/mix.exs index ec6e92df7..7f8665ea1 100644 --- a/mix.exs +++ b/mix.exs @@ -195,9 +195,7 @@ defp deps do {:majic, git: "https://git.pleroma.social/pleroma/elixir-libraries/majic.git", ref: "289cda1b6d0d70ccb2ba508a2b0bd24638db2880"}, - {:open_api_spex, - git: "https://git.pleroma.social/pleroma/elixir-libraries/open_api_spex.git", - ref: "f296ac0924ba3cf79c7a588c4c252889df4c2edd"}, + {:open_api_spex, "~> 3.10"}, ## dev & test {:ex_doc, "~> 0.22", only: :dev, runtime: false}, diff --git a/mix.lock b/mix.lock index 99be81826..61c79a7f9 100644 --- a/mix.lock +++ b/mix.lock @@ -82,7 +82,7 @@ "nimble_pool": {:hex, :nimble_pool, "0.1.0", "ffa9d5be27eee2b00b0c634eb649aa27f97b39186fec3c493716c2a33e784ec6", [:mix], [], "hexpm", "343a1eaa620ddcf3430a83f39f2af499fe2370390d4f785cd475b4df5acaf3f9"}, "nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]}, "oban": {:hex, :oban, "2.3.4", "ec7509b9af2524d55f529cb7aee93d36131ae0bf0f37706f65d2fe707f4d9fd8", [:mix], [{:ecto_sql, ">= 3.4.3", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c70ca0434758fd1805422ea4446af5e910ddc697c0c861549c8f0eb0cfbd2fdf"}, - "open_api_spex": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/open_api_spex.git", "f296ac0924ba3cf79c7a588c4c252889df4c2edd", [ref: "f296ac0924ba3cf79c7a588c4c252889df4c2edd"]}, + "open_api_spex": {:hex, :open_api_spex, "3.10.0", "94e9521ad525b3fcf6dc77da7c45f87fdac24756d4de588cb0816b413e7c1844", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.1", [hex: :poison, repo: "hexpm", optional: true]}], "hexpm", "2dbb2bde3d2b821f06936e8dfaf3284331186556291946d84eeba3750ac28765"}, "p1_utils": {:hex, :p1_utils, "1.0.18", "3fe224de5b2e190d730a3c5da9d6e8540c96484cf4b4692921d1e28f0c32b01c", [:rebar3], [], "hexpm", "1fc8773a71a15553b179c986b22fbeead19b28fe486c332d4929700ffeb71f88"}, "parse_trans": {:git, "https://github.com/uwiger/parse_trans.git", "76abb347c3c1d00fb0ccf9e4b43e22b3d2288484", [tag: "3.3.0"]}, "pbkdf2_elixir": {:hex, :pbkdf2_elixir, "1.2.1", "9cbe354b58121075bd20eb83076900a3832324b7dd171a6895fab57b6bb2752c", [:mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}], "hexpm", "d3b40a4a4630f0b442f19eca891fcfeeee4c40871936fed2f68e1c4faa30481f"}, diff --git a/test/support/conn_case.ex b/test/support/conn_case.ex index 953aa010a..deee98599 100644 --- a/test/support/conn_case.ex +++ b/test/support/conn_case.ex @@ -67,13 +67,11 @@ defp empty_json_response(conn) do end defp json_response_and_validate_schema( - %{ - private: %{ - open_api_spex: %{operation_id: op_id, operation_lookup: lookup, spec: spec} - } - } = conn, + %{private: %{operation_id: op_id}} = conn, status ) do + {spec, lookup} = OpenApiSpex.Plug.PutApiSpec.get_spec_and_operation_lookup(conn) + content_type = conn |> Plug.Conn.get_resp_header("content-type") From e97b34f65d71b3dd11aab151fe7ce6def315635a Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 5 Mar 2021 13:18:37 -0600 Subject: [PATCH 066/174] Add simple way to decode fully qualified mediaproxy URLs --- lib/pleroma/web/media_proxy.ex | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/pleroma/web/media_proxy.ex b/lib/pleroma/web/media_proxy.ex index 27f337138..d0d4bb4b3 100644 --- a/lib/pleroma/web/media_proxy.ex +++ b/lib/pleroma/web/media_proxy.ex @@ -121,6 +121,11 @@ def decode_url(sig, url) do end end + def decode_url(encoded) do + [_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/") + decode_url(sig, base64) + end + defp signed_url(url) do :crypto.hmac(:sha, Config.get([Web.Endpoint, :secret_key_base]), url) end From eaaa20e0f1ac56fee0a8a0eb6a21bc7bf11dbe48 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 5 Mar 2021 13:21:22 -0600 Subject: [PATCH 067/174] Make tests use it --- test/pleroma/web/media_proxy_test.exs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/pleroma/web/media_proxy_test.exs b/test/pleroma/web/media_proxy_test.exs index 7411d0a7a..b5ee6328d 100644 --- a/test/pleroma/web/media_proxy_test.exs +++ b/test/pleroma/web/media_proxy_test.exs @@ -11,8 +11,7 @@ defmodule Pleroma.Web.MediaProxyTest do alias Pleroma.Web.MediaProxy defp decode_result(encoded) do - [_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/") - {:ok, decoded} = MediaProxy.decode_url(sig, base64) + {:ok, decoded} = MediaProxy.decode_url(encoded) decoded end From 7f8785fd9be11fbb09283c2dbd32aeb7903a4f58 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Sun, 7 Mar 2021 11:33:21 +0300 Subject: [PATCH 068/174] [#3213] Performance optimization of filtering by hashtags ("any" condition). --- lib/pleroma/pagination.ex | 3 ++ lib/pleroma/web/activity_pub/activity_pub.ex | 47 ++++++++++++++----- .../controllers/timeline_controller.ex | 39 ++++++--------- 3 files changed, 53 insertions(+), 36 deletions(-) diff --git a/lib/pleroma/pagination.ex b/lib/pleroma/pagination.ex index 0d24e1010..33e45a0eb 100644 --- a/lib/pleroma/pagination.ex +++ b/lib/pleroma/pagination.ex @@ -93,6 +93,7 @@ defp cast_params(params) do max_id: :string, offset: :integer, limit: :integer, + skip_extra_order: :boolean, skip_order: :boolean } @@ -114,6 +115,8 @@ defp restrict(query, :max_id, %{max_id: max_id}, table_binding) do defp restrict(query, :order, %{skip_order: true}, _), do: query + defp restrict(%{order_bys: [_ | _]} = query, :order, %{skip_extra_order: true}, _), do: query + defp restrict(query, :order, %{min_id: _}, table_binding) do order_by( query, diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index a4b48ec9b..230faf024 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -466,6 +466,23 @@ def fetch_latest_direct_activity_id_for_context(context, opts \\ %{}) do |> Repo.one() end + defp fetch_paginated_optimized(query, opts, pagination) do + # Note: tag-filtering funcs may apply "ORDER BY objects.id DESC", + # and extra sorting on "activities.id DESC NULLS LAST" would worse the query plan + opts = Map.put(opts, :skip_extra_order, true) + + Pagination.fetch_paginated(query, opts, pagination) + end + + def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do + list_memberships = Pleroma.List.memberships(opts[:user]) + + fetch_activities_query(recipients ++ list_memberships, opts) + |> fetch_paginated_optimized(opts, pagination) + |> Enum.reverse() + |> maybe_update_cc(list_memberships, opts[:user]) + end + @spec fetch_public_or_unlisted_activities(map(), Pagination.type()) :: [Activity.t()] def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do opts = Map.delete(opts, :user) @@ -473,7 +490,7 @@ def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do [Constants.as_public()] |> fetch_activities_query(opts) |> restrict_unlisted(opts) - |> Pagination.fetch_paginated(opts, pagination) + |> fetch_paginated_optimized(opts, pagination) end @spec fetch_public_activities(map(), Pagination.type()) :: [Activity.t()] @@ -751,6 +768,7 @@ defp object_ids_query_for_tags(tags) do |> join(:inner, [hto], ht in Pleroma.Hashtag, on: hto.hashtag_id == ht.id) |> where([hto, ht], ht.name in ^tags) |> select([hto], hto.object_id) + |> distinct([hto], true) end defp restrict_hashtag_all(_query, %{tag_all: _tag, skip_preload: true}) do @@ -789,9 +807,18 @@ defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do end defp restrict_hashtag_any(query, %{tag: [_ | _] = tags}) do + hashtag_ids = + from(ht in Hashtag, where: ht.name in ^tags, select: ht.id) + |> Repo.all() + + # Note: NO extra ordering should be done on "activities.id desc nulls last" for optimal plan from( [_activity, object] in query, - where: object.id in subquery(object_ids_query_for_tags(tags)) + join: hto in "hashtags_objects", + on: hto.object_id == object.id, + where: hto.hashtag_id in ^hashtag_ids, + distinct: [desc: object.id], + order_by: [desc: object.id] ) end @@ -1188,7 +1215,12 @@ defp normalize_fetch_activities_query_opts(opts) do Map.put(opts, key, Hashtag.normalize_name(value)) value when is_list(value) -> - Map.put(opts, key, Enum.map(value, &Hashtag.normalize_name/1)) + normalized_value = + value + |> Enum.map(&Hashtag.normalize_name/1) + |> Enum.uniq() + + Map.put(opts, key, normalized_value) _ -> opts @@ -1275,15 +1307,6 @@ def fetch_activities_query(recipients, opts \\ %{}) do end end - def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do - list_memberships = Pleroma.List.memberships(opts[:user]) - - fetch_activities_query(recipients ++ list_memberships, opts) - |> Pagination.fetch_paginated(opts, pagination) - |> Enum.reverse() - |> maybe_update_cc(list_memberships, opts[:user]) - end - @doc """ Fetch favorites activities of user with order by sort adds to favorites """ diff --git a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex index 87effa00b..c611958be 100644 --- a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex @@ -133,34 +133,25 @@ defp fail_on_bad_auth(conn) do end defp hashtag_fetching(params, user, local_only) do - tags = + # Note: not sanitizing tag options at this stage (may be mix-cased, have duplicates etc.) + tags_any = [params[:tag], params[:any]] |> List.flatten() - |> Enum.reject(&is_nil/1) - |> Enum.map(&String.downcase/1) - |> Enum.uniq() + |> Enum.filter(& &1) - tag_all = - params - |> Map.get(:all, []) - |> Enum.map(&String.downcase/1) + tag_all = Map.get(params, :all, []) + tag_reject = Map.get(params, :none, []) - tag_reject = - params - |> Map.get(:none, []) - |> Enum.map(&String.downcase/1) - - _activities = - params - |> Map.put(:type, "Create") - |> Map.put(:local_only, local_only) - |> Map.put(:blocking_user, user) - |> Map.put(:muting_user, user) - |> Map.put(:user, user) - |> Map.put(:tag, tags) - |> Map.put(:tag_all, tag_all) - |> Map.put(:tag_reject, tag_reject) - |> ActivityPub.fetch_public_activities() + params + |> Map.put(:type, "Create") + |> Map.put(:local_only, local_only) + |> Map.put(:blocking_user, user) + |> Map.put(:muting_user, user) + |> Map.put(:user, user) + |> Map.put(:tag, tags_any) + |> Map.put(:tag_all, tag_all) + |> Map.put(:tag_reject, tag_reject) + |> ActivityPub.fetch_public_activities() end # GET /api/v1/timelines/tag/:tag From 8feeb672c8ec0b916d94fb516ea05b464342e19b Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Wed, 10 Mar 2021 13:03:14 -0600 Subject: [PATCH 069/174] Ensure we fetch deps during spec-build stage --- .gitlab-ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c7e8291d8..68644660c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -40,6 +40,7 @@ spec-build: paths: - spec.json script: + - mix deps.get - mix pleroma.openapi_spec spec.json benchmark: @@ -393,4 +394,4 @@ docker-adhoc: tags: - dind only: - - /^build-docker/.*$/@pleroma/pleroma \ No newline at end of file + - /^build-docker/.*$/@pleroma/pleroma From 502d166b7e44e36a94974df4770de6c6a239ad75 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Wed, 10 Mar 2021 16:19:18 -0600 Subject: [PATCH 070/174] See if switching to same image as releases fixes the build --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 68644660c..ea6611947 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,4 +1,4 @@ -image: elixir:1.9.4 +image: elixir:1.10.3 variables: &global_variables POSTGRES_DB: pleroma_test From fa75f11ca138e69952cf1a1483a8b848b3b0d1b9 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Wed, 10 Mar 2021 16:37:24 -0600 Subject: [PATCH 071/174] Revert "See if switching to same image as releases fixes the build" This reverts commit 502d166b7e44e36a94974df4770de6c6a239ad75. --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ea6611947..68644660c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,4 +1,4 @@ -image: elixir:1.10.3 +image: elixir:1.9.4 variables: &global_variables POSTGRES_DB: pleroma_test From 8246db2a968943a0cab615b8b5c1439aa4cb2547 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Sat, 6 Mar 2021 12:02:32 -0600 Subject: [PATCH 072/174] Workaround for URI.merge/2 bug https://github.com/elixir-lang/elixir/issues/10771 If we avoid URI.merge unless we know we need it we reduce the edge cases we could encounter. The site would need to both have "//" in the %URI{:path} and the image needs to be a relative URL. --- lib/pleroma/web/mastodon_api/views/status_view.ex | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index f3f54e03d..cf8037abb 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -380,9 +380,15 @@ def render("card.json", %{rich_media: rich_media, page_url: page_url}) do page_url = page_url_data |> to_string image_url = - if is_binary(rich_media["image"]) do - URI.merge(page_url_data, URI.parse(rich_media["image"])) - |> to_string + cond do + !is_binary(rich_media["image"]) -> + nil + + String.starts_with?(rich_media["image"], "http") -> + rich_media["image"] + + true -> + URI.merge(page_url_data, URI.parse(rich_media["image"])) |> to_string end %{ From 029ff6538972b59c6259dd7345ad9c4465fb3f73 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Thu, 11 Mar 2021 09:20:29 -0600 Subject: [PATCH 073/174] Leverage function pattern matching instead --- .../web/mastodon_api/views/status_view.ex | 36 +++++++++++++------ 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index cf8037abb..581b4e952 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -379,18 +379,15 @@ def render("card.json", %{rich_media: rich_media, page_url: page_url}) do page_url = page_url_data |> to_string - image_url = - cond do - !is_binary(rich_media["image"]) -> - nil - - String.starts_with?(rich_media["image"], "http") -> - rich_media["image"] - - true -> - URI.merge(page_url_data, URI.parse(rich_media["image"])) |> to_string + image_url_data = + if is_binary(rich_media["image"]) do + URI.parse(rich_media["image"]) + else + nil end + image_url = get_image_url(image_url_data, page_url_data) + %{ type: "link", provider_name: page_url_data.host, @@ -546,4 +543,23 @@ defp build_application(%{"type" => _type, "name" => name, "url" => url}), do: %{name: name, website: url} defp build_application(_), do: nil + + # Workaround for Elixir issue #10771 + # Avoid applying URI.merge unless necessary + # TODO: revert to always attempting URI.merge(image_url_data, page_url_data) + # when Elixir 1.12 is the minimum supported version + @spec get_image_url(struct() | nil, struct()) :: String.t() | nil + defp get_image_url( + %URI{scheme: image_scheme, host: image_host} = image_url_data, + %URI{} = _page_url_data + ) + when not is_nil(image_scheme) and not is_nil(image_host) do + image_url_data |> to_string + end + + defp get_image_url(%URI{} = image_url_data, %URI{} = page_url_data) do + URI.merge(page_url_data, image_url_data) |> to_string + end + + defp get_image_url(_, _), do: nil end From 884584772bd7ff52825bbb3bd38ca7c6190c084a Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Thu, 11 Mar 2021 09:40:40 -0600 Subject: [PATCH 074/174] Execute mix deps.get earlier and avoid duplicate invocations if possible --- .gitlab-ci.yml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 68644660c..2bc571971 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -25,13 +25,13 @@ before_script: - apt-get update && apt-get install -y cmake - mix local.hex --force - mix local.rebar --force + - mix deps.get - apt-get -qq update - apt-get install -y libmagic-dev build: stage: build script: - - mix deps.get - mix compile --force spec-build: @@ -40,7 +40,6 @@ spec-build: paths: - spec.json script: - - mix deps.get - mix pleroma.openapi_spec spec.json benchmark: @@ -53,7 +52,6 @@ benchmark: alias: postgres command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"] script: - - mix deps.get - mix ecto.create - mix ecto.migrate - mix pleroma.load_testing @@ -71,7 +69,6 @@ unit-testing: command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"] script: - apt-get update && apt-get install -y libimage-exiftool-perl ffmpeg - - mix deps.get - mix ecto.create - mix ecto.migrate - mix coveralls --preload-modules @@ -105,7 +102,6 @@ unit-testing-rum: RUM_ENABLED: "true" script: - apt-get update && apt-get install -y libimage-exiftool-perl ffmpeg - - mix deps.get - mix ecto.create - mix ecto.migrate - "mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/" @@ -121,7 +117,6 @@ analysis: stage: test cache: *testing_cache_policy script: - - mix deps.get - mix credo --strict --only=warnings,todo,fixme,consistency,readability docs-deploy: From 3edf45021eb6c3fba06bc083b346f7db54cd073f Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Fri, 12 Mar 2021 12:18:11 +0300 Subject: [PATCH 075/174] [#3213] Background migration infrastructure refactoring. Extracted BaseMigrator and BaseMigratorState. --- lib/pleroma/application.ex | 11 +- .../migrators/hashtags_table_migrator.ex | 265 ++++-------------- .../hashtags_table_migrator/state.ex | 104 ------- .../migrators/support/base_migrator.ex | 210 ++++++++++++++ .../migrators/support/base_migrator_state.ex | 116 ++++++++ 5 files changed, 385 insertions(+), 321 deletions(-) delete mode 100644 lib/pleroma/migrators/hashtags_table_migrator/state.ex create mode 100644 lib/pleroma/migrators/support/base_migrator.ex create mode 100644 lib/pleroma/migrators/support/base_migrator_state.ex diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index 2ff7562e2..06d399b2e 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -103,10 +103,7 @@ def start(_type, _args) do task_children(@mix_env) ++ dont_run_in_test(@mix_env) ++ chat_child(chat_enabled?()) ++ - [ - Pleroma.Migrators.HashtagsTableMigrator, - Pleroma.Gopher.Server - ] + [Pleroma.Gopher.Server] # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html # for other strategies and supported options @@ -231,6 +228,12 @@ defp dont_run_in_test(_) do keys: :duplicate, partitions: System.schedulers_online() ]} + ] ++ background_migrators() + end + + defp background_migrators do + [ + Pleroma.Migrators.HashtagsTableMigrator ] end diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex index 6123c88e0..b84058e11 100644 --- a/lib/pleroma/migrators/hashtags_table_migrator.ex +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -3,88 +3,27 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Migrators.HashtagsTableMigrator do - use GenServer + defmodule State do + use Pleroma.Migrators.Support.BaseMigratorState - require Logger + @impl Pleroma.Migrators.Support.BaseMigratorState + defdelegate data_migration(), to: Pleroma.DataMigration, as: :populate_hashtags_table + end - import Ecto.Query + use Pleroma.Migrators.Support.BaseMigrator - alias __MODULE__.State - alias Pleroma.Config alias Pleroma.Hashtag + alias Pleroma.Migrators.Support.BaseMigrator alias Pleroma.Object - alias Pleroma.Repo - defdelegate data_migration(), to: Pleroma.DataMigration, as: :populate_hashtags_table - defdelegate data_migration_id(), to: State + @impl BaseMigrator + def feature_config_path, do: [:features, :improved_hashtag_timeline] - defdelegate state(), to: State - defdelegate persist_state(), to: State, as: :persist_to_db - defdelegate get_stat(key, value \\ nil), to: State, as: :get_data_key - defdelegate put_stat(key, value), to: State, as: :put_data_key - defdelegate increment_stat(key, increment), to: State, as: :increment_data_key - - @feature_config_path [:features, :improved_hashtag_timeline] - @reg_name {:global, __MODULE__} - - def whereis, do: GenServer.whereis(@reg_name) - - def feature_state, do: Config.get(@feature_config_path) - - def start_link(_) do - case whereis() do - nil -> - GenServer.start_link(__MODULE__, nil, name: @reg_name) - - pid -> - {:ok, pid} - end - end - - @impl true - def init(_) do - {:ok, nil, {:continue, :init_state}} - end - - @impl true - def handle_continue(:init_state, _state) do - {:ok, _} = State.start_link(nil) - - data_migration = data_migration() - manual_migrations = Config.get([:instance, :manual_data_migrations], []) - - cond do - Config.get(:env) == :test -> - update_status(:noop) - - is_nil(data_migration) -> - message = "Data migration does not exist." - update_status(:failed, message) - Logger.error("#{__MODULE__}: #{message}") - - data_migration.state == :manual or data_migration.name in manual_migrations -> - message = "Data migration is in manual execution or manual fix mode." - update_status(:manual, message) - Logger.warn("#{__MODULE__}: #{message}") - - data_migration.state == :complete -> - on_complete(data_migration) - - true -> - send(self(), :migrate_hashtags) - end - - {:noreply, nil} - end - - @impl true - def handle_info(:migrate_hashtags, state) do - State.reinit() - - update_status(:running) - put_stat(:iteration_processed_count, 0) - put_stat(:started_at, NaiveDateTime.utc_now()) + @impl BaseMigrator + def fault_rate_allowance, do: Config.get([:populate_hashtags_table, :fault_rate_allowance], 0) + @impl BaseMigrator + def perform do data_migration_id = data_migration_id() max_processed_id = get_stat(:max_processed_id, 0) @@ -103,7 +42,7 @@ def handle_info(:migrate_hashtags, state) do |> Enum.filter(&(elem(&1, 0) == :error)) |> Enum.map(&elem(&1, 1)) - # Count of objects with hashtags (`{:noop, id}` is returned for objects having other AS2 tags) + # Count of objects with hashtags: `{:noop, id}` is returned for objects having other AS2 tags chunk_affected_count = results |> Enum.filter(&(elem(&1, 0) == :ok)) @@ -140,84 +79,10 @@ def handle_info(:migrate_hashtags, state) do Process.sleep(sleep_interval) end) |> Stream.run() - - fault_rate = fault_rate() - put_stat(:fault_rate, fault_rate) - fault_rate_allowance = Config.get([:populate_hashtags_table, :fault_rate_allowance], 0) - - cond do - fault_rate == 0 -> - set_complete() - - is_float(fault_rate) and fault_rate <= fault_rate_allowance -> - message = """ - Done with fault rate of #{fault_rate} which doesn't exceed #{fault_rate_allowance}. - Putting data migration to manual fix mode. Check `retry_failed/0`. - """ - - Logger.warn("#{__MODULE__}: #{message}") - update_status(:manual, message) - on_complete(data_migration()) - - true -> - message = "Too many failures. Check data_migration_failed_ids records / `retry_failed/0`." - Logger.error("#{__MODULE__}: #{message}") - update_status(:failed, message) - end - - persist_state() - {:noreply, state} end - def fault_rate do - with failures_count when is_integer(failures_count) <- failures_count() do - failures_count / Enum.max([get_stat(:affected_count, 0), 1]) - else - _ -> :error - end - end - - defp records_per_second do - get_stat(:iteration_processed_count, 0) / Enum.max([running_time(), 1]) - end - - defp running_time do - NaiveDateTime.diff(NaiveDateTime.utc_now(), get_stat(:started_at, NaiveDateTime.utc_now())) - end - - @hashtags_objects_cleanup_query """ - DELETE FROM hashtags_objects WHERE object_id IN - (SELECT DISTINCT objects.id FROM objects - JOIN hashtags_objects ON hashtags_objects.object_id = objects.id LEFT JOIN activities - ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = - (objects.data->>'id') - AND activities.data->>'type' = 'Create' - WHERE activities.id IS NULL); - """ - - @hashtags_cleanup_query """ - DELETE FROM hashtags WHERE id IN - (SELECT hashtags.id FROM hashtags - LEFT OUTER JOIN hashtags_objects - ON hashtags_objects.hashtag_id = hashtags.id - WHERE hashtags_objects.hashtag_id IS NULL); - """ - - @doc """ - Deletes `hashtags_objects` for legacy objects not asoociated with Create activity. - Also deletes unreferenced `hashtags` records (might occur after deletion of `hashtags_objects`). - """ - def delete_non_create_activities_hashtags do - {:ok, %{num_rows: hashtags_objects_count}} = - Repo.query(@hashtags_objects_cleanup_query, [], timeout: :infinity) - - {:ok, %{num_rows: hashtags_count}} = - Repo.query(@hashtags_cleanup_query, [], timeout: :infinity) - - {:ok, hashtags_objects_count, hashtags_count} - end - - defp query do + @impl BaseMigrator + def query do # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) # Note: not checking activity type, expecting remove_non_create_objects_hashtags/_ to clean up from( @@ -276,54 +141,7 @@ defp transfer_object_hashtags(object, hashtags) do end) end - @doc "Approximate count for current iteration (including processed records count)" - def count(force \\ false, timeout \\ :infinity) do - stored_count = get_stat(:count) - - if stored_count && !force do - stored_count - else - processed_count = get_stat(:processed_count, 0) - max_processed_id = get_stat(:max_processed_id, 0) - query = where(query(), [object], object.id > ^max_processed_id) - - count = Repo.aggregate(query, :count, :id, timeout: timeout) + processed_count - put_stat(:count, count) - persist_state() - - count - end - end - - defp on_complete(data_migration) do - if data_migration.feature_lock || feature_state() == :disabled do - Logger.warn("#{__MODULE__}: migration complete but feature is locked; consider enabling.") - :noop - else - Config.put(@feature_config_path, :enabled) - :ok - end - end - - def failed_objects_query do - from(o in Object) - |> join(:inner, [o], dmf in fragment("SELECT * FROM data_migration_failed_ids"), - on: dmf.record_id == o.id - ) - |> where([_o, dmf], dmf.data_migration_id == ^data_migration_id()) - |> order_by([o], asc: o.id) - end - - def failures_count do - with {:ok, %{rows: [[count]]}} <- - Repo.query( - "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;", - [data_migration_id()] - ) do - count - end - end - + @impl BaseMigrator def retry_failed do data_migration_id = data_migration_id() @@ -347,23 +165,44 @@ def retry_failed do force_continue() end - def force_continue do - send(whereis(), :migrate_hashtags) + defp failed_objects_query do + from(o in Object) + |> join(:inner, [o], dmf in fragment("SELECT * FROM data_migration_failed_ids"), + on: dmf.record_id == o.id + ) + |> where([_o, dmf], dmf.data_migration_id == ^data_migration_id()) + |> order_by([o], asc: o.id) end - def force_restart do - :ok = State.reset() - force_continue() - end + @doc """ + Service func to delete `hashtags_objects` for legacy objects not associated with Create activity. + Also deletes unreferenced `hashtags` records (might occur after deletion of `hashtags_objects`). + """ + def delete_non_create_activities_hashtags do + hashtags_objects_cleanup_query = """ + DELETE FROM hashtags_objects WHERE object_id IN + (SELECT DISTINCT objects.id FROM objects + JOIN hashtags_objects ON hashtags_objects.object_id = objects.id LEFT JOIN activities + ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = + (objects.data->>'id') + AND activities.data->>'type' = 'Create' + WHERE activities.id IS NULL); + """ - def set_complete do - update_status(:complete) - persist_state() - on_complete(data_migration()) - end + hashtags_cleanup_query = """ + DELETE FROM hashtags WHERE id IN + (SELECT hashtags.id FROM hashtags + LEFT OUTER JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id + WHERE hashtags_objects.hashtag_id IS NULL); + """ - defp update_status(status, message \\ nil) do - put_stat(:state, status) - put_stat(:message, message) + {:ok, %{num_rows: hashtags_objects_count}} = + Repo.query(hashtags_objects_cleanup_query, [], timeout: :infinity) + + {:ok, %{num_rows: hashtags_count}} = + Repo.query(hashtags_cleanup_query, [], timeout: :infinity) + + {:ok, hashtags_objects_count, hashtags_count} end end diff --git a/lib/pleroma/migrators/hashtags_table_migrator/state.ex b/lib/pleroma/migrators/hashtags_table_migrator/state.ex deleted file mode 100644 index ee0009b2e..000000000 --- a/lib/pleroma/migrators/hashtags_table_migrator/state.ex +++ /dev/null @@ -1,104 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Migrators.HashtagsTableMigrator.State do - use Agent - - alias Pleroma.DataMigration - - defdelegate data_migration(), to: Pleroma.Migrators.HashtagsTableMigrator - - @reg_name {:global, __MODULE__} - - def start_link(_) do - Agent.start_link(fn -> load_state_from_db() end, name: @reg_name) - end - - defp load_state_from_db do - data_migration = data_migration() - - data = - if data_migration do - Map.new(data_migration.data, fn {k, v} -> {String.to_atom(k), v} end) - else - %{} - end - - %{ - data_migration_id: data_migration && data_migration.id, - data: data - } - end - - def persist_to_db do - %{data_migration_id: data_migration_id, data: data} = state() - - if data_migration_id do - DataMigration.update_one_by_id(data_migration_id, data: data) - else - {:error, :nil_data_migration_id} - end - end - - def reset do - %{data_migration_id: data_migration_id} = state() - - with false <- is_nil(data_migration_id), - :ok <- - DataMigration.update_one_by_id(data_migration_id, - state: :pending, - data: %{} - ) do - reinit() - else - true -> {:error, :nil_data_migration_id} - e -> e - end - end - - def reinit do - Agent.update(@reg_name, fn _state -> load_state_from_db() end) - end - - def state do - Agent.get(@reg_name, & &1) - end - - def get_data_key(key, default \\ nil) do - get_in(state(), [:data, key]) || default - end - - def put_data_key(key, value) do - _ = persist_non_data_change(key, value) - - Agent.update(@reg_name, fn state -> - put_in(state, [:data, key], value) - end) - end - - def increment_data_key(key, increment \\ 1) do - Agent.update(@reg_name, fn state -> - initial_value = get_in(state, [:data, key]) || 0 - updated_value = initial_value + increment - put_in(state, [:data, key], updated_value) - end) - end - - defp persist_non_data_change(:state, value) do - with true <- get_data_key(:state) != value, - true <- value in Pleroma.DataMigration.State.__valid_values__(), - %{data_migration_id: data_migration_id} when not is_nil(data_migration_id) <- state() do - DataMigration.update_one_by_id(data_migration_id, state: value) - else - false -> :ok - _ -> {:error, :nil_data_migration_id} - end - end - - defp persist_non_data_change(_, _) do - nil - end - - def data_migration_id, do: Map.get(state(), :data_migration_id) -end diff --git a/lib/pleroma/migrators/support/base_migrator.ex b/lib/pleroma/migrators/support/base_migrator.ex new file mode 100644 index 000000000..1f8a5402b --- /dev/null +++ b/lib/pleroma/migrators/support/base_migrator.ex @@ -0,0 +1,210 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Migrators.Support.BaseMigrator do + @moduledoc """ + Base background migrator functionality. + """ + + @callback perform() :: any() + @callback retry_failed() :: any() + @callback feature_config_path() :: list(atom()) + @callback query() :: Ecto.Query.t() + @callback fault_rate_allowance() :: integer() | float() + + defmacro __using__(_opts) do + quote do + use GenServer + + require Logger + + import Ecto.Query + + alias __MODULE__.State + alias Pleroma.Config + alias Pleroma.Repo + + @behaviour Pleroma.Migrators.Support.BaseMigrator + + defdelegate data_migration(), to: State + defdelegate data_migration_id(), to: State + defdelegate state(), to: State + defdelegate persist_state(), to: State, as: :persist_to_db + defdelegate get_stat(key, value \\ nil), to: State, as: :get_data_key + defdelegate put_stat(key, value), to: State, as: :put_data_key + defdelegate increment_stat(key, increment), to: State, as: :increment_data_key + + @reg_name {:global, __MODULE__} + + def whereis, do: GenServer.whereis(@reg_name) + + def start_link(_) do + case whereis() do + nil -> + GenServer.start_link(__MODULE__, nil, name: @reg_name) + + pid -> + {:ok, pid} + end + end + + @impl true + def init(_) do + {:ok, nil, {:continue, :init_state}} + end + + @impl true + def handle_continue(:init_state, _state) do + {:ok, _} = State.start_link(nil) + + data_migration = data_migration() + manual_migrations = Config.get([:instance, :manual_data_migrations], []) + + cond do + Config.get(:env) == :test -> + update_status(:noop) + + is_nil(data_migration) -> + message = "Data migration does not exist." + update_status(:failed, message) + Logger.error("#{__MODULE__}: #{message}") + + data_migration.state == :manual or data_migration.name in manual_migrations -> + message = "Data migration is in manual execution or manual fix mode." + update_status(:manual, message) + Logger.warn("#{__MODULE__}: #{message}") + + data_migration.state == :complete -> + on_complete(data_migration) + + true -> + send(self(), :perform) + end + + {:noreply, nil} + end + + @impl true + def handle_info(:perform, state) do + State.reinit() + + update_status(:running) + put_stat(:iteration_processed_count, 0) + put_stat(:started_at, NaiveDateTime.utc_now()) + + perform() + + fault_rate = fault_rate() + put_stat(:fault_rate, fault_rate) + fault_rate_allowance = fault_rate_allowance() + + cond do + fault_rate == 0 -> + set_complete() + + is_float(fault_rate) and fault_rate <= fault_rate_allowance -> + message = """ + Done with fault rate of #{fault_rate} which doesn't exceed #{fault_rate_allowance}. + Putting data migration to manual fix mode. Try running `#{__MODULE__}.retry_failed/0`. + """ + + Logger.warn("#{__MODULE__}: #{message}") + update_status(:manual, message) + on_complete(data_migration()) + + true -> + message = "Too many failures. Try running `#{__MODULE__}.retry_failed/0`." + Logger.error("#{__MODULE__}: #{message}") + update_status(:failed, message) + end + + persist_state() + {:noreply, state} + end + + defp on_complete(data_migration) do + if data_migration.feature_lock || feature_state() == :disabled do + Logger.warn( + "#{__MODULE__}: migration complete but feature is locked; consider enabling." + ) + + :noop + else + Config.put(feature_config_path(), :enabled) + :ok + end + end + + @doc "Approximate count for current iteration (including processed records count)" + def count(force \\ false, timeout \\ :infinity) do + stored_count = get_stat(:count) + + if stored_count && !force do + stored_count + else + processed_count = get_stat(:processed_count, 0) + max_processed_id = get_stat(:max_processed_id, 0) + query = where(query(), [entity], entity.id > ^max_processed_id) + + count = Repo.aggregate(query, :count, :id, timeout: timeout) + processed_count + put_stat(:count, count) + persist_state() + + count + end + end + + def failures_count do + with {:ok, %{rows: [[count]]}} <- + Repo.query( + "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;", + [data_migration_id()] + ) do + count + end + end + + def feature_state, do: Config.get(feature_config_path()) + + def force_continue do + send(whereis(), :perform) + end + + def force_restart do + :ok = State.reset() + force_continue() + end + + def set_complete do + update_status(:complete) + persist_state() + on_complete(data_migration()) + end + + defp update_status(status, message \\ nil) do + put_stat(:state, status) + put_stat(:message, message) + end + + defp fault_rate do + with failures_count when is_integer(failures_count) <- failures_count() do + failures_count / Enum.max([get_stat(:affected_count, 0), 1]) + else + _ -> :error + end + end + + defp records_per_second do + get_stat(:iteration_processed_count, 0) / Enum.max([running_time(), 1]) + end + + defp running_time do + NaiveDateTime.diff( + NaiveDateTime.utc_now(), + get_stat(:started_at, NaiveDateTime.utc_now()) + ) + end + end + end +end diff --git a/lib/pleroma/migrators/support/base_migrator_state.ex b/lib/pleroma/migrators/support/base_migrator_state.ex new file mode 100644 index 000000000..69724ae79 --- /dev/null +++ b/lib/pleroma/migrators/support/base_migrator_state.ex @@ -0,0 +1,116 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Migrators.Support.BaseMigratorState do + @moduledoc """ + Base background migrator state functionality. + """ + + @callback data_migration() :: Pleroma.DataMigration.t() + + defmacro __using__(_opts) do + quote do + use Agent + + alias Pleroma.DataMigration + + @behaviour Pleroma.Migrators.Support.BaseMigratorState + @reg_name {:global, __MODULE__} + + def start_link(_) do + Agent.start_link(fn -> load_state_from_db() end, name: @reg_name) + end + + def data_migration, do: raise("data_migration/0 is not implemented") + defoverridable data_migration: 0 + + defp load_state_from_db do + data_migration = data_migration() + + data = + if data_migration do + Map.new(data_migration.data, fn {k, v} -> {String.to_atom(k), v} end) + else + %{} + end + + %{ + data_migration_id: data_migration && data_migration.id, + data: data + } + end + + def persist_to_db do + %{data_migration_id: data_migration_id, data: data} = state() + + if data_migration_id do + DataMigration.update_one_by_id(data_migration_id, data: data) + else + {:error, :nil_data_migration_id} + end + end + + def reset do + %{data_migration_id: data_migration_id} = state() + + with false <- is_nil(data_migration_id), + :ok <- + DataMigration.update_one_by_id(data_migration_id, + state: :pending, + data: %{} + ) do + reinit() + else + true -> {:error, :nil_data_migration_id} + e -> e + end + end + + def reinit do + Agent.update(@reg_name, fn _state -> load_state_from_db() end) + end + + def state do + Agent.get(@reg_name, & &1) + end + + def get_data_key(key, default \\ nil) do + get_in(state(), [:data, key]) || default + end + + def put_data_key(key, value) do + _ = persist_non_data_change(key, value) + + Agent.update(@reg_name, fn state -> + put_in(state, [:data, key], value) + end) + end + + def increment_data_key(key, increment \\ 1) do + Agent.update(@reg_name, fn state -> + initial_value = get_in(state, [:data, key]) || 0 + updated_value = initial_value + increment + put_in(state, [:data, key], updated_value) + end) + end + + defp persist_non_data_change(:state, value) do + with true <- get_data_key(:state) != value, + true <- value in Pleroma.DataMigration.State.__valid_values__(), + %{data_migration_id: data_migration_id} when not is_nil(data_migration_id) <- state() do + DataMigration.update_one_by_id(data_migration_id, state: value) + else + false -> :ok + _ -> {:error, :nil_data_migration_id} + end + end + + defp persist_non_data_change(_, _) do + nil + end + + def data_migration_id, do: Map.get(state(), :data_migration_id) + end + end +end From cb734566093f406fc3db12de2408fc166486f417 Mon Sep 17 00:00:00 2001 From: Ivan Tashkinov <ivantashkinov@gmail.com> Date: Fri, 12 Mar 2021 12:25:18 +0300 Subject: [PATCH 076/174] [#3213] Code formatting fix. --- lib/pleroma/migrators/support/base_migrator_state.ex | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/migrators/support/base_migrator_state.ex b/lib/pleroma/migrators/support/base_migrator_state.ex index 69724ae79..b698587f2 100644 --- a/lib/pleroma/migrators/support/base_migrator_state.ex +++ b/lib/pleroma/migrators/support/base_migrator_state.ex @@ -98,7 +98,8 @@ def increment_data_key(key, increment \\ 1) do defp persist_non_data_change(:state, value) do with true <- get_data_key(:state) != value, true <- value in Pleroma.DataMigration.State.__valid_values__(), - %{data_migration_id: data_migration_id} when not is_nil(data_migration_id) <- state() do + %{data_migration_id: data_migration_id} when not is_nil(data_migration_id) <- + state() do DataMigration.update_one_by_id(data_migration_id, state: value) else false -> :ok From 2408363e2a5115e4856957ba46231211eec6b338 Mon Sep 17 00:00:00 2001 From: Ben Is <spambenis@fastwebnet.it> Date: Thu, 11 Mar 2021 13:51:22 +0000 Subject: [PATCH 077/174] Translated using Weblate (Italian) Currently translated at 100.0% (106 of 106 strings) Translation: Pleroma/Pleroma backend Translate-URL: https://translate.pleroma.social/projects/pleroma/pleroma/it/ --- priv/gettext/it/LC_MESSAGES/errors.po | 44 +++++++++++++-------------- 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/priv/gettext/it/LC_MESSAGES/errors.po b/priv/gettext/it/LC_MESSAGES/errors.po index cd0cd6c65..6a6ec058e 100644 --- a/priv/gettext/it/LC_MESSAGES/errors.po +++ b/priv/gettext/it/LC_MESSAGES/errors.po @@ -3,8 +3,8 @@ msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2020-06-19 14:33+0000\n" -"PO-Revision-Date: 2020-07-09 14:40+0000\n" -"Last-Translator: Ben Is <srsbzns@cock.li>\n" +"PO-Revision-Date: 2021-03-13 09:40+0000\n" +"Last-Translator: Ben Is <spambenis@fastwebnet.it>\n" "Language-Team: Italian <https://translate.pleroma.social/projects/pleroma/" "pleroma/it/>\n" "Language: it\n" @@ -45,7 +45,7 @@ msgstr "ha una voce invalida" ## From Ecto.Changeset.validate_exclusion/3 msgid "is reserved" -msgstr "è vietato" +msgstr "è riservato" ## From Ecto.Changeset.validate_confirmation/3 msgid "does not match confirmation" @@ -123,7 +123,7 @@ msgstr "Richiesta invalida" #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:425 #, elixir-format msgid "Can't delete object" -msgstr "Non puoi eliminare quest'oggetto" +msgstr "Oggetto non eliminabile" #: lib/pleroma/web/mastodon_api/controllers/status_controller.ex:196 #, elixir-format @@ -160,12 +160,12 @@ msgstr "Non puoi pubblicare un messaggio vuoto senza allegati" #: lib/pleroma/web/common_api/utils.ex:504 #, elixir-format msgid "Comment must be up to %{max_size} characters" -msgstr "I commenti posso al massimo consistere di %{max_size} caratteri" +msgstr "I commenti posso al massimo contenere %{max_size} caratteri" #: lib/pleroma/config/config_db.ex:222 #, elixir-format msgid "Config with params %{params} not found" -msgstr "Configurazione con parametri %{max_size} non trovata" +msgstr "Configurazione con parametri %{params} non trovata" #: lib/pleroma/web/common_api/common_api.ex:95 #, elixir-format @@ -200,7 +200,7 @@ msgstr "Non de-intestato" #: lib/pleroma/web/common_api/common_api.ex:126 #, elixir-format msgid "Could not unrepeat" -msgstr "Non de-ripetuto" +msgstr "Non de-condiviso" #: lib/pleroma/web/common_api/common_api.ex:428 #: lib/pleroma/web/common_api/common_api.ex:437 @@ -310,12 +310,12 @@ msgstr "Il messaggio ha superato la lunghezza massima" #: lib/pleroma/plugs/ensure_public_or_authenticated_plug.ex:31 #, elixir-format msgid "This resource requires authentication." -msgstr "Accedi per leggere." +msgstr "Accedi per poter leggere." #: lib/pleroma/plugs/rate_limiter/rate_limiter.ex:206 #, elixir-format msgid "Throttled" -msgstr "Strozzato" +msgstr "Limitato" #: lib/pleroma/web/common_api/common_api.ex:266 #, elixir-format @@ -347,17 +347,17 @@ msgstr "Devi aggiungere un indirizzo email valido" #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:389 #, elixir-format msgid "can't read inbox of %{nickname} as %{as_nickname}" -msgstr "non puoi leggere i messaggi privati di %{nickname} come %{as_nickname}" +msgstr "non puoi leggere i messaggi di %{nickname} come %{as_nickname}" #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:472 #, elixir-format msgid "can't update outbox of %{nickname} as %{as_nickname}" -msgstr "non puoi aggiornare gli inviati di %{nickname} come %{as_nickname}" +msgstr "non puoi inviare da %{nickname} come %{as_nickname}" #: lib/pleroma/web/common_api/common_api.ex:388 #, elixir-format msgid "conversation is already muted" -msgstr "la conversazione è già zittita" +msgstr "la conversazione è già silenziata" #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:316 #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:491 @@ -419,7 +419,7 @@ msgstr "Errore interno" #: lib/pleroma/web/oauth/fallback_controller.ex:29 #, elixir-format msgid "Invalid Username/Password" -msgstr "Nome utente/parola d'ordine invalidi" +msgstr "Nome utente/password invalidi" #: lib/pleroma/web/twitter_api/twitter_api.ex:118 #, elixir-format @@ -455,7 +455,7 @@ msgstr "Gestore OAuth non supportato: %{provider}." #: lib/pleroma/uploaders/uploader.ex:72 #, elixir-format msgid "Uploader callback timeout" -msgstr "Callback caricatmento scaduta" +msgstr "Callback caricamento scaduta" #: lib/pleroma/web/uploader_controller.ex:23 #, elixir-format @@ -496,7 +496,7 @@ msgstr "Parametro mancante: %{name}" #: lib/pleroma/web/oauth/oauth_controller.ex:322 #, elixir-format msgid "Password reset is required" -msgstr "Necessario reimpostare parola d'ordine" +msgstr "Necessario reimpostare password" #: lib/pleroma/tests/auth_test_controller.ex:9 #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:6 lib/pleroma/web/admin_api/admin_api_controller.ex:6 @@ -540,34 +540,32 @@ msgstr "" #: lib/pleroma/web/pleroma_api/controllers/emoji_api_controller.ex:210 #, elixir-format msgid "Unexpected error occurred while adding file to pack." -msgstr "Errore inaspettato durante l'aggiunta del file al pacchetto." +msgstr "Errore inatteso durante l'aggiunta del file al pacchetto." #: lib/pleroma/web/pleroma_api/controllers/emoji_api_controller.ex:138 #, elixir-format msgid "Unexpected error occurred while creating pack." -msgstr "Errore inaspettato durante la creazione del pacchetto." +msgstr "Errore inatteso durante la creazione del pacchetto." #: lib/pleroma/web/pleroma_api/controllers/emoji_api_controller.ex:278 #, elixir-format msgid "Unexpected error occurred while removing file from pack." -msgstr "Errore inaspettato durante la rimozione del file dal pacchetto." +msgstr "Errore inatteso durante la rimozione del file dal pacchetto." #: lib/pleroma/web/pleroma_api/controllers/emoji_api_controller.ex:250 #, elixir-format msgid "Unexpected error occurred while updating file in pack." -msgstr "Errore inaspettato durante l'aggiornamento del file nel pacchetto." +msgstr "Errore inatteso durante l'aggiornamento del file nel pacchetto." #: lib/pleroma/web/pleroma_api/controllers/emoji_api_controller.ex:179 #, elixir-format msgid "Unexpected error occurred while updating pack metadata." -msgstr "Errore inaspettato durante l'aggiornamento dei metadati del pacchetto." +msgstr "Errore inatteso durante l'aggiornamento dei metadati del pacchetto." #: lib/pleroma/plugs/user_is_admin_plug.ex:21 #, elixir-format msgid "User is not an admin." -msgstr "" -"L'utente non è un amministratore." -"OAuth." +msgstr "L'utente non è un amministratore." #: lib/pleroma/web/mastodon_api/controllers/subscription_controller.ex:61 #, elixir-format From b80f868c6b41d1407cf6e4f2df8913bdf7a954c0 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Sat, 13 Mar 2021 12:27:15 -0600 Subject: [PATCH 078/174] Prefer naming this function build_image_url/2 --- lib/pleroma/web/mastodon_api/views/status_view.ex | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index 581b4e952..71f659ba0 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -386,7 +386,7 @@ def render("card.json", %{rich_media: rich_media, page_url: page_url}) do nil end - image_url = get_image_url(image_url_data, page_url_data) + image_url = build_image_url(image_url_data, page_url_data) %{ type: "link", @@ -548,8 +548,8 @@ defp build_application(_), do: nil # Avoid applying URI.merge unless necessary # TODO: revert to always attempting URI.merge(image_url_data, page_url_data) # when Elixir 1.12 is the minimum supported version - @spec get_image_url(struct() | nil, struct()) :: String.t() | nil - defp get_image_url( + @spec build_image_url(struct() | nil, struct()) :: String.t() | nil + defp build_image_url( %URI{scheme: image_scheme, host: image_host} = image_url_data, %URI{} = _page_url_data ) @@ -557,9 +557,9 @@ defp get_image_url( image_url_data |> to_string end - defp get_image_url(%URI{} = image_url_data, %URI{} = page_url_data) do + defp build_image_url(%URI{} = image_url_data, %URI{} = page_url_data) do URI.merge(page_url_data, image_url_data) |> to_string end - defp get_image_url(_, _), do: nil + defp build_image_url(_, _), do: nil end From b1d4b2b81ec97143c41d16ac3f5bc2825b836f4b Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" <contact@hacktivis.me> Date: Mon, 15 Mar 2021 06:43:12 +0100 Subject: [PATCH 079/174] Add support for actor icon being a list (Bridgy) --- lib/pleroma/web/activity_pub/activity_pub.ex | 28 +++---- test/fixtures/bridgy/actor.json | 80 +++++++++++++++++++ .../web/activity_pub/activity_pub_test.exs | 27 +++++++ 3 files changed, 119 insertions(+), 16 deletions(-) create mode 100644 test/fixtures/bridgy/actor.json diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 5b45e2ca1..ff478f44c 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -1250,21 +1250,17 @@ defp get_actor_url(url) when is_list(url) do defp get_actor_url(_url), do: nil + defp normalize_image(%{"url" => url}) do + %{ + "type" => "Image", + "url" => [%{"href" => url}] + } + end + + defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image() + defp normalize_image(_), do: nil + defp object_to_user_data(data) do - avatar = - data["icon"]["url"] && - %{ - "type" => "Image", - "url" => [%{"href" => data["icon"]["url"]}] - } - - banner = - data["image"]["url"] && - %{ - "type" => "Image", - "url" => [%{"href" => data["image"]["url"]}] - } - fields = data |> Map.get("attachment", []) @@ -1308,13 +1304,13 @@ defp object_to_user_data(data) do ap_id: data["id"], uri: get_actor_url(data["url"]), ap_enabled: true, - banner: banner, + banner: normalize_image(data["image"]), fields: fields, emoji: emojis, is_locked: is_locked, is_discoverable: is_discoverable, invisible: invisible, - avatar: avatar, + avatar: normalize_image(data["icon"]), name: data["name"], follower_address: data["followers"], following_address: data["following"], diff --git a/test/fixtures/bridgy/actor.json b/test/fixtures/bridgy/actor.json new file mode 100644 index 000000000..5b2d8982b --- /dev/null +++ b/test/fixtures/bridgy/actor.json @@ -0,0 +1,80 @@ +{ + "id": "https://fed.brid.gy/jk.nipponalba.scot", + "url": "https://fed.brid.gy/r/https://jk.nipponalba.scot", + "urls": [ + { + "value": "https://jk.nipponalba.scot" + }, + { + "value": "https://social.nipponalba.scot/jk" + }, + { + "value": "https://px.nipponalba.scot/jk" + } + ], + "@context": "https://www.w3.org/ns/activitystreams", + "type": "Person", + "name": "J K 🇯🇵🏴", + "image": [ + { + "url": "https://jk.nipponalba.scot/images/profile.jpg", + "type": "Image", + "name": "profile picture" + } + ], + "tag": [ + { + "type": "Tag", + "name": "Craft Beer" + }, + { + "type": "Tag", + "name": "Single Malt Whisky" + }, + { + "type": "Tag", + "name": "Homebrewing" + }, + { + "type": "Tag", + "name": "Scottish Politics" + }, + { + "type": "Tag", + "name": "Scottish History" + }, + { + "type": "Tag", + "name": "Japanese History" + }, + { + "type": "Tag", + "name": "Tech" + }, + { + "type": "Tag", + "name": "Veganism" + }, + { + "type": "Tag", + "name": "Cooking" + } + ], + "icon": [ + { + "url": "https://jk.nipponalba.scot/images/profile.jpg", + "type": "Image", + "name": "profile picture" + } + ], + "preferredUsername": "jk.nipponalba.scot", + "summary": "", + "publicKey": { + "id": "jk.nipponalba.scot", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDdarxwzxnNbJ2hneWOYHkYJowk\npyigQtxlUd0VjgSQHwxU9kWqfbrHBVADyTtcqi/4dAzQd3UnCI1TPNnn4LPZY9PW\noiWd3Zl1/EfLFxO7LU9GS7fcSLQkyj5JNhSlN3I8QPudZbybrgRDVZYooDe1D+52\n5KLGqC2ajrIVOiDRTQIDAQAB\n-----END PUBLIC KEY-----" + }, + "inbox": "https://fed.brid.gy/jk.nipponalba.scot/inbox", + "outbox": "https://fed.brid.gy/jk.nipponalba.scot/outbox", + "following": "https://fed.brid.gy/jk.nipponalba.scot/following", + "followers": "https://fed.brid.gy/jk.nipponalba.scot/followers" +} diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index f4023856c..57f12f821 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -208,6 +208,33 @@ test "works for guppe actors" do assert user.name == "Bernie2020 group" assert user.actor_type == "Group" end + + test "works for bridgy actors" do + user_id = "https://fed.brid.gy/jk.nipponalba.scot" + + Tesla.Mock.mock(fn + %{method: :get, url: ^user_id} -> + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/bridgy/actor.json"), + headers: [{"content-type", "application/activity+json"}] + } + end) + + {:ok, user} = ActivityPub.make_user_from_ap_id(user_id) + + assert user.actor_type == "Person" + + assert user.avatar == %{ + "type" => "Image", + "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}] + } + + assert user.banner == %{ + "type" => "Image", + "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}] + } + end end test "it fetches the appropriate tag-restricted posts" do From d7e51206a251b9da0180a4df3c879531ac302e1a Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Thu, 18 Mar 2021 13:49:03 +0300 Subject: [PATCH 080/174] respect content-type header in finger request --- lib/pleroma/web/web_finger.ex | 90 +++++++++++-------- lib/pleroma/web/xml.ex | 2 +- .../fixtures/tesla_mock/xn--q9jyb4c_host_meta | 4 - test/pleroma/web/web_finger_test.exs | 67 ++++++++++++++ test/support/http_request_mock.ex | 47 +++++----- 5 files changed, 141 insertions(+), 69 deletions(-) delete mode 100644 test/fixtures/tesla_mock/xn--q9jyb4c_host_meta diff --git a/lib/pleroma/web/web_finger.ex b/lib/pleroma/web/web_finger.ex index 15002b29f..21b10e654 100644 --- a/lib/pleroma/web/web_finger.ex +++ b/lib/pleroma/web/web_finger.ex @@ -94,52 +94,56 @@ def represent_user(user, "XML") do |> XmlBuilder.to_doc() end - defp webfinger_from_xml(doc) do - subject = XML.string_from_xpath("//Subject", doc) + defp webfinger_from_xml(body) do + with {:ok, doc} <- XML.parse_document(body) do + subject = XML.string_from_xpath("//Subject", doc) - subscribe_address = - ~s{//Link[@rel="http://ostatus.org/schema/1.0/subscribe"]/@template} - |> XML.string_from_xpath(doc) + subscribe_address = + ~s{//Link[@rel="http://ostatus.org/schema/1.0/subscribe"]/@template} + |> XML.string_from_xpath(doc) - ap_id = - ~s{//Link[@rel="self" and @type="application/activity+json"]/@href} - |> XML.string_from_xpath(doc) + ap_id = + ~s{//Link[@rel="self" and @type="application/activity+json"]/@href} + |> XML.string_from_xpath(doc) - data = %{ - "subject" => subject, - "subscribe_address" => subscribe_address, - "ap_id" => ap_id - } + data = %{ + "subject" => subject, + "subscribe_address" => subscribe_address, + "ap_id" => ap_id + } - {:ok, data} + {:ok, data} + end end - defp webfinger_from_json(doc) do - data = - Enum.reduce(doc["links"], %{"subject" => doc["subject"]}, fn link, data -> - case {link["type"], link["rel"]} do - {"application/activity+json", "self"} -> - Map.put(data, "ap_id", link["href"]) + defp webfinger_from_json(body) do + with {:ok, doc} <- Jason.decode(body) do + data = + Enum.reduce(doc["links"], %{"subject" => doc["subject"]}, fn link, data -> + case {link["type"], link["rel"]} do + {"application/activity+json", "self"} -> + Map.put(data, "ap_id", link["href"]) - {"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"", "self"} -> - Map.put(data, "ap_id", link["href"]) + {"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"", "self"} -> + Map.put(data, "ap_id", link["href"]) - {nil, "http://ostatus.org/schema/1.0/subscribe"} -> - Map.put(data, "subscribe_address", link["template"]) + {nil, "http://ostatus.org/schema/1.0/subscribe"} -> + Map.put(data, "subscribe_address", link["template"]) - _ -> - Logger.debug("Unhandled type: #{inspect(link["type"])}") - data - end - end) + _ -> + Logger.debug("Unhandled type: #{inspect(link["type"])}") + data + end + end) - {:ok, data} + {:ok, data} + end end def get_template_from_xml(body) do xpath = "//Link[@rel='lrdd']/@template" - with doc when doc != :error <- XML.parse_document(body), + with {:ok, doc} <- XML.parse_document(body), template when template != nil <- XML.string_from_xpath(xpath, doc) do {:ok, template} end @@ -192,15 +196,23 @@ def finger(account) do address, [{"accept", "application/xrd+xml,application/jrd+json"}] ), - {:ok, %{status: status, body: body}} when status in 200..299 <- response do - doc = XML.parse_document(body) + {:ok, %{status: status, body: body, headers: headers}} when status in 200..299 <- + response do + case List.keyfind(headers, "content-type", 0) do + {_, content_type} -> + case Plug.Conn.Utils.media_type(content_type) do + {:ok, "application", subtype, _} when subtype in ~w(xrd+xml xml) -> + webfinger_from_xml(body) - if doc != :error do - webfinger_from_xml(doc) - else - with {:ok, doc} <- Jason.decode(body) do - webfinger_from_json(doc) - end + {:ok, "application", subtype, _} when subtype in ~w(jrd+json json) -> + webfinger_from_json(body) + + _ -> + {:error, {:content_type, content_type}} + end + + _ -> + {:error, {:content_type, nil}} end else e -> diff --git a/lib/pleroma/web/xml.ex b/lib/pleroma/web/xml.ex index 2b34611ac..0ab6e9d32 100644 --- a/lib/pleroma/web/xml.ex +++ b/lib/pleroma/web/xml.ex @@ -31,7 +31,7 @@ def parse_document(text) do |> :binary.bin_to_list() |> :xmerl_scan.string(quiet: true) - doc + {:ok, doc} rescue _e -> Logger.debug("Couldn't parse XML: #{inspect(text)}") diff --git a/test/fixtures/tesla_mock/xn--q9jyb4c_host_meta b/test/fixtures/tesla_mock/xn--q9jyb4c_host_meta deleted file mode 100644 index 45d260e55..000000000 --- a/test/fixtures/tesla_mock/xn--q9jyb4c_host_meta +++ /dev/null @@ -1,4 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0"> - <Link rel="lrdd" template="https://zetsubou.xn--q9jyb4c/.well-known/webfinger?resource={uri}" type="application/xrd+xml" /> -</XRD> diff --git a/test/pleroma/web/web_finger_test.exs b/test/pleroma/web/web_finger_test.exs index 84477d5a1..2d7b4a40b 100644 --- a/test/pleroma/web/web_finger_test.exs +++ b/test/pleroma/web/web_finger_test.exs @@ -45,6 +45,26 @@ test "returns error for nonsensical input" do assert {:error, _} = WebFinger.finger("pleroma.social") end + test "returns error when there is no content-type header" do + Tesla.Mock.mock(fn + %{url: "http://social.heldscal.la/.well-known/host-meta"} -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/social.heldscal.la_host_meta") + }} + + %{ + url: + "https://social.heldscal.la/.well-known/webfinger?resource=acct:invalid_content@social.heldscal.la" + } -> + {:ok, %Tesla.Env{status: 200, body: ""}} + end) + + user = "invalid_content@social.heldscal.la" + assert {:error, {:content_type, nil}} = WebFinger.finger(user) + end + test "returns error when fails parse xml or json" do user = "invalid_content@social.heldscal.la" assert {:error, %Jason.DecodeError{}} = WebFinger.finger(user) @@ -113,5 +133,52 @@ test "it works with idna domains as link" do ap_id = "https://" <> to_string(:idna.encode("zetsubou.みんな")) <> "/users/lain" {:ok, _data} = WebFinger.finger(ap_id) end + + test "respects json content-type" do + Tesla.Mock.mock(fn + %{ + url: + "https://mastodon.social/.well-known/webfinger?resource=acct:emelie@mastodon.social" + } -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/webfinger_emelie.json"), + headers: [{"content-type", "application/jrd+json"}] + }} + + %{url: "http://mastodon.social/.well-known/host-meta"} -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/mastodon.social_host_meta") + }} + end) + + {:ok, _data} = WebFinger.finger("emelie@mastodon.social") + end + + test "respects xml content-type" do + Tesla.Mock.mock(fn + %{ + url: "https://pawoo.net/.well-known/webfinger?resource=acct:pekorino@pawoo.net" + } -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/https___pawoo.net_users_pekorino.xml"), + headers: [{"content-type", "application/xrd+xml"}] + }} + + %{url: "http://pawoo.net/.well-known/host-meta"} -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/pawoo.net_host_meta") + }} + end) + + {:ok, _data} = WebFinger.finger("pekorino@pawoo.net") + end end end diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex index 1328d6225..1e98020f0 100644 --- a/test/support/http_request_mock.ex +++ b/test/support/http_request_mock.ex @@ -122,7 +122,7 @@ def get( %Tesla.Env{ status: 200, body: File.read!("test/fixtures/tesla_mock/mike@osada.macgirvin.com.json"), - headers: activitypub_object_headers() + headers: [{"content-type", "application/jrd+json"}] }} end @@ -187,7 +187,8 @@ def get( {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/tesla_mock/lain_squeet.me_webfinger.xml") + body: File.read!("test/fixtures/tesla_mock/lain_squeet.me_webfinger.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -526,22 +527,6 @@ def get( }} end - def get("http://zetsubou.xn--q9jyb4c/.well-known/host-meta", _, _, _) do - {:ok, - %Tesla.Env{ - status: 200, - body: File.read!("test/fixtures/tesla_mock/xn--q9jyb4c_host_meta") - }} - end - - def get("https://zetsubou.xn--q9jyb4c/.well-known/host-meta", _, _, _) do - {:ok, - %Tesla.Env{ - status: 200, - body: File.read!("test/fixtures/tesla_mock/xn--q9jyb4c_host_meta") - }} - end - def get("http://pleroma.soykaf.com/.well-known/host-meta", _, _, _) do {:ok, %Tesla.Env{ @@ -786,7 +771,8 @@ def get( {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/tesla_mock/shp@social.heldscal.la.xml") + body: File.read!("test/fixtures/tesla_mock/shp@social.heldscal.la.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -796,7 +782,7 @@ def get( _, [{"accept", "application/xrd+xml,application/jrd+json"}] ) do - {:ok, %Tesla.Env{status: 200, body: ""}} + {:ok, %Tesla.Env{status: 200, body: "", headers: [{"content-type", "application/jrd+json"}]}} end def get("http://framatube.org/.well-known/host-meta", _, _, _) do @@ -816,7 +802,7 @@ def get( {:ok, %Tesla.Env{ status: 200, - headers: [{"content-type", "application/json"}], + headers: [{"content-type", "application/jrd+json"}], body: File.read!("test/fixtures/tesla_mock/framasoft@framatube.org.json") }} end @@ -876,7 +862,7 @@ def get( {:ok, %Tesla.Env{ status: 200, - headers: [{"content-type", "application/json"}], + headers: [{"content-type", "application/jrd+json"}], body: File.read!("test/fixtures/tesla_mock/kaniini@gerzilla.de.json") }} end @@ -1074,7 +1060,8 @@ def get( {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/lain.xml") + body: File.read!("test/fixtures/lain.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -1087,7 +1074,16 @@ def get( {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/lain.xml") + body: File.read!("test/fixtures/lain.xml"), + headers: [{"content-type", "application/xrd+xml"}] + }} + end + + def get("http://zetsubou.xn--q9jyb4c/.well-known/host-meta", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/host-meta-zetsubou.xn--q9jyb4c.xml") }} end @@ -1153,7 +1149,8 @@ def get("https://mstdn.jp/.well-known/webfinger?resource=acct:kpherox@mstdn.jp", {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/tesla_mock/kpherox@mstdn.jp.xml") + body: File.read!("test/fixtures/tesla_mock/kpherox@mstdn.jp.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end From ef5b0510eb3e2c77c94fc5a6168180141a73361f Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Sat, 20 Mar 2021 08:29:02 +0300 Subject: [PATCH 081/174] updating timex --- mix.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mix.lock b/mix.lock index 6034ce5a8..19b90660b 100644 --- a/mix.lock +++ b/mix.lock @@ -52,7 +52,7 @@ "gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm", "29bd14a88030980849c7ed2447b8db6d6c9278a28b11a44cafe41b791205440f"}, "gen_stage": {:hex, :gen_stage, "0.14.3", "d0c66f1c87faa301c1a85a809a3ee9097a4264b2edf7644bf5c123237ef732bf", [:mix], [], "hexpm"}, "gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"}, - "gettext": {:hex, :gettext, "0.18.0", "406d6b9e0e3278162c2ae1de0a60270452c553536772167e2d701f028116f870", [:mix], [], "hexpm", "c3f850be6367ebe1a08616c2158affe4a23231c70391050bf359d5f92f66a571"}, + "gettext": {:hex, :gettext, "0.18.2", "7df3ea191bb56c0309c00a783334b288d08a879f53a7014341284635850a6e55", [:mix], [], "hexpm", "f9f537b13d4fdd30f3039d33cb80144c3aa1f8d9698e47d7bcbcc8df93b1f5c5"}, "gun": {:git, "https://github.com/ninenines/gun.git", "921c47146b2d9567eac7e9a4d2ccc60fffd4f327", [ref: "921c47146b2d9567eac7e9a4d2ccc60fffd4f327"]}, "hackney": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/hackney.git", "7d7119f0651515d6d7669c78393fd90950a3ec6e", [ref: "7d7119f0651515d6d7669c78393fd90950a3ec6e"]}, "html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm", "30efab070904eb897ff05cd52fa61c1025d7f8ef3a9ca250bc4e6513d16c32de"}, @@ -117,9 +117,9 @@ "syslog": {:hex, :syslog, "1.1.0", "6419a232bea84f07b56dc575225007ffe34d9fdc91abe6f1b2f254fd71d8efc2", [:rebar3], [], "hexpm", "4c6a41373c7e20587be33ef841d3de6f3beba08519809329ecc4d27b15b659e1"}, "telemetry": {:hex, :telemetry, "0.4.2", "2808c992455e08d6177322f14d3bdb6b625fbcfd233a73505870d8738a2f4599", [:rebar3], [], "hexpm", "2d1419bd9dda6a206d7b5852179511722e2b18812310d304620c7bd92a13fcef"}, "tesla": {:hex, :tesla, "1.4.0", "1081bef0124b8bdec1c3d330bbe91956648fb008cf0d3950a369cda466a31a87", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.3", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "bf1374a5569f5fca8e641363b63f7347d680d91388880979a33bc12a6eb3e0aa"}, - "timex": {:hex, :timex, "3.6.2", "845cdeb6119e2fef10751c0b247b6c59d86d78554c83f78db612e3290f819bc2", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5 or ~> 1.0.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "26030b46199d02a590be61c2394b37ea25a3664c02fafbeca0b24c972025d47a"}, + "timex": {:hex, :timex, "3.7.3", "df8a2ea814749d700d6878ab9eacac9fdb498ecee2f507cb0002ec172bc24d0f", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "8691c1d86ca3a7bc14a156e2199dc8927be95d1a8f0e3b69e4bb2d6262c53ac6"}, "trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"}, - "tzdata": {:hex, :tzdata, "1.0.4", "a3baa4709ea8dba552dca165af6ae97c624a2d6ac14bd265165eaa8e8af94af6", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "b02637db3df1fd66dd2d3c4f194a81633d0e4b44308d36c1b2fdfd1e4e6f169b"}, + "tzdata": {:hex, :tzdata, "1.0.5", "69f1ee029a49afa04ad77801febaf69385f3d3e3d1e4b56b9469025677b89a28", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "55519aa2a99e5d2095c1e61cc74c9be69688f8ab75c27da724eb8279ff402a5a"}, "ueberauth": {:hex, :ueberauth, "0.6.3", "d42ace28b870e8072cf30e32e385579c57b9cc96ec74fa1f30f30da9c14f3cc0", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "afc293d8a1140d6591b53e3eaf415ca92842cb1d32fad3c450c6f045f7f91b60"}, "unicode_util_compat": {:git, "https://github.com/benoitc/unicode_util_compat.git", "38d7bc105f51159e8ea3279c40121db9db1e652f", [tag: "0.3.1"]}, "unsafe": {:hex, :unsafe, "1.0.1", "a27e1874f72ee49312e0a9ec2e0b27924214a05e3ddac90e91727bc76f8613d8", [:mix], [], "hexpm", "6c7729a2d214806450d29766abc2afaa7a2cbecf415be64f36a6691afebb50e5"}, From d3660b24d37862bb58cf309c582cfe7432fd7bb6 Mon Sep 17 00:00:00 2001 From: rinpatch <rin@patch.cx> Date: Mon, 22 Mar 2021 20:07:07 +0300 Subject: [PATCH 082/174] Copy emoji in the subject from parent post Sometimes people put emoji in the subject, which results in the subject looking broken if someone replies to it from a server that does not have the said emoji under the same shortcode. This patch solves the problem by extending the emoji set available in the summary to that of the parent post. --- lib/pleroma/web/common_api/activity_draft.ex | 27 ++++++++++ .../fixtures/tesla_mock/emoji-in-summary.json | 49 +++++++++++++++++++ test/pleroma/web/common_api_test.exs | 26 ++++++++++ test/support/http_request_mock.ex | 9 ++++ 4 files changed, 111 insertions(+) create mode 100644 test/fixtures/tesla_mock/emoji-in-summary.json diff --git a/lib/pleroma/web/common_api/activity_draft.ex b/lib/pleroma/web/common_api/activity_draft.ex index 8668b600e..80a9fa7bb 100644 --- a/lib/pleroma/web/common_api/activity_draft.ex +++ b/lib/pleroma/web/common_api/activity_draft.ex @@ -5,6 +5,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do alias Pleroma.Activity alias Pleroma.Conversation.Participation + alias Pleroma.Object alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI.Utils @@ -186,6 +187,32 @@ defp sensitive(draft) do defp object(draft) do emoji = Map.merge(Pleroma.Emoji.Formatter.get_emoji_map(draft.full_payload), draft.emoji) + # Sometimes people create posts with subject containing emoji, + # since subjects are usually copied this will result in a broken + # subject when someone replies from an instance that does not have + # the emoji or has it under different shortcode. This is an attempt + # to mitigate this by copying emoji from inReplyTo if they are present + # in the subject. + summary_emoji = + with %Activity{} <- draft.in_reply_to, + %Object{data: %{"tag" => [_ | _] = tag}} <- Object.normalize(draft.in_reply_to) do + Enum.reduce(tag, %{}, fn + %{"type" => "Emoji", "name" => name, "icon" => %{"url" => url}}, acc -> + if String.contains?(draft.summary, name) do + Map.put(acc, name, url) + else + acc + end + + _, acc -> + acc + end) + else + _ -> %{} + end + + emoji = Map.merge(emoji, summary_emoji) + object = Utils.make_note_data(draft) |> Map.put("emoji", emoji) diff --git a/test/fixtures/tesla_mock/emoji-in-summary.json b/test/fixtures/tesla_mock/emoji-in-summary.json new file mode 100644 index 000000000..f77c6e2e8 --- /dev/null +++ b/test/fixtures/tesla_mock/emoji-in-summary.json @@ -0,0 +1,49 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://patch.cx/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "actor": "https://patch.cx/users/rin", + "attachment": [], + "attributedTo": "https://patch.cx/users/rin", + "cc": [ + "https://patch.cx/users/rin/followers" + ], + "content": ":joker_disapprove: <br><br>just grabbing a test fixture, nevermind me", + "context": "https://patch.cx/contexts/2c3ce4b4-18b1-4b1a-8965-3932027b5326", + "conversation": "https://patch.cx/contexts/2c3ce4b4-18b1-4b1a-8965-3932027b5326", + "id": "https://patch.cx/objects/a399c28e-c821-4820-bc3e-4afeb044c16f", + "published": "2021-03-22T16:54:46.461939Z", + "sensitive": null, + "source": ":joker_disapprove: \r\n\r\njust grabbing a test fixture, nevermind me", + "summary": ":joker_smile: ", + "tag": [ + { + "icon": { + "type": "Image", + "url": "https://patch.cx/emoji/custom/joker_disapprove.png" + }, + "id": "https://patch.cx/emoji/custom/joker_disapprove.png", + "name": ":joker_disapprove:", + "type": "Emoji", + "updated": "1970-01-01T00:00:00Z" + }, + { + "icon": { + "type": "Image", + "url": "https://patch.cx/emoji/custom/joker_smile.png" + }, + "id": "https://patch.cx/emoji/custom/joker_smile.png", + "name": ":joker_smile:", + "type": "Emoji", + "updated": "1970-01-01T00:00:00Z" + } + ], + "to": [ + "https://www.w3.org/ns/activitystreams#Public" + ], + "type": "Note" +} diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index 9d005697c..6619f8fc8 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -25,6 +25,11 @@ defmodule Pleroma.Web.CommonAPITest do require Pleroma.Constants + setup_all do + Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end) + :ok + end + setup do: clear_config([:instance, :safe_dm_mentions]) setup do: clear_config([:instance, :limit]) setup do: clear_config([:instance, :max_pinned_statuses]) @@ -517,6 +522,27 @@ test "it adds an emoji on an external site" do assert url == "#{Pleroma.Web.base_url()}/emoji/blank.png" end + test "it copies emoji from the subject of the parent post" do + %Object{} = + object = + Object.normalize("https://patch.cx/objects/a399c28e-c821-4820-bc3e-4afeb044c16f", + fetch: true + ) + + activity = Activity.get_create_by_object_ap_id(object.data["id"]) + user = insert(:user) + + {:ok, reply_activity} = + CommonAPI.post(user, %{ + in_reply_to_id: activity.id, + status: ":joker_disapprove:", + spoiler_text: ":joker_smile:" + }) + + assert Object.normalize(reply_activity).data["emoji"][":joker_smile:"] + refute Object.normalize(reply_activity).data["emoji"][":joker_disapprove:"] + end + test "deactivated users can't post" do user = insert(:user, is_active: false) assert {:error, _} = CommonAPI.post(user, %{status: "ye"}) diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex index 1e98020f0..eb692fab5 100644 --- a/test/support/http_request_mock.ex +++ b/test/support/http_request_mock.ex @@ -1278,6 +1278,15 @@ def get("https://osada.macgirvin.com/", _, "", [{"accept", "text/html"}]) do }} end + def get("https://patch.cx/objects/a399c28e-c821-4820-bc3e-4afeb044c16f", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/emoji-in-summary.json"), + headers: activitypub_object_headers() + }} + end + def get(url, query, body, headers) do {:error, "Mock response not implemented for GET #{inspect(url)}, #{query}, #{inspect(body)}, #{ From 03843a53868860c0b6b2bebcf262bde746482f7e Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Tue, 23 Mar 2021 14:23:37 +0300 Subject: [PATCH 083/174] migrating config to tmp folder --- docs/administration/CLI_tasks/config.md | 10 +++-- lib/mix/tasks/pleroma/config.ex | 49 ++++++++++++++++++------- test/mix/tasks/pleroma/config_test.exs | 38 +++++++++++++++++++ 3 files changed, 80 insertions(+), 17 deletions(-) diff --git a/docs/administration/CLI_tasks/config.md b/docs/administration/CLI_tasks/config.md index 000ed4d98..fc9f3cbd5 100644 --- a/docs/administration/CLI_tasks/config.md +++ b/docs/administration/CLI_tasks/config.md @@ -32,16 +32,20 @@ config :pleroma, configurable_from_database: false ``` -To delete transferred settings from database optional flag `-d` can be used. `<env>` is `prod` by default. +Options: + +- `<path>` - where to save migrated config. E.g. `--path=/tmp`. If file saved into non standart folder, you must manually copy file into directory where Pleroma can read it. For OTP install path will be `PLEROMA_CONFIG_PATH` or `/etc/pleroma`. For installation from source - `config` directory in the pleroma folder. +- `<env>` - environment, for which is migrated config. By default is `prod`. +- To delete transferred settings from database optional flag `-d` can be used === "OTP" ```sh - ./bin/pleroma_ctl config migrate_from_db [--env=<env>] [-d] + ./bin/pleroma_ctl config migrate_from_db [--env=<env>] [-d] [--path=<path>] ``` === "From Source" ```sh - mix pleroma.config migrate_from_db [--env=<env>] [-d] + mix pleroma.config migrate_from_db [--env=<env>] [-d] [--path=<path>] ``` ## Dump all of the config settings defined in the database diff --git a/lib/mix/tasks/pleroma/config.ex b/lib/mix/tasks/pleroma/config.ex index 1962154b9..ac89702ae 100644 --- a/lib/mix/tasks/pleroma/config.ex +++ b/lib/mix/tasks/pleroma/config.ex @@ -27,7 +27,7 @@ def run(["migrate_from_db" | options]) do {opts, _} = OptionParser.parse!(options, - strict: [env: :string, delete: :boolean], + strict: [env: :string, delete: :boolean, path: :string], aliases: [d: :delete] ) @@ -259,18 +259,43 @@ defp create(group, settings) do defp migrate_from_db(opts) do env = opts[:env] || Pleroma.Config.get(:env) + filename = "#{env}.exported_from_db.secret.exs" + config_path = - if Pleroma.Config.get(:release) do - :config_path - |> Pleroma.Config.get() - |> Path.dirname() - else - "config" + cond do + opts[:path] -> + opts[:path] + + Pleroma.Config.get(:release) -> + :config_path + |> Pleroma.Config.get() + |> Path.dirname() + + true -> + "config" end - |> Path.join("#{env}.exported_from_db.secret.exs") + |> Path.join(filename) - file = File.open!(config_path, [:write, :utf8]) + with {:ok, file} <- File.open(config_path, [:write, :utf8]) do + write_config(file, config_path, opts) + shell_info("Database configuration settings have been exported to #{config_path}") + else + _ -> + shell_error("Impossible to save settings to this directory #{Path.dirname(config_path)}") + tmp_config_path = Path.join("/tmp", filename) + file = File.open!(tmp_config_path) + shell_info( + "Saving database configuration settings to #{tmp_config_path}. Copy it to the #{ + Path.dirname(config_path) + } manually." + ) + + write_config(file, tmp_config_path, opts) + end + end + + defp write_config(file, path, opts) do IO.write(file, config_header()) ConfigDB @@ -278,11 +303,7 @@ defp migrate_from_db(opts) do |> Enum.each(&write_and_delete(&1, file, opts[:delete])) :ok = File.close(file) - System.cmd("mix", ["format", config_path]) - - shell_info( - "Database configuration settings have been exported to config/#{env}.exported_from_db.secret.exs" - ) + System.cmd("mix", ["format", path]) end if Code.ensure_loaded?(Config.Reader) do diff --git a/test/mix/tasks/pleroma/config_test.exs b/test/mix/tasks/pleroma/config_test.exs index 21f8f2286..3ed1e94b8 100644 --- a/test/mix/tasks/pleroma/config_test.exs +++ b/test/mix/tasks/pleroma/config_test.exs @@ -200,6 +200,44 @@ test "load a settings with large values and pass to file", %{temp_file: temp_fil end end + describe "migrate_from_db/1" do + setup do: clear_config(:configurable_from_database, true) + + setup do + insert_config_record(:pleroma, :setting_first, key: "value", key2: ["Activity"]) + insert_config_record(:pleroma, :setting_second, key: "value2", key2: [Repo]) + insert_config_record(:quack, :level, :info) + + path = "test/instance_static" + file_path = Path.join(path, "temp.exported_from_db.secret.exs") + + on_exit(fn -> File.rm!(file_path) end) + + [file_path: file_path] + end + + test "with path parameter", %{file_path: file_path} do + MixTask.run(["migrate_from_db", "--env", "temp", "--path", Path.dirname(file_path)]) + + file = File.read!(file_path) + assert file =~ "config :pleroma, :setting_first," + assert file =~ "config :pleroma, :setting_second," + assert file =~ "config :quack, :level, :info" + end + + test "release", %{file_path: file_path} do + clear_config(:release, true) + clear_config(:config_path, file_path) + + MixTask.run(["migrate_from_db", "--env", "temp"]) + + file = File.read!(file_path) + assert file =~ "config :pleroma, :setting_first," + assert file =~ "config :pleroma, :setting_second," + assert file =~ "config :quack, :level, :info" + end + end + describe "operations on database config" do setup do: clear_config(:configurable_from_database, true) From 4cd34d019764fdd68829ebd4282118abc4534133 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Tue, 23 Mar 2021 17:27:02 +0300 Subject: [PATCH 084/174] suggestion --- lib/mix/tasks/pleroma/config.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mix/tasks/pleroma/config.ex b/lib/mix/tasks/pleroma/config.ex index ac89702ae..22502a522 100644 --- a/lib/mix/tasks/pleroma/config.ex +++ b/lib/mix/tasks/pleroma/config.ex @@ -282,7 +282,7 @@ defp migrate_from_db(opts) do else _ -> shell_error("Impossible to save settings to this directory #{Path.dirname(config_path)}") - tmp_config_path = Path.join("/tmp", filename) + tmp_config_path = Path.join(System.tmp_dir!(), filename) file = File.open!(tmp_config_path) shell_info( From ad907254fb47764869fecd5928bd863182421c8c Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Tue, 23 Mar 2021 19:37:25 +0300 Subject: [PATCH 085/174] changelog entry --- CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1fa22398..fb26c7a73 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## Unreleased (Patch) +### Fixed + +- Try to save exported ConfigDB settings (migrate_from_db) in the system temp directory if default location is not writable. + ## [2.3.0] - 2020-03-01 ### Security @@ -51,7 +55,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Pleroma API: Reroute `/api/pleroma/*` to `/api/v1/pleroma/*` </details> -- Improved hashtag timeline performance (requires a background migration). +- Improved hashtag timeline performance (requires a background migration). ### Added From b6a69b5efda5f75ad716252c69ae658a4e885b0a Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Wed, 24 Mar 2021 12:50:05 -0500 Subject: [PATCH 086/174] Return token's primary key with POST /oauth/token --- .../API/differences_in_mastoapi_responses.md | 24 +++++++++++++++++-- lib/pleroma/web/o_auth/o_auth_view.ex | 1 + .../web/o_auth/o_auth_controller_test.exs | 6 +++-- 3 files changed, 27 insertions(+), 4 deletions(-) diff --git a/docs/development/API/differences_in_mastoapi_responses.md b/docs/development/API/differences_in_mastoapi_responses.md index a14fcb416..3552b12fb 100644 --- a/docs/development/API/differences_in_mastoapi_responses.md +++ b/docs/development/API/differences_in_mastoapi_responses.md @@ -255,9 +255,29 @@ This information is returned in the `/api/v1/accounts/verify_credentials` endpoi *Pleroma supports refreshing tokens.* -`POST /oauth/token` +### POST `/oauth/token` -Post here request with `grant_type=refresh_token` to obtain new access token. Returns an access token. +You can obtain access tokens for a user in a few additional ways. + +#### Refreshing a token + +To obtain a new access token from a refresh token, pass `grant_type=refresh_token` with the following extra parameters: + +- `refresh_token`: The refresh token. + +#### Getting a token with a password + +To obtain a token from a user's password, pass `grant_type=password` with the following extra parameters: + +- `username`: Username to authenticate. +- `password`: The user's password. + +#### Response body + +Additional fields are returned in the response: + +- `id`: The primary key of this token in Pleroma's database. +- `me` (user tokens only): The ActivityPub ID of the user who owns the token. ## Account Registration diff --git a/lib/pleroma/web/o_auth/o_auth_view.ex b/lib/pleroma/web/o_auth/o_auth_view.ex index 281bbcc3c..1419c96a2 100644 --- a/lib/pleroma/web/o_auth/o_auth_view.ex +++ b/lib/pleroma/web/o_auth/o_auth_view.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Web.OAuth.OAuthView do def render("token.json", %{token: token} = opts) do response = %{ + id: token.id, token_type: "Bearer", access_token: token.token, refresh_token: token.refresh_token, diff --git a/test/pleroma/web/o_auth/o_auth_controller_test.exs b/test/pleroma/web/o_auth/o_auth_controller_test.exs index 312500feb..0fdd5b8e9 100644 --- a/test/pleroma/web/o_auth/o_auth_controller_test.exs +++ b/test/pleroma/web/o_auth/o_auth_controller_test.exs @@ -805,10 +805,12 @@ test "issues a token for `password` grant_type with valid credentials, with full "client_secret" => app.client_secret }) - assert %{"access_token" => token} = json_response(conn, 200) + assert %{"id" => id, "access_token" => access_token} = json_response(conn, 200) - token = Repo.get_by(Token, token: token) + token = Repo.get_by(Token, token: access_token) assert token + assert token.id == id + assert token.token == access_token assert token.scopes == app.scopes end From 3ec1dbd9223aa44205e90967175f07cc532501ab Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Wed, 3 Feb 2021 16:09:28 +0300 Subject: [PATCH 087/174] Let pins federate - save object ids on pin, instead of activity ids - pins federation - removed pinned_activities field from the users table - activityPub endpoint for user pins - pulling remote users pins --- .../API/differences_in_mastoapi_responses.md | 1 + lib/pleroma/activity.ex | 79 +++++++------ lib/pleroma/activity/queries.ex | 5 + lib/pleroma/user.ex | 77 +++++++------ lib/pleroma/web/activity_pub/activity_pub.ex | 60 +++++++++- .../activity_pub/activity_pub_controller.ex | 8 ++ lib/pleroma/web/activity_pub/builder.ex | 32 ++++++ .../web/activity_pub/object_validator.ex | 11 ++ .../object_validators/pin_validator.ex | 42 +++++++ lib/pleroma/web/activity_pub/side_effects.ex | 56 +++++++++- .../web/activity_pub/transmogrifier.ex | 9 ++ .../web/activity_pub/views/user_view.ex | 21 ++++ .../api_spec/operations/status_operation.ex | 46 +++++++- lib/pleroma/web/api_spec/schemas/status.ex | 7 ++ lib/pleroma/web/common_api.ex | 57 +++++++--- .../controllers/fallback_controller.ex | 6 + .../controllers/status_controller.ex | 12 ++ .../web/mastodon_api/views/status_view.ex | 23 +++- lib/pleroma/web/router.ex | 1 + ...0202110641_add_pinned_objects_to_users.exs | 9 ++ ...03141144_add_featured_address_to_users.exs | 23 ++++ ..._pinned_activities_into_pinned_objects.exs | 28 +++++ ...21_remove_pinned_activities_from_users.exs | 15 +++ test/fixtures/collections/featured.json | 39 +++++++ test/fixtures/masto_pin.json | 41 +++++++ test/fixtures/statuses/note.json | 27 +++++ test/fixtures/users_mock/masto_featured.json | 18 +++ test/fixtures/users_mock/user.json | 41 +++++++ test/pleroma/user_test.exs | 45 ++++++++ .../activity_pub_controller_test.exs | 105 ++++++++++++++++++ .../web/activity_pub/activity_pub_test.exs | 77 +++++++++++++ .../web/activity_pub/transmogrifier_test.exs | 74 ++++++++++++ test/pleroma/web/common_api_test.exs | 60 ++++++++-- .../controllers/status_controller_test.exs | 32 ++++-- .../mastodon_api/views/status_view_test.exs | 3 +- .../remote_follow_controller_test.exs | 30 +++++ test/support/factory.ex | 6 +- test/support/http_request_mock.ex | 23 ++++ 38 files changed, 1127 insertions(+), 122 deletions(-) create mode 100644 lib/pleroma/web/activity_pub/object_validators/pin_validator.ex create mode 100644 priv/repo/migrations/20210202110641_add_pinned_objects_to_users.exs create mode 100644 priv/repo/migrations/20210203141144_add_featured_address_to_users.exs create mode 100644 priv/repo/migrations/20210205145000_move_pinned_activities_into_pinned_objects.exs create mode 100644 priv/repo/migrations/20210206045221_remove_pinned_activities_from_users.exs create mode 100644 test/fixtures/collections/featured.json create mode 100644 test/fixtures/masto_pin.json create mode 100644 test/fixtures/statuses/note.json create mode 100644 test/fixtures/users_mock/masto_featured.json create mode 100644 test/fixtures/users_mock/user.json diff --git a/docs/development/API/differences_in_mastoapi_responses.md b/docs/development/API/differences_in_mastoapi_responses.md index a14fcb416..2ff56d3ca 100644 --- a/docs/development/API/differences_in_mastoapi_responses.md +++ b/docs/development/API/differences_in_mastoapi_responses.md @@ -38,6 +38,7 @@ Has these additional fields under the `pleroma` object: - `thread_muted`: true if the thread the post belongs to is muted - `emoji_reactions`: A list with emoji / reaction maps. The format is `{name: "☕", count: 1, me: true}`. Contains no information about the reacting users, for that use the `/statuses/:id/reactions` endpoint. - `parent_visible`: If the parent of this post is visible to the user or not. +- `pinned_at`: a datetime (iso8601) when status was pinned, `null` otherwise. ## Scheduled statuses diff --git a/lib/pleroma/activity.ex b/lib/pleroma/activity.ex index d59403884..a4cfca4c5 100644 --- a/lib/pleroma/activity.ex +++ b/lib/pleroma/activity.ex @@ -184,40 +184,48 @@ def get_by_ap_id_with_object(ap_id) do |> Repo.one() end - @spec get_by_id(String.t()) :: Activity.t() | nil - def get_by_id(id) do - case FlakeId.flake_id?(id) do - true -> - Activity - |> where([a], a.id == ^id) - |> restrict_deactivated_users() - |> Repo.one() + @doc """ + Gets activity by ID, doesn't load activities from deactivated actors by default. + """ + @spec get_by_id(String.t(), keyword()) :: t() | nil + def get_by_id(id, opts \\ [filter: [:restrict_deactivated]]), do: get_by_id_with_opts(id, opts) - _ -> - nil + @spec get_by_id_with_user_actor(String.t()) :: t() | nil + def get_by_id_with_user_actor(id), do: get_by_id_with_opts(id, preload: [:user_actor]) + + @spec get_by_id_with_object(String.t()) :: t() | nil + def get_by_id_with_object(id), do: get_by_id_with_opts(id, preload: [:object]) + + defp get_by_id_with_opts(id, opts) do + if FlakeId.flake_id?(id) do + query = Queries.by_id(id) + + with_filters_query = + if is_list(opts[:filter]) do + Enum.reduce(opts[:filter], query, fn + {:type, type}, acc -> Queries.by_type(acc, type) + :restrict_deactivated, acc -> restrict_deactivated_users(acc) + _, acc -> acc + end) + else + query + end + + with_preloads_query = + if is_list(opts[:preload]) do + Enum.reduce(opts[:preload], with_filters_query, fn + :user_actor, acc -> with_preloaded_user_actor(acc) + :object, acc -> with_preloaded_object(acc) + _, acc -> acc + end) + else + with_filters_query + end + + Repo.one(with_preloads_query) end end - def get_by_id_with_user_actor(id) do - case FlakeId.flake_id?(id) do - true -> - Activity - |> where([a], a.id == ^id) - |> with_preloaded_user_actor() - |> Repo.one() - - _ -> - nil - end - end - - def get_by_id_with_object(id) do - Activity - |> where(id: ^id) - |> with_preloaded_object() - |> Repo.one() - end - def all_by_ids_with_object(ids) do Activity |> where([a], a.id in ^ids) @@ -269,6 +277,11 @@ def get_create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do def get_create_by_object_ap_id_with_object(_), do: nil + @spec create_by_id_with_object(String.t()) :: t() | nil + def create_by_id_with_object(id) do + get_by_id_with_opts(id, preload: [:object], filter: [type: "Create"]) + end + defp get_in_reply_to_activity_from_object(%Object{data: %{"inReplyTo" => ap_id}}) do get_create_by_object_ap_id_with_object(ap_id) end @@ -368,12 +381,6 @@ def direct_conversation_id(activity, for_user) do end end - @spec pinned_by_actor?(Activity.t()) :: boolean() - def pinned_by_actor?(%Activity{} = activity) do - actor = user_actor(activity) - activity.id in actor.pinned_activities - end - @spec get_by_object_ap_id_with_object(String.t()) :: t() | nil def get_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do ap_id diff --git a/lib/pleroma/activity/queries.ex b/lib/pleroma/activity/queries.ex index a6b02a889..4632651b0 100644 --- a/lib/pleroma/activity/queries.ex +++ b/lib/pleroma/activity/queries.ex @@ -14,6 +14,11 @@ defmodule Pleroma.Activity.Queries do alias Pleroma.Activity alias Pleroma.User + @spec by_id(query(), String.t()) :: query() + def by_id(query \\ Activity, id) do + from(a in query, where: a.id == ^id) + end + @spec by_ap_id(query, String.t()) :: query def by_ap_id(query \\ Activity, ap_id) do from( diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index c1aa0f716..b78777141 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -99,6 +99,7 @@ defmodule Pleroma.User do field(:local, :boolean, default: true) field(:follower_address, :string) field(:following_address, :string) + field(:featured_address, :string) field(:search_rank, :float, virtual: true) field(:search_type, :integer, virtual: true) field(:tags, {:array, :string}, default: []) @@ -130,7 +131,6 @@ defmodule Pleroma.User do field(:hide_followers, :boolean, default: false) field(:hide_follows, :boolean, default: false) field(:hide_favorites, :boolean, default: true) - field(:pinned_activities, {:array, :string}, default: []) field(:email_notifications, :map, default: %{"digest" => false}) field(:mascot, :map, default: nil) field(:emoji, :map, default: %{}) @@ -148,6 +148,7 @@ defmodule Pleroma.User do field(:accepts_chat_messages, :boolean, default: nil) field(:last_active_at, :naive_datetime) field(:disclose_client, :boolean, default: true) + field(:pinned_objects, :map, default: %{}) embeds_one( :notification_settings, @@ -372,8 +373,10 @@ def banner_url(user, options \\ []) do end # Should probably be renamed or removed + @spec ap_id(User.t()) :: String.t() def ap_id(%User{nickname: nickname}), do: "#{Web.base_url()}/users/#{nickname}" + @spec ap_followers(User.t()) :: String.t() def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers" @@ -381,6 +384,11 @@ def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers" def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa def ap_following(%User{} = user), do: "#{ap_id(user)}/following" + @spec ap_featured_collection(User.t()) :: String.t() + def ap_featured_collection(%User{featured_address: fa}) when is_binary(fa), do: fa + + def ap_featured_collection(%User{} = user), do: "#{ap_id(user)}/collections/featured" + defp truncate_fields_param(params) do if Map.has_key?(params, :fields) do Map.put(params, :fields, Enum.map(params[:fields], &truncate_field/1)) @@ -443,6 +451,7 @@ def remote_user_changeset(struct \\ %User{local: false}, params) do :uri, :follower_address, :following_address, + :featured_address, :hide_followers, :hide_follows, :hide_followers_count, @@ -454,7 +463,8 @@ def remote_user_changeset(struct \\ %User{local: false}, params) do :invisible, :actor_type, :also_known_as, - :accepts_chat_messages + :accepts_chat_messages, + :pinned_objects ] ) |> cast(params, [:name], empty_values: []) @@ -686,7 +696,7 @@ def register_changeset_ldap(struct, params = %{password: password}) |> validate_format(:nickname, local_nickname_regex()) |> put_ap_id() |> unique_constraint(:ap_id) - |> put_following_and_follower_address() + |> put_following_and_follower_and_featured_address() end def register_changeset(struct, params \\ %{}, opts \\ []) do @@ -747,7 +757,7 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do |> put_password_hash |> put_ap_id() |> unique_constraint(:ap_id) - |> put_following_and_follower_address() + |> put_following_and_follower_and_featured_address() end def maybe_validate_required_email(changeset, true), do: changeset @@ -765,11 +775,16 @@ defp put_ap_id(changeset) do put_change(changeset, :ap_id, ap_id) end - defp put_following_and_follower_address(changeset) do - followers = ap_followers(%User{nickname: get_field(changeset, :nickname)}) + defp put_following_and_follower_and_featured_address(changeset) do + user = %User{nickname: get_field(changeset, :nickname)} + followers = ap_followers(user) + following = ap_following(user) + featured = ap_featured_collection(user) changeset |> put_change(:follower_address, followers) + |> put_change(:following_address, following) + |> put_change(:featured_address, featured) end defp autofollow_users(user) do @@ -2343,45 +2358,35 @@ def approval_changeset(user, set_approval: approved?) do cast(user, %{is_approved: approved?}, [:is_approved]) end - def add_pinnned_activity(user, %Pleroma.Activity{id: id}) do - if id not in user.pinned_activities do - max_pinned_statuses = Config.get([:instance, :max_pinned_statuses], 0) - params = %{pinned_activities: user.pinned_activities ++ [id]} - - # if pinned activity was scheduled for deletion, we remove job - if expiration = Pleroma.Workers.PurgeExpiredActivity.get_expiration(id) do - Oban.cancel_job(expiration.id) - end + @spec add_pinned_object_id(User.t(), String.t()) :: {:ok, User.t()} | {:error, term()} + def add_pinned_object_id(%User{} = user, object_id) do + if !user.pinned_objects[object_id] do + params = %{pinned_objects: Map.put(user.pinned_objects, object_id, NaiveDateTime.utc_now())} user - |> cast(params, [:pinned_activities]) - |> validate_length(:pinned_activities, - max: max_pinned_statuses, - message: "You have already pinned the maximum number of statuses" - ) + |> cast(params, [:pinned_objects]) + |> validate_change(:pinned_objects, fn :pinned_objects, pinned_objects -> + max_pinned_statuses = Config.get([:instance, :max_pinned_statuses], 0) + + if Enum.count(pinned_objects) <= max_pinned_statuses do + [] + else + [pinned_objects: "You have already pinned the maximum number of statuses"] + end + end) else change(user) end |> update_and_set_cache() end - def remove_pinnned_activity(user, %Pleroma.Activity{id: id, data: data}) do - params = %{pinned_activities: List.delete(user.pinned_activities, id)} - - # if pinned activity was scheduled for deletion, we reschedule it for deletion - if data["expires_at"] do - # MRF.ActivityExpirationPolicy used UTC timestamps for expires_at in original implementation - {:ok, expires_at} = - data["expires_at"] |> Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime.cast() - - Pleroma.Workers.PurgeExpiredActivity.enqueue(%{ - activity_id: id, - expires_at: expires_at - }) - end - + @spec remove_pinned_object_id(User.t(), String.t()) :: {:ok, t()} | {:error, term()} + def remove_pinned_object_id(%User{} = user, object_id) do user - |> cast(params, [:pinned_activities]) + |> cast( + %{pinned_objects: Map.delete(user.pinned_objects, object_id)}, + [:pinned_objects] + ) |> update_and_set_cache() end diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index efbf92c70..d0051d1cb 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -630,7 +630,7 @@ defp fetch_activities_for_user(user, reading_user, params) do |> Map.put(:type, ["Create", "Announce"]) |> Map.put(:user, reading_user) |> Map.put(:actor_id, user.ap_id) - |> Map.put(:pinned_activity_ids, user.pinned_activities) + |> Map.put(:pinned_object_ids, Map.keys(user.pinned_objects)) params = if User.blocks?(reading_user, user) do @@ -1075,8 +1075,18 @@ defp restrict_unlisted(query, %{restrict_unlisted: true}) do defp restrict_unlisted(query, _), do: query - defp restrict_pinned(query, %{pinned: true, pinned_activity_ids: ids}) do - from(activity in query, where: activity.id in ^ids) + defp restrict_pinned(query, %{pinned: true, pinned_object_ids: ids}) do + from( + [activity, object: o] in query, + where: + fragment( + "(?)->>'type' = 'Create' and coalesce((?)->'object'->>'id', (?)->>'object') = any (?)", + activity.data, + activity.data, + activity.data, + ^ids + ) + ) end defp restrict_pinned(query, _), do: query @@ -1419,6 +1429,9 @@ defp object_to_user_data(data) do invisible = data["invisible"] || false actor_type = data["type"] || "Person" + featured_address = data["featured"] + {:ok, pinned_objects} = fetch_and_prepare_featured_from_ap_id(featured_address) + public_key = if is_map(data["publicKey"]) && is_binary(data["publicKey"]["publicKeyPem"]) do data["publicKey"]["publicKeyPem"] @@ -1447,13 +1460,15 @@ defp object_to_user_data(data) do name: data["name"], follower_address: data["followers"], following_address: data["following"], + featured_address: featured_address, bio: data["summary"] || "", actor_type: actor_type, also_known_as: Map.get(data, "alsoKnownAs", []), public_key: public_key, inbox: data["inbox"], shared_inbox: shared_inbox, - accepts_chat_messages: accepts_chat_messages + accepts_chat_messages: accepts_chat_messages, + pinned_objects: pinned_objects } # nickname can be nil because of virtual actors @@ -1591,6 +1606,41 @@ def maybe_handle_clashing_nickname(data) do end end + def pin_data_from_featured_collection(%{ + "type" => type, + "orderedItems" => objects + }) + when type in ["OrderedCollection", "Collection"] do + Map.new(objects, fn %{"id" => object_ap_id} -> {object_ap_id, NaiveDateTime.utc_now()} end) + end + + def fetch_and_prepare_featured_from_ap_id(nil) do + {:ok, %{}} + end + + def fetch_and_prepare_featured_from_ap_id(ap_id) do + with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id) do + {:ok, pin_data_from_featured_collection(data)} + else + e -> + Logger.error("Could not decode featured collection at fetch #{ap_id}, #{inspect(e)}") + {:ok, %{}} + end + end + + def pinned_fetch_task(nil), do: nil + + def pinned_fetch_task(%{pinned_objects: pins}) do + if Enum.all?(pins, fn {ap_id, _} -> + Object.get_cached_by_ap_id(ap_id) || + match?({:ok, _object}, Fetcher.fetch_object_from_id(ap_id)) + end) do + :ok + else + :error + end + end + def make_user_from_ap_id(ap_id) do user = User.get_cached_by_ap_id(ap_id) @@ -1598,6 +1648,8 @@ def make_user_from_ap_id(ap_id) do Transmogrifier.upgrade_user_from_ap_id(ap_id) else with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do + {:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end) + if user do user |> User.remote_user_changeset(data) diff --git a/lib/pleroma/web/activity_pub/activity_pub_controller.ex b/lib/pleroma/web/activity_pub/activity_pub_controller.ex index 9d3dcc7f9..5aa3b281a 100644 --- a/lib/pleroma/web/activity_pub/activity_pub_controller.ex +++ b/lib/pleroma/web/activity_pub/activity_pub_controller.ex @@ -543,4 +543,12 @@ def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = |> json(object.data) end end + + def pinned(conn, %{"nickname" => nickname}) do + with %User{} = user <- User.get_cached_by_nickname(nickname) do + conn + |> put_resp_header("content-type", "application/activity+json") + |> json(UserView.render("featured.json", %{user: user})) + end + end end diff --git a/lib/pleroma/web/activity_pub/builder.ex b/lib/pleroma/web/activity_pub/builder.ex index f56bfc600..91a45836f 100644 --- a/lib/pleroma/web/activity_pub/builder.ex +++ b/lib/pleroma/web/activity_pub/builder.ex @@ -273,4 +273,36 @@ defp object_action(actor, object) do "context" => object.data["context"] }, []} end + + @spec pin(User.t(), Object.t()) :: {:ok, map(), keyword()} + def pin(%User{} = user, object) do + {:ok, + %{ + "id" => Utils.generate_activity_id(), + "target" => pinned_url(user.nickname), + "object" => object.data["id"], + "actor" => user.ap_id, + "type" => "Add", + "to" => [Pleroma.Constants.as_public()], + "cc" => [user.follower_address] + }, []} + end + + @spec unpin(User.t(), Object.t()) :: {:ok, map, keyword()} + def unpin(%User{} = user, object) do + {:ok, + %{ + "id" => Utils.generate_activity_id(), + "target" => pinned_url(user.nickname), + "object" => object.data["id"], + "actor" => user.ap_id, + "type" => "Remove", + "to" => [Pleroma.Constants.as_public()], + "cc" => [user.follower_address] + }, []} + end + + defp pinned_url(nickname) when is_binary(nickname) do + Pleroma.Web.Router.Helpers.activity_pub_url(Pleroma.Web.Endpoint, :pinned, nickname) + end end diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex index 297c19cc0..11432ef38 100644 --- a/lib/pleroma/web/activity_pub/object_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validator.ex @@ -30,6 +30,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.EventValidator alias Pleroma.Web.ActivityPub.ObjectValidators.FollowValidator alias Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator + alias Pleroma.Web.ActivityPub.ObjectValidators.PinValidator alias Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator alias Pleroma.Web.ActivityPub.ObjectValidators.UndoValidator alias Pleroma.Web.ActivityPub.ObjectValidators.UpdateValidator @@ -234,6 +235,16 @@ def validate(%{"type" => "Announce"} = object, meta) do end end + def validate(%{"type" => type} = object, meta) when type in ~w(Add Remove) do + with {:ok, object} <- + object + |> PinValidator.cast_and_validate() + |> Ecto.Changeset.apply_action(:insert) do + object = stringify_keys(object) + {:ok, object, meta} + end + end + def cast_and_apply(%{"type" => "ChatMessage"} = object) do ChatMessageValidator.cast_and_apply(object) end diff --git a/lib/pleroma/web/activity_pub/object_validators/pin_validator.ex b/lib/pleroma/web/activity_pub/object_validators/pin_validator.ex new file mode 100644 index 000000000..dca8cba6f --- /dev/null +++ b/lib/pleroma/web/activity_pub/object_validators/pin_validator.ex @@ -0,0 +1,42 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.ObjectValidators.PinValidator do + use Ecto.Schema + + import Ecto.Changeset + import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations + + alias Pleroma.EctoType.ActivityPub.ObjectValidators + + @primary_key false + + embedded_schema do + field(:id, ObjectValidators.ObjectID, primary_key: true) + field(:target) + field(:object, ObjectValidators.ObjectID) + field(:actor, ObjectValidators.ObjectID) + field(:type) + field(:to, ObjectValidators.Recipients, default: []) + field(:cc, ObjectValidators.Recipients, default: []) + end + + def cast_and_validate(data) do + data + |> cast_data() + |> validate_data() + end + + defp cast_data(data) do + cast(%__MODULE__{}, data, __schema__(:fields)) + end + + defp validate_data(changeset) do + changeset + |> validate_required([:id, :target, :object, :actor, :type, :to, :cc]) + |> validate_inclusion(:type, ~w(Add Remove)) + |> validate_actor_presence() + |> validate_object_presence() + end +end diff --git a/lib/pleroma/web/activity_pub/side_effects.ex b/lib/pleroma/web/activity_pub/side_effects.ex index 0b9a9f0c5..9d22f9d3c 100644 --- a/lib/pleroma/web/activity_pub/side_effects.ex +++ b/lib/pleroma/web/activity_pub/side_effects.ex @@ -276,10 +276,10 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object, result = case deleted_object do %Object{} -> - with {:ok, deleted_object, activity} <- Object.delete(deleted_object), + with {:ok, deleted_object, _activity} <- Object.delete(deleted_object), {_, actor} when is_binary(actor) <- {:actor, deleted_object.data["actor"]}, %User{} = user <- User.get_cached_by_ap_id(actor) do - User.remove_pinnned_activity(user, activity) + User.remove_pinned_object_id(user, deleted_object.data["id"]) {:ok, user} = ActivityPub.decrease_note_count_if_public(user, deleted_object) @@ -312,6 +312,58 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object, end end + # Tasks this handles: + # - adds pin to user + # - removes expiration job for pinned activity, if was set for expiration + @impl true + def handle(%{data: %{"type" => "Add"} = data} = object, meta) do + with %User{} = user <- User.get_cached_by_ap_id(data["actor"]), + {:ok, _user} <- User.add_pinned_object_id(user, data["object"]) do + # if pinned activity was scheduled for deletion, we remove job + if expiration = Pleroma.Workers.PurgeExpiredActivity.get_expiration(meta[:activity_id]) do + Oban.cancel_job(expiration.id) + end + + {:ok, object, meta} + else + nil -> + {:error, :user_not_found} + + {:error, changeset} -> + if changeset.errors[:pinned_objects] do + {:error, :pinned_statuses_limit_reached} + else + changeset.errors + end + end + end + + # Tasks this handles: + # - removes pin from user + # - if activity had expiration, recreates activity expiration job + @impl true + def handle(%{data: %{"type" => "Remove"} = data} = object, meta) do + with %User{} = user <- User.get_cached_by_ap_id(data["actor"]), + {:ok, _user} <- User.remove_pinned_object_id(user, data["object"]) do + # if pinned activity was scheduled for deletion, we reschedule it for deletion + if meta[:expires_at] do + # MRF.ActivityExpirationPolicy used UTC timestamps for expires_at in original implementation + {:ok, expires_at} = + Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime.cast(meta[:expires_at]) + + Pleroma.Workers.PurgeExpiredActivity.enqueue(%{ + activity_id: meta[:activity_id], + expires_at: expires_at + }) + end + + {:ok, object, meta} + else + nil -> {:error, :user_not_found} + error -> error + end + end + # Nothing to do @impl true def handle(object, meta) do diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 8c7d6a747..270cea6dc 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -556,6 +556,14 @@ def handle_incoming( end end + def handle_incoming(%{"type" => type} = data, _options) when type in ~w(Add Remove) do + with :ok <- ObjectValidator.fetch_actor_and_object(data), + %Object{} <- Object.normalize(data["object"], fetch: true), + {:ok, activity, _meta} <- Pipeline.common_pipeline(data, local: false) do + {:ok, activity} + end + end + def handle_incoming( %{"type" => "Delete"} = data, _options @@ -1000,6 +1008,7 @@ def upgrade_user_from_ap_id(ap_id) do with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id), {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id), {:ok, user} <- update_user(user, data) do + {:ok, _pid} = Task.start(fn -> ActivityPub.pinned_fetch_task(user) end) TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id}) {:ok, user} else diff --git a/lib/pleroma/web/activity_pub/views/user_view.ex b/lib/pleroma/web/activity_pub/views/user_view.ex index 8adc9878a..462f3b4a7 100644 --- a/lib/pleroma/web/activity_pub/views/user_view.ex +++ b/lib/pleroma/web/activity_pub/views/user_view.ex @@ -6,8 +6,10 @@ defmodule Pleroma.Web.ActivityPub.UserView do use Pleroma.Web, :view alias Pleroma.Keys + alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User + alias Pleroma.Web.ActivityPub.ObjectView alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.Endpoint @@ -97,6 +99,7 @@ def render("user.json", %{user: user}) do "followers" => "#{user.ap_id}/followers", "inbox" => "#{user.ap_id}/inbox", "outbox" => "#{user.ap_id}/outbox", + "featured" => "#{user.ap_id}/collections/featured", "preferredUsername" => user.nickname, "name" => user.name, "summary" => user.bio, @@ -245,6 +248,24 @@ def render("activity_collection_page.json", %{ |> Map.merge(pagination) end + def render("featured.json", %{ + user: %{featured_address: featured_address, pinned_objects: pinned_objects} + }) do + objects = + pinned_objects + |> Enum.sort_by(fn {_, pinned_at} -> pinned_at end, &>=/2) + |> Enum.map(fn {id, _} -> + ObjectView.render("object.json", %{object: Object.get_cached_by_ap_id(id)}) + end) + + %{ + "id" => featured_address, + "type" => "OrderedCollection", + "orderedItems" => objects + } + |> Map.merge(Utils.make_json_ld_header()) + end + defp maybe_put_total_items(map, false, _total), do: map defp maybe_put_total_items(map, true, total) do diff --git a/lib/pleroma/web/api_spec/operations/status_operation.ex b/lib/pleroma/web/api_spec/operations/status_operation.ex index 4bdb8e281..802fbef3e 100644 --- a/lib/pleroma/web/api_spec/operations/status_operation.ex +++ b/lib/pleroma/web/api_spec/operations/status_operation.ex @@ -182,7 +182,34 @@ def pin_operation do parameters: [id_param()], responses: %{ 200 => status_response(), - 400 => Operation.response("Error", "application/json", ApiError) + 400 => + Operation.response("Bad Request", "application/json", %Schema{ + allOf: [ApiError], + title: "Unprocessable Entity", + example: %{ + "error" => "You have already pinned the maximum number of statuses" + } + }), + 404 => + Operation.response("Not found", "application/json", %Schema{ + allOf: [ApiError], + title: "Unprocessable Entity", + example: %{ + "error" => "Record not found" + } + }), + 422 => + Operation.response( + "Unprocessable Entity", + "application/json", + %Schema{ + allOf: [ApiError], + title: "Unprocessable Entity", + example: %{ + "error" => "Someone else's status cannot be pinned" + } + } + ) } } end @@ -197,7 +224,22 @@ def unpin_operation do parameters: [id_param()], responses: %{ 200 => status_response(), - 400 => Operation.response("Error", "application/json", ApiError) + 400 => + Operation.response("Bad Request", "application/json", %Schema{ + allOf: [ApiError], + title: "Unprocessable Entity", + example: %{ + "error" => "You have already pinned the maximum number of statuses" + } + }), + 404 => + Operation.response("Not found", "application/json", %Schema{ + allOf: [ApiError], + title: "Unprocessable Entity", + example: %{ + "error" => "Record not found" + } + }) } } end diff --git a/lib/pleroma/web/api_spec/schemas/status.ex b/lib/pleroma/web/api_spec/schemas/status.ex index 42fa98718..3d042dc19 100644 --- a/lib/pleroma/web/api_spec/schemas/status.ex +++ b/lib/pleroma/web/api_spec/schemas/status.ex @@ -194,6 +194,13 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do parent_visible: %Schema{ type: :boolean, description: "`true` if the parent post is visible to the user" + }, + pinned_at: %Schema{ + type: :string, + format: "date-time", + nullable: true, + description: + "A datetime (ISO 8601) that states when the post was pinned or `null` if the post is not pinned" } } }, diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex index b003e30c7..d35a0f219 100644 --- a/lib/pleroma/web/common_api.ex +++ b/lib/pleroma/web/common_api.ex @@ -411,29 +411,54 @@ def post(user, %{status: _} = data) do end end - def pin(id, %{ap_id: user_ap_id} = user) do - with %Activity{ - actor: ^user_ap_id, - data: %{"type" => "Create"}, - object: %Object{data: %{"type" => object_type}} - } = activity <- Activity.get_by_id_with_object(id), - true <- object_type in ["Note", "Article", "Question"], - true <- Visibility.is_public?(activity), - {:ok, _user} <- User.add_pinnned_activity(user, activity) do + @spec pin(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, term()} + def pin(id, %User{ap_id: actor} = user) do + with %Activity{} = activity <- create_activity_by_id(id), + true <- activity_belongs_to_actor(activity, actor), + true <- object_type_is_allowed_for_pin(activity.object), + true <- activity_is_public(activity), + {:ok, pin_data, _} <- Builder.pin(user, activity.object), + {:ok, _pin, _} <- + Pipeline.common_pipeline(pin_data, local: true, activity_id: id) do {:ok, activity} else - {:error, %{errors: [pinned_activities: {err, _}]}} -> {:error, err} - _ -> {:error, dgettext("errors", "Could not pin")} + {:error, {:execute_side_effects, error}} -> error + error -> error end end + defp create_activity_by_id(id) do + with nil <- Activity.create_by_id_with_object(id) do + {:error, :not_found} + end + end + + defp activity_belongs_to_actor(%{actor: actor}, actor), do: true + defp activity_belongs_to_actor(_, _), do: {:error, :ownership_error} + + defp object_type_is_allowed_for_pin(%{data: %{"type" => type}}) do + with false <- type in ["Note", "Article", "Question"] do + {:error, :not_allowed} + end + end + + defp activity_is_public(activity) do + with false <- Visibility.is_public?(activity) do + {:error, :visibility_error} + end + end + + @spec unpin(String.t(), User.t()) :: {:ok, User.t()} | {:error, term()} def unpin(id, user) do - with %Activity{data: %{"type" => "Create"}} = activity <- Activity.get_by_id(id), - {:ok, _user} <- User.remove_pinnned_activity(user, activity) do + with %Activity{} = activity <- create_activity_by_id(id), + {:ok, unpin_data, _} <- Builder.unpin(user, activity.object), + {:ok, _unpin, _} <- + Pipeline.common_pipeline(unpin_data, + local: true, + activity_id: activity.id, + expires_at: activity.data["expires_at"] + ) do {:ok, activity} - else - {:error, %{errors: [pinned_activities: {err, _}]}} -> {:error, err} - _ -> {:error, dgettext("errors", "Could not unpin")} end end diff --git a/lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex b/lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex index d25f84837..84621500e 100644 --- a/lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex @@ -30,6 +30,12 @@ def call(conn, {:error, error_message}) do |> json(%{error: error_message}) end + def call(conn, {:error, status, message}) do + conn + |> put_status(status) + |> json(%{error: message}) + end + def call(conn, _) do conn |> put_status(:internal_server_error) diff --git a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex index b051fca74..724dc5c5d 100644 --- a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex @@ -260,6 +260,18 @@ def unfavourite(%{assigns: %{user: user}} = conn, %{id: activity_id}) do def pin(%{assigns: %{user: user}} = conn, %{id: ap_id_or_id}) do with {:ok, activity} <- CommonAPI.pin(ap_id_or_id, user) do try_render(conn, "show.json", activity: activity, for: user, as: :activity) + else + {:error, :pinned_statuses_limit_reached} -> + {:error, "You have already pinned the maximum number of statuses"} + + {:error, :ownership_error} -> + {:error, :unprocessable_entity, "Someone else's status cannot be pinned"} + + {:error, :visibility_error} -> + {:error, :unprocessable_entity, "Non-public status cannot be pinned"} + + error -> + error end end diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index 3753588f2..d0247fa4a 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -152,6 +152,8 @@ def render( |> Enum.filter(& &1) |> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end) + {pinned?, pinned_at} = pin_data(activity_object, user) + %{ id: to_string(activity.id), uri: object.data["id"], @@ -173,7 +175,7 @@ def render( favourited: present?(favorited), bookmarked: present?(bookmarked), muted: false, - pinned: pinned?(activity, user), + pinned: pinned?, sensitive: false, spoiler_text: "", visibility: get_visibility(activity), @@ -184,7 +186,8 @@ def render( language: nil, emojis: [], pleroma: %{ - local: activity.local + local: activity.local, + pinned_at: pinned_at } } end @@ -316,6 +319,8 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} fn for_user, user -> User.mutes?(for_user, user) end ) + {pinned?, pinned_at} = pin_data(object, user) + %{ id: to_string(activity.id), uri: object.data["id"], @@ -339,7 +344,7 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} favourited: present?(favorited), bookmarked: present?(bookmarked), muted: muted, - pinned: pinned?(activity, user), + pinned: pinned?, sensitive: sensitive, spoiler_text: summary, visibility: get_visibility(object), @@ -360,7 +365,8 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} direct_conversation_id: direct_conversation_id, thread_muted: thread_muted?, emoji_reactions: emoji_reactions, - parent_visible: visible_for_user?(reply_to, opts[:for]) + parent_visible: visible_for_user?(reply_to, opts[:for]), + pinned_at: pinned_at } } end @@ -529,8 +535,13 @@ defp present?(nil), do: false defp present?(false), do: false defp present?(_), do: true - defp pinned?(%Activity{id: id}, %User{pinned_activities: pinned_activities}), - do: id in pinned_activities + defp pin_data(%Object{data: %{"id" => object_id}}, %User{pinned_objects: pinned_objects}) do + if pinned_at = pinned_objects[object_id] do + {true, Utils.to_masto_date(pinned_at)} + else + {false, nil} + end + end defp build_emoji_map(emoji, users, current_user) do %{ diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index de0bd27d7..ccf2ef796 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -704,6 +704,7 @@ defmodule Pleroma.Web.Router do # The following two are S2S as well, see `ActivityPub.fetch_follow_information_for_user/1`: get("/users/:nickname/followers", ActivityPubController, :followers) get("/users/:nickname/following", ActivityPubController, :following) + get("/users/:nickname/collections/featured", ActivityPubController, :pinned) end scope "/", Pleroma.Web.ActivityPub do diff --git a/priv/repo/migrations/20210202110641_add_pinned_objects_to_users.exs b/priv/repo/migrations/20210202110641_add_pinned_objects_to_users.exs new file mode 100644 index 000000000..644527246 --- /dev/null +++ b/priv/repo/migrations/20210202110641_add_pinned_objects_to_users.exs @@ -0,0 +1,9 @@ +defmodule Pleroma.Repo.Migrations.AddPinnedObjectsToUsers do + use Ecto.Migration + + def change do + alter table(:users) do + add(:pinned_objects, :map) + end + end +end diff --git a/priv/repo/migrations/20210203141144_add_featured_address_to_users.exs b/priv/repo/migrations/20210203141144_add_featured_address_to_users.exs new file mode 100644 index 000000000..0f6a21611 --- /dev/null +++ b/priv/repo/migrations/20210203141144_add_featured_address_to_users.exs @@ -0,0 +1,23 @@ +defmodule Pleroma.Repo.Migrations.AddFeaturedAddressToUsers do + use Ecto.Migration + + def up do + alter table(:users) do + add(:featured_address, :string) + end + + create(index(:users, [:featured_address])) + + execute(""" + + update users set featured_address = concat(ap_id, '/collections/featured') where local = true and featured_address is null; + + """) + end + + def down do + alter table(:users) do + remove(:featured_address) + end + end +end diff --git a/priv/repo/migrations/20210205145000_move_pinned_activities_into_pinned_objects.exs b/priv/repo/migrations/20210205145000_move_pinned_activities_into_pinned_objects.exs new file mode 100644 index 000000000..9aee545e3 --- /dev/null +++ b/priv/repo/migrations/20210205145000_move_pinned_activities_into_pinned_objects.exs @@ -0,0 +1,28 @@ +defmodule Pleroma.Repo.Migrations.MovePinnedActivitiesIntoPinnedObjects do + use Ecto.Migration + + import Ecto.Query + + alias Pleroma.Repo + alias Pleroma.User + + def up do + from(u in User) + |> select([u], {u.id, fragment("?.pinned_activities", u)}) + |> Repo.stream() + |> Stream.each(fn {user_id, pinned_activities_ids} -> + pinned_activities = Pleroma.Activity.all_by_ids_with_object(pinned_activities_ids) + + pins = + Map.new(pinned_activities, fn %{object: %{data: %{"id" => object_id}}} -> + {object_id, NaiveDateTime.utc_now()} + end) + + from(u in User, where: u.id == ^user_id) + |> Repo.update_all(set: [pinned_objects: pins]) + end) + |> Stream.run() + end + + def down, do: :noop +end diff --git a/priv/repo/migrations/20210206045221_remove_pinned_activities_from_users.exs b/priv/repo/migrations/20210206045221_remove_pinned_activities_from_users.exs new file mode 100644 index 000000000..a3ee93f48 --- /dev/null +++ b/priv/repo/migrations/20210206045221_remove_pinned_activities_from_users.exs @@ -0,0 +1,15 @@ +defmodule Pleroma.Repo.Migrations.RemovePinnedActivitiesFromUsers do + use Ecto.Migration + + def up do + alter table(:users) do + remove(:pinned_activities) + end + end + + def down do + alter table(:users) do + add(:pinned_activities, {:array, :string}, default: []) + end + end +end diff --git a/test/fixtures/collections/featured.json b/test/fixtures/collections/featured.json new file mode 100644 index 000000000..56f8f56fa --- /dev/null +++ b/test/fixtures/collections/featured.json @@ -0,0 +1,39 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://{{domain}}/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "id": "https://{{domain}}/users/{{nickname}}/collections/featured", + "orderedItems": [ + { + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://{{domain}}/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "actor": "https://{{domain}}/users/{{nickname}}", + "attachment": [], + "attributedTo": "https://{{domain}}/users/{{nickname}}", + "cc": [ + "https://{{domain}}/users/{{nickname}}/followers" + ], + "content": "", + "id": "https://{{domain}}/objects/{{object_id}}", + "published": "2021-02-12T15:13:43.915429Z", + "sensitive": false, + "source": "", + "summary": "", + "tag": [], + "to": [ + "https://www.w3.org/ns/activitystreams#Public" + ], + "type": "Note" + } + ], + "type": "OrderedCollection" +} diff --git a/test/fixtures/masto_pin.json b/test/fixtures/masto_pin.json new file mode 100644 index 000000000..e57a34375 --- /dev/null +++ b/test/fixtures/masto_pin.json @@ -0,0 +1,41 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + { + "Emoji": "toot:Emoji", + "Hashtag": "as:Hashtag", + "PropertyValue": "schema:PropertyValue", + "alsoKnownAs": { + "@id": "as:alsoKnownAs", + "@type": "@id" + }, + "atomUri": "ostatus:atomUri", + "conversation": "ostatus:conversation", + "featured": { + "@id": "toot:featured", + "@type": "@id" + }, + "focalPoint": { + "@container": "@list", + "@id": "toot:focalPoint" + }, + "inReplyToAtomUri": "ostatus:inReplyToAtomUri", + "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", + "movedTo": { + "@id": "as:movedTo", + "@type": "@id" + }, + "ostatus": "http://ostatus.org#", + "schema": "http://schema.org#", + "sensitive": "as:sensitive", + "toot": "http://joinmastodon.org/ns#", + "value": "schema:value" + } + ], + "id": "https://example.com/users/nickname/statuses/{{id}}", + "actor": "https://example.com/users/nickname", + "object": "https://example.com/users/nickname/statuses/101355175004496751", + "target": "https://example.com/users/nickname/collections/featured", + "type": "{{type}}" +} diff --git a/test/fixtures/statuses/note.json b/test/fixtures/statuses/note.json new file mode 100644 index 000000000..41735cbc5 --- /dev/null +++ b/test/fixtures/statuses/note.json @@ -0,0 +1,27 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://example.com/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "actor": "https://example.com/users/{{nickname}}", + "attachment": [], + "attributedTo": "https://example.com/users/{{nickname}}", + "cc": [ + "https://example.com/users/{{nickname}}/followers" + ], + "content": "Content", + "context": "https://example.com/contexts/e4b180e1-7403-477f-aeb4-de57e7a3fe7f", + "conversation": "https://example.com/contexts/e4b180e1-7403-477f-aeb4-de57e7a3fe7f", + "id": "https://example.com/objects/{{object_id}}", + "published": "2019-12-15T22:00:05.279583Z", + "sensitive": false, + "summary": "", + "tag": [], + "to": [ + "https://www.w3.org/ns/activitystreams#Public" + ], + "type": "Note" +} diff --git a/test/fixtures/users_mock/masto_featured.json b/test/fixtures/users_mock/masto_featured.json new file mode 100644 index 000000000..646a343ad --- /dev/null +++ b/test/fixtures/users_mock/masto_featured.json @@ -0,0 +1,18 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + { + "ostatus": "http://ostatus.org#", + "atomUri": "ostatus:atomUri", + "inReplyToAtomUri": "ostatus:inReplyToAtomUri", + "conversation": "ostatus:conversation", + "sensitive": "as:sensitive", + "toot": "http://joinmastodon.org/ns#", + "votersCount": "toot:votersCount" + } + ], + "id": "https://{{domain}}/users/{{nickname}}/collections/featured", + "type": "OrderedCollection", + "totalItems": 0, + "orderedItems": [] +} diff --git a/test/fixtures/users_mock/user.json b/test/fixtures/users_mock/user.json new file mode 100644 index 000000000..da2483d02 --- /dev/null +++ b/test/fixtures/users_mock/user.json @@ -0,0 +1,41 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://example.com/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "attachment": [], + "endpoints": { + "oauthAuthorizationEndpoint": "https://example.com/oauth/authorize", + "oauthRegistrationEndpoint": "https://example.com/api/v1/apps", + "oauthTokenEndpoint": "https://example.com/oauth/token", + "sharedInbox": "https://example.com/inbox" + }, + "followers": "https://example.com/users/{{nickname}}/followers", + "following": "https://example.com/users/{{nickname}}/following", + "icon": { + "type": "Image", + "url": "https://example.com/media/4e914f5b84e4a259a3f6c2d2edc9ab642f2ab05f3e3d9c52c81fc2d984b3d51e.jpg" + }, + "id": "https://example.com/users/{{nickname}}", + "image": { + "type": "Image", + "url": "https://example.com/media/f739efddefeee49c6e67e947c4811fdc911785c16ae43da4c3684051fbf8da6a.jpg?name=f739efddefeee49c6e67e947c4811fdc911785c16ae43da4c3684051fbf8da6a.jpg" + }, + "inbox": "https://example.com/users/{{nickname}}/inbox", + "manuallyApprovesFollowers": false, + "name": "{{nickname}}", + "outbox": "https://example.com/users/{{nickname}}/outbox", + "preferredUsername": "{{nickname}}", + "publicKey": { + "id": "https://example.com/users/{{nickname}}#main-key", + "owner": "https://example.com/users/{{nickname}}", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5DLtwGXNZElJyxFGfcVc\nXANhaMadj/iYYQwZjOJTV9QsbtiNBeIK54PJrYuU0/0YIdrvS1iqheX5IwXRhcwa\nhm3ZyLz7XeN9st7FBni4BmZMBtMpxAuYuu5p/jbWy13qAiYOhPreCx0wrWgm/lBD\n9mkgaxIxPooBE0S4ZWEJIDIV1Vft3AWcRUyWW1vIBK0uZzs6GYshbQZB952S0yo4\nFzI1hABGHncH8UvuFauh4EZ8tY7/X5I0pGRnDOcRN1dAht5w5yTA+6r5kebiFQjP\nIzN/eCO/a9Flrj9YGW7HDNtjSOH0A31PLRGlJtJO3yK57dnf5ppyCZGfL4emShQo\ncQIDAQAB\n-----END PUBLIC KEY-----\n\n" + }, + "summary": "your friendly neighborhood pleroma developer<br>I like cute things and distributed systems, and really hate delete and redrafts", + "tag": [], + "type": "Person", + "url": "https://example.com/users/{{nickname}}" +} diff --git a/test/pleroma/user_test.exs b/test/pleroma/user_test.exs index 6f5bcab57..d81c1b8eb 100644 --- a/test/pleroma/user_test.exs +++ b/test/pleroma/user_test.exs @@ -2338,4 +2338,49 @@ test "active_user_count/1" do assert User.active_user_count(6) == 3 assert User.active_user_count(1) == 1 end + + describe "pins" do + setup do + user = insert(:user) + + [user: user, object_id: object_id_from_created_activity(user)] + end + + test "unique pins", %{user: user, object_id: object_id} do + assert {:ok, %{pinned_objects: %{^object_id => pinned_at1} = pins} = updated_user} = + User.add_pinned_object_id(user, object_id) + + assert Enum.count(pins) == 1 + + assert {:ok, %{pinned_objects: %{^object_id => pinned_at2} = pins}} = + User.add_pinned_object_id(updated_user, object_id) + + assert pinned_at1 == pinned_at2 + + assert Enum.count(pins) == 1 + end + + test "respects max_pinned_statuses limit", %{user: user, object_id: object_id} do + clear_config([:instance, :max_pinned_statuses], 1) + {:ok, updated} = User.add_pinned_object_id(user, object_id) + + object_id2 = object_id_from_created_activity(user) + + {:error, %{errors: errors}} = User.add_pinned_object_id(updated, object_id2) + assert Keyword.has_key?(errors, :pinned_objects) + end + + test "remove_pinned_object_id/2", %{user: user, object_id: object_id} do + assert {:ok, updated} = User.add_pinned_object_id(user, object_id) + + {:ok, after_remove} = User.remove_pinned_object_id(updated, object_id) + assert after_remove.pinned_objects == %{} + end + end + + defp object_id_from_created_activity(user) do + %{id: id} = insert(:note_activity, user: user) + %{object: %{data: %{"id" => object_id}}} = Activity.get_by_id_with_object(id) + object_id + end end diff --git a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs index 19e04d472..a9cbf90c3 100644 --- a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs @@ -636,6 +636,86 @@ test "without valid signature, " <> |> post("/inbox", non_create_data) |> json_response(400) end + + test "accepts Add/Remove activities", %{conn: conn} do + object_id = "c61d6733-e256-4fe1-ab13-1e369789423f" + + status = + File.read!("test/fixtures/statuses/note.json") + |> String.replace("{{nickname}}", "lain") + |> String.replace("{{object_id}}", object_id) + + object_url = "https://example.com/objects/#{object_id}" + + user = + File.read!("test/fixtures/users_mock/user.json") + |> String.replace("{{nickname}}", "lain") + + actor = "https://example.com/users/lain" + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^object_url + } -> + %Tesla.Env{ + status: 200, + body: status, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^actor + } -> + %Tesla.Env{ + status: 200, + body: user, + headers: [{"content-type", "application/activity+json"}] + } + end) + + data = %{ + "id" => "https://example.com/objects/d61d6733-e256-4fe1-ab13-1e369789423f", + "actor" => actor, + "object" => object_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Add", + "to" => [Pleroma.Constants.as_public()] + } + + assert "ok" == + conn + |> assign(:valid_signature, true) + |> put_req_header("content-type", "application/activity+json") + |> post("/inbox", data) + |> json_response(200) + + ObanHelpers.perform(all_enqueued(worker: ReceiverWorker)) + assert Activity.get_by_ap_id(data["id"]) + user = User.get_cached_by_ap_id(data["actor"]) + assert user.pinned_objects[data["object"]] + + data = %{ + "id" => "https://example.com/objects/d61d6733-e256-4fe1-ab13-1e369789423d", + "actor" => actor, + "object" => object_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Remove", + "to" => [Pleroma.Constants.as_public()] + } + + assert "ok" == + conn + |> assign(:valid_signature, true) + |> put_req_header("content-type", "application/activity+json") + |> post("/inbox", data) + |> json_response(200) + + ObanHelpers.perform(all_enqueued(worker: ReceiverWorker)) + user = refresh_record(user) + refute user.pinned_objects[data["object"]] + end end describe "/users/:nickname/inbox" do @@ -1772,4 +1852,29 @@ test "POST /api/ap/upload_media", %{conn: conn} do |> json_response(403) end end + + test "pinned collection", %{conn: conn} do + clear_config([:instance, :max_pinned_statuses], 2) + user = insert(:user) + objects = insert_list(2, :note, user: user) + + Enum.reduce(objects, user, fn %{data: %{"id" => object_id}}, user -> + {:ok, updated} = User.add_pinned_object_id(user, object_id) + updated + end) + + %{nickname: nickname, featured_address: featured_address, pinned_objects: pinned_objects} = + refresh_record(user) + + %{"id" => ^featured_address, "orderedItems" => items} = + conn + |> get("/users/#{nickname}/collections/featured") + |> json_response(200) + + object_ids = Enum.map(items, & &1["id"]) + + assert Enum.all?(pinned_objects, fn {obj_id, _} -> + obj_id in object_ids + end) + end end diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index c7fa452f7..081d00d45 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -235,6 +235,83 @@ test "works for bridgy actors" do "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}] } end + + test "fetches user featured collection" do + ap_id = "https://example.com/users/lain" + + featured_url = "https://example.com/users/lain/collections/featured" + + user_data = + "test/fixtures/users_mock/user.json" + |> File.read!() + |> String.replace("{{nickname}}", "lain") + |> Jason.decode!() + |> Map.put("featured", featured_url) + |> Jason.encode!() + + object_id = Ecto.UUID.generate() + + featured_data = + "test/fixtures/collections/featured.json" + |> File.read!() + |> String.replace("{{domain}}", "example.com") + |> String.replace("{{nickname}}", "lain") + |> String.replace("{{object_id}}", object_id) + + object_url = "https://example.com/objects/#{object_id}" + + object_data = + "test/fixtures/statuses/note.json" + |> File.read!() + |> String.replace("{{object_id}}", object_id) + |> String.replace("{{nickname}}", "lain") + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^ap_id + } -> + %Tesla.Env{ + status: 200, + body: user_data, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^featured_url + } -> + %Tesla.Env{ + status: 200, + body: featured_data, + headers: [{"content-type", "application/activity+json"}] + } + end) + + Tesla.Mock.mock_global(fn + %{ + method: :get, + url: ^object_url + } -> + %Tesla.Env{ + status: 200, + body: object_data, + headers: [{"content-type", "application/activity+json"}] + } + end) + + {:ok, user} = ActivityPub.make_user_from_ap_id(ap_id) + Process.sleep(50) + + assert user.featured_address == featured_url + assert Map.has_key?(user.pinned_objects, object_url) + + in_db = Pleroma.User.get_by_ap_id(ap_id) + assert in_db.featured_address == featured_url + assert Map.has_key?(user.pinned_objects, object_url) + + assert %{data: %{"id" => ^object_url}} = Object.get_by_ap_id(object_url) + end end test "it fetches the appropriate tag-restricted posts" do diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs index 4c3fcb44a..28d7e1e3c 100644 --- a/test/pleroma/web/activity_pub/transmogrifier_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs @@ -6,6 +6,8 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do use Oban.Testing, repo: Pleroma.Repo use Pleroma.DataCase + require Pleroma.Constants + alias Pleroma.Activity alias Pleroma.Object alias Pleroma.Tests.ObanHelpers @@ -106,6 +108,78 @@ test "it accepts Move activities" do assert activity.data["target"] == new_user.ap_id assert activity.data["type"] == "Move" end + + test "it accepts Add/Remove activities" do + user = + "test/fixtures/users_mock/user.json" + |> File.read!() + |> String.replace("{{nickname}}", "lain") + + object_id = "c61d6733-e256-4fe1-ab13-1e369789423f" + + object = + "test/fixtures/statuses/note.json" + |> File.read!() + |> String.replace("{{nickname}}", "lain") + |> String.replace("{{object_id}}", object_id) + + object_url = "https://example.com/objects/#{object_id}" + + actor = "https://example.com/users/lain" + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^actor + } -> + %Tesla.Env{ + status: 200, + body: user, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^object_url + } -> + %Tesla.Env{ + status: 200, + body: object, + headers: [{"content-type", "application/activity+json"}] + } + end) + + message = %{ + "id" => "https://example.com/objects/d61d6733-e256-4fe1-ab13-1e369789423f", + "actor" => actor, + "object" => object_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Add", + "to" => [Pleroma.Constants.as_public()], + "cc" => ["https://example.com/users/lain/followers"] + } + + assert {:ok, activity} = Transmogrifier.handle_incoming(message) + assert activity.data == message + user = User.get_cached_by_ap_id(actor) + assert user.pinned_objects[object_url] + + remove = %{ + "id" => "http://localhost:400/objects/d61d6733-e256-4fe1-ab13-1e369789423d", + "actor" => actor, + "object" => object_url, + "target" => "http://example.com/users/lain/collections/featured", + "type" => "Remove", + "to" => [Pleroma.Constants.as_public()], + "cc" => ["https://example.com/users/lain/followers"] + } + + assert {:ok, activity} = Transmogrifier.handle_incoming(remove) + assert activity.data == remove + + user = refresh_record(user) + refute user.pinned_objects[object_url] + end end describe "prepare outgoing" do diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index 6619f8fc8..fa55c2832 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -827,13 +827,17 @@ test "favoriting a status twice returns ok, but without the like activity" do [user: user, activity: activity] end + test "activity not found error", %{user: user} do + assert {:error, :not_found} = CommonAPI.pin("id", user) + end + test "pin status", %{user: user, activity: activity} do assert {:ok, ^activity} = CommonAPI.pin(activity.id, user) - id = activity.id + %{data: %{"id" => object_id}} = Object.normalize(activity) user = refresh_record(user) - assert %User{pinned_activities: [^id]} = user + assert user.pinned_objects |> Map.keys() == [object_id] end test "pin poll", %{user: user} do @@ -845,10 +849,11 @@ test "pin poll", %{user: user} do assert {:ok, ^activity} = CommonAPI.pin(activity.id, user) - id = activity.id + %{data: %{"id" => object_id}} = Object.normalize(activity) + user = refresh_record(user) - assert %User{pinned_activities: [^id]} = user + assert user.pinned_objects |> Map.keys() == [object_id] end test "unlisted statuses can be pinned", %{user: user} do @@ -859,7 +864,7 @@ test "unlisted statuses can be pinned", %{user: user} do test "only self-authored can be pinned", %{activity: activity} do user = insert(:user) - assert {:error, "Could not pin"} = CommonAPI.pin(activity.id, user) + assert {:error, :ownership_error} = CommonAPI.pin(activity.id, user) end test "max pinned statuses", %{user: user, activity: activity_one} do @@ -869,8 +874,12 @@ test "max pinned statuses", %{user: user, activity: activity_one} do user = refresh_record(user) - assert {:error, "You have already pinned the maximum number of statuses"} = - CommonAPI.pin(activity_two.id, user) + assert {:error, :pinned_statuses_limit_reached} = CommonAPI.pin(activity_two.id, user) + end + + test "only public can be pinned", %{user: user} do + {:ok, activity} = CommonAPI.post(user, %{status: "private status", visibility: "private"}) + {:error, :visibility_error} = CommonAPI.pin(activity.id, user) end test "unpin status", %{user: user, activity: activity} do @@ -884,7 +893,7 @@ test "unpin status", %{user: user, activity: activity} do user = refresh_record(user) - assert %User{pinned_activities: []} = user + assert user.pinned_objects == %{} end test "should unpin when deleting a status", %{user: user, activity: activity} do @@ -896,7 +905,40 @@ test "should unpin when deleting a status", %{user: user, activity: activity} do user = refresh_record(user) - assert %User{pinned_activities: []} = user + assert user.pinned_objects == %{} + end + + test "ephemeral activity won't be deleted if was pinned", %{user: user} do + {:ok, activity} = CommonAPI.post(user, %{status: "Hello!", expires_in: 601}) + + assert Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) + + {:ok, _activity} = CommonAPI.pin(activity.id, user) + refute Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) + + user = refresh_record(user) + {:ok, _} = CommonAPI.unpin(activity.id, user) + + # recreates expiration job on unpin + assert Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) + end + + test "ephemeral activity deletion job won't be deleted on pinning error", %{ + user: user, + activity: activity + } do + clear_config([:instance, :max_pinned_statuses], 1) + + {:ok, _activity} = CommonAPI.pin(activity.id, user) + + {:ok, activity2} = CommonAPI.post(user, %{status: "another status", expires_in: 601}) + + assert Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity2.id) + + user = refresh_record(user) + {:error, :pinned_statuses_limit_reached} = CommonAPI.pin(activity2.id, user) + + assert Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity2.id) end end diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs index f616f405e..e0d642910 100644 --- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs @@ -1223,6 +1223,13 @@ test "pin status", %{conn: conn, user: user, activity: activity} do |> json_response_and_validate_schema(200) end + test "non authenticated user", %{activity: activity} do + assert build_conn() + |> put_req_header("content-type", "application/json") + |> post("/api/v1/statuses/#{activity.id}/pin") + |> json_response(403) == %{"error" => "Invalid credentials."} + end + test "/pin: returns 400 error when activity is not public", %{conn: conn, user: user} do {:ok, dm} = CommonAPI.post(user, %{status: "test", visibility: "direct"}) @@ -1231,7 +1238,18 @@ test "/pin: returns 400 error when activity is not public", %{conn: conn, user: |> put_req_header("content-type", "application/json") |> post("/api/v1/statuses/#{dm.id}/pin") - assert json_response_and_validate_schema(conn, 400) == %{"error" => "Could not pin"} + assert json_response_and_validate_schema(conn, 422) == %{ + "error" => "Non-public status cannot be pinned" + } + end + + test "pin by another user", %{activity: activity} do + %{conn: conn} = oauth_access(["write:accounts"]) + + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/v1/statuses/#{activity.id}/pin") + |> json_response(422) == %{"error" => "Someone else's status cannot be pinned"} end test "unpin status", %{conn: conn, user: user, activity: activity} do @@ -1252,13 +1270,11 @@ test "unpin status", %{conn: conn, user: user, activity: activity} do |> json_response_and_validate_schema(200) end - test "/unpin: returns 400 error when activity is not exist", %{conn: conn} do - conn = - conn - |> put_req_header("content-type", "application/json") - |> post("/api/v1/statuses/1/unpin") - - assert json_response_and_validate_schema(conn, 400) == %{"error" => "Could not unpin"} + test "/unpin: returns 404 error when activity doesn't exist", %{conn: conn} do + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/v1/statuses/1/unpin") + |> json_response_and_validate_schema(404) == %{"error" => "Record not found"} end test "max pinned statuses", %{conn: conn, user: user, activity: activity_one} do diff --git a/test/pleroma/web/mastodon_api/views/status_view_test.exs b/test/pleroma/web/mastodon_api/views/status_view_test.exs index 4172cc294..fbea25079 100644 --- a/test/pleroma/web/mastodon_api/views/status_view_test.exs +++ b/test/pleroma/web/mastodon_api/views/status_view_test.exs @@ -286,7 +286,8 @@ test "a note activity" do direct_conversation_id: nil, thread_muted: false, emoji_reactions: [], - parent_visible: false + parent_visible: false, + pinned_at: nil } } diff --git a/test/pleroma/web/twitter_api/remote_follow_controller_test.exs b/test/pleroma/web/twitter_api/remote_follow_controller_test.exs index f389c272b..fa3b29006 100644 --- a/test/pleroma/web/twitter_api/remote_follow_controller_test.exs +++ b/test/pleroma/web/twitter_api/remote_follow_controller_test.exs @@ -27,6 +27,16 @@ test "adds status to pleroma instance if the `acct` is a status", %{conn: conn} body: File.read!("test/fixtures/tesla_mock/status.emelie.json") } + %{method: :get, url: "https://mastodon.social/users/emelie/collections/featured"} -> + %Tesla.Env{ + status: 200, + headers: [{"content-type", "application/activity+json"}], + body: + File.read!("test/fixtures/users_mock/masto_featured.json") + |> String.replace("{{domain}}", "mastodon.social") + |> String.replace("{{nickname}}", "emelie") + } + %{method: :get, url: "https://mastodon.social/users/emelie"} -> %Tesla.Env{ status: 200, @@ -52,6 +62,16 @@ test "show follow account page if the `acct` is a account link", %{conn: conn} d headers: [{"content-type", "application/activity+json"}], body: File.read!("test/fixtures/tesla_mock/emelie.json") } + + %{method: :get, url: "https://mastodon.social/users/emelie/collections/featured"} -> + %Tesla.Env{ + status: 200, + headers: [{"content-type", "application/activity+json"}], + body: + File.read!("test/fixtures/users_mock/masto_featured.json") + |> String.replace("{{domain}}", "mastodon.social") + |> String.replace("{{nickname}}", "emelie") + } end) response = @@ -70,6 +90,16 @@ test "show follow page if the `acct` is a account link", %{conn: conn} do headers: [{"content-type", "application/activity+json"}], body: File.read!("test/fixtures/tesla_mock/emelie.json") } + + %{method: :get, url: "https://mastodon.social/users/emelie/collections/featured"} -> + %Tesla.Env{ + status: 200, + headers: [{"content-type", "application/activity+json"}], + body: + File.read!("test/fixtures/users_mock/masto_featured.json") + |> String.replace("{{domain}}", "mastodon.social") + |> String.replace("{{nickname}}", "emelie") + } end) user = insert(:user) diff --git a/test/support/factory.ex b/test/support/factory.ex index af4fff45b..883cedf3c 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -48,13 +48,15 @@ def user_factory(attrs \\ %{}) do %{ ap_id: ap_id, follower_address: ap_id <> "/followers", - following_address: ap_id <> "/following" + following_address: ap_id <> "/following", + featured_address: ap_id <> "/collections/featured" } else %{ ap_id: User.ap_id(user), follower_address: User.ap_followers(user), - following_address: User.ap_following(user) + following_address: User.ap_following(user), + featured_address: User.ap_featured_collection(user) } end diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex index eb692fab5..9e9f1c86c 100644 --- a/test/support/http_request_mock.ex +++ b/test/support/http_request_mock.ex @@ -89,6 +89,18 @@ def get("https://mastodon.sdf.org/users/rinpatch", _, _, _) do }} end + def get("https://mastodon.sdf.org/users/rinpatch/collections/featured", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: + File.read!("test/fixtures/users_mock/masto_featured.json") + |> String.replace("{{domain}}", "mastodon.sdf.org") + |> String.replace("{{nickname}}", "rinpatch"), + headers: [{"content-type", "application/activity+json"}] + }} + end + def get("https://patch.cx/objects/tesla_mock/poll_attachment", _, _, _) do {:ok, %Tesla.Env{ @@ -905,6 +917,17 @@ def get("https://mastodon.social/users/lambadalambda", _, _, _) do }} end + def get("https://mastodon.social/users/lambadalambda/collections/featured", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: + File.read!("test/fixtures/users_mock/masto_featured.json") + |> String.replace("{{domain}}", "mastodon.social") + |> String.replace("{{nickname}}", "lambadalambda") + }} + end + def get("https://apfed.club/channel/indio", _, _, _) do {:ok, %Tesla.Env{ From 17f28c0507e3c34ce75e63747eed9abb66713e6e Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Thu, 25 Feb 2021 14:00:44 +0300 Subject: [PATCH 088/174] mastodon pins --- lib/pleroma/object/containment.ex | 8 ++ .../web/activity_pub/transmogrifier.ex | 17 +++- test/fixtures/statuses/masto-note.json | 47 +++++++++++ .../activity_pub_controller_test.exs | 78 +++++++++++++++++++ 4 files changed, 146 insertions(+), 4 deletions(-) create mode 100644 test/fixtures/statuses/masto-note.json diff --git a/lib/pleroma/object/containment.ex b/lib/pleroma/object/containment.ex index fb0398f92..040537acf 100644 --- a/lib/pleroma/object/containment.ex +++ b/lib/pleroma/object/containment.ex @@ -71,6 +71,14 @@ def contain_origin_from_id(id, %{"id" => other_id} = _params) when is_binary(oth compare_uris(id_uri, other_uri) end + # Mastodon pin activities don't have an id, so we check the object field, which will be pinned. + def contain_origin_from_id(id, %{"object" => object}) when is_binary(object) do + id_uri = URI.parse(id) + object_uri = URI.parse(object) + + compare_uris(id_uri, object_uri) + end + def contain_origin_from_id(_id, _data), do: :error def contain_child(%{"object" => %{"id" => id, "attributedTo" => _} = object}), diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 270cea6dc..b662f5379 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -557,10 +557,19 @@ def handle_incoming( end def handle_incoming(%{"type" => type} = data, _options) when type in ~w(Add Remove) do - with :ok <- ObjectValidator.fetch_actor_and_object(data), - %Object{} <- Object.normalize(data["object"], fetch: true), - {:ok, activity, _meta} <- Pipeline.common_pipeline(data, local: false) do - {:ok, activity} + with {:ok, user} <- ObjectValidator.fetch_actor(data), + %Object{} <- Object.normalize(data["object"], fetch: true) do + # Mastodon sends pin/unpin objects without id, to, cc fields + data = + data + |> Map.put_new("id", Utils.generate_activity_id()) + |> Map.put_new("to", [Pleroma.Constants.as_public()]) + |> Map.put_new("cc", [user.follower_address]) + + case Pipeline.common_pipeline(data, local: false) do + {:ok, activity, _meta} -> {:ok, activity} + error -> error + end end end diff --git a/test/fixtures/statuses/masto-note.json b/test/fixtures/statuses/masto-note.json new file mode 100644 index 000000000..6b96de473 --- /dev/null +++ b/test/fixtures/statuses/masto-note.json @@ -0,0 +1,47 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + { + "ostatus": "http://ostatus.org#", + "atomUri": "ostatus:atomUri", + "inReplyToAtomUri": "ostatus:inReplyToAtomUri", + "conversation": "ostatus:conversation", + "sensitive": "as:sensitive", + "toot": "http://joinmastodon.org/ns#", + "votersCount": "toot:votersCount" + } + ], + "id": "https://example.com/users/{{nickname}}/statuses/{{status_id}}", + "type": "Note", + "summary": null, + "inReplyTo": null, + "published": "2021-02-24T12:40:49Z", + "url": "https://example.com/@{{nickname}}/{{status_id}}", + "attributedTo": "https://example.com/users/{{nickname}}", + "to": [ + "https://www.w3.org/ns/activitystreams#Public" + ], + "cc": [ + "https://example.com/users/{{nickname}}/followers" + ], + "sensitive": false, + "atomUri": "https://example.com/users/{{nickname}}/statuses/{{status_id}}", + "inReplyToAtomUri": null, + "conversation": "tag:example.com,2021-02-24:objectId=15:objectType=Conversation", + "content": "<p></p>", + "contentMap": { + "en": "<p></p>" + }, + "attachment": [], + "tag": [], + "replies": { + "id": "https://example.com/users/{{nickname}}/statuses/{{status_id}}/replies", + "type": "Collection", + "first": { + "type": "CollectionPage", + "next": "https://example.com/users/{{nickname}}/statuses/{{status_id}}/replies?only_other_accounts=true&page=true", + "partOf": "https://example.com/users/{{nickname}}/statuses/{{status_id}}/replies", + "items": [] + } + } +} diff --git a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs index a9cbf90c3..d9fa25d94 100644 --- a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs @@ -716,6 +716,84 @@ test "accepts Add/Remove activities", %{conn: conn} do user = refresh_record(user) refute user.pinned_objects[data["object"]] end + + test "mastodon pin/unpin", %{conn: conn} do + status_id = "105786274556060421" + + status = + File.read!("test/fixtures/statuses/masto-note.json") + |> String.replace("{{nickname}}", "lain") + |> String.replace("{{status_id}}", status_id) + + status_url = "https://example.com/users/lain/statuses/#{status_id}" + + user = + File.read!("test/fixtures/users_mock/user.json") + |> String.replace("{{nickname}}", "lain") + + actor = "https://example.com/users/lain" + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^status_url + } -> + %Tesla.Env{ + status: 200, + body: status, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^actor + } -> + %Tesla.Env{ + status: 200, + body: user, + headers: [{"content-type", "application/activity+json"}] + } + end) + + data = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "actor" => actor, + "object" => status_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Add" + } + + assert "ok" == + conn + |> assign(:valid_signature, true) + |> put_req_header("content-type", "application/activity+json") + |> post("/inbox", data) + |> json_response(200) + + ObanHelpers.perform(all_enqueued(worker: ReceiverWorker)) + assert Activity.get_by_object_ap_id_with_object(data["object"]) + user = User.get_cached_by_ap_id(data["actor"]) + assert user.pinned_objects[data["object"]] + + data = %{ + "actor" => actor, + "object" => status_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Remove" + } + + assert "ok" == + conn + |> assign(:valid_signature, true) + |> put_req_header("content-type", "application/activity+json") + |> post("/inbox", data) + |> json_response(200) + + ObanHelpers.perform(all_enqueued(worker: ReceiverWorker)) + assert Activity.get_by_object_ap_id_with_object(data["object"]) + user = refresh_record(user) + refute user.pinned_objects[data["object"]] + end end describe "/users/:nickname/inbox" do From ff612750b1bae5223bca76b34a39e7d2bd05770c Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Tue, 2 Mar 2021 17:24:06 +0300 Subject: [PATCH 089/174] validator renaming & add validation for target --- lib/pleroma/web/activity_pub/object_validator.ex | 4 ++-- .../{pin_validator.ex => add_remove_validator.ex} | 13 ++++++++++++- .../object_validators/common_validations.ex | 8 ++++++++ .../web/activity_pub/transmogrifier_test.exs | 2 +- .../controllers/status_controller_test.exs | 6 +++--- test/support/http_request_mock.ex | 3 ++- 6 files changed, 28 insertions(+), 8 deletions(-) rename lib/pleroma/web/activity_pub/object_validators/{pin_validator.ex => add_remove_validator.ex} (73%) diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex index 11432ef38..14c3e8531 100644 --- a/lib/pleroma/web/activity_pub/object_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validator.ex @@ -17,6 +17,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do alias Pleroma.Object.Containment alias Pleroma.User alias Pleroma.Web.ActivityPub.ObjectValidators.AcceptRejectValidator + alias Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AnswerValidator alias Pleroma.Web.ActivityPub.ObjectValidators.ArticleNoteValidator @@ -30,7 +31,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.EventValidator alias Pleroma.Web.ActivityPub.ObjectValidators.FollowValidator alias Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator - alias Pleroma.Web.ActivityPub.ObjectValidators.PinValidator alias Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator alias Pleroma.Web.ActivityPub.ObjectValidators.UndoValidator alias Pleroma.Web.ActivityPub.ObjectValidators.UpdateValidator @@ -238,7 +238,7 @@ def validate(%{"type" => "Announce"} = object, meta) do def validate(%{"type" => type} = object, meta) when type in ~w(Add Remove) do with {:ok, object} <- object - |> PinValidator.cast_and_validate() + |> AddRemoveValidator.cast_and_validate() |> Ecto.Changeset.apply_action(:insert) do object = stringify_keys(object) {:ok, object, meta} diff --git a/lib/pleroma/web/activity_pub/object_validators/pin_validator.ex b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex similarity index 73% rename from lib/pleroma/web/activity_pub/object_validators/pin_validator.ex rename to lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex index dca8cba6f..73d1c03f0 100644 --- a/lib/pleroma/web/activity_pub/object_validators/pin_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex @@ -2,7 +2,7 @@ # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> # SPDX-License-Identifier: AGPL-3.0-only -defmodule Pleroma.Web.ActivityPub.ObjectValidators.PinValidator do +defmodule Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator do use Ecto.Schema import Ecto.Changeset @@ -37,6 +37,17 @@ defp validate_data(changeset) do |> validate_required([:id, :target, :object, :actor, :type, :to, :cc]) |> validate_inclusion(:type, ~w(Add Remove)) |> validate_actor_presence() + |> validate_collection_belongs_to_actor() |> validate_object_presence() end + + defp validate_collection_belongs_to_actor(changeset) do + validate_change(changeset, :target, fn :target, target -> + if String.starts_with?(target, changeset.changes[:actor]) do + [] + else + [target: "collection doesn't belong to actor"] + end + end) + end end diff --git a/lib/pleroma/web/activity_pub/object_validators/common_validations.ex b/lib/pleroma/web/activity_pub/object_validators/common_validations.ex index 093549a45..940430588 100644 --- a/lib/pleroma/web/activity_pub/object_validators/common_validations.ex +++ b/lib/pleroma/web/activity_pub/object_validators/common_validations.ex @@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations do alias Pleroma.Object alias Pleroma.User + @spec validate_any_presence(Ecto.Changeset.t(), [atom()]) :: Ecto.Changeset.t() def validate_any_presence(cng, fields) do non_empty = fields @@ -29,6 +30,7 @@ def validate_any_presence(cng, fields) do end end + @spec validate_actor_presence(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t() def validate_actor_presence(cng, options \\ []) do field_name = Keyword.get(options, :field_name, :actor) @@ -47,6 +49,7 @@ def validate_actor_presence(cng, options \\ []) do end) end + @spec validate_object_presence(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t() def validate_object_presence(cng, options \\ []) do field_name = Keyword.get(options, :field_name, :object) allowed_types = Keyword.get(options, :allowed_types, false) @@ -68,6 +71,7 @@ def validate_object_presence(cng, options \\ []) do end) end + @spec validate_object_or_user_presence(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t() def validate_object_or_user_presence(cng, options \\ []) do field_name = Keyword.get(options, :field_name, :object) options = Keyword.put(options, :field_name, field_name) @@ -83,6 +87,7 @@ def validate_object_or_user_presence(cng, options \\ []) do if actor_cng.valid?, do: actor_cng, else: object_cng end + @spec validate_host_match(Ecto.Changeset.t(), [atom()]) :: Ecto.Changeset.t() def validate_host_match(cng, fields \\ [:id, :actor]) do if same_domain?(cng, fields) do cng @@ -95,6 +100,7 @@ def validate_host_match(cng, fields \\ [:id, :actor]) do end end + @spec validate_fields_match(Ecto.Changeset.t(), [atom()]) :: Ecto.Changeset.t() def validate_fields_match(cng, fields) do if map_unique?(cng, fields) do cng @@ -122,12 +128,14 @@ defp map_unique?(cng, fields, func \\ & &1) do end) end + @spec same_domain?(Ecto.Changeset.t(), [atom()]) :: boolean() def same_domain?(cng, fields \\ [:actor, :object]) do map_unique?(cng, fields, fn value -> URI.parse(value).host end) end # This figures out if a user is able to create, delete or modify something # based on the domain and superuser status + @spec validate_modification_rights(Ecto.Changeset.t()) :: Ecto.Changeset.t() def validate_modification_rights(cng) do actor = User.get_cached_by_ap_id(get_field(cng, :actor)) diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs index 28d7e1e3c..9bc27f89e 100644 --- a/test/pleroma/web/activity_pub/transmogrifier_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs @@ -168,7 +168,7 @@ test "it accepts Add/Remove activities" do "id" => "http://localhost:400/objects/d61d6733-e256-4fe1-ab13-1e369789423d", "actor" => actor, "object" => object_url, - "target" => "http://example.com/users/lain/collections/featured", + "target" => "https://example.com/users/lain/collections/featured", "type" => "Remove", "to" => [Pleroma.Constants.as_public()], "cc" => ["https://example.com/users/lain/followers"] diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs index e0d642910..99ad87d05 100644 --- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs @@ -1209,15 +1209,15 @@ test "returns 404 error for a wrong id", %{conn: conn} do setup do: clear_config([:instance, :max_pinned_statuses], 1) test "pin status", %{conn: conn, user: user, activity: activity} do - id_str = to_string(activity.id) + id = activity.id - assert %{"id" => ^id_str, "pinned" => true} = + assert %{"id" => ^id, "pinned" => true} = conn |> put_req_header("content-type", "application/json") |> post("/api/v1/statuses/#{activity.id}/pin") |> json_response_and_validate_schema(200) - assert [%{"id" => ^id_str, "pinned" => true}] = + assert [%{"id" => ^id, "pinned" => true}] = conn |> get("/api/v1/accounts/#{user.id}/statuses?pinned=true") |> json_response_and_validate_schema(200) diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex index 9e9f1c86c..8807c2d14 100644 --- a/test/support/http_request_mock.ex +++ b/test/support/http_request_mock.ex @@ -924,7 +924,8 @@ def get("https://mastodon.social/users/lambadalambda/collections/featured", _, _ body: File.read!("test/fixtures/users_mock/masto_featured.json") |> String.replace("{{domain}}", "mastodon.social") - |> String.replace("{{nickname}}", "lambadalambda") + |> String.replace("{{nickname}}", "lambadalambda"), + headers: activitypub_object_headers() }} end From d1d2744ee3e6015064cf50ac5725bfe45b682466 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Wed, 3 Mar 2021 15:41:05 +0300 Subject: [PATCH 090/174] featured_address valition in AddRemoveValidator --- .../web/activity_pub/object_validator.ex | 2 +- .../object_validators/add_remove_validator.ex | 12 +++++----- .../web/activity_pub/transmogrifier.ex | 7 ++++-- lib/pleroma/web/common_api.ex | 13 +++++++---- test/fixtures/users_mock/user.json | 1 + .../activity_pub_controller_test.exs | 22 +++++++++++++++++++ .../web/activity_pub/transmogrifier_test.exs | 11 ++++++++++ 7 files changed, 55 insertions(+), 13 deletions(-) diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex index 14c3e8531..3ca9136aa 100644 --- a/lib/pleroma/web/activity_pub/object_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validator.ex @@ -238,7 +238,7 @@ def validate(%{"type" => "Announce"} = object, meta) do def validate(%{"type" => type} = object, meta) when type in ~w(Add Remove) do with {:ok, object} <- object - |> AddRemoveValidator.cast_and_validate() + |> AddRemoveValidator.cast_and_validate(meta) |> Ecto.Changeset.apply_action(:insert) do object = stringify_keys(object) {:ok, object, meta} diff --git a/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex index 73d1c03f0..885282f32 100644 --- a/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex @@ -22,28 +22,28 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator do field(:cc, ObjectValidators.Recipients, default: []) end - def cast_and_validate(data) do + def cast_and_validate(data, meta) do data |> cast_data() - |> validate_data() + |> validate_data(meta) end defp cast_data(data) do cast(%__MODULE__{}, data, __schema__(:fields)) end - defp validate_data(changeset) do + defp validate_data(changeset, meta) do changeset |> validate_required([:id, :target, :object, :actor, :type, :to, :cc]) |> validate_inclusion(:type, ~w(Add Remove)) |> validate_actor_presence() - |> validate_collection_belongs_to_actor() + |> validate_collection_belongs_to_actor(meta) |> validate_object_presence() end - defp validate_collection_belongs_to_actor(changeset) do + defp validate_collection_belongs_to_actor(changeset, meta) do validate_change(changeset, :target, fn :target, target -> - if String.starts_with?(target, changeset.changes[:actor]) do + if target == meta[:featured_address] do [] else [target: "collection doesn't belong to actor"] diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index b662f5379..fa62e0db2 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -557,7 +557,7 @@ def handle_incoming( end def handle_incoming(%{"type" => type} = data, _options) when type in ~w(Add Remove) do - with {:ok, user} <- ObjectValidator.fetch_actor(data), + with {:ok, %User{} = user} <- ObjectValidator.fetch_actor(data), %Object{} <- Object.normalize(data["object"], fetch: true) do # Mastodon sends pin/unpin objects without id, to, cc fields data = @@ -566,7 +566,10 @@ def handle_incoming(%{"type" => type} = data, _options) when type in ~w(Add Remo |> Map.put_new("to", [Pleroma.Constants.as_public()]) |> Map.put_new("cc", [user.follower_address]) - case Pipeline.common_pipeline(data, local: false) do + case Pipeline.common_pipeline(data, + local: false, + featured_address: user.featured_address + ) do {:ok, activity, _meta} -> {:ok, activity} error -> error end diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex index d35a0f219..175d690cc 100644 --- a/lib/pleroma/web/common_api.ex +++ b/lib/pleroma/web/common_api.ex @@ -412,14 +412,18 @@ def post(user, %{status: _} = data) do end @spec pin(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, term()} - def pin(id, %User{ap_id: actor} = user) do + def pin(id, %User{} = user) do with %Activity{} = activity <- create_activity_by_id(id), - true <- activity_belongs_to_actor(activity, actor), + true <- activity_belongs_to_actor(activity, user.ap_id), true <- object_type_is_allowed_for_pin(activity.object), true <- activity_is_public(activity), {:ok, pin_data, _} <- Builder.pin(user, activity.object), {:ok, _pin, _} <- - Pipeline.common_pipeline(pin_data, local: true, activity_id: id) do + Pipeline.common_pipeline(pin_data, + local: true, + activity_id: id, + featured_address: user.featured_address + ) do {:ok, activity} else {:error, {:execute_side_effects, error}} -> error @@ -456,7 +460,8 @@ def unpin(id, user) do Pipeline.common_pipeline(unpin_data, local: true, activity_id: activity.id, - expires_at: activity.data["expires_at"] + expires_at: activity.data["expires_at"], + featured_address: user.featured_address ) do {:ok, activity} end diff --git a/test/fixtures/users_mock/user.json b/test/fixtures/users_mock/user.json index da2483d02..c722a1145 100644 --- a/test/fixtures/users_mock/user.json +++ b/test/fixtures/users_mock/user.json @@ -34,6 +34,7 @@ "owner": "https://example.com/users/{{nickname}}", "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5DLtwGXNZElJyxFGfcVc\nXANhaMadj/iYYQwZjOJTV9QsbtiNBeIK54PJrYuU0/0YIdrvS1iqheX5IwXRhcwa\nhm3ZyLz7XeN9st7FBni4BmZMBtMpxAuYuu5p/jbWy13qAiYOhPreCx0wrWgm/lBD\n9mkgaxIxPooBE0S4ZWEJIDIV1Vft3AWcRUyWW1vIBK0uZzs6GYshbQZB952S0yo4\nFzI1hABGHncH8UvuFauh4EZ8tY7/X5I0pGRnDOcRN1dAht5w5yTA+6r5kebiFQjP\nIzN/eCO/a9Flrj9YGW7HDNtjSOH0A31PLRGlJtJO3yK57dnf5ppyCZGfL4emShQo\ncQIDAQAB\n-----END PUBLIC KEY-----\n\n" }, + "featured": "https://example.com/users/{{nickname}}/collections/featured", "summary": "your friendly neighborhood pleroma developer<br>I like cute things and distributed systems, and really hate delete and redrafts", "tag": [], "type": "Person", diff --git a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs index d9fa25d94..cea4b3a97 100644 --- a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs @@ -673,6 +673,17 @@ test "accepts Add/Remove activities", %{conn: conn} do body: user, headers: [{"content-type", "application/activity+json"}] } + + %{method: :get, url: "https://example.com/users/lain/collections/featured"} -> + %Tesla.Env{ + status: 200, + body: + "test/fixtures/users_mock/masto_featured.json" + |> File.read!() + |> String.replace("{{domain}}", "example.com") + |> String.replace("{{nickname}}", "lain"), + headers: [{"content-type", "application/activity+json"}] + } end) data = %{ @@ -753,6 +764,17 @@ test "mastodon pin/unpin", %{conn: conn} do body: user, headers: [{"content-type", "application/activity+json"}] } + + %{method: :get, url: "https://example.com/users/lain/collections/featured"} -> + %Tesla.Env{ + status: 200, + body: + "test/fixtures/users_mock/masto_featured.json" + |> File.read!() + |> String.replace("{{domain}}", "example.com") + |> String.replace("{{nickname}}", "lain"), + headers: [{"content-type", "application/activity+json"}] + } end) data = %{ diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs index 9bc27f89e..fb8284aaf 100644 --- a/test/pleroma/web/activity_pub/transmogrifier_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs @@ -147,6 +147,17 @@ test "it accepts Add/Remove activities" do body: object, headers: [{"content-type", "application/activity+json"}] } + + %{method: :get, url: "https://example.com/users/lain/collections/featured"} -> + %Tesla.Env{ + status: 200, + body: + "test/fixtures/users_mock/masto_featured.json" + |> File.read!() + |> String.replace("{{domain}}", "example.com") + |> String.replace("{{nickname}}", "lain"), + headers: [{"content-type", "application/activity+json"}] + } end) message = %{ From 3adb43cc20751540ea590645b31b985807684202 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Wed, 3 Mar 2021 18:04:06 +0300 Subject: [PATCH 091/174] refetch user on incoming add/remove activity if featured_address is nil --- .../web/activity_pub/transmogrifier.ex | 8 ++ .../web/mastodon_api/views/status_view.ex | 2 +- .../web/activity_pub/transmogrifier_test.exs | 78 +++++++++++++++++++ test/support/factory.ex | 4 +- 4 files changed, 90 insertions(+), 2 deletions(-) diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index fa62e0db2..c4b11a655 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -558,6 +558,8 @@ def handle_incoming( def handle_incoming(%{"type" => type} = data, _options) when type in ~w(Add Remove) do with {:ok, %User{} = user} <- ObjectValidator.fetch_actor(data), + # maybe locally user doesn't have featured_address + {:ok, user} <- maybe_refetch_user(user), %Object{} <- Object.normalize(data["object"], fetch: true) do # Mastodon sends pin/unpin objects without id, to, cc fields data = @@ -669,6 +671,12 @@ def handle_incoming( def handle_incoming(_, _), do: :error + defp maybe_refetch_user(%User{featured_address: address} = user) when is_binary(address) do + {:ok, user} + end + + defp maybe_refetch_user(%User{ap_id: ap_id}), do: upgrade_user_from_ap_id(ap_id) + @spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil def get_obj_helper(id, options \\ []) do options = Keyword.put(options, :fetch, true) diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index d0247fa4a..814b3d142 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -152,7 +152,7 @@ def render( |> Enum.filter(& &1) |> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end) - {pinned?, pinned_at} = pin_data(activity_object, user) + {pinned?, pinned_at} = pin_data(object, user) %{ id: to_string(activity.id), diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs index fb8284aaf..07ed3920f 100644 --- a/test/pleroma/web/activity_pub/transmogrifier_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs @@ -191,6 +191,84 @@ test "it accepts Add/Remove activities" do user = refresh_record(user) refute user.pinned_objects[object_url] end + + test "Add/Remove activities for remote users without featured address" do + user = insert(:user, local: false, domain: "example.com") + + user = + user + |> Ecto.Changeset.change(featured_address: nil) + |> Repo.update!() + + %{host: host} = URI.parse(user.ap_id) + + user_data = + "test/fixtures/users_mock/user.json" + |> File.read!() + |> String.replace("{{nickname}}", user.nickname) + + object_id = "c61d6733-e256-4fe1-ab13-1e369789423f" + + object = + "test/fixtures/statuses/note.json" + |> File.read!() + |> String.replace("{{nickname}}", user.nickname) + |> String.replace("{{object_id}}", object_id) + + object_url = "https://#{host}/objects/#{object_id}" + + actor = "https://#{host}/users/#{user.nickname}" + + featured = "https://#{host}/users/#{user.nickname}/collections/featured" + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^actor + } -> + %Tesla.Env{ + status: 200, + body: user_data, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^object_url + } -> + %Tesla.Env{ + status: 200, + body: object, + headers: [{"content-type", "application/activity+json"}] + } + + %{method: :get, url: ^featured} -> + %Tesla.Env{ + status: 200, + body: + "test/fixtures/users_mock/masto_featured.json" + |> File.read!() + |> String.replace("{{domain}}", "#{host}") + |> String.replace("{{nickname}}", user.nickname), + headers: [{"content-type", "application/activity+json"}] + } + end) + + message = %{ + "id" => "https://#{host}/objects/d61d6733-e256-4fe1-ab13-1e369789423f", + "actor" => actor, + "object" => object_url, + "target" => "https://#{host}/users/#{user.nickname}/collections/featured", + "type" => "Add", + "to" => [Pleroma.Constants.as_public()], + "cc" => ["https://#{host}/users/#{user.nickname}/followers"] + } + + assert {:ok, activity} = Transmogrifier.handle_incoming(message) + assert activity.data == message + user = User.get_cached_by_ap_id(actor) + assert user.pinned_objects[object_url] + end end describe "prepare outgoing" do diff --git a/test/support/factory.ex b/test/support/factory.ex index 883cedf3c..867076d6a 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -41,7 +41,7 @@ def user_factory(attrs \\ %{}) do urls = if attrs[:local] == false do - base_domain = Enum.random(["domain1.com", "domain2.com", "domain3.com"]) + base_domain = attrs[:domain] || Enum.random(["domain1.com", "domain2.com", "domain3.com"]) ap_id = "https://#{base_domain}/users/#{user.nickname}" @@ -60,6 +60,8 @@ def user_factory(attrs \\ %{}) do } end + attrs = Map.delete(attrs, :domain) + user |> Map.put(:raw_bio, user.bio) |> Map.merge(urls) From 16c96966e9f7a039a969c06bdd6c4e18ab8d432c Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Tue, 9 Mar 2021 08:59:50 +0300 Subject: [PATCH 092/174] not needed --- test/fixtures/masto_pin.json | 41 ------------------------------------ 1 file changed, 41 deletions(-) delete mode 100644 test/fixtures/masto_pin.json diff --git a/test/fixtures/masto_pin.json b/test/fixtures/masto_pin.json deleted file mode 100644 index e57a34375..000000000 --- a/test/fixtures/masto_pin.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "@context": [ - "https://www.w3.org/ns/activitystreams", - "https://w3id.org/security/v1", - { - "Emoji": "toot:Emoji", - "Hashtag": "as:Hashtag", - "PropertyValue": "schema:PropertyValue", - "alsoKnownAs": { - "@id": "as:alsoKnownAs", - "@type": "@id" - }, - "atomUri": "ostatus:atomUri", - "conversation": "ostatus:conversation", - "featured": { - "@id": "toot:featured", - "@type": "@id" - }, - "focalPoint": { - "@container": "@list", - "@id": "toot:focalPoint" - }, - "inReplyToAtomUri": "ostatus:inReplyToAtomUri", - "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", - "movedTo": { - "@id": "as:movedTo", - "@type": "@id" - }, - "ostatus": "http://ostatus.org#", - "schema": "http://schema.org#", - "sensitive": "as:sensitive", - "toot": "http://joinmastodon.org/ns#", - "value": "schema:value" - } - ], - "id": "https://example.com/users/nickname/statuses/{{id}}", - "actor": "https://example.com/users/nickname", - "object": "https://example.com/users/nickname/statuses/101355175004496751", - "target": "https://example.com/users/nickname/collections/featured", - "type": "{{type}}" -} From 8f0778166c2e7c76975d14937ef61c05d399b560 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Tue, 9 Mar 2021 09:00:20 +0300 Subject: [PATCH 093/174] moving fixture into mastodon folder --- test/fixtures/{ => mastodon}/collections/featured.json | 0 test/pleroma/web/activity_pub/activity_pub_test.exs | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename test/fixtures/{ => mastodon}/collections/featured.json (100%) diff --git a/test/fixtures/collections/featured.json b/test/fixtures/mastodon/collections/featured.json similarity index 100% rename from test/fixtures/collections/featured.json rename to test/fixtures/mastodon/collections/featured.json diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index 081d00d45..64e12066e 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -252,7 +252,7 @@ test "fetches user featured collection" do object_id = Ecto.UUID.generate() featured_data = - "test/fixtures/collections/featured.json" + "test/fixtures/mastodon/collections/featured.json" |> File.read!() |> String.replace("{{domain}}", "example.com") |> String.replace("{{nickname}}", "lain") From 5ae9b05600dd3dffc628ba25fe01b271f7bc0122 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Tue, 9 Mar 2021 09:00:44 +0300 Subject: [PATCH 094/174] separate test file for featured collection --- .../add_remove_handling_test.exs | 172 ++++++++++++++++++ .../web/activity_pub/transmogrifier_test.exs | 163 ----------------- 2 files changed, 172 insertions(+), 163 deletions(-) create mode 100644 test/pleroma/web/activity_pub/transmogrifier/add_remove_handling_test.exs diff --git a/test/pleroma/web/activity_pub/transmogrifier/add_remove_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/add_remove_handling_test.exs new file mode 100644 index 000000000..fc7757125 --- /dev/null +++ b/test/pleroma/web/activity_pub/transmogrifier/add_remove_handling_test.exs @@ -0,0 +1,172 @@ +defmodule Pleroma.Web.ActivityPub.Transmogrifier.AddRemoveHandlingTest do + use Oban.Testing, repo: Pleroma.Repo + use Pleroma.DataCase, async: true + + require Pleroma.Constants + + import Pleroma.Factory + + alias Pleroma.User + alias Pleroma.Web.ActivityPub.Transmogrifier + + test "it accepts Add/Remove activities" do + user = + "test/fixtures/users_mock/user.json" + |> File.read!() + |> String.replace("{{nickname}}", "lain") + + object_id = "c61d6733-e256-4fe1-ab13-1e369789423f" + + object = + "test/fixtures/statuses/note.json" + |> File.read!() + |> String.replace("{{nickname}}", "lain") + |> String.replace("{{object_id}}", object_id) + + object_url = "https://example.com/objects/#{object_id}" + + actor = "https://example.com/users/lain" + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^actor + } -> + %Tesla.Env{ + status: 200, + body: user, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^object_url + } -> + %Tesla.Env{ + status: 200, + body: object, + headers: [{"content-type", "application/activity+json"}] + } + + %{method: :get, url: "https://example.com/users/lain/collections/featured"} -> + %Tesla.Env{ + status: 200, + body: + "test/fixtures/users_mock/masto_featured.json" + |> File.read!() + |> String.replace("{{domain}}", "example.com") + |> String.replace("{{nickname}}", "lain"), + headers: [{"content-type", "application/activity+json"}] + } + end) + + message = %{ + "id" => "https://example.com/objects/d61d6733-e256-4fe1-ab13-1e369789423f", + "actor" => actor, + "object" => object_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Add", + "to" => [Pleroma.Constants.as_public()], + "cc" => ["https://example.com/users/lain/followers"] + } + + assert {:ok, activity} = Transmogrifier.handle_incoming(message) + assert activity.data == message + user = User.get_cached_by_ap_id(actor) + assert user.pinned_objects[object_url] + + remove = %{ + "id" => "http://localhost:400/objects/d61d6733-e256-4fe1-ab13-1e369789423d", + "actor" => actor, + "object" => object_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Remove", + "to" => [Pleroma.Constants.as_public()], + "cc" => ["https://example.com/users/lain/followers"] + } + + assert {:ok, activity} = Transmogrifier.handle_incoming(remove) + assert activity.data == remove + + user = refresh_record(user) + refute user.pinned_objects[object_url] + end + + test "Add/Remove activities for remote users without featured address" do + user = insert(:user, local: false, domain: "example.com") + + user = + user + |> Ecto.Changeset.change(featured_address: nil) + |> Repo.update!() + + %{host: host} = URI.parse(user.ap_id) + + user_data = + "test/fixtures/users_mock/user.json" + |> File.read!() + |> String.replace("{{nickname}}", user.nickname) + + object_id = "c61d6733-e256-4fe1-ab13-1e369789423f" + + object = + "test/fixtures/statuses/note.json" + |> File.read!() + |> String.replace("{{nickname}}", user.nickname) + |> String.replace("{{object_id}}", object_id) + + object_url = "https://#{host}/objects/#{object_id}" + + actor = "https://#{host}/users/#{user.nickname}" + + featured = "https://#{host}/users/#{user.nickname}/collections/featured" + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^actor + } -> + %Tesla.Env{ + status: 200, + body: user_data, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^object_url + } -> + %Tesla.Env{ + status: 200, + body: object, + headers: [{"content-type", "application/activity+json"}] + } + + %{method: :get, url: ^featured} -> + %Tesla.Env{ + status: 200, + body: + "test/fixtures/users_mock/masto_featured.json" + |> File.read!() + |> String.replace("{{domain}}", "#{host}") + |> String.replace("{{nickname}}", user.nickname), + headers: [{"content-type", "application/activity+json"}] + } + end) + + message = %{ + "id" => "https://#{host}/objects/d61d6733-e256-4fe1-ab13-1e369789423f", + "actor" => actor, + "object" => object_url, + "target" => "https://#{host}/users/#{user.nickname}/collections/featured", + "type" => "Add", + "to" => [Pleroma.Constants.as_public()], + "cc" => ["https://#{host}/users/#{user.nickname}/followers"] + } + + assert {:ok, activity} = Transmogrifier.handle_incoming(message) + assert activity.data == message + user = User.get_cached_by_ap_id(actor) + assert user.pinned_objects[object_url] + end +end diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs index 07ed3920f..4c3fcb44a 100644 --- a/test/pleroma/web/activity_pub/transmogrifier_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs @@ -6,8 +6,6 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do use Oban.Testing, repo: Pleroma.Repo use Pleroma.DataCase - require Pleroma.Constants - alias Pleroma.Activity alias Pleroma.Object alias Pleroma.Tests.ObanHelpers @@ -108,167 +106,6 @@ test "it accepts Move activities" do assert activity.data["target"] == new_user.ap_id assert activity.data["type"] == "Move" end - - test "it accepts Add/Remove activities" do - user = - "test/fixtures/users_mock/user.json" - |> File.read!() - |> String.replace("{{nickname}}", "lain") - - object_id = "c61d6733-e256-4fe1-ab13-1e369789423f" - - object = - "test/fixtures/statuses/note.json" - |> File.read!() - |> String.replace("{{nickname}}", "lain") - |> String.replace("{{object_id}}", object_id) - - object_url = "https://example.com/objects/#{object_id}" - - actor = "https://example.com/users/lain" - - Tesla.Mock.mock(fn - %{ - method: :get, - url: ^actor - } -> - %Tesla.Env{ - status: 200, - body: user, - headers: [{"content-type", "application/activity+json"}] - } - - %{ - method: :get, - url: ^object_url - } -> - %Tesla.Env{ - status: 200, - body: object, - headers: [{"content-type", "application/activity+json"}] - } - - %{method: :get, url: "https://example.com/users/lain/collections/featured"} -> - %Tesla.Env{ - status: 200, - body: - "test/fixtures/users_mock/masto_featured.json" - |> File.read!() - |> String.replace("{{domain}}", "example.com") - |> String.replace("{{nickname}}", "lain"), - headers: [{"content-type", "application/activity+json"}] - } - end) - - message = %{ - "id" => "https://example.com/objects/d61d6733-e256-4fe1-ab13-1e369789423f", - "actor" => actor, - "object" => object_url, - "target" => "https://example.com/users/lain/collections/featured", - "type" => "Add", - "to" => [Pleroma.Constants.as_public()], - "cc" => ["https://example.com/users/lain/followers"] - } - - assert {:ok, activity} = Transmogrifier.handle_incoming(message) - assert activity.data == message - user = User.get_cached_by_ap_id(actor) - assert user.pinned_objects[object_url] - - remove = %{ - "id" => "http://localhost:400/objects/d61d6733-e256-4fe1-ab13-1e369789423d", - "actor" => actor, - "object" => object_url, - "target" => "https://example.com/users/lain/collections/featured", - "type" => "Remove", - "to" => [Pleroma.Constants.as_public()], - "cc" => ["https://example.com/users/lain/followers"] - } - - assert {:ok, activity} = Transmogrifier.handle_incoming(remove) - assert activity.data == remove - - user = refresh_record(user) - refute user.pinned_objects[object_url] - end - - test "Add/Remove activities for remote users without featured address" do - user = insert(:user, local: false, domain: "example.com") - - user = - user - |> Ecto.Changeset.change(featured_address: nil) - |> Repo.update!() - - %{host: host} = URI.parse(user.ap_id) - - user_data = - "test/fixtures/users_mock/user.json" - |> File.read!() - |> String.replace("{{nickname}}", user.nickname) - - object_id = "c61d6733-e256-4fe1-ab13-1e369789423f" - - object = - "test/fixtures/statuses/note.json" - |> File.read!() - |> String.replace("{{nickname}}", user.nickname) - |> String.replace("{{object_id}}", object_id) - - object_url = "https://#{host}/objects/#{object_id}" - - actor = "https://#{host}/users/#{user.nickname}" - - featured = "https://#{host}/users/#{user.nickname}/collections/featured" - - Tesla.Mock.mock(fn - %{ - method: :get, - url: ^actor - } -> - %Tesla.Env{ - status: 200, - body: user_data, - headers: [{"content-type", "application/activity+json"}] - } - - %{ - method: :get, - url: ^object_url - } -> - %Tesla.Env{ - status: 200, - body: object, - headers: [{"content-type", "application/activity+json"}] - } - - %{method: :get, url: ^featured} -> - %Tesla.Env{ - status: 200, - body: - "test/fixtures/users_mock/masto_featured.json" - |> File.read!() - |> String.replace("{{domain}}", "#{host}") - |> String.replace("{{nickname}}", user.nickname), - headers: [{"content-type", "application/activity+json"}] - } - end) - - message = %{ - "id" => "https://#{host}/objects/d61d6733-e256-4fe1-ab13-1e369789423f", - "actor" => actor, - "object" => object_url, - "target" => "https://#{host}/users/#{user.nickname}/collections/featured", - "type" => "Add", - "to" => [Pleroma.Constants.as_public()], - "cc" => ["https://#{host}/users/#{user.nickname}/followers"] - } - - assert {:ok, activity} = Transmogrifier.handle_incoming(message) - assert activity.data == message - user = User.get_cached_by_ap_id(actor) - assert user.pinned_objects[object_url] - end end describe "prepare outgoing" do From 8857242c952dcac0bc5363e1c80160efaf7a1638 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Tue, 9 Mar 2021 11:57:20 +0300 Subject: [PATCH 095/174] removeing corresponding add activity --- lib/pleroma/activity.ex | 9 +++++ lib/pleroma/web/activity_pub/side_effects.ex | 5 +++ test/pleroma/activity_test.exs | 22 ++++++++++ test/support/factory.ex | 42 ++++++++++++++++++++ 4 files changed, 78 insertions(+) diff --git a/lib/pleroma/activity.ex b/lib/pleroma/activity.ex index a4cfca4c5..53beca5e6 100644 --- a/lib/pleroma/activity.ex +++ b/lib/pleroma/activity.ex @@ -391,4 +391,13 @@ def get_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do end def get_by_object_ap_id_with_object(_), do: nil + + @spec add_by_params_query(String.t(), String.t(), String.t()) :: Ecto.Query.t() + def add_by_params_query(object_id, actor, target) do + object_id + |> Queries.by_object_id() + |> Queries.by_type("Add") + |> Queries.by_actor(actor) + |> where([a], fragment("?->>'target' = ?", a.data, ^target)) + end end diff --git a/lib/pleroma/web/activity_pub/side_effects.ex b/lib/pleroma/web/activity_pub/side_effects.ex index 9d22f9d3c..5fe143c2b 100644 --- a/lib/pleroma/web/activity_pub/side_effects.ex +++ b/lib/pleroma/web/activity_pub/side_effects.ex @@ -340,11 +340,16 @@ def handle(%{data: %{"type" => "Add"} = data} = object, meta) do # Tasks this handles: # - removes pin from user + # - removes corresponding Add activity # - if activity had expiration, recreates activity expiration job @impl true def handle(%{data: %{"type" => "Remove"} = data} = object, meta) do with %User{} = user <- User.get_cached_by_ap_id(data["actor"]), {:ok, _user} <- User.remove_pinned_object_id(user, data["object"]) do + data["object"] + |> Activity.add_by_params_query(user.ap_id, user.featured_address) + |> Repo.delete_all() + # if pinned activity was scheduled for deletion, we reschedule it for deletion if meta[:expires_at] do # MRF.ActivityExpirationPolicy used UTC timestamps for expires_at in original implementation diff --git a/test/pleroma/activity_test.exs b/test/pleroma/activity_test.exs index 390a06344..962bc7e45 100644 --- a/test/pleroma/activity_test.exs +++ b/test/pleroma/activity_test.exs @@ -254,4 +254,26 @@ test "get_by_object_ap_id_with_object/1" do assert %{id: ^id} = Activity.get_by_object_ap_id_with_object(obj_id) end + + test "add_by_params_query/3" do + user = insert(:user) + + note = insert(:note_activity, user: user) + + insert(:add_activity, user: user, note: note) + insert(:add_activity, user: user, note: note) + insert(:add_activity, user: user) + + assert Repo.aggregate(Activity, :count, :id) == 4 + + add_query = + Activity.add_by_params_query(note.data["object"], user.ap_id, user.featured_address) + + assert Repo.aggregate(add_query, :count, :id) == 2 + + Repo.delete_all(add_query) + assert Repo.aggregate(add_query, :count, :id) == 0 + + assert Repo.aggregate(Activity, :count, :id) == 2 + end end diff --git a/test/support/factory.ex b/test/support/factory.ex index 867076d6a..5c4e65c81 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -4,6 +4,9 @@ defmodule Pleroma.Factory do use ExMachina.Ecto, repo: Pleroma.Repo + + require Pleroma.Constants + alias Pleroma.Object alias Pleroma.User @@ -225,6 +228,45 @@ def direct_note_activity_factory do } end + def add_activity_factory(attrs \\ %{}) do + featured_collection_activity(attrs, "Add") + end + + def remove_activity_factor(attrs \\ %{}) do + featured_collection_activity(attrs, "Remove") + end + + defp featured_collection_activity(attrs, type) do + user = attrs[:user] || insert(:user) + note = attrs[:note] || insert(:note, user: user) + + data_attrs = + attrs + |> Map.get(:data_attrs, %{}) + |> Map.put(:type, type) + + attrs = Map.drop(attrs, [:user, :note, :data_attrs]) + + data = + %{ + "id" => Pleroma.Web.ActivityPub.Utils.generate_activity_id(), + "target" => user.featured_address, + "object" => note.data["object"], + "actor" => note.data["actor"], + "type" => "Add", + "to" => [Pleroma.Constants.as_public()], + "cc" => [user.follower_address] + } + |> Map.merge(data_attrs) + + %Pleroma.Activity{ + data: data, + actor: data["actor"], + recipients: data["to"] + } + |> Map.merge(attrs) + end + def note_activity_factory(attrs \\ %{}) do user = attrs[:user] || insert(:user) note = attrs[:note] || insert(:note, user: user) From 2a520ba008f432e7e1fa297954966e0181245f01 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Fri, 19 Mar 2021 17:25:12 +0300 Subject: [PATCH 096/174] expanding AddRemoveValidator --- .../web/activity_pub/object_validator.ex | 2 +- .../object_validators/add_remove_validator.ex | 26 ++++++++++++++----- .../web/activity_pub/transmogrifier.ex | 22 ++++------------ lib/pleroma/web/common_api.ex | 3 +-- 4 files changed, 27 insertions(+), 26 deletions(-) diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex index 3ca9136aa..14c3e8531 100644 --- a/lib/pleroma/web/activity_pub/object_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validator.ex @@ -238,7 +238,7 @@ def validate(%{"type" => "Announce"} = object, meta) do def validate(%{"type" => type} = object, meta) when type in ~w(Add Remove) do with {:ok, object} <- object - |> AddRemoveValidator.cast_and_validate(meta) + |> AddRemoveValidator.cast_and_validate() |> Ecto.Changeset.apply_action(:insert) do object = stringify_keys(object) {:ok, object, meta} diff --git a/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex index 885282f32..c38f86a0e 100644 --- a/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex @@ -8,6 +8,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator do import Ecto.Changeset import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations + require Pleroma.Constants + alias Pleroma.EctoType.ActivityPub.ObjectValidators @primary_key false @@ -22,28 +24,40 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator do field(:cc, ObjectValidators.Recipients, default: []) end - def cast_and_validate(data, meta) do + def cast_and_validate(data) do data + |> maybe_fix_data_for_mastodon() |> cast_data() - |> validate_data(meta) + |> validate_data() + end + + defp maybe_fix_data_for_mastodon(data) do + {:ok, actor} = Pleroma.User.get_or_fetch_by_ap_id(data["actor"]) + # Mastodon sends pin/unpin objects without id, to, cc fields + data + |> Map.put_new("id", Pleroma.Web.ActivityPub.Utils.generate_activity_id()) + |> Map.put_new("to", [Pleroma.Constants.as_public()]) + |> Map.put_new("cc", [actor.follower_address]) end defp cast_data(data) do cast(%__MODULE__{}, data, __schema__(:fields)) end - defp validate_data(changeset, meta) do + defp validate_data(changeset) do changeset |> validate_required([:id, :target, :object, :actor, :type, :to, :cc]) |> validate_inclusion(:type, ~w(Add Remove)) |> validate_actor_presence() - |> validate_collection_belongs_to_actor(meta) + |> validate_collection_belongs_to_actor() |> validate_object_presence() end - defp validate_collection_belongs_to_actor(changeset, meta) do + defp validate_collection_belongs_to_actor(changeset) do + {:ok, actor} = Pleroma.User.get_or_fetch_by_ap_id(changeset.changes[:actor]) + validate_change(changeset, :target, fn :target, target -> - if target == meta[:featured_address] do + if target == actor.featured_address do [] else [target: "collection doesn't belong to actor"] diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index c4b11a655..2172e7736 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -557,24 +557,12 @@ def handle_incoming( end def handle_incoming(%{"type" => type} = data, _options) when type in ~w(Add Remove) do - with {:ok, %User{} = user} <- ObjectValidator.fetch_actor(data), + with :ok <- ObjectValidator.fetch_actor_and_object(data), + {:ok, actor} <- Pleroma.User.get_or_fetch_by_ap_id(data["actor"]), # maybe locally user doesn't have featured_address - {:ok, user} <- maybe_refetch_user(user), - %Object{} <- Object.normalize(data["object"], fetch: true) do - # Mastodon sends pin/unpin objects without id, to, cc fields - data = - data - |> Map.put_new("id", Utils.generate_activity_id()) - |> Map.put_new("to", [Pleroma.Constants.as_public()]) - |> Map.put_new("cc", [user.follower_address]) - - case Pipeline.common_pipeline(data, - local: false, - featured_address: user.featured_address - ) do - {:ok, activity, _meta} -> {:ok, activity} - error -> error - end + {:ok, _} <- maybe_refetch_user(actor), + {:ok, activity, _meta} <- Pipeline.common_pipeline(data, local: false) do + {:ok, activity} end end diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex index 175d690cc..b36be4d2a 100644 --- a/lib/pleroma/web/common_api.ex +++ b/lib/pleroma/web/common_api.ex @@ -421,8 +421,7 @@ def pin(id, %User{} = user) do {:ok, _pin, _} <- Pipeline.common_pipeline(pin_data, local: true, - activity_id: id, - featured_address: user.featured_address + activity_id: id ) do {:ok, activity} else From 1885268c9c242aca2a51bd15ed839bd65d6a52dc Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Thu, 25 Mar 2021 13:26:54 +0300 Subject: [PATCH 097/174] expanding validator --- .../object_validators/add_remove_validator.ex | 28 +++++++++++++------ .../web/activity_pub/transmogrifier.ex | 18 +----------- 2 files changed, 20 insertions(+), 26 deletions(-) diff --git a/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex index c38f86a0e..f885aabe4 100644 --- a/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex @@ -11,6 +11,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator do require Pleroma.Constants alias Pleroma.EctoType.ActivityPub.ObjectValidators + alias Pleroma.User @primary_key false @@ -25,14 +26,17 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator do end def cast_and_validate(data) do + {:ok, actor} = User.get_or_fetch_by_ap_id(data["actor"]) + + {:ok, actor} = maybe_refetch_user(actor) + data - |> maybe_fix_data_for_mastodon() + |> maybe_fix_data_for_mastodon(actor) |> cast_data() - |> validate_data() + |> validate_data(actor) end - defp maybe_fix_data_for_mastodon(data) do - {:ok, actor} = Pleroma.User.get_or_fetch_by_ap_id(data["actor"]) + defp maybe_fix_data_for_mastodon(data, actor) do # Mastodon sends pin/unpin objects without id, to, cc fields data |> Map.put_new("id", Pleroma.Web.ActivityPub.Utils.generate_activity_id()) @@ -44,18 +48,16 @@ defp cast_data(data) do cast(%__MODULE__{}, data, __schema__(:fields)) end - defp validate_data(changeset) do + defp validate_data(changeset, actor) do changeset |> validate_required([:id, :target, :object, :actor, :type, :to, :cc]) |> validate_inclusion(:type, ~w(Add Remove)) |> validate_actor_presence() - |> validate_collection_belongs_to_actor() + |> validate_collection_belongs_to_actor(actor) |> validate_object_presence() end - defp validate_collection_belongs_to_actor(changeset) do - {:ok, actor} = Pleroma.User.get_or_fetch_by_ap_id(changeset.changes[:actor]) - + defp validate_collection_belongs_to_actor(changeset, actor) do validate_change(changeset, :target, fn :target, target -> if target == actor.featured_address do [] @@ -64,4 +66,12 @@ defp validate_collection_belongs_to_actor(changeset) do end end) end + + defp maybe_refetch_user(%User{featured_address: address} = user) when is_binary(address) do + {:ok, user} + end + + defp maybe_refetch_user(%User{ap_id: ap_id}) do + Pleroma.Web.ActivityPub.Transmogrifier.upgrade_user_from_ap_id(ap_id) + end end diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 2172e7736..c4caeff0a 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -534,7 +534,7 @@ def handle_incoming( end def handle_incoming(%{"type" => type} = data, _options) - when type in ~w{Like EmojiReact Announce} do + when type in ~w{Like EmojiReact Announce Add Remove} do with :ok <- ObjectValidator.fetch_actor_and_object(data), {:ok, activity, _meta} <- Pipeline.common_pipeline(data, local: false) do @@ -556,16 +556,6 @@ def handle_incoming( end end - def handle_incoming(%{"type" => type} = data, _options) when type in ~w(Add Remove) do - with :ok <- ObjectValidator.fetch_actor_and_object(data), - {:ok, actor} <- Pleroma.User.get_or_fetch_by_ap_id(data["actor"]), - # maybe locally user doesn't have featured_address - {:ok, _} <- maybe_refetch_user(actor), - {:ok, activity, _meta} <- Pipeline.common_pipeline(data, local: false) do - {:ok, activity} - end - end - def handle_incoming( %{"type" => "Delete"} = data, _options @@ -659,12 +649,6 @@ def handle_incoming( def handle_incoming(_, _), do: :error - defp maybe_refetch_user(%User{featured_address: address} = user) when is_binary(address) do - {:ok, user} - end - - defp maybe_refetch_user(%User{ap_id: ap_id}), do: upgrade_user_from_ap_id(ap_id) - @spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil def get_obj_helper(id, options \\ []) do options = Keyword.put(options, :fetch, true) From 6e108b8603de45d489d4aef7e3e271bc5e8c431d Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Fri, 26 Mar 2021 19:19:19 +0300 Subject: [PATCH 098/174] reading the file, instead of config keyword --- lib/pleroma/config/release_runtime_provider.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/config/release_runtime_provider.ex b/lib/pleroma/config/release_runtime_provider.ex index 8227195dc..70ef3bcc1 100644 --- a/lib/pleroma/config/release_runtime_provider.ex +++ b/lib/pleroma/config/release_runtime_provider.ex @@ -39,7 +39,7 @@ def load(config, _opts) do with_exported = if File.exists?(exported_config_path) do - exported_config = Config.Reader.read!(with_runtime_config) + exported_config = Config.Reader.read!(exported_config_path) Config.Reader.merge(with_runtime_config, exported_config) else with_runtime_config From 4d046afd2769cfdc16b2ee48e8c1d8f7f8e8ffa7 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Sat, 27 Mar 2021 09:05:33 +0300 Subject: [PATCH 099/174] tests for release config provider --- .../config/release_runtime_provider.ex | 17 +++---- mix.exs | 13 +++++- .../config/temp.exported_from_db.secret.exs | 5 ++ .../config/release_runtime_provider_test.exs | 46 +++++++++++++++++++ 4 files changed, 70 insertions(+), 11 deletions(-) create mode 100644 test/fixtures/config/temp.exported_from_db.secret.exs create mode 100644 test/pleroma/config/release_runtime_provider_test.exs diff --git a/lib/pleroma/config/release_runtime_provider.ex b/lib/pleroma/config/release_runtime_provider.ex index 70ef3bcc1..46fa35559 100644 --- a/lib/pleroma/config/release_runtime_provider.ex +++ b/lib/pleroma/config/release_runtime_provider.ex @@ -1,6 +1,6 @@ defmodule Pleroma.Config.ReleaseRuntimeProvider do @moduledoc """ - Imports `runtime.exs` and `{env}.exported_from_db.secret.exs` for elixir releases. + Imports runtime config and `{env}.exported_from_db.secret.exs` for releases. """ @behaviour Config.Provider @@ -8,13 +8,13 @@ defmodule Pleroma.Config.ReleaseRuntimeProvider do def init(opts), do: opts @impl true - def load(config, _opts) do + def load(config, opts) do with_defaults = Config.Reader.merge(config, Pleroma.Config.Holder.release_defaults()) - config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs" + config_path = opts[:config_path] with_runtime_config = - if File.exists?(config_path) do + if config_path && File.exists?(config_path) do runtime_config = Config.Reader.read!(config_path) with_defaults @@ -24,7 +24,7 @@ def load(config, _opts) do warning = [ IO.ANSI.red(), IO.ANSI.bright(), - "!!! #{config_path} not found! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file", + "!!! Config path is not declared! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file", IO.ANSI.reset() ] @@ -32,13 +32,10 @@ def load(config, _opts) do with_defaults end - exported_config_path = - config_path - |> Path.dirname() - |> Path.join("prod.exported_from_db.secret.exs") + exported_config_path = opts[:exported_config_path] with_exported = - if File.exists?(exported_config_path) do + if exported_config_path && File.exists?(exported_config_path) do exported_config = Config.Reader.read!(exported_config_path) Config.Reader.merge(with_runtime_config, exported_config) else diff --git a/mix.exs b/mix.exs index ae74f50a3..7328b533b 100644 --- a/mix.exs +++ b/mix.exs @@ -38,7 +38,7 @@ def project do include_executables_for: [:unix], applications: [ex_syslogger: :load, syslog: :load, eldap: :transient], steps: [:assemble, &put_otp_version/1, ©_files/1, ©_nginx_config/1], - config_providers: [{Pleroma.Config.ReleaseRuntimeProvider, nil}] + config_providers: [{Pleroma.Config.ReleaseRuntimeProvider, release_config_paths()}] ] ] ] @@ -67,6 +67,17 @@ def copy_nginx_config(%{path: target_path} = release) do release end + defp release_config_paths do + config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs" + + exported_config_path = + config_path + |> Path.dirname() + |> Path.join("#{Mix.env()}.exported_from_db.secret.exs") + + [config_path: config_path, exported_config_path: exported_config_path] + end + # Configuration for the OTP application. # # Type `mix help compile.app` for more information. diff --git a/test/fixtures/config/temp.exported_from_db.secret.exs b/test/fixtures/config/temp.exported_from_db.secret.exs new file mode 100644 index 000000000..64bee7f32 --- /dev/null +++ b/test/fixtures/config/temp.exported_from_db.secret.exs @@ -0,0 +1,5 @@ +use Mix.Config + +config :pleroma, exported_config_merged: true + +config :pleroma, :first_setting, key: "new value" diff --git a/test/pleroma/config/release_runtime_provider_test.exs b/test/pleroma/config/release_runtime_provider_test.exs new file mode 100644 index 000000000..1921698c5 --- /dev/null +++ b/test/pleroma/config/release_runtime_provider_test.exs @@ -0,0 +1,46 @@ +defmodule Pleroma.Config.ReleaseRuntimeProviderTest do + use ExUnit.Case, async: true + + alias Pleroma.Config.ReleaseRuntimeProvider + + describe "load/2" do + test "loads release defaults config and warns about non-existent runtime config" do + ExUnit.CaptureIO.capture_io(fn -> + merged = ReleaseRuntimeProvider.load([], []) + assert merged == Pleroma.Config.Holder.release_defaults() + IO.inspect(merged) + end) =~ + "!!! Config path is not declared! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file" + end + + test "merged runtime config" do + merged = + ReleaseRuntimeProvider.load([], config_path: "test/fixtures/config/temp.secret.exs") + + assert merged[:pleroma][:first_setting] == [key: "value", key2: [Pleroma.Repo]] + assert merged[:pleroma][:second_setting] == [key: "value2", key2: ["Activity"]] + end + + test "merged exported config" do + ExUnit.CaptureIO.capture_io(fn -> + merged = + ReleaseRuntimeProvider.load([], + exported_config_path: "test/fixtures/config/temp.exported_from_db.secret.exs" + ) + + assert merged[:pleroma][:exported_config_merged] + end) =~ + "!!! Config path is not declared! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file" + end + + test "runtime config is merged with exported config" do + merged = + ReleaseRuntimeProvider.load([], + config_path: "test/fixtures/config/temp.secret.exs", + exported_config_path: "test/fixtures/config/temp.exported_from_db.secret.exs" + ) + + assert merged[:pleroma][:first_setting] == [key2: [Pleroma.Repo], key: "new value"] + end + end +end From 8b81d6222773180c9632b7b53ebe7f5ee19f4f65 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@FreeBSD.org> Date: Thu, 8 Oct 2020 11:55:35 -0500 Subject: [PATCH 100/174] Upstream original followbot implementation --- config/config.exs | 2 + .../web/activity_pub/mrf/follow_bot_policy.ex | 41 +++++++++++++++++++ 2 files changed, 43 insertions(+) create mode 100644 lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex diff --git a/config/config.exs b/config/config.exs index 8d1e17b42..4381068ac 100644 --- a/config/config.exs +++ b/config/config.exs @@ -409,6 +409,8 @@ threshold: 604_800, actions: [:delist, :strip_followers] +config :pleroma, :mrf_follow_bot, follower_nickname: nil + config :pleroma, :rich_media, enabled: true, ignore_hosts: [], diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex new file mode 100644 index 000000000..fb123dbd3 --- /dev/null +++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex @@ -0,0 +1,41 @@ +defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do + @behaviour Pleroma.Web.ActivityPub.MRF + alias Pleroma.User + alias Pleroma.Web.CommonAPI + require Logger + + @impl true + def filter(message) do + Task.start(fn -> + follower_nickname = Pleroma.Config.get([:mrf_follow_bot, :follower_nickname]) + + with %User{} = follower <- User.get_cached_by_nickname(follower_nickname), + %{"type" => "Create", "object" => %{"type" => "Note"}} <- message do + to = Map.get(message, "to", []) + cc = Map.get(message, "cc", []) + actor = [message["actor"]] + + Enum.concat([to, cc, actor]) + |> List.flatten() + |> User.get_all_by_ap_id() + |> Enum.each(fn user -> + Logger.info("Checking if #{user.nickname} can be followed") + + with false <- User.following?(follower, user), + false <- user.locked, + false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot") do + Logger.info("Following #{user.nickname}") + CommonAPI.follow(follower, user) + end + end) + end + end) + + {:ok, message} + end + + @impl true + def describe do + {:ok, %{}} + end +end From fba770b3ea861d0fdf7811b61a297278a617136b Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@FreeBSD.org> Date: Thu, 8 Oct 2020 12:09:31 -0500 Subject: [PATCH 101/174] Try to handle misconfiguration scenarios gracefully --- .../web/activity_pub/mrf/follow_bot_policy.ex | 55 ++++++++++++------- 1 file changed, 35 insertions(+), 20 deletions(-) diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex index fb123dbd3..52ac9aef7 100644 --- a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex @@ -1,34 +1,49 @@ defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do @behaviour Pleroma.Web.ActivityPub.MRF + alias Pleroma.Config alias Pleroma.User alias Pleroma.Web.CommonAPI require Logger @impl true def filter(message) do + with follower_nickname <- Config.get([:mrf_follow_bot, :follower_nickname]), + %User{} = follower <- User.get_cached_by_nickname(follower_nickname), + %{"type" => "Create", "object" => %{"type" => "Note"}} <- message do + try_follow(follower, message) + else + nil -> + Logger.warn( + "#{__MODULE__} skipped because of missing :mrf_follow_bot, :follower_nickname configuration or the account + does not exist." + ) + + {:ok, message} + + _ -> + {:ok, message} + end + end + + defp try_follow(follower, message) do Task.start(fn -> - follower_nickname = Pleroma.Config.get([:mrf_follow_bot, :follower_nickname]) + to = Map.get(message, "to", []) + cc = Map.get(message, "cc", []) + actor = [message["actor"]] - with %User{} = follower <- User.get_cached_by_nickname(follower_nickname), - %{"type" => "Create", "object" => %{"type" => "Note"}} <- message do - to = Map.get(message, "to", []) - cc = Map.get(message, "cc", []) - actor = [message["actor"]] + Enum.concat([to, cc, actor]) + |> List.flatten() + |> User.get_all_by_ap_id() + |> Enum.each(fn user -> + Logger.info("Checking if #{user.nickname} can be followed") - Enum.concat([to, cc, actor]) - |> List.flatten() - |> User.get_all_by_ap_id() - |> Enum.each(fn user -> - Logger.info("Checking if #{user.nickname} can be followed") - - with false <- User.following?(follower, user), - false <- user.locked, - false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot") do - Logger.info("Following #{user.nickname}") - CommonAPI.follow(follower, user) - end - end) - end + with false <- User.following?(follower, user), + false <- user.locked, + false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot") do + Logger.info("Following #{user.nickname}") + CommonAPI.follow(follower, user) + end + end) end) {:ok, message} From 840dc4b44ba3ea2613b1a8dc110a9008ffc618c3 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 30 Mar 2021 11:10:34 -0500 Subject: [PATCH 102/174] Document :mrf_follow_bot --- docs/configuration/cheatsheet.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 8f2c4347e..6e52cd181 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -124,6 +124,7 @@ To add configuration to your config file, you can copy it from the base config. * `Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy`: Rejects or delists posts based on their age when received. (See [`:mrf_object_age`](#mrf_object_age)). * `Pleroma.Web.ActivityPub.MRF.ActivityExpirationPolicy`: Sets a default expiration on all posts made by users of the local instance. Requires `Pleroma.Workers.PurgeExpiredActivity` to be enabled for processing the scheduled delections. * `Pleroma.Web.ActivityPub.MRF.ForceBotUnlistedPolicy`: Makes all bot posts to disappear from public timelines. + * `Pleroma.Web.ActivityPub.MRF.FollowBotPolicy`: Automatically follows newly discovered users from the specified bot account. * `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo). * `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value. @@ -220,6 +221,11 @@ Notes: - The hashtags in the configuration do not have a leading `#`. - This MRF Policy is always enabled, if you want to disable it you have to set empty lists +#### :mrf_follow_bot + +* `follower_nickname`: The name of the bot account to use for following newly discovered users. + + ### :activitypub * `unfollow_blocked`: Whether blocks result in people getting unfollowed * `outgoing_blocks`: Whether to federate blocks to other instances From e78738173aefd512bbce33c12b4ee3372bdc904b Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@FreeBSD.org> Date: Thu, 8 Oct 2020 12:41:01 -0500 Subject: [PATCH 103/174] Enforce that the followbot must be marked as a bot. --- lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex index 52ac9aef7..d10b7b480 100644 --- a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex @@ -8,14 +8,15 @@ defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do @impl true def filter(message) do with follower_nickname <- Config.get([:mrf_follow_bot, :follower_nickname]), - %User{} = follower <- User.get_cached_by_nickname(follower_nickname), + %User{actor_type: "Service"} = follower <- + User.get_cached_by_nickname(follower_nickname), %{"type" => "Create", "object" => %{"type" => "Note"}} <- message do try_follow(follower, message) else nil -> Logger.warn( - "#{__MODULE__} skipped because of missing :mrf_follow_bot, :follower_nickname configuration or the account - does not exist." + "#{__MODULE__} skipped because of missing `:mrf_follow_bot, :follower_nickname` configuration, the :follower_nickname + account does not exist, or the account is not correctly configured as a bot." ) {:ok, message} From 2557e805a3034f363f0dfaa38cb8b174d89e3d1b Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@FreeBSD.org> Date: Thu, 8 Oct 2020 12:46:27 -0500 Subject: [PATCH 104/174] Support for configuration via AdminFE --- config/description.exs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/config/description.exs b/config/description.exs index 41e5e4056..bb1f43305 100644 --- a/config/description.exs +++ b/config/description.exs @@ -2942,6 +2942,23 @@ } ] }, + %{ + group: :pleroma, + key: :mrf_follow_bot, + tab: :mrf, + related_policy: "Pleroma.Web.ActivityPub.MRF.FollowBotPolicy", + label: "MRF FollowBot Policy", + type: :group, + description: "Automatically follows newly discovered accounts.", + children: [ + %{ + key: :follower_nickname, + type: :string, + description: "The name of the bot account to use for following newly discovered users.", + suggestions: ["followbot"] + } + ] + }, %{ group: :pleroma, key: :modules, From 2689463c7e8e99f25964072360b4c6955b7fcea0 Mon Sep 17 00:00:00 2001 From: feld <feld@feld.me> Date: Thu, 8 Oct 2020 19:48:09 +0000 Subject: [PATCH 105/174] Apply 1 suggestion(s) to 1 file(s) --- docs/configuration/cheatsheet.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 6e52cd181..d30f4cbdd 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -223,7 +223,7 @@ Notes: #### :mrf_follow_bot -* `follower_nickname`: The name of the bot account to use for following newly discovered users. +* `follower_nickname`: The name of the bot account to use for following newly discovered users. Using `followbot` or similar is strongly suggested. ### :activitypub From 3949cfdc249bb508c1171851fa2ec076126003cc Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 19 Feb 2021 09:47:25 -0600 Subject: [PATCH 106/174] Make the followbot only dispatch follow requests once per 30 day period --- .../web/activity_pub/mrf/follow_bot_policy.ex | 27 ++++++++++++++++--- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex index d10b7b480..044febe0c 100644 --- a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex @@ -1,10 +1,14 @@ defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do @behaviour Pleroma.Web.ActivityPub.MRF + alias Pleroma.Activity.Queries alias Pleroma.Config + alias Pleroma.Repo alias Pleroma.User alias Pleroma.Web.CommonAPI require Logger + import Ecto.Query + @impl true def filter(message) do with follower_nickname <- Config.get([:mrf_follow_bot, :follower_nickname]), @@ -36,12 +40,13 @@ defp try_follow(follower, message) do |> List.flatten() |> User.get_all_by_ap_id() |> Enum.each(fn user -> - Logger.info("Checking if #{user.nickname} can be followed") + since_thirty_days_ago = NaiveDateTime.utc_now() |> NaiveDateTime.add(-(86_400 * 30)) with false <- User.following?(follower, user), - false <- user.locked, - false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot") do - Logger.info("Following #{user.nickname}") + false <- User.locked?(user), + false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot"), + false <- outstanding_follow_request_since?(follower, user, since_thirty_days_ago) do + Logger.info("#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}") CommonAPI.follow(follower, user) end end) @@ -50,6 +55,20 @@ defp try_follow(follower, message) do {:ok, message} end + defp outstanding_follow_request_since?( + %User{ap_id: follower_id}, + %User{ap_id: followee_id}, + since_datetime + ) do + followee_id + |> Queries.by_object_id() + |> Queries.by_type("Follow") + |> where([a], a.inserted_at > ^since_datetime) + |> where([a], fragment("? ->> 'state' = 'pending'", a.data)) + |> where([a], a.actor == ^follower_id) + |> Repo.exists?() + end + @impl true def describe do {:ok, %{}} From 3989ec508c00a66d9093ead06deb8b1272b0b985 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 19 Feb 2021 09:59:30 -0600 Subject: [PATCH 107/174] Prevent duplicates from being processed --- lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex index 044febe0c..2fd5d5612 100644 --- a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex @@ -38,6 +38,7 @@ defp try_follow(follower, message) do Enum.concat([to, cc, actor]) |> List.flatten() + |> Enum.uniq() |> User.get_all_by_ap_id() |> Enum.each(fn user -> since_thirty_days_ago = NaiveDateTime.utc_now() |> NaiveDateTime.add(-(86_400 * 30)) From a176914c73456eea7926235eb48e342ac1ab112d Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 19 Feb 2021 14:42:20 -0600 Subject: [PATCH 108/174] Better checking of previous follow request attempts --- lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex index 2fd5d5612..c7aaa6386 100644 --- a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex @@ -65,7 +65,7 @@ defp outstanding_follow_request_since?( |> Queries.by_object_id() |> Queries.by_type("Follow") |> where([a], a.inserted_at > ^since_datetime) - |> where([a], fragment("? ->> 'state' = 'pending'", a.data)) + |> where([a], fragment("? ->> 'state' != 'accept'", a.data)) |> where([a], a.actor == ^follower_id) |> Repo.exists?() end From f0dcc1ca692fb5d6a5aca4f8a9ccb255baef9c1d Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 19 Feb 2021 14:55:05 -0600 Subject: [PATCH 109/174] Lint --- lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex index c7aaa6386..441ce553e 100644 --- a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex @@ -47,7 +47,10 @@ defp try_follow(follower, message) do false <- User.locked?(user), false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot"), false <- outstanding_follow_request_since?(follower, user, since_thirty_days_ago) do - Logger.info("#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}") + Logger.info( + "#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}" + ) + CommonAPI.follow(follower, user) end end) From 1926d0804ba6ade106a509c027af6bf56e6a8791 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 19 Feb 2021 15:16:55 -0600 Subject: [PATCH 110/174] Add follow_requests_outstanding_since?/3 to Pleroma.Activity --- lib/pleroma/activity.ex | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/lib/pleroma/activity.ex b/lib/pleroma/activity.ex index d59403884..b0f1a900d 100644 --- a/lib/pleroma/activity.ex +++ b/lib/pleroma/activity.ex @@ -345,6 +345,20 @@ def following_requests_for_actor(%User{ap_id: ap_id}) do |> Repo.all() end + def follow_requests_outstanding_since?( + %User{ap_id: follower_id}, + %User{ap_id: followee_id}, + since_datetime + ) do + followee_id + |> Queries.by_object_id() + |> Queries.by_type("Follow") + |> where([a], a.inserted_at > ^since_datetime) + |> where([a], fragment("? ->> 'state' != 'accept'", a.data)) + |> where([a], a.actor == ^follower_id) + |> Repo.exists?() + end + def restrict_deactivated_users(query) do deactivated_users = from(u in User.Query.build(%{deactivated: true}), select: u.ap_id) From 86182ef8e445ee8a89ce2e49f33cab3dac2d2b12 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 19 Feb 2021 15:17:33 -0600 Subject: [PATCH 111/174] Change module name to FollowbotPolicy --- ...llow_bot_policy.ex => followbot_policy.ex} | 25 ++++--------------- 1 file changed, 5 insertions(+), 20 deletions(-) rename lib/pleroma/web/activity_pub/mrf/{follow_bot_policy.ex => followbot_policy.ex} (72%) diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex similarity index 72% rename from lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex rename to lib/pleroma/web/activity_pub/mrf/followbot_policy.ex index 441ce553e..838d39c88 100644 --- a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex @@ -1,13 +1,11 @@ -defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do +defmodule Pleroma.Web.ActivityPub.MRF.FollowbotPolicy do @behaviour Pleroma.Web.ActivityPub.MRF - alias Pleroma.Activity.Queries + alias Pleroma.Activity alias Pleroma.Config - alias Pleroma.Repo alias Pleroma.User alias Pleroma.Web.CommonAPI - require Logger - import Ecto.Query + require Logger @impl true def filter(message) do @@ -46,7 +44,8 @@ defp try_follow(follower, message) do with false <- User.following?(follower, user), false <- User.locked?(user), false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot"), - false <- outstanding_follow_request_since?(follower, user, since_thirty_days_ago) do + false <- + Activity.follow_requests_outstanding_since?(follower, user, since_thirty_days_ago) do Logger.info( "#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}" ) @@ -59,20 +58,6 @@ defp try_follow(follower, message) do {:ok, message} end - defp outstanding_follow_request_since?( - %User{ap_id: follower_id}, - %User{ap_id: followee_id}, - since_datetime - ) do - followee_id - |> Queries.by_object_id() - |> Queries.by_type("Follow") - |> where([a], a.inserted_at > ^since_datetime) - |> where([a], fragment("? ->> 'state' != 'accept'", a.data)) - |> where([a], a.actor == ^follower_id) - |> Repo.exists?() - end - @impl true def describe do {:ok, %{}} From 778010ef8e1f4509bd554e65556336e5e8457ef6 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 19 Feb 2021 15:25:26 -0600 Subject: [PATCH 112/174] Do not try to follow local users. Their posts are already available locally on the instance. --- lib/pleroma/web/activity_pub/mrf/followbot_policy.ex | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex b/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex index 838d39c88..5c8834536 100644 --- a/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex @@ -41,7 +41,8 @@ defp try_follow(follower, message) do |> Enum.each(fn user -> since_thirty_days_ago = NaiveDateTime.utc_now() |> NaiveDateTime.add(-(86_400 * 30)) - with false <- User.following?(follower, user), + with false <- user.local, + false <- User.following?(follower, user), false <- User.locked?(user), false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot"), false <- From c252ac71d4ea4f3b08bd3524f32ee3fe9308be06 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 19 Feb 2021 18:34:52 -0600 Subject: [PATCH 113/174] Revert --- lib/pleroma/activity.ex | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/lib/pleroma/activity.ex b/lib/pleroma/activity.ex index b0f1a900d..d59403884 100644 --- a/lib/pleroma/activity.ex +++ b/lib/pleroma/activity.ex @@ -345,20 +345,6 @@ def following_requests_for_actor(%User{ap_id: ap_id}) do |> Repo.all() end - def follow_requests_outstanding_since?( - %User{ap_id: follower_id}, - %User{ap_id: followee_id}, - since_datetime - ) do - followee_id - |> Queries.by_object_id() - |> Queries.by_type("Follow") - |> where([a], a.inserted_at > ^since_datetime) - |> where([a], fragment("? ->> 'state' != 'accept'", a.data)) - |> where([a], a.actor == ^follower_id) - |> Repo.exists?() - end - def restrict_deactivated_users(query) do deactivated_users = from(u in User.Query.build(%{deactivated: true}), select: u.ap_id) From f73d1667854fc4c6c721bf49a7deeefde1f569e3 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 19 Feb 2021 18:36:21 -0600 Subject: [PATCH 114/174] Only need to validate a follow request is generated for now --- .../mrf/followbot_policy_test.exs | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 test/pleroma/web/activity_pub/mrf/followbot_policy_test.exs diff --git a/test/pleroma/web/activity_pub/mrf/followbot_policy_test.exs b/test/pleroma/web/activity_pub/mrf/followbot_policy_test.exs new file mode 100644 index 000000000..283e9b12c --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/followbot_policy_test.exs @@ -0,0 +1,42 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.FollowbotPolicyTest do + use Pleroma.DataCase, async: true + + alias Pleroma.User + alias Pleroma.Web.ActivityPub.MRF.FollowbotPolicy + + import Pleroma.Factory + + describe "FollowBotPolicy" do + test "follows remote users" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, local: false) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "Test post", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowbotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 1 + end + end +end From 4796df0bc39a57b2581168cb8d8fde7779068f2d Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 19 Feb 2021 18:36:35 -0600 Subject: [PATCH 115/174] Remove Task.async as it is broken here and probably a premature optimization anyway --- .../web/activity_pub/mrf/followbot_policy.ex | 41 ++++++++----------- 1 file changed, 17 insertions(+), 24 deletions(-) diff --git a/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex b/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex index 5c8834536..ca99e429c 100644 --- a/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex @@ -1,6 +1,5 @@ defmodule Pleroma.Web.ActivityPub.MRF.FollowbotPolicy do @behaviour Pleroma.Web.ActivityPub.MRF - alias Pleroma.Activity alias Pleroma.Config alias Pleroma.User alias Pleroma.Web.CommonAPI @@ -29,31 +28,25 @@ def filter(message) do end defp try_follow(follower, message) do - Task.start(fn -> - to = Map.get(message, "to", []) - cc = Map.get(message, "cc", []) - actor = [message["actor"]] + to = Map.get(message, "to", []) + cc = Map.get(message, "cc", []) + actor = [message["actor"]] - Enum.concat([to, cc, actor]) - |> List.flatten() - |> Enum.uniq() - |> User.get_all_by_ap_id() - |> Enum.each(fn user -> - since_thirty_days_ago = NaiveDateTime.utc_now() |> NaiveDateTime.add(-(86_400 * 30)) + Enum.concat([to, cc, actor]) + |> List.flatten() + |> Enum.uniq() + |> User.get_all_by_ap_id() + |> Enum.each(fn user -> + with false <- user.local, + false <- User.following?(follower, user), + false <- User.locked?(user), + false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot") do + Logger.debug( + "#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}" + ) - with false <- user.local, - false <- User.following?(follower, user), - false <- User.locked?(user), - false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot"), - false <- - Activity.follow_requests_outstanding_since?(follower, user, since_thirty_days_ago) do - Logger.info( - "#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}" - ) - - CommonAPI.follow(follower, user) - end - end) + CommonAPI.follow(follower, user) + end end) {:ok, message} From fef4f3772cf035cefb939bdfaaa4b12d6444b553 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Wed, 24 Feb 2021 11:52:03 -0600 Subject: [PATCH 116/174] More tests to validate Followbot is behaving --- .../mrf/followbot_policy_test.exs | 84 +++++++++++++++++++ 1 file changed, 84 insertions(+) diff --git a/test/pleroma/web/activity_pub/mrf/followbot_policy_test.exs b/test/pleroma/web/activity_pub/mrf/followbot_policy_test.exs index 283e9b12c..4c39e04e8 100644 --- a/test/pleroma/web/activity_pub/mrf/followbot_policy_test.exs +++ b/test/pleroma/web/activity_pub/mrf/followbot_policy_test.exs @@ -38,5 +38,89 @@ test "follows remote users" do assert User.get_follow_requests(remote_user) |> length == 1 end + + test "does not follow users with #nobot in bio" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, %{local: false, bio: "go away bots! #nobot"}) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "I don't like follow bots", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowbotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 0 + end + + test "does not follow local users" do + bot = insert(:user, actor_type: "Service") + local_user = insert(:user, local: true) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [local_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "Hi I'm a local user", + "type" => "Note", + "attributedTo" => local_user.ap_id, + "inReplyTo" => nil + }, + "actor" => local_user.ap_id + } + + refute User.following?(bot, local_user) + + assert User.get_follow_requests(local_user) |> length == 0 + + FollowbotPolicy.filter(message) + + assert User.get_follow_requests(local_user) |> length == 0 + end + + test "does not follow users requiring follower approval" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, %{local: false, is_locked: true}) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "I don't like randos following me", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowbotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 0 + end end end From 7eab98d5c856097c0cfe09a02adfd4c05fb5d240 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Wed, 24 Feb 2021 11:58:09 -0600 Subject: [PATCH 117/174] Document new FollowBot MRF --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fb26c7a73..43f2bb638 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -75,6 +75,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Ability to define custom HTTP headers per each frontend - MRF (`NoEmptyPolicy`): New MRF Policy which will deny empty statuses or statuses of only mentions from being created by local users - New users will receive a simple email confirming their registration if no other emails will be dispatched. (e.g., Welcome, Confirmation, or Approval Required) +- MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded. <details> <summary>API Changes</summary> From 03f38ac4ebd97e792b0ff2a6ac804adefed85a41 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Wed, 24 Feb 2021 11:59:11 -0600 Subject: [PATCH 118/174] Prefer FollowBot naming convention vs Followbot --- ...llowbot_policy.ex => follow_bot_policy.ex} | 2 +- .../mrf/follow_bot_policy_test.exs | 126 ++++++++++++++++++ ...y_test.exs => follow_bot_policy_test.exs~} | 0 3 files changed, 127 insertions(+), 1 deletion(-) rename lib/pleroma/web/activity_pub/mrf/{followbot_policy.ex => follow_bot_policy.ex} (96%) create mode 100644 test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs rename test/pleroma/web/activity_pub/mrf/{followbot_policy_test.exs => follow_bot_policy_test.exs~} (100%) diff --git a/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex similarity index 96% rename from lib/pleroma/web/activity_pub/mrf/followbot_policy.ex rename to lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex index ca99e429c..7307c9c14 100644 --- a/lib/pleroma/web/activity_pub/mrf/followbot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex @@ -1,4 +1,4 @@ -defmodule Pleroma.Web.ActivityPub.MRF.FollowbotPolicy do +defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do @behaviour Pleroma.Web.ActivityPub.MRF alias Pleroma.Config alias Pleroma.User diff --git a/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs b/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs new file mode 100644 index 000000000..3f63f11ef --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs @@ -0,0 +1,126 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicyTest do + use Pleroma.DataCase, async: true + + alias Pleroma.User + alias Pleroma.Web.ActivityPub.MRF.FollowBotPolicy + + import Pleroma.Factory + + describe "FollowBotPolicy" do + test "follows remote users" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, local: false) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "Test post", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowbotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 1 + end + + test "does not follow users with #nobot in bio" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, %{local: false, bio: "go away bots! #nobot"}) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "I don't like follow bots", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowbotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 0 + end + + test "does not follow local users" do + bot = insert(:user, actor_type: "Service") + local_user = insert(:user, local: true) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [local_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "Hi I'm a local user", + "type" => "Note", + "attributedTo" => local_user.ap_id, + "inReplyTo" => nil + }, + "actor" => local_user.ap_id + } + + refute User.following?(bot, local_user) + + assert User.get_follow_requests(local_user) |> length == 0 + + FollowbotPolicy.filter(message) + + assert User.get_follow_requests(local_user) |> length == 0 + end + + test "does not follow users requiring follower approval" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, %{local: false, is_locked: true}) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "I don't like randos following me", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowbotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 0 + end + end +end diff --git a/test/pleroma/web/activity_pub/mrf/followbot_policy_test.exs b/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs~ similarity index 100% rename from test/pleroma/web/activity_pub/mrf/followbot_policy_test.exs rename to test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs~ From d29f6d6b6ef896d0fa47b4f5136fc6714e3425f3 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Wed, 24 Feb 2021 12:02:33 -0600 Subject: [PATCH 119/174] Add more details to the cheatsheat for FollowBot MRF --- docs/configuration/cheatsheet.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index d30f4cbdd..069421722 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -124,7 +124,7 @@ To add configuration to your config file, you can copy it from the base config. * `Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy`: Rejects or delists posts based on their age when received. (See [`:mrf_object_age`](#mrf_object_age)). * `Pleroma.Web.ActivityPub.MRF.ActivityExpirationPolicy`: Sets a default expiration on all posts made by users of the local instance. Requires `Pleroma.Workers.PurgeExpiredActivity` to be enabled for processing the scheduled delections. * `Pleroma.Web.ActivityPub.MRF.ForceBotUnlistedPolicy`: Makes all bot posts to disappear from public timelines. - * `Pleroma.Web.ActivityPub.MRF.FollowBotPolicy`: Automatically follows newly discovered users from the specified bot account. + * `Pleroma.Web.ActivityPub.MRF.FollowBotPolicy`: Automatically follows newly discovered users from the specified bot account. Local accounts, locked accounts, and users with "#nobot" in their bio are respected and excluded from being followed. * `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo). * `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value. From bfcdcd4f6937bfc0b123a6ec0bbf1d3e6968f0fb Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Wed, 24 Feb 2021 12:07:40 -0600 Subject: [PATCH 120/174] Temp file leaked, oops --- .../mrf/follow_bot_policy_test.exs~ | 126 ------------------ 1 file changed, 126 deletions(-) delete mode 100644 test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs~ diff --git a/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs~ b/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs~ deleted file mode 100644 index 4c39e04e8..000000000 --- a/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs~ +++ /dev/null @@ -1,126 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.ActivityPub.MRF.FollowbotPolicyTest do - use Pleroma.DataCase, async: true - - alias Pleroma.User - alias Pleroma.Web.ActivityPub.MRF.FollowbotPolicy - - import Pleroma.Factory - - describe "FollowBotPolicy" do - test "follows remote users" do - bot = insert(:user, actor_type: "Service") - remote_user = insert(:user, local: false) - clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) - - message = %{ - "@context" => "https://www.w3.org/ns/activitystreams", - "to" => [remote_user.follower_address], - "cc" => ["https://www.w3.org/ns/activitystreams#Public"], - "type" => "Create", - "object" => %{ - "content" => "Test post", - "type" => "Note", - "attributedTo" => remote_user.ap_id, - "inReplyTo" => nil - }, - "actor" => remote_user.ap_id - } - - refute User.following?(bot, remote_user) - - assert User.get_follow_requests(remote_user) |> length == 0 - - FollowbotPolicy.filter(message) - - assert User.get_follow_requests(remote_user) |> length == 1 - end - - test "does not follow users with #nobot in bio" do - bot = insert(:user, actor_type: "Service") - remote_user = insert(:user, %{local: false, bio: "go away bots! #nobot"}) - clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) - - message = %{ - "@context" => "https://www.w3.org/ns/activitystreams", - "to" => [remote_user.follower_address], - "cc" => ["https://www.w3.org/ns/activitystreams#Public"], - "type" => "Create", - "object" => %{ - "content" => "I don't like follow bots", - "type" => "Note", - "attributedTo" => remote_user.ap_id, - "inReplyTo" => nil - }, - "actor" => remote_user.ap_id - } - - refute User.following?(bot, remote_user) - - assert User.get_follow_requests(remote_user) |> length == 0 - - FollowbotPolicy.filter(message) - - assert User.get_follow_requests(remote_user) |> length == 0 - end - - test "does not follow local users" do - bot = insert(:user, actor_type: "Service") - local_user = insert(:user, local: true) - clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) - - message = %{ - "@context" => "https://www.w3.org/ns/activitystreams", - "to" => [local_user.follower_address], - "cc" => ["https://www.w3.org/ns/activitystreams#Public"], - "type" => "Create", - "object" => %{ - "content" => "Hi I'm a local user", - "type" => "Note", - "attributedTo" => local_user.ap_id, - "inReplyTo" => nil - }, - "actor" => local_user.ap_id - } - - refute User.following?(bot, local_user) - - assert User.get_follow_requests(local_user) |> length == 0 - - FollowbotPolicy.filter(message) - - assert User.get_follow_requests(local_user) |> length == 0 - end - - test "does not follow users requiring follower approval" do - bot = insert(:user, actor_type: "Service") - remote_user = insert(:user, %{local: false, is_locked: true}) - clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) - - message = %{ - "@context" => "https://www.w3.org/ns/activitystreams", - "to" => [remote_user.follower_address], - "cc" => ["https://www.w3.org/ns/activitystreams#Public"], - "type" => "Create", - "object" => %{ - "content" => "I don't like randos following me", - "type" => "Note", - "attributedTo" => remote_user.ap_id, - "inReplyTo" => nil - }, - "actor" => remote_user.ap_id - } - - refute User.following?(bot, remote_user) - - assert User.get_follow_requests(remote_user) |> length == 0 - - FollowbotPolicy.filter(message) - - assert User.get_follow_requests(remote_user) |> length == 0 - end - end -end From 16a7ffb1ea9dc8e2c7a70d9243424b40d594bd63 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 26 Feb 2021 11:04:27 -0600 Subject: [PATCH 121/174] Fix function calls due to module name change --- .../web/activity_pub/mrf/follow_bot_policy_test.exs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs b/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs index 3f63f11ef..a61562558 100644 --- a/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs +++ b/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs @@ -34,7 +34,7 @@ test "follows remote users" do assert User.get_follow_requests(remote_user) |> length == 0 - FollowbotPolicy.filter(message) + FollowBotPolicy.filter(message) assert User.get_follow_requests(remote_user) |> length == 1 end @@ -62,7 +62,7 @@ test "does not follow users with #nobot in bio" do assert User.get_follow_requests(remote_user) |> length == 0 - FollowbotPolicy.filter(message) + FollowBotPolicy.filter(message) assert User.get_follow_requests(remote_user) |> length == 0 end @@ -90,7 +90,7 @@ test "does not follow local users" do assert User.get_follow_requests(local_user) |> length == 0 - FollowbotPolicy.filter(message) + FollowBotPolicy.filter(message) assert User.get_follow_requests(local_user) |> length == 0 end @@ -118,7 +118,7 @@ test "does not follow users requiring follower approval" do assert User.get_follow_requests(remote_user) |> length == 0 - FollowbotPolicy.filter(message) + FollowBotPolicy.filter(message) assert User.get_follow_requests(remote_user) |> length == 0 end From 863010ea637d6670076dba3f6da54daa144cce67 Mon Sep 17 00:00:00 2001 From: Miss Pasture <atsmdq@gmail.com> Date: Wed, 31 Mar 2021 06:51:22 +0000 Subject: [PATCH 122/174] date-times are always strings --- lib/pleroma/web/api_spec/operations/account_operation.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/web/api_spec/operations/account_operation.ex b/lib/pleroma/web/api_spec/operations/account_operation.ex index 54e5ebc76..08d68893a 100644 --- a/lib/pleroma/web/api_spec/operations/account_operation.ex +++ b/lib/pleroma/web/api_spec/operations/account_operation.ex @@ -482,7 +482,7 @@ defp create_response do access_token: %Schema{type: :string}, refresh_token: %Schema{type: :string}, scope: %Schema{type: :string}, - created_at: %Schema{type: :integer, format: :"date-time"}, + created_at: %Schema{type: :string, format: :"date-time"}, me: %Schema{type: :string}, expires_in: %Schema{type: :integer}, # From af1cd28f9b8005dff563c0df7f80db47df4e8488 Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" <contact@hacktivis.me> Date: Thu, 1 Apr 2021 11:50:45 +0200 Subject: [PATCH 123/174] object_validator: Refactor most of validate/2 to a generic block --- .../web/activity_pub/object_validator.ex | 135 +++--------------- 1 file changed, 22 insertions(+), 113 deletions(-) diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex index 297c19cc0..f75744203 100644 --- a/lib/pleroma/web/activity_pub/object_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validator.ex @@ -37,37 +37,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do @impl true def validate(object, meta) - def validate(%{"type" => type} = object, meta) - when type in ~w[Accept Reject] do - with {:ok, object} <- - object - |> AcceptRejectValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "Event"} = object, meta) do - with {:ok, object} <- - object - |> EventValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "Follow"} = object, meta) do - with {:ok, object} <- - object - |> FollowValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - def validate(%{"type" => "Block"} = block_activity, meta) do with {:ok, block_activity} <- block_activity @@ -87,16 +56,6 @@ def validate(%{"type" => "Block"} = block_activity, meta) do end end - def validate(%{"type" => "Update"} = update_activity, meta) do - with {:ok, update_activity} <- - update_activity - |> UpdateValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - update_activity = stringify_keys(update_activity) - {:ok, update_activity, meta} - end - end - def validate(%{"type" => "Undo"} = object, meta) do with {:ok, object} <- object @@ -123,76 +82,6 @@ def validate(%{"type" => "Delete"} = object, meta) do end end - def validate(%{"type" => "Like"} = object, meta) do - with {:ok, object} <- - object - |> LikeValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "ChatMessage"} = object, meta) do - with {:ok, object} <- - object - |> ChatMessageValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "Question"} = object, meta) do - with {:ok, object} <- - object - |> QuestionValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => type} = object, meta) when type in ~w[Audio Video] do - with {:ok, object} <- - object - |> AudioVideoValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "Article"} = object, meta) do - with {:ok, object} <- - object - |> ArticleNoteValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "Answer"} = object, meta) do - with {:ok, object} <- - object - |> AnswerValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "EmojiReact"} = object, meta) do - with {:ok, object} <- - object - |> EmojiReactValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - def validate( %{"type" => "Create", "object" => %{"type" => "ChatMessage"} = object} = create_activity, meta @@ -224,10 +113,30 @@ def validate( end end - def validate(%{"type" => "Announce"} = object, meta) do + def validate(%{"type" => type} = object, meta) + when type in ~w[Accept Reject Follow Update Like EmojiReact Announce + Event ChatMessage Question Audio Video Article Answer] do + validator = + case type do + "Accept" -> AcceptRejectValidator + "Reject" -> AcceptRejectValidator + "Follow" -> FollowValidator + "Update" -> UpdateValidator + "Like" -> LikeValidator + "EmojiReact" -> EmojiReactValidator + "Announce" -> AnnounceValidator + "Event" -> EventValidator + "ChatMessage" -> ChatMessageValidator + "Question" -> QuestionValidator + "Audio" -> AudioVideoValidator + "Video" -> AudioVideoValidator + "Article" -> ArticleNoteValidator + "Answer" -> AnswerValidator + end + with {:ok, object} <- object - |> AnnounceValidator.cast_and_validate() + |> validator.cast_and_validate() |> Ecto.Changeset.apply_action(:insert) do object = stringify_keys(object) {:ok, object, meta} From 1e3db075861198417bca36f4f2b0f29c2367c77e Mon Sep 17 00:00:00 2001 From: Haelwenn <contact+git.pleroma.social@hacktivis.me> Date: Thu, 1 Apr 2021 12:00:58 +0000 Subject: [PATCH 124/174] Revert "Merge branch 'patch-fix-open-api-spec' into 'develop'" This reverts merge request !3382 --- lib/pleroma/web/api_spec/operations/account_operation.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/web/api_spec/operations/account_operation.ex b/lib/pleroma/web/api_spec/operations/account_operation.ex index 08d68893a..54e5ebc76 100644 --- a/lib/pleroma/web/api_spec/operations/account_operation.ex +++ b/lib/pleroma/web/api_spec/operations/account_operation.ex @@ -482,7 +482,7 @@ defp create_response do access_token: %Schema{type: :string}, refresh_token: %Schema{type: :string}, scope: %Schema{type: :string}, - created_at: %Schema{type: :string, format: :"date-time"}, + created_at: %Schema{type: :integer, format: :"date-time"}, me: %Schema{type: :string}, expires_in: %Schema{type: :integer}, # From 9015df22291ab60c0efad328557936fd14eab2e6 Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" <contact@hacktivis.me> Date: Thu, 7 Jan 2021 18:23:01 +0100 Subject: [PATCH 125/174] TagValidator: New --- .../article_note_validator.ex | 7 +- .../object_validators/attachment_validator.ex | 1 - .../audio_video_validator.ex | 7 +- .../object_validators/event_validator.ex | 7 +- .../object_validators/question_validator.ex | 7 +- .../object_validators/tag_validator.ex | 77 +++++++++++++++++++ 6 files changed, 93 insertions(+), 13 deletions(-) create mode 100644 lib/pleroma/web/activity_pub/object_validators/tag_validator.ex diff --git a/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex b/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex index b0388ef3b..5910f4060 100644 --- a/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex @@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNoteValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations + alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator alias Pleroma.Web.ActivityPub.Transmogrifier import Ecto.Changeset @@ -22,8 +23,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNoteValidator do field(:cc, ObjectValidators.Recipients, default: []) field(:bto, ObjectValidators.Recipients, default: []) field(:bcc, ObjectValidators.Recipients, default: []) - # TODO: Write type - field(:tag, {:array, :map}, default: []) + embeds_many(:tag, TagValidator) field(:type, :string) field(:name, :string) @@ -90,8 +90,9 @@ def changeset(struct, data) do data = fix(data) struct - |> cast(data, __schema__(:fields) -- [:attachment]) + |> cast(data, __schema__(:fields) -- [:attachment, :tag]) |> cast_embed(:attachment) + |> cast_embed(:tag) end def validate_data(data_cng) do diff --git a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex index 3175427ad..e7b3a3922 100644 --- a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex @@ -6,7 +6,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do use Ecto.Schema alias Pleroma.EctoType.ActivityPub.ObjectValidators - alias Pleroma.Web.ActivityPub.ObjectValidators.UrlObjectValidator import Ecto.Changeset diff --git a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex index 4a96fef52..a04c95f4b 100644 --- a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations + alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator alias Pleroma.Web.ActivityPub.Transmogrifier import Ecto.Changeset @@ -23,8 +24,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator do field(:cc, ObjectValidators.Recipients, default: []) field(:bto, ObjectValidators.Recipients, default: []) field(:bcc, ObjectValidators.Recipients, default: []) - # TODO: Write type - field(:tag, {:array, :map}, default: []) + embeds_many(:tag, TagValidator) field(:type, :string) field(:name, :string) @@ -132,8 +132,9 @@ def changeset(struct, data) do data = fix(data) struct - |> cast(data, __schema__(:fields) -- [:attachment]) + |> cast(data, __schema__(:fields) -- [:attachment, :tag]) |> cast_embed(:attachment) + |> cast_embed(:tag) end def validate_data(data_cng) do diff --git a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex index 2e26726f8..0112a074d 100644 --- a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex @@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations + alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator alias Pleroma.Web.ActivityPub.Transmogrifier import Ecto.Changeset @@ -23,8 +24,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do field(:cc, ObjectValidators.Recipients, default: []) field(:bto, ObjectValidators.Recipients, default: []) field(:bcc, ObjectValidators.Recipients, default: []) - # TODO: Write type - field(:tag, {:array, :map}, default: []) + embeds_many(:tag, TagValidator) field(:type, :string) field(:name, :string) @@ -81,8 +81,9 @@ def changeset(struct, data) do data = fix(data) struct - |> cast(data, __schema__(:fields) -- [:attachment]) + |> cast(data, __schema__(:fields) -- [:attachment, :tag]) |> cast_embed(:attachment) + |> cast_embed(:tag) end def validate_data(data_cng) do diff --git a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex index 6b746c997..7acb1e928 100644 --- a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations alias Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator + alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator alias Pleroma.Web.ActivityPub.Transmogrifier import Ecto.Changeset @@ -24,8 +25,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do field(:cc, ObjectValidators.Recipients, default: []) field(:bto, ObjectValidators.Recipients, default: []) field(:bcc, ObjectValidators.Recipients, default: []) - # TODO: Write type - field(:tag, {:array, :map}, default: []) + embeds_many(:tag, TagValidator) field(:type, :string) field(:content, :string) field(:context, :string) @@ -93,10 +93,11 @@ def changeset(struct, data) do data = fix(data) struct - |> cast(data, __schema__(:fields) -- [:anyOf, :oneOf, :attachment]) + |> cast(data, __schema__(:fields) -- [:anyOf, :oneOf, :attachment, :tag]) |> cast_embed(:attachment) |> cast_embed(:anyOf) |> cast_embed(:oneOf) + |> cast_embed(:tag) end def validate_data(data_cng) do diff --git a/lib/pleroma/web/activity_pub/object_validators/tag_validator.ex b/lib/pleroma/web/activity_pub/object_validators/tag_validator.ex new file mode 100644 index 000000000..751021585 --- /dev/null +++ b/lib/pleroma/web/activity_pub/object_validators/tag_validator.ex @@ -0,0 +1,77 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.ObjectValidators.TagValidator do + use Ecto.Schema + + alias Pleroma.EctoType.ActivityPub.ObjectValidators + + import Ecto.Changeset + + @primary_key false + embedded_schema do + # Common + field(:type, :string) + field(:name, :string) + + # Mention, Hashtag + field(:href, ObjectValidators.Uri) + + # Emoji + embeds_one :icon, IconObjectValidator, primary_key: false do + field(:type, :string) + field(:url, ObjectValidators.Uri) + end + + field(:updated, ObjectValidators.DateTime) + field(:id, ObjectValidators.Uri) + end + + def cast_and_validate(data) do + data + |> cast_data() + end + + def cast_data(data) do + %__MODULE__{} + |> changeset(data) + end + + def changeset(struct, %{"type" => "Mention"} = data) do + struct + |> cast(data, [:type, :name, :href]) + |> validate_required([:type, :href]) + end + + def changeset(struct, %{"type" => "Hashtag", "name" => name} = data) do + name = + cond do + "#" <> name -> name + name -> name + end + |> String.downcase() + + data = Map.put(data, "name", name) + + struct + |> cast(data, [:type, :name, :href]) + |> validate_required([:type, :name]) + end + + def changeset(struct, %{"type" => "Emoji"} = data) do + data = Map.put(data, "name", String.trim(data["name"], ":")) + + struct + |> cast(data, [:type, :name, :updated, :id]) + |> cast_embed(:icon, with: &icon_changeset/2) + |> validate_required([:type, :name, :icon]) + end + + def icon_changeset(struct, data) do + struct + |> cast(data, [:type, :url]) + |> validate_inclusion(:type, ~w[Image]) + |> validate_required([:type, :url]) + end +end From 5ae27c8451a7012b43ef9113713132158701364b Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" <contact@hacktivis.me> Date: Tue, 12 Jan 2021 14:11:29 +0100 Subject: [PATCH 126/174] pipeline_test: Fix usage of %Activity{} --- .../web/activity_pub/object_validator.ex | 2 +- lib/pleroma/web/activity_pub/pipeline.ex | 2 ++ .../web/activity_pub/pipeline_test.exs | 23 +++++++++++++------ 3 files changed, 19 insertions(+), 8 deletions(-) diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex index f75744203..15784b28c 100644 --- a/lib/pleroma/web/activity_pub/object_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validator.ex @@ -169,7 +169,7 @@ def cast_and_apply(%{"type" => "Article"} = object) do def cast_and_apply(o), do: {:error, {:validator_not_set, o}} - # is_struct/1 isn't present in Elixir 1.8.x + # is_struct/1 appears in Elixir 1.11 def stringify_keys(%{__struct__: _} = object) do object |> Map.from_struct() diff --git a/lib/pleroma/web/activity_pub/pipeline.ex b/lib/pleroma/web/activity_pub/pipeline.ex index 195596f94..0aa504e72 100644 --- a/lib/pleroma/web/activity_pub/pipeline.ex +++ b/lib/pleroma/web/activity_pub/pipeline.ex @@ -40,6 +40,8 @@ def common_pipeline(object, meta) do end end + def do_common_pipeline(%{__struct__: _}, _meta), do: {:error, :is_struct} + def do_common_pipeline(object, meta) do with {_, {:ok, validated_object, meta}} <- {:validate_object, @object_validator.validate(object, meta)}, diff --git a/test/pleroma/web/activity_pub/pipeline_test.exs b/test/pleroma/web/activity_pub/pipeline_test.exs index 52fa933ee..e606fa3d1 100644 --- a/test/pleroma/web/activity_pub/pipeline_test.exs +++ b/test/pleroma/web/activity_pub/pipeline_test.exs @@ -25,9 +25,6 @@ defmodule Pleroma.Web.ActivityPub.PipelineTest do MRFMock |> expect(:pipeline_filter, fn o, m -> {:ok, o, m} end) - ActivityPubMock - |> expect(:persist, fn o, m -> {:ok, o, m} end) - SideEffectsMock |> expect(:handle, fn o, m -> {:ok, o, m} end) |> expect(:handle_after_transaction, fn m -> m end) @@ -42,6 +39,9 @@ test "when given an `object_data` in meta, Federation will receive a the origina activity_with_object = %{activity | data: Map.put(activity.data, "object", object)} + ActivityPubMock + |> expect(:persist, fn _, m -> {:ok, activity, m} end) + FederatorMock |> expect(:publish, fn ^activity_with_object -> :ok end) @@ -50,7 +50,7 @@ test "when given an `object_data` in meta, Federation will receive a the origina assert {:ok, ^activity, ^meta} = Pleroma.Web.ActivityPub.Pipeline.common_pipeline( - activity, + activity.data, meta ) end @@ -59,6 +59,9 @@ test "it goes through validation, filtering, persisting, side effects and federa activity = insert(:note_activity) meta = [local: true] + ActivityPubMock + |> expect(:persist, fn _, m -> {:ok, activity, m} end) + FederatorMock |> expect(:publish, fn ^activity -> :ok end) @@ -66,29 +69,35 @@ test "it goes through validation, filtering, persisting, side effects and federa |> expect(:get, fn [:instance, :federating] -> true end) assert {:ok, ^activity, ^meta} = - Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity, meta) + Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity.data, meta) end test "it goes through validation, filtering, persisting, side effects without federation for remote activities" do activity = insert(:note_activity) meta = [local: false] + ActivityPubMock + |> expect(:persist, fn _, m -> {:ok, activity, m} end) + ConfigMock |> expect(:get, fn [:instance, :federating] -> true end) assert {:ok, ^activity, ^meta} = - Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity, meta) + Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity.data, meta) end test "it goes through validation, filtering, persisting, side effects without federation for local activities if federation is deactivated" do activity = insert(:note_activity) meta = [local: true] + ActivityPubMock + |> expect(:persist, fn _, m -> {:ok, activity, m} end) + ConfigMock |> expect(:get, fn [:instance, :federating] -> false end) assert {:ok, ^activity, ^meta} = - Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity, meta) + Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity.data, meta) end end end From 37a7f521fd4778cde48f1b003ad9695e6ea45d1f Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" <contact@hacktivis.me> Date: Tue, 12 Jan 2021 09:30:22 +0100 Subject: [PATCH 127/174] Insert string-hashtags in Pipeline Cannot be done in Ecto schemas because only one type is allowed in arrays, and needs to be done before the MRFs. --- lib/pleroma/web/activity_pub/pipeline.ex | 34 ++++++++++++------- .../web/activity_pub/transmogrifier.ex | 2 +- lib/pleroma/web/common_api.ex | 12 +------ 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/lib/pleroma/web/activity_pub/pipeline.ex b/lib/pleroma/web/activity_pub/pipeline.ex index 0aa504e72..e184a9376 100644 --- a/lib/pleroma/web/activity_pub/pipeline.ex +++ b/lib/pleroma/web/activity_pub/pipeline.ex @@ -42,23 +42,33 @@ def common_pipeline(object, meta) do def do_common_pipeline(%{__struct__: _}, _meta), do: {:error, :is_struct} - def do_common_pipeline(object, meta) do - with {_, {:ok, validated_object, meta}} <- - {:validate_object, @object_validator.validate(object, meta)}, - {_, {:ok, mrfd_object, meta}} <- - {:mrf_object, @mrf.pipeline_filter(validated_object, meta)}, - {_, {:ok, activity, meta}} <- - {:persist_object, @activity_pub.persist(mrfd_object, meta)}, - {_, {:ok, activity, meta}} <- - {:execute_side_effects, @side_effects.handle(activity, meta)}, - {_, {:ok, _}} <- {:federation, maybe_federate(activity, meta)} do - {:ok, activity, meta} + def do_common_pipeline(message, meta) do + with {_, {:ok, message, meta}} <- {:validate, @object_validator.validate(message, meta)}, + {_, {:ok, message, meta}} <- {:fixup, validation_fixups(message, meta)}, + {_, {:ok, message, meta}} <- {:mrf, @mrf.pipeline_filter(message, meta)}, + {_, {:ok, message, meta}} <- {:persist, @activity_pub.persist(message, meta)}, + {_, {:ok, message, meta}} <- {:side_effects, @side_effects.handle(message, meta)}, + {_, {:ok, _}} <- {:federation, maybe_federate(message, meta)} do + {:ok, message, meta} else - {:mrf_object, {:reject, message, _}} -> {:reject, message} + {:mrf, {:reject, message, _}} -> {:reject, message} e -> {:error, e} end end + defp validation_fixups(message, meta) do + # Insert copy of hashtags as strings for the non-hashtag table indexing + message = + if message["tag"] do + tag = Object.hashtags(%Object{data: message}) ++ (message["tag"] || []) + Map.put(message, "tag", tag) + else + message + end + + {:ok, message, meta} + end + defp maybe_federate(%Object{}, _), do: {:ok, :not_federated} defp maybe_federate(%Activity{} = activity, meta) do diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 8c7d6a747..4070ed14d 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -564,7 +564,7 @@ def handle_incoming( Pipeline.common_pipeline(data, local: false) do {:ok, activity} else - {:error, {:validate_object, _}} = e -> + {:error, {:validate, _}} = e -> # Check if we have a create activity for this with {:ok, object_id} <- ObjectValidators.ObjectID.cast(data["object"]), %Activity{data: %{"actor" => actor}} <- diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex index b003e30c7..895baebc9 100644 --- a/lib/pleroma/web/common_api.ex +++ b/lib/pleroma/web/common_api.ex @@ -228,17 +228,7 @@ def favorite_helper(user, id) do {:find_object, _} -> {:error, :not_found} - {:common_pipeline, - { - :error, - { - :validate_object, - { - :error, - changeset - } - } - }} = e -> + {:common_pipeline, {:error, {:validate, {:error, changeset}}}} = e -> if {:object, {"already liked by this actor", []}} in changeset.errors do {:ok, :already_liked} else From 7ebfe899007002f5bbf8744a8f0b582e0e13342e Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" <contact@hacktivis.me> Date: Tue, 12 Jan 2021 11:14:09 +0100 Subject: [PATCH 128/174] object_validators: Mark validate_data as private --- .../activity_pub/object_validators/accept_reject_validator.ex | 2 +- .../web/activity_pub/object_validators/announce_validator.ex | 2 +- .../web/activity_pub/object_validators/answer_validator.ex | 2 +- .../activity_pub/object_validators/article_note_validator.ex | 2 +- .../web/activity_pub/object_validators/attachment_validator.ex | 2 +- .../web/activity_pub/object_validators/audio_video_validator.ex | 2 +- .../web/activity_pub/object_validators/block_validator.ex | 2 +- .../activity_pub/object_validators/chat_message_validator.ex | 2 +- .../object_validators/create_chat_message_validator.ex | 2 +- .../activity_pub/object_validators/create_generic_validator.ex | 2 +- .../web/activity_pub/object_validators/delete_validator.ex | 2 +- .../web/activity_pub/object_validators/emoji_react_validator.ex | 2 +- .../web/activity_pub/object_validators/event_validator.ex | 2 +- .../web/activity_pub/object_validators/follow_validator.ex | 2 +- .../web/activity_pub/object_validators/like_validator.ex | 2 +- .../web/activity_pub/object_validators/question_validator.ex | 2 +- .../web/activity_pub/object_validators/undo_validator.ex | 2 +- .../web/activity_pub/object_validators/update_validator.ex | 2 +- 18 files changed, 18 insertions(+), 18 deletions(-) diff --git a/lib/pleroma/web/activity_pub/object_validators/accept_reject_validator.ex b/lib/pleroma/web/activity_pub/object_validators/accept_reject_validator.ex index d31e780c3..b577a1044 100644 --- a/lib/pleroma/web/activity_pub/object_validators/accept_reject_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/accept_reject_validator.ex @@ -27,7 +27,7 @@ def cast_data(data) do |> cast(data, __schema__(:fields)) end - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Accept", "Reject"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex b/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex index b08a33e68..576341790 100644 --- a/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex @@ -50,7 +50,7 @@ def fix_after_cast(cng) do cng end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Announce"]) |> validate_required([:id, :type, :object, :actor, :to, :cc]) diff --git a/lib/pleroma/web/activity_pub/object_validators/answer_validator.ex b/lib/pleroma/web/activity_pub/object_validators/answer_validator.ex index 15e4413cd..c9bd9e42d 100644 --- a/lib/pleroma/web/activity_pub/object_validators/answer_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/answer_validator.ex @@ -50,7 +50,7 @@ def changeset(struct, data) do |> cast(data, __schema__(:fields)) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Answer"]) |> validate_required([:id, :inReplyTo, :name, :attributedTo, :actor]) diff --git a/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex b/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex index 5910f4060..39ef6dc29 100644 --- a/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex @@ -95,7 +95,7 @@ def changeset(struct, data) do |> cast_embed(:tag) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Article", "Note"]) |> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id]) diff --git a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex index e7b3a3922..4a0d1473d 100644 --- a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex @@ -89,7 +89,7 @@ defp fix_url(data) do end end - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_inclusion(:type, ~w[Document Audio Image Video]) |> validate_required([:mediaType, :url, :type]) diff --git a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex index a04c95f4b..8a5a60526 100644 --- a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex @@ -137,7 +137,7 @@ def changeset(struct, data) do |> cast_embed(:tag) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Audio", "Video"]) |> validate_required([:id, :actor, :attributedTo, :type, :context, :attachment]) diff --git a/lib/pleroma/web/activity_pub/object_validators/block_validator.ex b/lib/pleroma/web/activity_pub/object_validators/block_validator.ex index c5f77bb76..88948135f 100644 --- a/lib/pleroma/web/activity_pub/object_validators/block_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/block_validator.ex @@ -26,7 +26,7 @@ def cast_data(data) do |> cast(data, __schema__(:fields)) end - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Block"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex b/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex index 1189778f2..b153156b0 100644 --- a/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex @@ -67,7 +67,7 @@ def changeset(struct, data) do |> cast_embed(:attachment) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["ChatMessage"]) |> validate_required([:id, :actor, :to, :type, :published]) diff --git a/lib/pleroma/web/activity_pub/object_validators/create_chat_message_validator.ex b/lib/pleroma/web/activity_pub/object_validators/create_chat_message_validator.ex index 8384c16a7..7a31a99bf 100644 --- a/lib/pleroma/web/activity_pub/object_validators/create_chat_message_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/create_chat_message_validator.ex @@ -39,7 +39,7 @@ def cast_and_validate(data, meta \\ []) do |> validate_data(meta) end - def validate_data(cng, meta \\ []) do + defp validate_data(cng, meta) do cng |> validate_required([:id, :actor, :to, :type, :object]) |> validate_inclusion(:type, ["Create"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex b/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex index bf56a918c..e06e442f4 100644 --- a/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex @@ -79,7 +79,7 @@ defp fix(data, meta) do |> CommonFixes.fix_actor() end - def validate_data(cng, meta \\ []) do + defp validate_data(cng, meta) do cng |> validate_required([:actor, :type, :object]) |> validate_inclusion(:type, ["Create"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex b/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex index fc1a79a72..7da67bf16 100644 --- a/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex @@ -53,7 +53,7 @@ def add_deleted_activity_id(cng) do Tombstone Video } - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Delete"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex b/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex index 1906e597e..ec7566515 100644 --- a/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex @@ -70,7 +70,7 @@ def validate_emoji(cng) do end end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["EmojiReact"]) |> validate_required([:id, :type, :object, :actor, :context, :to, :cc, :content]) diff --git a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex index 0112a074d..d42458ef5 100644 --- a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex @@ -86,7 +86,7 @@ def changeset(struct, data) do |> cast_embed(:tag) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Event"]) |> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id]) diff --git a/lib/pleroma/web/activity_pub/object_validators/follow_validator.ex b/lib/pleroma/web/activity_pub/object_validators/follow_validator.ex index 6e428bacc..239cee5e7 100644 --- a/lib/pleroma/web/activity_pub/object_validators/follow_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/follow_validator.ex @@ -27,7 +27,7 @@ def cast_data(data) do |> cast(data, __schema__(:fields)) end - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Follow"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/like_validator.ex b/lib/pleroma/web/activity_pub/object_validators/like_validator.ex index 30c40b238..509da507b 100644 --- a/lib/pleroma/web/activity_pub/object_validators/like_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/like_validator.ex @@ -76,7 +76,7 @@ def fix_recipients(cng) do end end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Like"]) |> validate_required([:id, :type, :object, :actor, :context, :to, :cc]) diff --git a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex index 7acb1e928..7012e2e1d 100644 --- a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex @@ -100,7 +100,7 @@ def changeset(struct, data) do |> cast_embed(:tag) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Question"]) |> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id]) diff --git a/lib/pleroma/web/activity_pub/object_validators/undo_validator.ex b/lib/pleroma/web/activity_pub/object_validators/undo_validator.ex index 783a79ddb..e8af60ffa 100644 --- a/lib/pleroma/web/activity_pub/object_validators/undo_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/undo_validator.ex @@ -38,7 +38,7 @@ def changeset(struct, data) do |> cast(data, __schema__(:fields)) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Undo"]) |> validate_required([:id, :type, :object, :actor, :to, :cc]) diff --git a/lib/pleroma/web/activity_pub/object_validators/update_validator.ex b/lib/pleroma/web/activity_pub/object_validators/update_validator.ex index a66d41400..6bb1dc7fa 100644 --- a/lib/pleroma/web/activity_pub/object_validators/update_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/update_validator.ex @@ -28,7 +28,7 @@ def cast_data(data) do |> cast(data, __schema__(:fields)) end - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Update"]) From 4ecf6ceea6062d68c382918010dc577151d0131c Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Thu, 1 Apr 2021 10:01:31 -0500 Subject: [PATCH 129/174] Enforce user.notification_settings is NOT NULL --- ...401143153_user_notification_settings_fix.exs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 priv/repo/migrations/20210401143153_user_notification_settings_fix.exs diff --git a/priv/repo/migrations/20210401143153_user_notification_settings_fix.exs b/priv/repo/migrations/20210401143153_user_notification_settings_fix.exs new file mode 100644 index 000000000..cf68f1be6 --- /dev/null +++ b/priv/repo/migrations/20210401143153_user_notification_settings_fix.exs @@ -0,0 +1,17 @@ +defmodule Pleroma.Repo.Migrations.UserNotificationSettingsFix do + use Ecto.Migration + + def up do + execute(~s(UPDATE users + SET + notification_settings = '{"followers": true, "follows": true, "non_follows": true, "non_followers": true}'::jsonb WHERE notification_settings IS NULL +)) + + execute("ALTER TABLE users + ALTER COLUMN notification_settings SET NOT NULL") + end + + def down do + :ok + end +end From 765f0907dfa9371038188ee35fc3b241be796d26 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Thu, 1 Apr 2021 10:07:57 -0500 Subject: [PATCH 130/174] Document user login failure fix for NULL notification_settings --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 43f2bb638..31a22bb31 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Fixed - Try to save exported ConfigDB settings (migrate_from_db) in the system temp directory if default location is not writable. +- User login failures if their `notification_settings` were in a NULL state. ## [2.3.0] - 2020-03-01 From 31ce8a37304e24381b26d678dfbbc7b7a6b1ba35 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Thu, 1 Apr 2021 10:09:32 -0500 Subject: [PATCH 131/174] Fix CHANGELOG entry meant for next release --- CHANGELOG.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 43f2bb638..6c45cad85 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,8 +6,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## Unreleased +### Changed + - The `application` metadata returned with statuses is no longer hardcoded. Apps that want to display these details will now have valid data for new posts after this change. +### Added + +- MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded. + ## Unreleased (Patch) ### Fixed @@ -75,7 +81,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Ability to define custom HTTP headers per each frontend - MRF (`NoEmptyPolicy`): New MRF Policy which will deny empty statuses or statuses of only mentions from being created by local users - New users will receive a simple email confirming their registration if no other emails will be dispatched. (e.g., Welcome, Confirmation, or Approval Required) -- MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded. <details> <summary>API Changes</summary> From ef36f7fa5cff0a0d364aff192954556b0d2b0d2a Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" <contact@hacktivis.me> Date: Thu, 1 Apr 2021 13:49:04 +0200 Subject: [PATCH 132/174] Move tag fixup to object_validator --- .../web/activity_pub/object_validator.ex | 32 +++++++++++++++---- lib/pleroma/web/activity_pub/pipeline.ex | 14 -------- 2 files changed, 26 insertions(+), 20 deletions(-) diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex index 15784b28c..70d9a35a9 100644 --- a/lib/pleroma/web/activity_pub/object_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validator.ex @@ -113,9 +113,34 @@ def validate( end end + def validate(%{"type" => type} = object, meta) + when type in ~w[Event Question Audio Video Article] do + validator = + case type do + "Event" -> EventValidator + "Question" -> QuestionValidator + "Audio" -> AudioVideoValidator + "Video" -> AudioVideoValidator + "Article" -> ArticleNoteValidator + end + + with {:ok, object} <- + object + |> validator.cast_and_validate() + |> Ecto.Changeset.apply_action(:insert) do + object = stringify_keys(object) + + # Insert copy of hashtags as strings for the non-hashtag table indexing + tag = (object["tag"] || []) ++ Object.hashtags(%Object{data: object}) + object = Map.put(object, "tag", tag) + + {:ok, object, meta} + end + end + def validate(%{"type" => type} = object, meta) when type in ~w[Accept Reject Follow Update Like EmojiReact Announce - Event ChatMessage Question Audio Video Article Answer] do + ChatMessage Answer] do validator = case type do "Accept" -> AcceptRejectValidator @@ -125,12 +150,7 @@ def validate(%{"type" => type} = object, meta) "Like" -> LikeValidator "EmojiReact" -> EmojiReactValidator "Announce" -> AnnounceValidator - "Event" -> EventValidator "ChatMessage" -> ChatMessageValidator - "Question" -> QuestionValidator - "Audio" -> AudioVideoValidator - "Video" -> AudioVideoValidator - "Article" -> ArticleNoteValidator "Answer" -> AnswerValidator end diff --git a/lib/pleroma/web/activity_pub/pipeline.ex b/lib/pleroma/web/activity_pub/pipeline.ex index e184a9376..377eccb92 100644 --- a/lib/pleroma/web/activity_pub/pipeline.ex +++ b/lib/pleroma/web/activity_pub/pipeline.ex @@ -44,7 +44,6 @@ def do_common_pipeline(%{__struct__: _}, _meta), do: {:error, :is_struct} def do_common_pipeline(message, meta) do with {_, {:ok, message, meta}} <- {:validate, @object_validator.validate(message, meta)}, - {_, {:ok, message, meta}} <- {:fixup, validation_fixups(message, meta)}, {_, {:ok, message, meta}} <- {:mrf, @mrf.pipeline_filter(message, meta)}, {_, {:ok, message, meta}} <- {:persist, @activity_pub.persist(message, meta)}, {_, {:ok, message, meta}} <- {:side_effects, @side_effects.handle(message, meta)}, @@ -56,19 +55,6 @@ def do_common_pipeline(message, meta) do end end - defp validation_fixups(message, meta) do - # Insert copy of hashtags as strings for the non-hashtag table indexing - message = - if message["tag"] do - tag = Object.hashtags(%Object{data: message}) ++ (message["tag"] || []) - Map.put(message, "tag", tag) - else - message - end - - {:ok, message, meta} - end - defp maybe_federate(%Object{}, _), do: {:ok, :not_federated} defp maybe_federate(%Activity{} = activity, meta) do From 681a42c359b4fbae74285363c670dff18aac5918 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov <alex.strizhakov@gmail.com> Date: Thu, 8 Apr 2021 15:45:31 +0300 Subject: [PATCH 133/174] release runtime provider fix for paths --- lib/pleroma/config/release_runtime_provider.ex | 13 +++++++++---- mix.exs | 13 +------------ .../config/release_runtime_provider_test.exs | 1 - 3 files changed, 10 insertions(+), 17 deletions(-) diff --git a/lib/pleroma/config/release_runtime_provider.ex b/lib/pleroma/config/release_runtime_provider.ex index 46fa35559..e5e9d3dcd 100644 --- a/lib/pleroma/config/release_runtime_provider.ex +++ b/lib/pleroma/config/release_runtime_provider.ex @@ -11,10 +11,11 @@ def init(opts), do: opts def load(config, opts) do with_defaults = Config.Reader.merge(config, Pleroma.Config.Holder.release_defaults()) - config_path = opts[:config_path] + config_path = + opts[:config_path] || System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs" with_runtime_config = - if config_path && File.exists?(config_path) do + if File.exists?(config_path) do runtime_config = Config.Reader.read!(config_path) with_defaults @@ -32,10 +33,14 @@ def load(config, opts) do with_defaults end - exported_config_path = opts[:exported_config_path] + exported_config_path = + opts[:exported_config_path] || + config_path + |> Path.dirname() + |> Path.join("#{Pleroma.Config.get(:env)}.exported_from_db.secret.exs") with_exported = - if exported_config_path && File.exists?(exported_config_path) do + if File.exists?(exported_config_path) do exported_config = Config.Reader.read!(exported_config_path) Config.Reader.merge(with_runtime_config, exported_config) else diff --git a/mix.exs b/mix.exs index 7328b533b..fe5d9d963 100644 --- a/mix.exs +++ b/mix.exs @@ -38,7 +38,7 @@ def project do include_executables_for: [:unix], applications: [ex_syslogger: :load, syslog: :load, eldap: :transient], steps: [:assemble, &put_otp_version/1, ©_files/1, ©_nginx_config/1], - config_providers: [{Pleroma.Config.ReleaseRuntimeProvider, release_config_paths()}] + config_providers: [{Pleroma.Config.ReleaseRuntimeProvider, []}] ] ] ] @@ -67,17 +67,6 @@ def copy_nginx_config(%{path: target_path} = release) do release end - defp release_config_paths do - config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs" - - exported_config_path = - config_path - |> Path.dirname() - |> Path.join("#{Mix.env()}.exported_from_db.secret.exs") - - [config_path: config_path, exported_config_path: exported_config_path] - end - # Configuration for the OTP application. # # Type `mix help compile.app` for more information. diff --git a/test/pleroma/config/release_runtime_provider_test.exs b/test/pleroma/config/release_runtime_provider_test.exs index 1921698c5..6578d3268 100644 --- a/test/pleroma/config/release_runtime_provider_test.exs +++ b/test/pleroma/config/release_runtime_provider_test.exs @@ -8,7 +8,6 @@ test "loads release defaults config and warns about non-existent runtime config" ExUnit.CaptureIO.capture_io(fn -> merged = ReleaseRuntimeProvider.load([], []) assert merged == Pleroma.Config.Holder.release_defaults() - IO.inspect(merged) end) =~ "!!! Config path is not declared! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file" end From 0feafcc20cec168258f592b9d509c1e6ccc8efba Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Fri, 9 Apr 2021 10:30:27 -0500 Subject: [PATCH 134/174] Use URI.merge to prevent concatenating two canonical URLs when a custom instance thumbnail was uploaded via AdminFE --- lib/pleroma/web/mastodon_api/views/instance_view.ex | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/web/mastodon_api/views/instance_view.ex b/lib/pleroma/web/mastodon_api/views/instance_view.ex index 73205fb6d..dac68d8e6 100644 --- a/lib/pleroma/web/mastodon_api/views/instance_view.ex +++ b/lib/pleroma/web/mastodon_api/views/instance_view.ex @@ -23,7 +23,8 @@ def render("show.json", _) do streaming_api: Pleroma.Web.Endpoint.websocket_url() }, stats: Pleroma.Stats.get_stats(), - thumbnail: Pleroma.Web.base_url() <> Keyword.get(instance, :instance_thumbnail), + thumbnail: + URI.merge(Pleroma.Web.base_url(), Keyword.get(instance, :instance_thumbnail)) |> to_string, languages: ["en"], registrations: Keyword.get(instance, :registrations_open), approval_required: Keyword.get(instance, :account_approval_required), From 9fbcdc15b11dedf27bc5c78d09048ba354906c16 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 13 Apr 2021 10:52:53 -0500 Subject: [PATCH 135/174] Validate custom instance thumbnail set via AdminAPI produces correct URL --- CHANGELOG.md | 1 + .../controllers/config_controller_test.exs | 42 +++++++++++++++++++ 2 files changed, 43 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6c45cad85..1553245e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Fixed - Try to save exported ConfigDB settings (migrate_from_db) in the system temp directory if default location is not writable. +- Uploading custom instance thumbnail via AdminAPI/AdminFE generated invalid URL to the image ## [2.3.0] - 2020-03-01 diff --git a/test/pleroma/web/admin_api/controllers/config_controller_test.exs b/test/pleroma/web/admin_api/controllers/config_controller_test.exs index 578a4c914..71151712e 100644 --- a/test/pleroma/web/admin_api/controllers/config_controller_test.exs +++ b/test/pleroma/web/admin_api/controllers/config_controller_test.exs @@ -1410,6 +1410,48 @@ test "enables the welcome messages", %{conn: conn} do "need_reboot" => false } end + + test "custom instance thumbnail", %{conn: conn} do + clear_config([:instance]) + + params = %{ + "group" => ":pleroma", + "key" => ":instance", + "value" => [ + %{ + "tuple" => [ + ":instance_thumbnail", + "https://example.com/media/new_thumbnail.jpg" + ] + } + ] + } + + res = + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/pleroma/admin/config", %{"configs" => [params]}) + |> json_response_and_validate_schema(200) + + assert res == %{ + "configs" => [ + %{ + "db" => [":instance_thumbnail"], + "group" => ":pleroma", + "key" => ":instance", + "value" => params["value"] + } + ], + "need_reboot" => false + } + + assert res = + conn + |> get("/api/v1/instance") + |> json_response_and_validate_schema(200) + + assert res = %{"thumbnail" => "https://example.com/media/new_thumbnail.jpg"} + end end describe "GET /api/pleroma/admin/config/descriptions" do From cdd271b0655799e65bb9a13016dc82441ec34f87 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 13 Apr 2021 10:55:54 -0500 Subject: [PATCH 136/174] Fix assignment / assertion --- .../web/admin_api/controllers/config_controller_test.exs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/pleroma/web/admin_api/controllers/config_controller_test.exs b/test/pleroma/web/admin_api/controllers/config_controller_test.exs index 71151712e..c4d07d61c 100644 --- a/test/pleroma/web/admin_api/controllers/config_controller_test.exs +++ b/test/pleroma/web/admin_api/controllers/config_controller_test.exs @@ -1445,8 +1445,8 @@ test "custom instance thumbnail", %{conn: conn} do "need_reboot" => false } - assert res = - conn + _res = + assert conn |> get("/api/v1/instance") |> json_response_and_validate_schema(200) From 905efc57e9f2a96519bf1ac84b56f88d1818cca3 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 13 Apr 2021 11:15:52 -0500 Subject: [PATCH 137/174] Initial test validating the AdminAPI issue --- .../controllers/config_controller_test.exs | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/test/pleroma/web/admin_api/controllers/config_controller_test.exs b/test/pleroma/web/admin_api/controllers/config_controller_test.exs index c4d07d61c..d26fd3150 100644 --- a/test/pleroma/web/admin_api/controllers/config_controller_test.exs +++ b/test/pleroma/web/admin_api/controllers/config_controller_test.exs @@ -1452,6 +1452,41 @@ test "custom instance thumbnail", %{conn: conn} do assert res = %{"thumbnail" => "https://example.com/media/new_thumbnail.jpg"} end + + test "Concurrent Limiter", %{conn: conn} do + clear_config([ConcurrentLimiter]) + + params = %{ + "group" => ":pleroma", + "key" => "ConcurrentLimiter", + "value" => [ + %{ + "tuple" => [ + "Pleroma.Web.RichMedia.Helpers", + [ + %{"tuple" => [":max_running", 6]}, + %{"tuple" => [":max_waiting", 6]} + ] + ] + }, + %{ + "tuple" => [ + "Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy", + [ + %{"tuple" => [":max_running", 7]}, + %{"tuple" => [":max_waiting", 7]} + ] + ] + } + ] + } + + _res = + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/pleroma/admin/config", %{"configs" => [params]}) + |> json_response_and_validate_schema(200) + end end describe "GET /api/pleroma/admin/config/descriptions" do From ee53ad4d7705328a5a583680c6f551c4c3bf2302 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 13 Apr 2021 12:09:18 -0500 Subject: [PATCH 138/174] Add ConcurrentLimiter to module_name?/1 and apply string_to_elixir_types/1 to search_opts keys during update_or_create/1 --- lib/pleroma/config_db.ex | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/pleroma/config_db.ex b/lib/pleroma/config_db.ex index b874e0e37..03905c06b 100644 --- a/lib/pleroma/config_db.ex +++ b/lib/pleroma/config_db.ex @@ -141,7 +141,9 @@ defp deep_merge(_key, value1, value2) do @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} def update_or_create(params) do params = Map.put(params, :value, to_elixir_types(params[:value])) - search_opts = Map.take(params, [:group, :key]) + + search_opts = + Map.take(params, [:group, :key]) |> Map.update!(:key, &string_to_elixir_types(&1)) with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), {_, true, config} <- {:partial_update, can_be_partially_updated?(config), config}, @@ -387,6 +389,6 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do @spec module_name?(String.t()) :: boolean() def module_name?(string) do Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or - string in ["Oban", "Ueberauth", "ExSyslogger"] + string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"] end end From 861f1928526930eeb78f79c4840c69cee5c2f215 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 13 Apr 2021 14:39:44 -0500 Subject: [PATCH 139/174] Document fixed ability to save ConcurrentLimiter settings in ConfigDB --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1553245e5..6e13b3875 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Try to save exported ConfigDB settings (migrate_from_db) in the system temp directory if default location is not writable. - Uploading custom instance thumbnail via AdminAPI/AdminFE generated invalid URL to the image +- Applying ConcurrentLimiter settings via AdminAPI ## [2.3.0] - 2020-03-01 From c3b8c77967b0c42f93286f864236b7d6f1471c13 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 13 Apr 2021 14:25:15 -0500 Subject: [PATCH 140/174] Improve string_to_elixir_types/1 with guards --- lib/pleroma/config_db.ex | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/pleroma/config_db.ex b/lib/pleroma/config_db.ex index 03905c06b..eeeb026c1 100644 --- a/lib/pleroma/config_db.ex +++ b/lib/pleroma/config_db.ex @@ -327,7 +327,7 @@ def to_elixir_types(entity), do: entity @spec string_to_elixir_types(String.t()) :: atom() | Regex.t() | module() | String.t() | no_return() - def string_to_elixir_types("~r" <> _pattern = regex) do + def string_to_elixir_types("~r" <> _pattern = regex) when is_binary(regex) do pattern = ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u @@ -341,9 +341,9 @@ def string_to_elixir_types("~r" <> _pattern = regex) do end end - def string_to_elixir_types(":" <> atom), do: String.to_atom(atom) + def string_to_elixir_types(":" <> atom) when is_binary(atom), do: String.to_atom(atom) - def string_to_elixir_types(value) do + def string_to_elixir_types(value) when is_binary(value) do if module_name?(value) do String.to_existing_atom("Elixir." <> value) else @@ -351,6 +351,8 @@ def string_to_elixir_types(value) do end end + def string_to_elixir_types(value) when is_atom(value), do: value + defp parse_host("localhost"), do: :localhost defp parse_host(host) do From f95b52255b2d7373a3e0bf4adff81f83c080b2ef Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Wed, 14 Apr 2021 09:39:57 -0500 Subject: [PATCH 141/174] Revert guards on string_to_elixir_types/1, remove unnecessary assignment in test --- lib/pleroma/config_db.ex | 12 ++++-------- .../admin_api/controllers/config_controller_test.exs | 9 ++++----- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/lib/pleroma/config_db.ex b/lib/pleroma/config_db.ex index eeeb026c1..cb57673e3 100644 --- a/lib/pleroma/config_db.ex +++ b/lib/pleroma/config_db.ex @@ -141,9 +141,7 @@ defp deep_merge(_key, value1, value2) do @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} def update_or_create(params) do params = Map.put(params, :value, to_elixir_types(params[:value])) - - search_opts = - Map.take(params, [:group, :key]) |> Map.update!(:key, &string_to_elixir_types(&1)) + search_opts = Map.take(params, [:group, :key]) with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), {_, true, config} <- {:partial_update, can_be_partially_updated?(config), config}, @@ -327,7 +325,7 @@ def to_elixir_types(entity), do: entity @spec string_to_elixir_types(String.t()) :: atom() | Regex.t() | module() | String.t() | no_return() - def string_to_elixir_types("~r" <> _pattern = regex) when is_binary(regex) do + def string_to_elixir_types("~r" <> _pattern = regex) do pattern = ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u @@ -341,9 +339,9 @@ def string_to_elixir_types("~r" <> _pattern = regex) when is_binary(regex) do end end - def string_to_elixir_types(":" <> atom) when is_binary(atom), do: String.to_atom(atom) + def string_to_elixir_types(":" <> atom), do: String.to_atom(atom) - def string_to_elixir_types(value) when is_binary(value) do + def string_to_elixir_types(value) do if module_name?(value) do String.to_existing_atom("Elixir." <> value) else @@ -351,8 +349,6 @@ def string_to_elixir_types(value) when is_binary(value) do end end - def string_to_elixir_types(value) when is_atom(value), do: value - defp parse_host("localhost"), do: :localhost defp parse_host(host) do diff --git a/test/pleroma/web/admin_api/controllers/config_controller_test.exs b/test/pleroma/web/admin_api/controllers/config_controller_test.exs index d26fd3150..c39c1b1e1 100644 --- a/test/pleroma/web/admin_api/controllers/config_controller_test.exs +++ b/test/pleroma/web/admin_api/controllers/config_controller_test.exs @@ -1481,11 +1481,10 @@ test "Concurrent Limiter", %{conn: conn} do ] } - _res = - assert conn - |> put_req_header("content-type", "application/json") - |> post("/api/pleroma/admin/config", %{"configs" => [params]}) - |> json_response_and_validate_schema(200) + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/pleroma/admin/config", %{"configs" => [params]}) + |> json_response_and_validate_schema(200) end end From d9fce0133ef3444ef7d09ae7e2760583540d1cd2 Mon Sep 17 00:00:00 2001 From: Sean King <seanking2919@protonmail.com> Date: Wed, 14 Apr 2021 14:01:33 -0600 Subject: [PATCH 142/174] Fix Mastodon interface link --- docs/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.md b/docs/index.md index 1a90d0a8d..80c5d2631 100644 --- a/docs/index.md +++ b/docs/index.md @@ -20,7 +20,7 @@ The default front-end used by Pleroma is Pleroma-FE. You can find more informati ### Mastodon interface If the Pleroma interface isn't your thing, or you're just trying something new but you want to keep using the familiar Mastodon interface, we got that too! -Just add a "/web" after your instance url (e.g. <https://pleroma.soycaf.com/web>) and you'll end on the Mastodon web interface, but with a Pleroma backend! MAGIC! +Just add a "/web" after your instance url (e.g. <https://pleroma.soykaf.com/web>) and you'll end on the Mastodon web interface, but with a Pleroma backend! MAGIC! The Mastodon interface is from the Glitch-soc fork. For more information on the Mastodon interface you can check the [Mastodon](https://docs.joinmastodon.org/) and [Glitch-soc](https://glitch-soc.github.io/docs/) documentation. Remember, what you see is only the frontend part of Mastodon, the backend is still Pleroma. From 2b4f958b2ad653ee8e294ade18aa4482e4d372e1 Mon Sep 17 00:00:00 2001 From: Sean King <seanking2919@protonmail.com> Date: Sun, 18 Apr 2021 14:00:18 -0600 Subject: [PATCH 143/174] Add opting out of Google FLoC to HTTPSecurityPlug headers --- lib/pleroma/web/plugs/http_security_plug.ex | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/web/plugs/http_security_plug.ex b/lib/pleroma/web/plugs/http_security_plug.ex index 0025b042a..d1e6cc9d3 100644 --- a/lib/pleroma/web/plugs/http_security_plug.ex +++ b/lib/pleroma/web/plugs/http_security_plug.ex @@ -48,7 +48,8 @@ def headers do {"x-content-type-options", "nosniff"}, {"referrer-policy", referrer_policy}, {"x-download-options", "noopen"}, - {"content-security-policy", csp_string()} + {"content-security-policy", csp_string()}, + {"permissions-policy", "interest-cohort=()"} ] headers = From efed94a23e30260bcf1b297910906b11d6e4d895 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Mon, 19 Apr 2021 16:23:57 -0500 Subject: [PATCH 144/174] Fix error response which was breaking tests related to pinned posts --- lib/pleroma/web/common_api.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex index 3970c19a8..1b5f8491e 100644 --- a/lib/pleroma/web/common_api.ex +++ b/lib/pleroma/web/common_api.ex @@ -415,7 +415,7 @@ def pin(id, %User{} = user) do ) do {:ok, activity} else - {:error, {:execute_side_effects, error}} -> error + {:error, {:side_effects, error}} -> error error -> error end end From 2780cdd4e7acde0f4bf4719b7c82bc7e2d1bf3b5 Mon Sep 17 00:00:00 2001 From: Sean King <seanking2919@protonmail.com> Date: Mon, 19 Apr 2021 16:06:19 -0600 Subject: [PATCH 145/174] Add CHANGELOG entry --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9b0678023..bfa76a89a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Changed - The `application` metadata returned with statuses is no longer hardcoded. Apps that want to display these details will now have valid data for new posts after this change. +- HTTPSecurityPlug now sends a response header to opt out of Google's FLoC (Federated Learning of Cohorts) targeted advertising. ### Added From 7eded7218922b46c5cc085e715b6031ffff9b6ce Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 20 Apr 2021 12:31:14 -0500 Subject: [PATCH 146/174] Fix incorrect shell command Can't be in /opt/pleroma/bin and then call ./bin/pleroma_ctl :) --- docs/installation/otp_en.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/installation/otp_en.md b/docs/installation/otp_en.md index 42e264e65..13f9636f3 100644 --- a/docs/installation/otp_en.md +++ b/docs/installation/otp_en.md @@ -290,7 +290,7 @@ nginx -t ## Create your first user and set as admin ```sh -cd /opt/pleroma/bin +cd /opt/pleroma su pleroma -s $SHELL -lc "./bin/pleroma_ctl user new joeuser joeuser@sld.tld --admin" ``` This will create an account withe the username of 'joeuser' with the email address of joeuser@sld.tld, and set that user's account as an admin. This will result in a link that you can paste into the browser, which logs you in and enables you to set the password. From 0effcd2cfed36baec1d960b64c901da7e56710a8 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Mon, 19 Apr 2021 15:43:17 -0500 Subject: [PATCH 147/174] Set Repo.transaction/2 timeout to infinity. Fixes pleroma.user delete_activities mix task. --- lib/pleroma/web/activity_pub/pipeline.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/web/activity_pub/pipeline.ex b/lib/pleroma/web/activity_pub/pipeline.ex index 377eccb92..400823094 100644 --- a/lib/pleroma/web/activity_pub/pipeline.ex +++ b/lib/pleroma/web/activity_pub/pipeline.ex @@ -24,7 +24,7 @@ defmodule Pleroma.Web.ActivityPub.Pipeline do @spec common_pipeline(map(), keyword()) :: {:ok, Activity.t() | Object.t(), keyword()} | {:error, any()} def common_pipeline(object, meta) do - case Repo.transaction(fn -> do_common_pipeline(object, meta) end) do + case Repo.transaction(fn -> do_common_pipeline(object, meta) end, timeout: :infinity) do {:ok, {:ok, activity, meta}} -> @side_effects.handle_after_transaction(meta) {:ok, activity, meta} From 9bc69196d5dfbd3fb37c0e62da19ce08fb9bf28d Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 20 Apr 2021 11:10:39 -0500 Subject: [PATCH 148/174] Add utility function to return infinite timeout for SQL transactions if we detect it was called from a Mix Task --- lib/pleroma/utils.ex | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/lib/pleroma/utils.ex b/lib/pleroma/utils.ex index bc0c95332..5e2fa8bf7 100644 --- a/lib/pleroma/utils.ex +++ b/lib/pleroma/utils.ex @@ -63,4 +63,13 @@ def posix_error_message(code) when code in @posix_error_codes do end def posix_error_message(_), do: "" + + def query_timeout do + {parent, _, _, _} = Process.info(self(), :current_stacktrace) |> elem(1) |> Enum.fetch!(2) + + cond do + parent |> to_string |> String.starts_with?("Elixir.Mix.Task") -> [timeout: :infinity] + true -> [timeout: 15_000] + end + end end From 9f711ddcf84bdb5a5680e1b55afa83768014906d Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 20 Apr 2021 11:16:24 -0500 Subject: [PATCH 149/174] Try to set query timeout intelligently --- lib/pleroma/web/activity_pub/pipeline.ex | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/web/activity_pub/pipeline.ex b/lib/pleroma/web/activity_pub/pipeline.ex index 400823094..a0f2e0312 100644 --- a/lib/pleroma/web/activity_pub/pipeline.ex +++ b/lib/pleroma/web/activity_pub/pipeline.ex @@ -7,6 +7,7 @@ defmodule Pleroma.Web.ActivityPub.Pipeline do alias Pleroma.Config alias Pleroma.Object alias Pleroma.Repo + alias Pleroma.Utils alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.MRF alias Pleroma.Web.ActivityPub.ObjectValidator @@ -24,7 +25,7 @@ defmodule Pleroma.Web.ActivityPub.Pipeline do @spec common_pipeline(map(), keyword()) :: {:ok, Activity.t() | Object.t(), keyword()} | {:error, any()} def common_pipeline(object, meta) do - case Repo.transaction(fn -> do_common_pipeline(object, meta) end, timeout: :infinity) do + case Repo.transaction(fn -> do_common_pipeline(object, meta) end, Utils.query_timeout()) do {:ok, {:ok, activity, meta}} -> @side_effects.handle_after_transaction(meta) {:ok, activity, meta} From 99fd9c5e38ad08973f435f1a67d6af60d004c578 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 20 Apr 2021 12:00:02 -0500 Subject: [PATCH 150/174] OTP releases executing commands via pleroma_ctl show the parent of the process is :erl_eval --- lib/pleroma/utils.ex | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/pleroma/utils.ex b/lib/pleroma/utils.ex index 5e2fa8bf7..55aecc509 100644 --- a/lib/pleroma/utils.ex +++ b/lib/pleroma/utils.ex @@ -69,6 +69,7 @@ def query_timeout do cond do parent |> to_string |> String.starts_with?("Elixir.Mix.Task") -> [timeout: :infinity] + parent == :erl_eval -> [timeout: :infinity] true -> [timeout: 15_000] end end From 959dc6e6fc95b33700fb7e08689afb701b2877f2 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Thu, 22 Apr 2021 10:11:08 -0500 Subject: [PATCH 151/174] Cleanup and ensure we obey custom Repo timeout --- lib/pleroma/utils.ex | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/utils.ex b/lib/pleroma/utils.ex index 55aecc509..a446d3ae6 100644 --- a/lib/pleroma/utils.ex +++ b/lib/pleroma/utils.ex @@ -11,6 +11,8 @@ defmodule Pleroma.Utils do eperm epipe erange erofs espipe esrch estale etxtbsy exdev )a + @repo_timeout Pleroma.Config.get([Pleroma.Repo, :timeout], 15_000) + def compile_dir(dir) when is_binary(dir) do dir |> File.ls!() @@ -64,13 +66,20 @@ def posix_error_message(code) when code in @posix_error_codes do def posix_error_message(_), do: "" + @doc """ + Returns [timeout: integer] suitable for passing as an option to Repo functions. + + This function detects if the execution was triggered from IEx shell, Mix task, or + ./bin/pleroma_ctl and sets the timeout to :infinity, else returns the default timeout value. + """ + @spec query_timeout() :: [timeout: integer] def query_timeout do {parent, _, _, _} = Process.info(self(), :current_stacktrace) |> elem(1) |> Enum.fetch!(2) cond do parent |> to_string |> String.starts_with?("Elixir.Mix.Task") -> [timeout: :infinity] parent == :erl_eval -> [timeout: :infinity] - true -> [timeout: 15_000] + true -> [timeout: @repo_timeout] end end end From d7a71a275abea6286ee116d092ddc9440a9419a5 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Thu, 22 Apr 2021 10:15:05 -0500 Subject: [PATCH 152/174] Fixed pleroma.user delete_activities mix task. --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index bfa76a89a..a1173414d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Uploading custom instance thumbnail via AdminAPI/AdminFE generated invalid URL to the image - Applying ConcurrentLimiter settings via AdminAPI - User login failures if their `notification_settings` were in a NULL state. +- Mix task `pleroma.user delete_activities` query transaction timeout is now :infinity ## [2.3.0] - 2020-03-01 From b9a99ac0d4b245ff3df6a9aa1b4db46ee75e9d22 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Tue, 27 Apr 2021 11:54:28 -0500 Subject: [PATCH 153/174] Cache gitlab-ci based on mix.lock --- .gitlab-ci.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 2bc571971..2651ff9e6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -8,7 +8,9 @@ variables: &global_variables MIX_ENV: test cache: &global_cache_policy - key: ${CI_COMMIT_REF_SLUG} + key: + files: + - mix.lock paths: - deps - _build @@ -171,8 +173,8 @@ spec-deploy: - apk add curl script: - curl -X POST -F"token=$API_DOCS_PIPELINE_TRIGGER" -F'ref=master' -F"variables[BRANCH]=$CI_COMMIT_REF_NAME" -F"variables[JOB_REF]=$CI_JOB_ID" https://git.pleroma.social/api/v4/projects/1130/trigger/pipeline - - + + stop_review_app: image: alpine:3.9 stage: deploy @@ -231,7 +233,7 @@ amd64-musl: stage: release artifacts: *release-artifacts only: *release-only - image: elixir:1.10.3-alpine + image: elixir:1.10.3-alpine cache: *release-cache variables: *release-variables before_script: &before-release-musl From 8c1d6e88395e1d7ada9d86236a7fa2339d9097e9 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Thu, 29 Apr 2021 12:20:46 -0500 Subject: [PATCH 154/174] CHANGELOG: Return OAuth token `id` --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1173414d..9a0171763 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Added - MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded. +- Return OAuth token `id` (primary key) in POST `/oauth/token`. ## Unreleased (Patch) From b5ae8268982524a0a4fd295ddef64e4983832489 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Thu, 29 Apr 2021 13:03:41 -0500 Subject: [PATCH 155/174] CI: Purge pleroma build directory between runs --- .gitlab-ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 2651ff9e6..78e715d47 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -24,6 +24,7 @@ stages: - docker before_script: + - rm -rf _build/*/lib/pleroma - apt-get update && apt-get install -y cmake - mix local.hex --force - mix local.rebar --force @@ -31,6 +32,9 @@ before_script: - apt-get -qq update - apt-get install -y libmagic-dev +after_script: + - rm -rf _build/*/lib/pleroma + build: stage: build script: From 004bcedb074d50bc42803e4c0a884239bd504b3d Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Fri, 30 Apr 2021 12:23:11 -0500 Subject: [PATCH 156/174] Upgrade Earmark 1.4.15 --- mix.exs | 2 +- mix.lock | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mix.exs b/mix.exs index 06d77edb7..8ba2d8fbc 100644 --- a/mix.exs +++ b/mix.exs @@ -144,7 +144,7 @@ defp deps do {:ex_aws, "~> 2.1.6"}, {:ex_aws_s3, "~> 2.0"}, {:sweet_xml, "~> 0.6.6"}, - {:earmark, "1.4.13"}, + {:earmark, "1.4.15"}, {:bbcode_pleroma, "~> 0.2.0"}, {:crypt, git: "https://github.com/msantos/crypt.git", diff --git a/mix.lock b/mix.lock index e4dd32c83..06542f18d 100644 --- a/mix.lock +++ b/mix.lock @@ -27,8 +27,8 @@ "db_connection": {:hex, :db_connection, "2.2.2", "3bbca41b199e1598245b716248964926303b5d4609ff065125ce98bcd368939e", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm", "642af240d8a8affb93b4ba5a6fcd2bbcbdc327e1a524b825d383711536f8070c"}, "decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"}, "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, - "earmark": {:hex, :earmark, "1.4.13", "2c6ce9768fc9fdbf4046f457e207df6360ee6c91ee1ecb8e9a139f96a4289d91", [:mix], [{:earmark_parser, ">= 1.4.12", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "a0cf3ed88ef2b1964df408889b5ecb886d1a048edde53497fc935ccd15af3403"}, - "earmark_parser": {:hex, :earmark_parser, "1.4.12", "b245e875ec0a311a342320da0551da407d9d2b65d98f7a9597ae078615af3449", [:mix], [], "hexpm", "711e2cc4d64abb7d566d43f54b78f7dc129308a63bc103fbd88550d2174b3160"}, + "earmark": {:hex, :earmark, "1.4.15", "2c7f924bf495ec1f65bd144b355d0949a05a254d0ec561740308a54946a67888", [:mix], [{:earmark_parser, ">= 1.4.13", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "3b1209b85bc9f3586f370f7c363f6533788fb4e51db23aa79565875e7f9999ee"}, + "earmark_parser": {:hex, :earmark_parser, "1.4.13", "0c98163e7d04a15feb62000e1a891489feb29f3d10cb57d4f845c405852bbef8", [:mix], [], "hexpm", "d602c26af3a0af43d2f2645613f65841657ad6efc9f0e361c3b6c06b578214ba"}, "ecto": {:hex, :ecto, "3.4.6", "08f7afad3257d6eb8613309af31037e16c36808dfda5a3cd0cb4e9738db030e4", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6f13a9e2a62e75c2dcfc7207bfc65645ab387af8360db4c89fee8b5a4bf3f70b"}, "ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"}, "ecto_sql": {:hex, :ecto_sql, "3.4.5", "30161f81b167d561a9a2df4329c10ae05ff36eca7ccc84628f2c8b9fa1e43323", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.4.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.0", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "31990c6a3579b36a3c0841d34a94c275e727de8b84f58509da5f1b2032c98ac2"}, From 6727a3659f60c0e09fa6375b6c0843c01f5be3dc Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Fri, 30 Apr 2021 12:27:06 -0500 Subject: [PATCH 157/174] Remove Pleroma.Formatter.minify/2 --- lib/pleroma/formatter.ex | 11 ----------- .../object_validators/audio_video_validator.ex | 1 - lib/pleroma/web/common_api/utils.ex | 1 - test/pleroma/formatter_test.exs | 7 ------- 4 files changed, 20 deletions(-) diff --git a/lib/pleroma/formatter.ex b/lib/pleroma/formatter.ex index 2aa236ca9..baf652a5a 100644 --- a/lib/pleroma/formatter.ex +++ b/lib/pleroma/formatter.ex @@ -142,17 +142,6 @@ def html_escape(text, "text/plain") do |> Enum.join("") end - def minify({text, mentions, hashtags}, type) do - {minify(text, type), mentions, hashtags} - end - - def minify(text, "text/html") do - text - |> String.replace(">\n", ">") - |> String.replace("> ", ">") - |> String.replace(" <", "<") - end - def truncate(text, max_length \\ 200, omission \\ "...") do # Remove trailing whitespace text = Regex.replace(~r/([^ \t\r\n])([ \t]+$)/u, text, "\\g{1}") diff --git a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex index fa3e2c026..9b38aa4c2 100644 --- a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex @@ -96,7 +96,6 @@ defp fix_content(%{"mediaType" => "text/markdown", "content" => content} = data) content = content |> Pleroma.Formatter.markdown_to_html() - |> Pleroma.Formatter.minify("text/html") |> Pleroma.HTML.filter_tags() Map.put(data, "content", content) diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex index be86009af..4731e79be 100644 --- a/lib/pleroma/web/common_api/utils.ex +++ b/lib/pleroma/web/common_api/utils.ex @@ -296,7 +296,6 @@ def format_input(text, "text/markdown", options) do |> Formatter.mentions_escape(options) |> Formatter.markdown_to_html() |> Formatter.linkify(options) - |> Formatter.minify("text/html") |> Formatter.html_escape("text/html") end diff --git a/test/pleroma/formatter_test.exs b/test/pleroma/formatter_test.exs index ceedd1b6d..5781a3f01 100644 --- a/test/pleroma/formatter_test.exs +++ b/test/pleroma/formatter_test.exs @@ -307,11 +307,4 @@ test "it escapes HTML in plain text" do assert Formatter.html_escape(text, "text/plain") == expected end - - test "it minifies html" do - text = "<p>\nhello</p>\n<p>\nworld</p>\n" - expected = "<p>hello</p><p>world</p>" - - assert Formatter.minify(text, "text/html") == expected - end end From 53760d2cda9b9f241355365b3fff9852bcb1a8a2 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Fri, 30 Apr 2021 12:51:18 -0500 Subject: [PATCH 158/174] Delete obsolete EarmarkRendereTests (moved to UtilsTest) --- test/pleroma/earmark_renderer_test.exs | 79 -------------------------- 1 file changed, 79 deletions(-) delete mode 100644 test/pleroma/earmark_renderer_test.exs diff --git a/test/pleroma/earmark_renderer_test.exs b/test/pleroma/earmark_renderer_test.exs deleted file mode 100644 index 3adbefc1e..000000000 --- a/test/pleroma/earmark_renderer_test.exs +++ /dev/null @@ -1,79 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2020 Pleroma Authors <https://pleroma.social/> -# SPDX-License-Identifier: AGPL-3.0-only -defmodule Pleroma.EarmarkRendererTest do - use ExUnit.Case - - test "Paragraph" do - code = ~s[Hello\n\nWorld!] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == "<p>Hello</p><p>World!</p>" - end - - test "raw HTML" do - code = ~s[<a href="http://example.org/">OwO</a><!-- what's this?-->] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == "<p>#{code}</p>" - end - - test "rulers" do - code = ~s[before\n\n-----\n\nafter] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == "<p>before</p><hr /><p>after</p>" - end - - test "headings" do - code = ~s[# h1\n## h2\n### h3\n] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == ~s[<h1>h1</h1><h2>h2</h2><h3>h3</h3>] - end - - test "blockquote" do - code = ~s[> whoms't are you quoting?] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == "<blockquote><p>whoms’t are you quoting?</p></blockquote>" - end - - test "code" do - code = ~s[`mix`] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == ~s[<p><code class="inline">mix</code></p>] - - code = ~s[``mix``] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == ~s[<p><code class="inline">mix</code></p>] - - code = ~s[```\nputs "Hello World"\n```] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == ~s[<pre><code class="">puts "Hello World"</code></pre>] - end - - test "lists" do - code = ~s[- one\n- two\n- three\n- four] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == "<ul><li>one</li><li>two</li><li>three</li><li>four</li></ul>" - - code = ~s[1. one\n2. two\n3. three\n4. four\n] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == "<ol><li>one</li><li>two</li><li>three</li><li>four</li></ol>" - end - - test "delegated renderers" do - code = ~s[a<br/>b] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == "<p>#{code}</p>" - - code = ~s[*aaaa~*] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == ~s[<p><em>aaaa~</em></p>] - - code = ~s[**aaaa~**] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == ~s[<p><strong>aaaa~</strong></p>] - - # strikethrought - code = ~s[<del>aaaa~</del>] - result = Pleroma.Formatter.markdown_to_html(code) - assert result == ~s[<p><del>aaaa~</del></p>] - end -end From a8fa00ef666f574aec8048626aed78a7d62e6915 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Fri, 30 Apr 2021 12:55:43 -0500 Subject: [PATCH 159/174] Fix failing remote mentions test, valid TLDs --- test/pleroma/web/common_api/utils_test.exs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/pleroma/web/common_api/utils_test.exs b/test/pleroma/web/common_api/utils_test.exs index 28b05ed91..8c79a9a83 100644 --- a/test/pleroma/web/common_api/utils_test.exs +++ b/test/pleroma/web/common_api/utils_test.exs @@ -209,10 +209,10 @@ test "local mentions" do end test "remote mentions" do - mario = insert(:user, %{nickname: "mario@mushroom.kingdom", local: false}) - luigi = insert(:user, %{nickname: "luigi@mushroom.kingdom", local: false}) + mario = insert(:user, %{nickname: "mario@mushroom.world", local: false}) + luigi = insert(:user, %{nickname: "luigi@mushroom.world", local: false}) - code = "@mario@mushroom.kingdom @luigi@mushroom.kingdom yo what's up?" + code = "@mario@mushroom.world @luigi@mushroom.world yo what's up?" {result, _, []} = Utils.format_input(code, "text/markdown") assert result == From dca87c5e7b4b12918cf59a83a77be389a7e0df01 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Sat, 1 May 2021 11:28:06 -0500 Subject: [PATCH 160/174] CHANGELOG: markdown --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9a0171763..ed6e548dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Applying ConcurrentLimiter settings via AdminAPI - User login failures if their `notification_settings` were in a NULL state. - Mix task `pleroma.user delete_activities` query transaction timeout is now :infinity +- Fixed some Markdown issues, including trailing slash in links. ## [2.3.0] - 2020-03-01 From c80b1aaf514dec6b538a9833d48df027708b6b4d Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Mon, 3 May 2021 14:27:03 -0500 Subject: [PATCH 161/174] Don't crash when email settings are invalid Fixes: https://git.pleroma.social/pleroma/pleroma/-/issues/2606 Fixes: https://gitlab.com/soapbox-pub/soapbox/-/issues/4 --- lib/pleroma/application_requirements.ex | 38 ++++++++++--------- .../pleroma/application_requirements_test.exs | 18 ++++----- test/pleroma/user_test.exs | 18 +++++++++ 3 files changed, 47 insertions(+), 27 deletions(-) diff --git a/lib/pleroma/application_requirements.ex b/lib/pleroma/application_requirements.ex index 6ef65b263..c412dec5e 100644 --- a/lib/pleroma/application_requirements.ex +++ b/lib/pleroma/application_requirements.ex @@ -34,15 +34,16 @@ defp handle_result({:error, message}), do: raise(VerifyError, message: message) defp check_welcome_message_config!(:ok) do if Pleroma.Config.get([:welcome, :email, :enabled], false) and not Pleroma.Emails.Mailer.enabled?() do - Logger.error(""" - To send welcome email do you need to enable mail. - \nconfig :pleroma, Pleroma.Emails.Mailer, enabled: true - """) + Logger.warn(""" + To send welcome emails, you need to enable the mailer. + Welcome emails will NOT be sent with the current config. - {:error, "The mail disabled."} - else - :ok + Enable the mailer: + config :pleroma, Pleroma.Emails.Mailer, enabled: true + """) end + + :ok end defp check_welcome_message_config!(result), do: result @@ -51,18 +52,21 @@ defp check_welcome_message_config!(result), do: result # def check_confirmation_accounts!(:ok) do if Pleroma.Config.get([:instance, :account_activation_required]) && - not Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled]) do - Logger.error( - "Account activation enabled, but no Mailer settings enabled.\n" <> - "Please set config :pleroma, :instance, account_activation_required: false\n" <> - "Otherwise setup and enable Mailer." - ) + not Pleroma.Emails.Mailer.enabled?() do + Logger.warn(""" + Account activation is required, but the mailer is disabled. + Users will NOT be able to confirm their accounts with this config. + Either disable account activation or enable the mailer. - {:error, - "Account activation enabled, but Mailer is disabled. Cannot send confirmation emails."} - else - :ok + Disable account activation: + config :pleroma, :instance, account_activation_required: false + + Enable the mailer: + config :pleroma, Pleroma.Emails.Mailer, enabled: true + """) end + + :ok end def check_confirmation_accounts!(result), do: result diff --git a/test/pleroma/application_requirements_test.exs b/test/pleroma/application_requirements_test.exs index 683ac8c96..a54c37968 100644 --- a/test/pleroma/application_requirements_test.exs +++ b/test/pleroma/application_requirements_test.exs @@ -35,13 +35,13 @@ test "doesn't raise if the pool size is unexpected but the respective flag is se setup do: clear_config([:welcome]) setup do: clear_config([Pleroma.Emails.Mailer]) - test "raises if welcome email enabled but mail disabled" do + test "warns if welcome email enabled but mail disabled" do clear_config([:welcome, :email, :enabled], true) clear_config([Pleroma.Emails.Mailer, :enabled], false) - assert_raise Pleroma.ApplicationRequirements.VerifyError, "The mail disabled.", fn -> - capture_log(&Pleroma.ApplicationRequirements.verify!/0) - end + assert capture_log(fn -> + assert Pleroma.ApplicationRequirements.verify!() == :ok + end) =~ "Welcome emails will NOT be sent" end end @@ -57,15 +57,13 @@ test "raises if welcome email enabled but mail disabled" do setup do: clear_config([:instance, :account_activation_required]) - test "raises if account confirmation is required but mailer isn't enable" do + test "warns if account confirmation is required but mailer isn't enabled" do clear_config([:instance, :account_activation_required], true) clear_config([Pleroma.Emails.Mailer, :enabled], false) - assert_raise Pleroma.ApplicationRequirements.VerifyError, - "Account activation enabled, but Mailer is disabled. Cannot send confirmation emails.", - fn -> - capture_log(&Pleroma.ApplicationRequirements.verify!/0) - end + assert capture_log(fn -> + assert Pleroma.ApplicationRequirements.verify!() == :ok + end) =~ "Users will NOT be able to confirm their accounts" end test "doesn't do anything if account confirmation is disabled" do diff --git a/test/pleroma/user_test.exs b/test/pleroma/user_test.exs index 6f5bcab57..f89ea458a 100644 --- a/test/pleroma/user_test.exs +++ b/test/pleroma/user_test.exs @@ -572,6 +572,24 @@ test "it sends a registration confirmed email if no others will be sent" do ) end + test "it fails gracefully with invalid email config" do + cng = User.register_changeset(%User{}, @full_user_data) + + # Disable the mailer but enable all the things that want to send emails + clear_config([Pleroma.Emails.Mailer, :enabled], false) + clear_config([:instance, :account_activation_required], true) + clear_config([:instance, :account_approval_required], true) + clear_config([:welcome, :email, :enabled], true) + clear_config([:welcome, :email, :sender], "lain@lain.com") + + # The user is still created + assert {:ok, %User{nickname: "nick"}} = User.register(cng) + + # No emails are sent + ObanHelpers.perform_all() + refute_email_sent() + end + test "it requires an email, name, nickname and password, bio is optional when account_activation_required is enabled" do clear_config([:instance, :account_activation_required], true) From 90770e0841d3ffea87627b35627bfe38cad52f07 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Mon, 3 May 2021 14:30:21 -0500 Subject: [PATCH 162/174] CHANGELOG: don't crash so hard when email settings are invalid --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9a0171763..74086a54b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded. - Return OAuth token `id` (primary key) in POST `/oauth/token`. +### Fixed +- Don't crash so hard when email settings are invalid. + ## Unreleased (Patch) ### Fixed From ab9eabdf20180f2dd8539cf5d3dc0fdc6412496b Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Wed, 12 May 2021 13:38:11 -0500 Subject: [PATCH 163/174] Add SetMeta filter to store uploaded image sizes --- lib/pleroma/upload.ex | 9 ++++- lib/pleroma/upload/filter/set_meta.ex | 36 +++++++++++++++++++ .../web/mastodon_api/views/status_view.ex | 16 +++++++++ test/pleroma/upload/filter/set_meta_test.exs | 19 ++++++++++ .../mastodon_api/views/status_view_test.exs | 5 ++- 5 files changed, 83 insertions(+), 2 deletions(-) create mode 100644 lib/pleroma/upload/filter/set_meta.ex create mode 100644 test/pleroma/upload/filter/set_meta_test.exs diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex index 654711351..4d58abd48 100644 --- a/lib/pleroma/upload.ex +++ b/lib/pleroma/upload.ex @@ -23,6 +23,8 @@ defmodule Pleroma.Upload do is once created permanent and changing it (especially in uploaders) is probably a bad idea! * `:tempfile` - path to the temporary file. Prefer in-place changes on the file rather than changing the path as the temporary file is also tracked by `Plug.Upload{}` and automatically deleted once the request is over. + * `:width` - width of the media in pixels + * `:height` - height of the media in pixels Related behaviors: @@ -32,6 +34,7 @@ defmodule Pleroma.Upload do """ alias Ecto.UUID alias Pleroma.Config + alias Pleroma.Maps require Logger @type source :: @@ -53,9 +56,11 @@ defmodule Pleroma.Upload do name: String.t(), tempfile: String.t(), content_type: String.t(), + width: integer(), + height: integer(), path: String.t() } - defstruct [:id, :name, :tempfile, :content_type, :path] + defstruct [:id, :name, :tempfile, :content_type, :width, :height, :path] defp get_description(opts, upload) do case {opts[:description], Pleroma.Config.get([Pleroma.Upload, :default_description])} do @@ -89,6 +94,8 @@ def store(upload, opts \\ []) do "mediaType" => upload.content_type, "href" => url_from_spec(upload, opts.base_url, url_spec) } + |> Maps.put_if_present("width", upload.width) + |> Maps.put_if_present("height", upload.height) ], "name" => description }} diff --git a/lib/pleroma/upload/filter/set_meta.ex b/lib/pleroma/upload/filter/set_meta.ex new file mode 100644 index 000000000..cccb6c371 --- /dev/null +++ b/lib/pleroma/upload/filter/set_meta.ex @@ -0,0 +1,36 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Upload.Filter.SetMeta do + @moduledoc """ + Extracts metadata about the upload, such as width/height + """ + require Logger + + @behaviour Pleroma.Upload.Filter + + @spec filter(Pleroma.Upload.t()) :: + {:ok, :filtered, Pleroma.Upload.t()} | {:ok, :noop} | {:error, String.t()} + def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _} = upload) do + try do + image = + file + |> Mogrify.open() + |> Mogrify.verbose() + + upload = + upload + |> Map.put(:width, image.width) + |> Map.put(:height, image.height) + + {:ok, :filtered, upload} + rescue + e in ErlangError -> + Logger.warn("#{__MODULE__}: #{inspect(e)}") + {:ok, :noop} + end + end + + def filter(_), do: {:ok, :noop} +end diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index bac897a57..5dbdc309e 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -426,10 +426,26 @@ def render("attachment.json", %{attachment: attachment}) do type: type, description: attachment["name"], pleroma: %{mime_type: media_type}, + meta: render("attachment_meta.json", %{attachment: attachment}), blurhash: attachment["blurhash"] } end + def render("attachment_meta.json", %{ + attachment: %{"url" => [%{"width" => width, "height" => height} | _]} + }) + when is_integer(width) and is_integer(height) do + %{ + original: %{ + width: width, + height: height, + aspect: width / height + } + } + end + + def render("attachment_meta.json", _), do: %{} + def render("context.json", %{activity: activity, activities: activities, user: user}) do %{ancestors: ancestors, descendants: descendants} = activities diff --git a/test/pleroma/upload/filter/set_meta_test.exs b/test/pleroma/upload/filter/set_meta_test.exs new file mode 100644 index 000000000..650e527b4 --- /dev/null +++ b/test/pleroma/upload/filter/set_meta_test.exs @@ -0,0 +1,19 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Upload.Filter.SetMetaTest do + use Pleroma.DataCase, async: true + alias Pleroma.Upload.Filter.SetMeta + + test "adds the image dimensions" do + upload = %Pleroma.Upload{ + name: "an… image.jpg", + content_type: "image/jpeg", + path: Path.absname("test/fixtures/image.jpg"), + tempfile: Path.absname("test/fixtures/image.jpg") + } + + assert {:ok, :filtered, %{width: 1024, height: 768}} = SetMeta.filter(upload) + end +end diff --git a/test/pleroma/web/mastodon_api/views/status_view_test.exs b/test/pleroma/web/mastodon_api/views/status_view_test.exs index 2de3afc4f..e6c37e782 100644 --- a/test/pleroma/web/mastodon_api/views/status_view_test.exs +++ b/test/pleroma/web/mastodon_api/views/status_view_test.exs @@ -458,7 +458,9 @@ test "attachments" do "url" => [ %{ "mediaType" => "image/png", - "href" => "someurl" + "href" => "someurl", + "width" => 200, + "height" => 100 } ], "blurhash" => "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn", @@ -474,6 +476,7 @@ test "attachments" do text_url: "someurl", description: nil, pleroma: %{mime_type: "image/png"}, + meta: %{original: %{width: 200, height: 100, aspect: 2}}, blurhash: "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn" } From 4c060ae73371a8567468186e5d1333ec00fbdf41 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Wed, 12 May 2021 15:38:49 -0500 Subject: [PATCH 164/174] Ingest remote attachment width/height --- .../object_validators/attachment_validator.ex | 4 ++- .../web/activity_pub/transmogrifier.ex | 2 ++ .../attachment_validator_test.exs | 33 +++++++++++++++++++ .../transmogrifier/audio_handling_test.exs | 4 ++- .../transmogrifier/video_handling_test.exs | 12 +++++-- 5 files changed, 50 insertions(+), 5 deletions(-) diff --git a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex index 3175427ad..a99b40adc 100644 --- a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex @@ -21,6 +21,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do field(:type, :string) field(:href, ObjectValidators.Uri) field(:mediaType, :string, default: "application/octet-stream") + field(:width, :integer) + field(:height, :integer) end end @@ -52,7 +54,7 @@ def url_changeset(struct, data) do data = fix_media_type(data) struct - |> cast(data, [:type, :href, :mediaType]) + |> cast(data, [:type, :href, :mediaType, :width, :height]) |> validate_inclusion(:type, ["Link"]) |> validate_required([:type, :href, :mediaType]) end diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 4d9a5617e..b5767863c 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -245,6 +245,8 @@ def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachm "type" => Map.get(url || %{}, "type", "Link") } |> Maps.put_if_present("mediaType", media_type) + |> Maps.put_if_present("width", (url || %{})["width"]) + |> Maps.put_if_present("height", (url || %{})["height"]) %{ "url" => [attachment_url], diff --git a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs index b775515e0..0e49fda99 100644 --- a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs +++ b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs @@ -72,5 +72,38 @@ test "it handles our own uploads" do assert attachment.mediaType == "image/jpeg" end + + test "it handles image dimensions" do + attachment = %{ + "url" => [ + %{ + "type" => "Link", + "mediaType" => "image/jpeg", + "href" => "https://example.com/images/1.jpg", + "width" => 200, + "height" => 100 + } + ], + "type" => "Document", + "name" => nil, + "mediaType" => "image/jpeg" + } + + {:ok, attachment} = + AttachmentValidator.cast_and_validate(attachment) + |> Ecto.Changeset.apply_action(:insert) + + assert [ + %{ + href: "https://example.com/images/1.jpg", + type: "Link", + mediaType: "image/jpeg", + width: 200, + height: 100 + } + ] = attachment.url + + assert attachment.mediaType == "image/jpeg" + end end end diff --git a/test/pleroma/web/activity_pub/transmogrifier/audio_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/audio_handling_test.exs index e733f167d..a21e9e3d3 100644 --- a/test/pleroma/web/activity_pub/transmogrifier/audio_handling_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier/audio_handling_test.exs @@ -76,7 +76,9 @@ test "Funkwhale Audio object" do "href" => "https://channels.tests.funkwhale.audio/api/v1/listen/3901e5d8-0445-49d5-9711-e096cf32e515/?upload=42342395-0208-4fee-a38d-259a6dae0871&download=false", "mediaType" => "audio/ogg", - "type" => "Link" + "type" => "Link", + "width" => nil, + "height" => nil } ] } diff --git a/test/pleroma/web/activity_pub/transmogrifier/video_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/video_handling_test.exs index 6ddf7c172..62b4a2cb3 100644 --- a/test/pleroma/web/activity_pub/transmogrifier/video_handling_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier/video_handling_test.exs @@ -60,7 +60,9 @@ test "it remaps video URLs as attachments if necessary" do "href" => "https://peertube.moe/static/webseed/df5f464b-be8d-46fb-ad81-2d4c2d1630e3-480.mp4", "mediaType" => "video/mp4", - "type" => "Link" + "type" => "Link", + "width" => nil, + "height" => nil } ] } @@ -83,7 +85,9 @@ test "it remaps video URLs as attachments if necessary" do "href" => "https://framatube.org/static/webseed/6050732a-8a7a-43d4-a6cd-809525a1d206-1080.mp4", "mediaType" => "video/mp4", - "type" => "Link" + "type" => "Link", + "width" => nil, + "height" => nil } ] } @@ -113,7 +117,9 @@ test "it works for peertube videos with only their mpegURL map" do "href" => "https://peertube.stream/static/streaming-playlists/hls/abece3c3-b9c6-47f4-8040-f3eed8c602e6/abece3c3-b9c6-47f4-8040-f3eed8c602e6-1080-fragmented.mp4", "mediaType" => "video/mp4", - "type" => "Link" + "type" => "Link", + "width" => nil, + "height" => nil } ] } From 02b9436494998e441fe2119b78c0e4f68c45a9e1 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Wed, 12 May 2021 16:16:10 -0500 Subject: [PATCH 165/174] Don't render media `meta` if nil --- lib/pleroma/web/mastodon_api/views/status_view.ex | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index 5dbdc309e..7f318e81b 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -9,6 +9,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do alias Pleroma.Activity alias Pleroma.HTML + alias Pleroma.Maps alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User @@ -406,6 +407,7 @@ def render("attachment.json", %{attachment: attachment}) do media_type = attachment_url["mediaType"] || attachment_url["mimeType"] || "image" href = attachment_url["href"] |> MediaProxy.url() href_preview = attachment_url["href"] |> MediaProxy.preview_url() + meta = render("attachment_meta.json", %{attachment: attachment}) type = cond do @@ -426,9 +428,9 @@ def render("attachment.json", %{attachment: attachment}) do type: type, description: attachment["name"], pleroma: %{mime_type: media_type}, - meta: render("attachment_meta.json", %{attachment: attachment}), blurhash: attachment["blurhash"] } + |> Maps.put_if_present(:meta, meta) end def render("attachment_meta.json", %{ @@ -444,7 +446,7 @@ def render("attachment_meta.json", %{ } end - def render("attachment_meta.json", _), do: %{} + def render("attachment_meta.json", _), do: nil def render("context.json", %{activity: activity, activities: activities, user: user}) do %{ancestors: ancestors, descendants: descendants} = From 6f0b42656dcce9cd7e4c833be42b6544954ca93b Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Wed, 12 May 2021 19:03:10 -0500 Subject: [PATCH 166/174] Federate attachments as Links instead of Documents --- lib/pleroma/web/activity_pub/transmogrifier.ex | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index b5767863c..acb4f4b3e 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -245,8 +245,8 @@ def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachm "type" => Map.get(url || %{}, "type", "Link") } |> Maps.put_if_present("mediaType", media_type) - |> Maps.put_if_present("width", (url || %{})["width"]) - |> Maps.put_if_present("height", (url || %{})["height"]) + |> Maps.put_if_present("width", (url || %{})["width"] || data["width"]) + |> Maps.put_if_present("height", (url || %{})["height"] || data["height"]) %{ "url" => [attachment_url], @@ -963,7 +963,7 @@ def prepare_attachments(object) do object |> Map.get("attachment", []) |> Enum.map(fn data -> - [%{"mediaType" => media_type, "href" => href} | _] = data["url"] + [%{"mediaType" => media_type, "href" => href} = url | _] = data["url"] %{ "url" => href, @@ -971,6 +971,8 @@ def prepare_attachments(object) do "name" => data["name"], "type" => "Document" } + |> Maps.put_if_present("width", url["width"]) + |> Maps.put_if_present("height", url["height"]) end) Map.put(object, "attachment", attachments) From 5a57b025c7745ebdc7ecf8c7d6b75bcc6770562a Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Wed, 12 May 2021 20:15:33 -0500 Subject: [PATCH 167/174] Changelog: attachment meta --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5bb4b1e73..22eaa0b94 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded. - Return OAuth token `id` (primary key) in POST `/oauth/token`. +- `SetMeta` upload filter for extracting attachment dimensions. +- Attachment dimensions are federated when available. ### Fixed - Don't crash so hard when email settings are invalid. From 543e9402d64bce556f85294f91dc690c9acec51f Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Fri, 14 May 2021 08:38:23 -0500 Subject: [PATCH 168/174] Support blurhash --- lib/pleroma/upload.ex | 7 +++++-- lib/pleroma/upload/filter/set_meta.ex | 9 +++++++++ lib/pleroma/web/activity_pub/transmogrifier.ex | 1 + mix.exs | 3 +++ mix.lock | 1 + 5 files changed, 19 insertions(+), 2 deletions(-) diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex index 4d58abd48..5570ed104 100644 --- a/lib/pleroma/upload.ex +++ b/lib/pleroma/upload.ex @@ -25,6 +25,7 @@ defmodule Pleroma.Upload do path as the temporary file is also tracked by `Plug.Upload{}` and automatically deleted once the request is over. * `:width` - width of the media in pixels * `:height` - height of the media in pixels + * `:blurhash` - string hash of the image encoded with the blurhash algorithm (https://blurha.sh/) Related behaviors: @@ -58,9 +59,10 @@ defmodule Pleroma.Upload do content_type: String.t(), width: integer(), height: integer(), + blurhash: String.t(), path: String.t() } - defstruct [:id, :name, :tempfile, :content_type, :width, :height, :path] + defstruct [:id, :name, :tempfile, :content_type, :width, :height, :blurhash, :path] defp get_description(opts, upload) do case {opts[:description], Pleroma.Config.get([Pleroma.Upload, :default_description])} do @@ -98,7 +100,8 @@ def store(upload, opts \\ []) do |> Maps.put_if_present("height", upload.height) ], "name" => description - }} + } + |> Maps.put_if_present("blurhash", upload.blurhash)} else {:description_limit, _} -> {:error, :description_too_long} diff --git a/lib/pleroma/upload/filter/set_meta.ex b/lib/pleroma/upload/filter/set_meta.ex index cccb6c371..81c48228a 100644 --- a/lib/pleroma/upload/filter/set_meta.ex +++ b/lib/pleroma/upload/filter/set_meta.ex @@ -23,6 +23,7 @@ def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _} = upload) upload |> Map.put(:width, image.width) |> Map.put(:height, image.height) + |> Map.put(:blurhash, get_blurhash(file)) {:ok, :filtered, upload} rescue @@ -33,4 +34,12 @@ def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _} = upload) end def filter(_), do: {:ok, :noop} + + defp get_blurhash(file) do + with {:ok, blurhash} <- :eblurhash.magick(file) do + blurhash + else + _ -> nil + end + end end diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index acb4f4b3e..f601d6111 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -973,6 +973,7 @@ def prepare_attachments(object) do } |> Maps.put_if_present("width", url["width"]) |> Maps.put_if_present("height", url["height"]) + |> Maps.put_if_present("blurhash", data["blurhash"]) end) Map.put(object, "attachment", attachments) diff --git a/mix.exs b/mix.exs index 436381f32..08581824a 100644 --- a/mix.exs +++ b/mix.exs @@ -198,6 +198,9 @@ defp deps do {:open_api_spex, git: "https://git.pleroma.social/pleroma/elixir-libraries/open_api_spex.git", ref: "f296ac0924ba3cf79c7a588c4c252889df4c2edd"}, + {:eblurhash, + git: "https://github.com/zotonic/eblurhash.git", + ref: "04a0b76eadf4de1be17726f39b6313b88708fd12"}, ## dev & test {:ex_doc, "~> 0.22", only: :dev, runtime: false}, diff --git a/mix.lock b/mix.lock index 99be81826..d24f9c699 100644 --- a/mix.lock +++ b/mix.lock @@ -29,6 +29,7 @@ "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, "earmark": {:hex, :earmark, "1.4.3", "364ca2e9710f6bff494117dbbd53880d84bebb692dafc3a78eb50aa3183f2bfd", [:mix], [], "hexpm", "8cf8a291ebf1c7b9539e3cddb19e9cef066c2441b1640f13c34c1d3cfc825fec"}, "earmark_parser": {:hex, :earmark_parser, "1.4.10", "6603d7a603b9c18d3d20db69921527f82ef09990885ed7525003c7fe7dc86c56", [:mix], [], "hexpm", "8e2d5370b732385db2c9b22215c3f59c84ac7dda7ed7e544d7c459496ae519c0"}, + "eblurhash": {:git, "https://github.com/zotonic/eblurhash.git", "04a0b76eadf4de1be17726f39b6313b88708fd12", [ref: "04a0b76eadf4de1be17726f39b6313b88708fd12"]}, "ecto": {:hex, :ecto, "3.4.6", "08f7afad3257d6eb8613309af31037e16c36808dfda5a3cd0cb4e9738db030e4", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6f13a9e2a62e75c2dcfc7207bfc65645ab387af8360db4c89fee8b5a4bf3f70b"}, "ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"}, "ecto_sql": {:hex, :ecto_sql, "3.4.5", "30161f81b167d561a9a2df4329c10ae05ff36eca7ccc84628f2c8b9fa1e43323", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.4.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.0", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "31990c6a3579b36a3c0841d34a94c275e727de8b84f58509da5f1b2032c98ac2"}, From b22f54eb29237b4c34a26b497f88770dbebf5578 Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Sun, 16 May 2021 12:26:32 -0500 Subject: [PATCH 169/174] Make prod.secret.exs optional (with warning) --- config/prod.exs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/config/prod.exs b/config/prod.exs index adbce5606..0e151000b 100644 --- a/config/prod.exs +++ b/config/prod.exs @@ -63,7 +63,12 @@ # Finally import the config/prod.secret.exs # which should be versioned separately. -import_config "prod.secret.exs" +if File.exists?("./config/prod.secret.exs") do + import_config "prod.secret.exs" +else + "`config/prod.secret.exs` not found. You may want to create one by running `mix pleroma.instance gen`" + |> IO.warn([]) +end if File.exists?("./config/prod.exported_from_db.secret.exs"), do: import_config("prod.exported_from_db.secret.exs") From b540fff9081765feeadcc880af43f5d5d49d1e9c Mon Sep 17 00:00:00 2001 From: Alex Gleason <alex@alexgleason.me> Date: Sun, 16 May 2021 12:20:20 -0500 Subject: [PATCH 170/174] Docs: use `MIX_ENV=prod mix pleroma.instance gen` --- docs/installation/alpine_linux_en.md | 2 +- docs/installation/arch_linux_en.md | 2 +- docs/installation/debian_based_en.md | 2 +- docs/installation/debian_based_jp.md | 4 ++-- docs/installation/freebsd_en.md | 6 +++--- docs/installation/gentoo_en.md | 10 +++++----- docs/installation/netbsd_en.md | 4 ++-- docs/installation/openbsd_en.md | 2 +- 8 files changed, 16 insertions(+), 16 deletions(-) diff --git a/docs/installation/alpine_linux_en.md b/docs/installation/alpine_linux_en.md index 7eb1718f2..c2dbd836d 100644 --- a/docs/installation/alpine_linux_en.md +++ b/docs/installation/alpine_linux_en.md @@ -117,7 +117,7 @@ cd /opt/pleroma sudo -Hu pleroma mix deps.get ``` -* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen` +* Generate the configuration: `sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen` * Answer with `yes` if it asks you to install `rebar3`. * This may take some time, because parts of pleroma get compiled first. * After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`. diff --git a/docs/installation/arch_linux_en.md b/docs/installation/arch_linux_en.md index da78c3205..53afccc0f 100644 --- a/docs/installation/arch_linux_en.md +++ b/docs/installation/arch_linux_en.md @@ -92,7 +92,7 @@ cd /opt/pleroma sudo -Hu pleroma mix deps.get ``` -* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen` +* Generate the configuration: `sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen` * Answer with `yes` if it asks you to install `rebar3`. * This may take some time, because parts of pleroma get compiled first. * After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`. diff --git a/docs/installation/debian_based_en.md b/docs/installation/debian_based_en.md index c5687a01e..a9cf86ab3 100644 --- a/docs/installation/debian_based_en.md +++ b/docs/installation/debian_based_en.md @@ -90,7 +90,7 @@ cd /opt/pleroma sudo -Hu pleroma mix deps.get ``` -* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen` +* Generate the configuration: `sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen` * Answer with `yes` if it asks you to install `rebar3`. * This may take some time, because parts of pleroma get compiled first. * After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`. diff --git a/docs/installation/debian_based_jp.md b/docs/installation/debian_based_jp.md index c4bbd4780..e076e2308 100644 --- a/docs/installation/debian_based_jp.md +++ b/docs/installation/debian_based_jp.md @@ -89,7 +89,7 @@ sudo -Hu pleroma mix deps.get * コンフィギュレーションを生成します。 ``` -sudo -Hu pleroma mix pleroma.instance gen +sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen ``` * rebar3をインストールしてもよいか聞かれたら、yesを入力してください。 * このときにpleromaの一部がコンパイルされるため、この処理には時間がかかります。 @@ -103,7 +103,7 @@ sudo -Hu pleroma mv config/{generated_config.exs,prod.secret.exs} * 先程のコマンドで、すでに `config/setup_db.psql` というファイルが作られています。このファイルをもとに、データベースを作成します。 ``` -sudo -Hu pleroma mix pleroma.instance gen +sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen ``` * そして、データベースのマイグレーションを実行します。 diff --git a/docs/installation/freebsd_en.md b/docs/installation/freebsd_en.md index 2dc466eb8..f4f4d0db9 100644 --- a/docs/installation/freebsd_en.md +++ b/docs/installation/freebsd_en.md @@ -1,8 +1,8 @@ -# Installing on FreeBSD +# Installing on FreeBSD This document was written for FreeBSD 12.1, but should be work on future releases. -## Required software +## Required software This assumes the target system has `pkg(8)`. @@ -54,7 +54,7 @@ Configure Pleroma. Note that you need a domain name at this point: ``` $ cd /home/pleroma/pleroma $ mix deps.get # Enter "y" when asked to install Hex -$ mix pleroma.instance gen # You will be asked a few questions here. +$ MIX_ENV=prod mix pleroma.instance gen # You will be asked a few questions here. $ cp config/generated_config.exs config/prod.secret.exs ``` diff --git a/docs/installation/gentoo_en.md b/docs/installation/gentoo_en.md index f2380ab72..af68db70d 100644 --- a/docs/installation/gentoo_en.md +++ b/docs/installation/gentoo_en.md @@ -54,7 +54,7 @@ Gentoo quite pointedly does not come with a cron daemon installed, and as such i # emerge --ask dev-db/postgresql dev-lang/elixir dev-vcs/git www-servers/nginx app-crypt/certbot app-crypt/certbot-nginx dev-util/cmake sys-apps/file ``` -If you would not like to install the optional packages, remove them from this line. +If you would not like to install the optional packages, remove them from this line. If you're running this from a low-powered virtual machine, it should work though it will take some time. There were no issues on a VPS with a single core and 1GB of RAM; if you are using an even more limited device and run into issues, you can try creating a swapfile or use a more powerful machine running Gentoo to [cross build](https://wiki.gentoo.org/wiki/Cross_build_environment). If you have a wait ahead of you, now would be a good time to take a break, strech a bit, refresh your beverage of choice and/or get a snack, and reply to Arch users' posts with "I use Gentoo btw" as we do. @@ -79,12 +79,12 @@ The output from emerging postgresql should give you a command for initializing t ``` * Start postgres and enable the system service - + ```shell # /etc/init.d/postgresql-11 start # rc-update add postgresql-11 default ``` - + ### A note on licenses, the AGPL, and deployment procedures If you do not plan to make any modifications to your Pleroma instance, cloning directly from the main repo will get you what you need. However, if you plan on doing any contributions to upstream development, making changes or modifications to your instance, making custom themes, or want to play around--and let's be honest here, if you're using Gentoo that is most likely you--you will save yourself a lot of headache later if you take the time right now to fork the Pleroma repo and use that in the following section. @@ -135,7 +135,7 @@ pleroma$ mix deps.get * Generate the configuration: ```shell -pleroma$ mix pleroma.instance gen +pleroma$ MIX_ENV=prod mix pleroma.instance gen ``` * Answer with `yes` if it asks you to install `rebar3`. @@ -241,7 +241,7 @@ First, ensure that the command you will be installing into your crontab works. # /usr/bin/certbot renew --nginx ``` -Assuming not much time has passed since you got certbot working a few steps ago, you should get a message for all domains you installed certificates for saying `Cert not yet due for renewal`. +Assuming not much time has passed since you got certbot working a few steps ago, you should get a message for all domains you installed certificates for saying `Cert not yet due for renewal`. Now, run crontab as a superuser with `crontab -e` or `sudo crontab -e` as appropriate, and add the following line to your cron: diff --git a/docs/installation/netbsd_en.md b/docs/installation/netbsd_en.md index 233cf28b7..22cdd5691 100644 --- a/docs/installation/netbsd_en.md +++ b/docs/installation/netbsd_en.md @@ -1,6 +1,6 @@ # Installing on NetBSD -## Required software +## Required software pkgin should have been installed by the NetBSD installer if you selected the right options. If it isn't installed, install it using pkg_add. @@ -71,7 +71,7 @@ Configure Pleroma. Note that you need a domain name at this point: ``` $ cd /home/pleroma/pleroma $ mix deps.get -$ mix pleroma.instance gen # You will be asked a few questions here. +$ MIX_ENV=prod mix pleroma.instance gen # You will be asked a few questions here. ``` Since Postgres is configured, we can now initialize the database. There should diff --git a/docs/installation/openbsd_en.md b/docs/installation/openbsd_en.md index 0e1269ca5..017b37519 100644 --- a/docs/installation/openbsd_en.md +++ b/docs/installation/openbsd_en.md @@ -239,7 +239,7 @@ Enter a shell as \_pleroma (as root `su _pleroma -`) and enter pleroma's install Then follow the main installation guide: * run `mix deps.get` - * run `mix pleroma.instance gen` and enter your instance's information when asked + * run `MIX_ENV=prod mix pleroma.instance gen` and enter your instance's information when asked * copy config/generated\_config.exs to config/prod.secret.exs. The default values should be sufficient but you should edit it and check that everything seems OK. * exit your current shell back to a root one and run `psql -U postgres -f /home/_pleroma/pleroma/config/setup_db.psql` to setup the database. * return to a \_pleroma shell into pleroma's installation directory (`su _pleroma -;cd ~/pleroma`) and run `MIX_ENV=prod mix ecto.migrate` From 9b6b5ac196d9a2defb74902bffad67505b0de5c5 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 18 May 2021 15:33:33 -0500 Subject: [PATCH 171/174] Rename upload filter to AnalyzeMetadata --- CHANGELOG.md | 2 +- .../upload/filter/{set_meta.ex => analyze_metadata.ex} | 2 +- .../filter/{set_meta_test.exs => analyze_metadata_test.exs} | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) rename lib/pleroma/upload/filter/{set_meta.ex => analyze_metadata.ex} (95%) rename test/pleroma/upload/filter/{set_meta_test.exs => analyze_metadata_test.exs} (70%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 22eaa0b94..1a69414a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,7 +15,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded. - Return OAuth token `id` (primary key) in POST `/oauth/token`. -- `SetMeta` upload filter for extracting attachment dimensions. +- `AnalyzeMetadata` upload filter for extracting attachment dimensions. - Attachment dimensions are federated when available. ### Fixed diff --git a/lib/pleroma/upload/filter/set_meta.ex b/lib/pleroma/upload/filter/analyze_metadata.ex similarity index 95% rename from lib/pleroma/upload/filter/set_meta.ex rename to lib/pleroma/upload/filter/analyze_metadata.ex index 81c48228a..8c23076d4 100644 --- a/lib/pleroma/upload/filter/set_meta.ex +++ b/lib/pleroma/upload/filter/analyze_metadata.ex @@ -2,7 +2,7 @@ # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> # SPDX-License-Identifier: AGPL-3.0-only -defmodule Pleroma.Upload.Filter.SetMeta do +defmodule Pleroma.Upload.Filter.AnalyzeMetadata do @moduledoc """ Extracts metadata about the upload, such as width/height """ diff --git a/test/pleroma/upload/filter/set_meta_test.exs b/test/pleroma/upload/filter/analyze_metadata_test.exs similarity index 70% rename from test/pleroma/upload/filter/set_meta_test.exs rename to test/pleroma/upload/filter/analyze_metadata_test.exs index 650e527b4..6f0e432ef 100644 --- a/test/pleroma/upload/filter/set_meta_test.exs +++ b/test/pleroma/upload/filter/analyze_metadata_test.exs @@ -2,9 +2,9 @@ # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/> # SPDX-License-Identifier: AGPL-3.0-only -defmodule Pleroma.Upload.Filter.SetMetaTest do +defmodule Pleroma.Upload.Filter.AnalyzeMetadataTest do use Pleroma.DataCase, async: true - alias Pleroma.Upload.Filter.SetMeta + alias Pleroma.Upload.Filter.AnalyzeMetadata test "adds the image dimensions" do upload = %Pleroma.Upload{ @@ -14,6 +14,6 @@ test "adds the image dimensions" do tempfile: Path.absname("test/fixtures/image.jpg") } - assert {:ok, :filtered, %{width: 1024, height: 768}} = SetMeta.filter(upload) + assert {:ok, :filtered, %{width: 1024, height: 768}} = AnalyzeMetadata.filter(upload) end end From 4ab3ef07d0f10815e7a91ba3143b7f97cd2a6058 Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 18 May 2021 15:51:11 -0500 Subject: [PATCH 172/174] Check AnalyzeMetadata filter's required commands eblurhash:magick uses "convert" Fetching image metadata uses "mogrify" --- lib/pleroma/application_requirements.ex | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/application_requirements.ex b/lib/pleroma/application_requirements.ex index c412dec5e..294eb3b6b 100644 --- a/lib/pleroma/application_requirements.ex +++ b/lib/pleroma/application_requirements.ex @@ -166,7 +166,9 @@ defp check_system_commands!(:ok) do filter_commands_statuses = [ check_filter(Pleroma.Upload.Filters.Exiftool, "exiftool"), check_filter(Pleroma.Upload.Filters.Mogrify, "mogrify"), - check_filter(Pleroma.Upload.Filters.Mogrifun, "mogrify") + check_filter(Pleroma.Upload.Filters.Mogrifun, "mogrify"), + check_filter(Pleroma.Upload.Filters.AnalyzeMetadata, "mogrify"), + check_filter(Pleroma.Upload.Filters.AnalyzeMetadata, "convert") ] preview_proxy_commands_status = From c64cbee26c7b78f9743b668724d4797faa6a942a Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 18 May 2021 16:28:21 -0500 Subject: [PATCH 173/174] Fixed checking for Upload Filter required commands --- CHANGELOG.md | 1 + lib/pleroma/application_requirements.ex | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1a69414a5..768405dd6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Fixed - Don't crash so hard when email settings are invalid. +- Checking activated Upload Filters for required commands. ## Unreleased (Patch) diff --git a/lib/pleroma/application_requirements.ex b/lib/pleroma/application_requirements.ex index 294eb3b6b..ee6ee9516 100644 --- a/lib/pleroma/application_requirements.ex +++ b/lib/pleroma/application_requirements.ex @@ -164,11 +164,11 @@ defp do_check_rum!(setting, migrate) do defp check_system_commands!(:ok) do filter_commands_statuses = [ - check_filter(Pleroma.Upload.Filters.Exiftool, "exiftool"), - check_filter(Pleroma.Upload.Filters.Mogrify, "mogrify"), - check_filter(Pleroma.Upload.Filters.Mogrifun, "mogrify"), - check_filter(Pleroma.Upload.Filters.AnalyzeMetadata, "mogrify"), - check_filter(Pleroma.Upload.Filters.AnalyzeMetadata, "convert") + check_filter(Pleroma.Upload.Filter.Exiftool, "exiftool"), + check_filter(Pleroma.Upload.Filter.Mogrify, "mogrify"), + check_filter(Pleroma.Upload.Filter.Mogrifun, "mogrify"), + check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "mogrify"), + check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "convert") ] preview_proxy_commands_status = From 2d7f6ce6fb047872083c2db6ad8b75a9032211fd Mon Sep 17 00:00:00 2001 From: Mark Felder <feld@feld.me> Date: Tue, 18 May 2021 16:46:51 -0500 Subject: [PATCH 174/174] Clarify AttachmentMetadata changes --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 768405dd6..898f8adb5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,8 +15,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded. - Return OAuth token `id` (primary key) in POST `/oauth/token`. -- `AnalyzeMetadata` upload filter for extracting attachment dimensions. -- Attachment dimensions are federated when available. +- `AnalyzeMetadata` upload filter for extracting attachment dimensions and generating blurhashes. +- Attachment dimensions and blurhashes are federated when available. ### Fixed - Don't crash so hard when email settings are invalid.