2019-04-18 20:34:01 +00:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-03 22:44:49 +00:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2019-04-18 20:34:01 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
|
|
|
defmodule Mix.Tasks.Pleroma.Database do
|
2019-05-16 18:14:48 +00:00
|
|
|
alias Pleroma.Conversation
|
2020-12-26 19:20:55 +00:00
|
|
|
alias Pleroma.Hashtag
|
2020-05-27 21:40:51 +00:00
|
|
|
alias Pleroma.Maintenance
|
2020-05-27 22:17:06 +00:00
|
|
|
alias Pleroma.Object
|
2019-05-16 20:04:08 +00:00
|
|
|
alias Pleroma.Repo
|
|
|
|
alias Pleroma.User
|
2020-12-26 19:20:55 +00:00
|
|
|
|
2019-04-18 21:17:37 +00:00
|
|
|
require Logger
|
2019-07-29 02:43:19 +00:00
|
|
|
require Pleroma.Constants
|
2020-12-26 19:20:55 +00:00
|
|
|
|
2020-08-08 12:29:40 +00:00
|
|
|
import Ecto.Query
|
2019-06-19 23:05:19 +00:00
|
|
|
import Mix.Pleroma
|
2020-12-26 19:20:55 +00:00
|
|
|
|
2019-04-18 20:34:01 +00:00
|
|
|
use Mix.Task
|
|
|
|
|
|
|
|
@shortdoc "A collection of database related tasks"
|
2019-10-03 10:59:49 +00:00
|
|
|
@moduledoc File.read!("docs/administration/CLI_tasks/database.md")
|
2019-04-18 20:34:01 +00:00
|
|
|
|
|
|
|
def run(["remove_embedded_objects" | args]) do
|
|
|
|
{options, [], []} =
|
|
|
|
OptionParser.parse(
|
|
|
|
args,
|
|
|
|
strict: [
|
|
|
|
vacuum: :boolean
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2019-06-19 23:05:19 +00:00
|
|
|
start_pleroma()
|
2019-04-18 21:17:37 +00:00
|
|
|
Logger.info("Removing embedded objects")
|
2019-04-18 20:34:01 +00:00
|
|
|
|
2019-05-16 20:04:08 +00:00
|
|
|
Repo.query!(
|
2019-10-18 11:11:30 +00:00
|
|
|
"update activities set data = safe_jsonb_set(data, '{object}'::text[], data->'object'->'id') where data->'object'->>'id' is not null;",
|
2019-04-18 20:58:59 +00:00
|
|
|
[],
|
|
|
|
timeout: :infinity
|
2019-04-18 20:34:01 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
if Keyword.get(options, :vacuum) do
|
2020-05-27 21:40:51 +00:00
|
|
|
Maintenance.vacuum("full")
|
2019-04-18 20:34:01 +00:00
|
|
|
end
|
|
|
|
end
|
2019-05-16 18:14:48 +00:00
|
|
|
|
|
|
|
def run(["bump_all_conversations"]) do
|
2019-06-19 23:05:19 +00:00
|
|
|
start_pleroma()
|
2019-05-16 18:14:48 +00:00
|
|
|
Conversation.bump_for_all_activities()
|
|
|
|
end
|
2019-05-16 20:04:08 +00:00
|
|
|
|
|
|
|
def run(["update_users_following_followers_counts"]) do
|
2019-06-19 23:05:19 +00:00
|
|
|
start_pleroma()
|
2019-05-16 20:04:08 +00:00
|
|
|
|
2020-12-12 14:30:08 +00:00
|
|
|
Repo.transaction(
|
|
|
|
fn ->
|
|
|
|
from(u in User, select: u)
|
|
|
|
|> Repo.stream()
|
|
|
|
|> Stream.each(&User.update_follower_count/1)
|
|
|
|
|> Stream.run()
|
|
|
|
end,
|
|
|
|
timeout: :infinity
|
|
|
|
)
|
2019-05-16 20:04:08 +00:00
|
|
|
end
|
2019-05-21 01:21:28 +00:00
|
|
|
|
|
|
|
def run(["prune_objects" | args]) do
|
|
|
|
{options, [], []} =
|
|
|
|
OptionParser.parse(
|
|
|
|
args,
|
|
|
|
strict: [
|
|
|
|
vacuum: :boolean
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2019-06-19 23:05:19 +00:00
|
|
|
start_pleroma()
|
2019-05-21 01:21:28 +00:00
|
|
|
|
|
|
|
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
|
|
|
|
|
|
|
|
Logger.info("Pruning objects older than #{deadline} days")
|
|
|
|
|
|
|
|
time_deadline =
|
|
|
|
NaiveDateTime.utc_now()
|
|
|
|
|> NaiveDateTime.add(-(deadline * 86_400))
|
|
|
|
|
2019-05-22 03:12:48 +00:00
|
|
|
from(o in Object,
|
2019-07-29 02:43:19 +00:00
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"?->'to' \\? ? OR ?->'cc' \\? ?",
|
|
|
|
o.data,
|
|
|
|
^Pleroma.Constants.as_public(),
|
|
|
|
o.data,
|
|
|
|
^Pleroma.Constants.as_public()
|
|
|
|
),
|
2019-05-22 03:12:48 +00:00
|
|
|
where: o.inserted_at < ^time_deadline,
|
|
|
|
where:
|
|
|
|
fragment("split_part(?->>'actor', '/', 3) != ?", o.data, ^Pleroma.Web.Endpoint.host())
|
2019-05-21 01:21:28 +00:00
|
|
|
)
|
2019-05-22 17:10:52 +00:00
|
|
|
|> Repo.delete_all(timeout: :infinity)
|
2019-05-21 01:21:28 +00:00
|
|
|
|
|
|
|
if Keyword.get(options, :vacuum) do
|
2020-05-27 21:40:51 +00:00
|
|
|
Maintenance.vacuum("full")
|
2019-05-21 01:21:28 +00:00
|
|
|
end
|
|
|
|
end
|
2019-08-10 18:47:40 +00:00
|
|
|
|
|
|
|
def run(["fix_likes_collections"]) do
|
|
|
|
start_pleroma()
|
|
|
|
|
|
|
|
from(object in Object,
|
|
|
|
where: fragment("(?)->>'likes' is not null", object.data),
|
|
|
|
select: %{id: object.id, likes: fragment("(?)->>'likes'", object.data)}
|
|
|
|
)
|
2020-09-16 06:47:18 +00:00
|
|
|
|> Pleroma.Repo.chunk_stream(100, :batches)
|
2019-08-10 18:47:40 +00:00
|
|
|
|> Stream.each(fn objects ->
|
|
|
|
ids =
|
|
|
|
objects
|
|
|
|
|> Enum.filter(fn object -> object.likes |> Jason.decode!() |> is_map() end)
|
|
|
|
|> Enum.map(& &1.id)
|
|
|
|
|
|
|
|
Object
|
|
|
|
|> where([object], object.id in ^ids)
|
|
|
|
|> update([object],
|
|
|
|
set: [
|
|
|
|
data:
|
|
|
|
fragment(
|
2019-10-18 11:11:30 +00:00
|
|
|
"safe_jsonb_set(?, '{likes}', '[]'::jsonb, true)",
|
2019-08-10 18:47:40 +00:00
|
|
|
object.data
|
|
|
|
)
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|> Repo.update_all([], timeout: :infinity)
|
|
|
|
end)
|
|
|
|
|> Stream.run()
|
|
|
|
end
|
2020-05-27 21:27:07 +00:00
|
|
|
|
2020-12-26 19:20:55 +00:00
|
|
|
def run(["transfer_hashtags"]) do
|
|
|
|
import Ecto.Query
|
|
|
|
|
|
|
|
start_pleroma()
|
|
|
|
|
2020-12-30 11:35:19 +00:00
|
|
|
Logger.info("Starting transferring object embedded hashtags to `hashtags` table...")
|
|
|
|
|
2021-01-07 09:20:29 +00:00
|
|
|
# Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out)
|
2020-12-26 19:20:55 +00:00
|
|
|
from(
|
|
|
|
object in Object,
|
|
|
|
left_join: hashtag in assoc(object, :hashtags),
|
|
|
|
where: is_nil(hashtag.id),
|
|
|
|
where: fragment("(?)->>'tag' != '[]'", object.data),
|
|
|
|
select: %{
|
|
|
|
id: object.id,
|
|
|
|
tag: fragment("(?)->>'tag'", object.data)
|
2020-12-30 11:35:19 +00:00
|
|
|
}
|
2020-12-26 19:20:55 +00:00
|
|
|
)
|
2020-12-31 06:36:26 +00:00
|
|
|
|> Repo.chunk_stream(100, :batches, timeout: :infinity)
|
2020-12-26 19:20:55 +00:00
|
|
|
|> Stream.each(fn objects ->
|
2020-12-31 06:36:26 +00:00
|
|
|
Logger.info("Processing #{length(objects)} objects starting from id #{hd(objects).id}...")
|
2020-12-26 19:20:55 +00:00
|
|
|
|
2021-01-07 09:20:29 +00:00
|
|
|
failed_ids =
|
|
|
|
objects
|
|
|
|
|> Enum.map(fn object ->
|
|
|
|
hashtags = Object.object_data_hashtags(%{"tag" => Jason.decode!(object.tag)})
|
2020-12-26 19:20:55 +00:00
|
|
|
|
2020-12-30 11:35:19 +00:00
|
|
|
Repo.transaction(fn ->
|
|
|
|
with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do
|
2020-12-26 19:20:55 +00:00
|
|
|
for hashtag_record <- hashtag_records do
|
2020-12-30 11:35:19 +00:00
|
|
|
with {:ok, _} <-
|
2020-12-31 06:36:26 +00:00
|
|
|
Repo.query(
|
|
|
|
"insert into hashtags_objects(hashtag_id, object_id) values ($1, $2);",
|
|
|
|
[hashtag_record.id, object.id]
|
2020-12-26 19:20:55 +00:00
|
|
|
) do
|
2021-01-07 09:20:29 +00:00
|
|
|
nil
|
2020-12-30 11:35:19 +00:00
|
|
|
else
|
|
|
|
{:error, e} ->
|
|
|
|
error =
|
|
|
|
"ERROR: could not link object #{object.id} and hashtag " <>
|
|
|
|
"#{hashtag_record.id}: #{inspect(e)}"
|
|
|
|
|
|
|
|
Logger.error(error)
|
2021-01-07 09:20:29 +00:00
|
|
|
Repo.rollback(object.id)
|
2020-12-26 19:20:55 +00:00
|
|
|
end
|
|
|
|
end
|
2021-01-07 09:20:29 +00:00
|
|
|
|
|
|
|
object.id
|
2020-12-30 11:35:19 +00:00
|
|
|
else
|
|
|
|
e ->
|
|
|
|
error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}"
|
|
|
|
Logger.error(error)
|
2021-01-07 09:20:29 +00:00
|
|
|
Repo.rollback(object.id)
|
2020-12-30 11:35:19 +00:00
|
|
|
end
|
|
|
|
end)
|
2021-01-07 09:20:29 +00:00
|
|
|
end)
|
|
|
|
|> Enum.filter(&(elem(&1, 0) == :error))
|
|
|
|
|> Enum.map(&elem(&1, 1))
|
|
|
|
|
|
|
|
if Enum.any?(failed_ids) do
|
|
|
|
Logger.error("ERROR: transfer_hashtags iteration failed for ids: #{inspect(failed_ids)}")
|
|
|
|
end
|
2020-12-26 19:20:55 +00:00
|
|
|
end)
|
|
|
|
|> Stream.run()
|
2020-12-30 11:35:19 +00:00
|
|
|
|
|
|
|
Logger.info("Done transferring hashtags. Please check logs to ensure no errors.")
|
2020-12-26 19:20:55 +00:00
|
|
|
end
|
|
|
|
|
2020-05-27 21:27:07 +00:00
|
|
|
def run(["vacuum", args]) do
|
|
|
|
start_pleroma()
|
|
|
|
|
2020-05-27 21:40:51 +00:00
|
|
|
Maintenance.vacuum(args)
|
2020-05-27 21:27:07 +00:00
|
|
|
end
|
2020-08-08 12:29:40 +00:00
|
|
|
|
|
|
|
def run(["ensure_expiration"]) do
|
|
|
|
start_pleroma()
|
|
|
|
days = Pleroma.Config.get([:mrf_activity_expiration, :days], 365)
|
|
|
|
|
|
|
|
Pleroma.Activity
|
2020-08-22 17:46:01 +00:00
|
|
|
|> join(:inner, [a], o in Object,
|
2020-08-11 16:28:22 +00:00
|
|
|
on:
|
|
|
|
fragment(
|
|
|
|
"(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')",
|
|
|
|
o.data,
|
|
|
|
a.data,
|
|
|
|
a.data
|
|
|
|
)
|
|
|
|
)
|
2020-08-08 12:29:40 +00:00
|
|
|
|> where(local: true)
|
2020-08-08 17:40:52 +00:00
|
|
|
|> where([a], fragment("(? ->> 'type'::text) = 'Create'", a.data))
|
2020-08-22 17:46:01 +00:00
|
|
|
|> where([_a, o], fragment("?->>'type' = 'Note'", o.data))
|
2020-09-16 06:47:18 +00:00
|
|
|
|> Pleroma.Repo.chunk_stream(100, :batches)
|
2020-08-08 12:29:40 +00:00
|
|
|
|> Stream.each(fn activities ->
|
|
|
|
Enum.each(activities, fn activity ->
|
2020-08-22 17:46:01 +00:00
|
|
|
expires_at =
|
|
|
|
activity.inserted_at
|
|
|
|
|> DateTime.from_naive!("Etc/UTC")
|
|
|
|
|> Timex.shift(days: days)
|
|
|
|
|
|
|
|
Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
|
|
|
|
activity_id: activity.id,
|
2020-09-08 12:11:18 +00:00
|
|
|
expires_at: expires_at
|
2020-08-22 17:46:01 +00:00
|
|
|
})
|
2020-08-08 12:29:40 +00:00
|
|
|
end)
|
|
|
|
end)
|
|
|
|
|> Stream.run()
|
|
|
|
end
|
2019-04-18 20:34:01 +00:00
|
|
|
end
|