akkoma/lib/mix/tasks/pleroma/database.ex

235 lines
6 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
2020-03-03 15:44:49 -07:00
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Database do
2019-05-16 12:14:48 -06:00
alias Pleroma.Conversation
alias Pleroma.Hashtag
alias Pleroma.Maintenance
2020-05-27 16:17:06 -06:00
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.User
2019-04-18 15:17:37 -06:00
require Logger
require Pleroma.Constants
import Ecto.Query
import Mix.Pleroma
use Mix.Task
@shortdoc "A collection of database related tasks"
@moduledoc File.read!("docs/administration/CLI_tasks/database.md")
def run(["remove_embedded_objects" | args]) do
{options, [], []} =
OptionParser.parse(
args,
strict: [
vacuum: :boolean
]
)
start_pleroma()
2019-04-18 15:17:37 -06:00
Logger.info("Removing embedded objects")
Repo.query!(
"update activities set data = safe_jsonb_set(data, '{object}'::text[], data->'object'->'id') where data->'object'->>'id' is not null;",
2019-04-18 14:58:59 -06:00
[],
timeout: :infinity
)
if Keyword.get(options, :vacuum) do
Maintenance.vacuum("full")
end
end
2019-05-16 12:14:48 -06:00
def run(["bump_all_conversations"]) do
start_pleroma()
2019-05-16 12:14:48 -06:00
Conversation.bump_for_all_activities()
end
def run(["update_users_following_followers_counts"]) do
start_pleroma()
Repo.transaction(
fn ->
from(u in User, select: u)
|> Repo.stream()
|> Stream.each(&User.update_follower_count/1)
|> Stream.run()
end,
timeout: :infinity
)
end
def run(["prune_objects" | args]) do
{options, [], []} =
OptionParser.parse(
args,
strict: [
vacuum: :boolean
]
)
start_pleroma()
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
Logger.info("Pruning objects older than #{deadline} days")
time_deadline =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(-(deadline * 86_400))
from(o in Object,
where:
fragment(
"?->'to' \\? ? OR ?->'cc' \\? ?",
o.data,
^Pleroma.Constants.as_public(),
o.data,
^Pleroma.Constants.as_public()
),
where: o.inserted_at < ^time_deadline,
where:
fragment("split_part(?->>'actor', '/', 3) != ?", o.data, ^Pleroma.Web.Endpoint.host())
)
|> Repo.delete_all(timeout: :infinity)
if Keyword.get(options, :vacuum) do
Maintenance.vacuum("full")
end
end
def run(["fix_likes_collections"]) do
start_pleroma()
from(object in Object,
where: fragment("(?)->>'likes' is not null", object.data),
select: %{id: object.id, likes: fragment("(?)->>'likes'", object.data)}
)
|> Pleroma.Repo.chunk_stream(100, :batches)
|> Stream.each(fn objects ->
ids =
objects
|> Enum.filter(fn object -> object.likes |> Jason.decode!() |> is_map() end)
|> Enum.map(& &1.id)
Object
|> where([object], object.id in ^ids)
|> update([object],
set: [
data:
fragment(
"safe_jsonb_set(?, '{likes}', '[]'::jsonb, true)",
object.data
)
]
)
|> Repo.update_all([], timeout: :infinity)
end)
|> Stream.run()
end
def run(["transfer_hashtags"]) do
import Ecto.Query
start_pleroma()
from(
object in Object,
left_join: hashtag in assoc(object, :hashtags),
where: is_nil(hashtag.id),
where: fragment("(?)->>'tag' != '[]'", object.data),
select: %{
id: object.id,
inserted_at: object.inserted_at,
tag: fragment("(?)->>'tag'", object.data)
},
order_by: [desc: object.id]
)
|> Pleroma.Repo.chunk_stream(100, :batches)
|> Stream.each(fn objects ->
chunk_start = List.first(objects)
chunk_end = List.last(objects)
Logger.info(
"transfer_hashtags: " <>
"#{chunk_start.id} (#{chunk_start.inserted_at}) -- " <>
"#{chunk_end.id} (#{chunk_end.inserted_at})"
)
Enum.map(
objects,
fn object ->
hashtags =
object.tag
|> Jason.decode!()
|> Enum.filter(&is_bitstring(&1))
with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do
Repo.transaction(fn ->
for hashtag_record <- hashtag_records do
with {:error, _} <-
Ecto.Adapters.SQL.query(
Repo,
"insert into hashtags_objects(hashtag_id, object_id) values " <>
"(#{hashtag_record.id}, #{object.id});"
) do
Logger.warn(
"ERROR: could not link object #{object.id} and hashtag #{hashtag_record.id}"
)
end
end
end)
else
e -> Logger.warn("ERROR: could not process object #{object.id}: #{inspect(e)}")
end
end
)
end)
|> Stream.run()
end
def run(["vacuum", args]) do
start_pleroma()
Maintenance.vacuum(args)
end
def run(["ensure_expiration"]) do
start_pleroma()
days = Pleroma.Config.get([:mrf_activity_expiration, :days], 365)
Pleroma.Activity
2020-08-22 11:46:01 -06:00
|> join(:inner, [a], o in Object,
on:
fragment(
"(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')",
o.data,
a.data,
a.data
)
)
|> where(local: true)
|> where([a], fragment("(? ->> 'type'::text) = 'Create'", a.data))
2020-08-22 11:46:01 -06:00
|> where([_a, o], fragment("?->>'type' = 'Note'", o.data))
|> Pleroma.Repo.chunk_stream(100, :batches)
|> Stream.each(fn activities ->
Enum.each(activities, fn activity ->
2020-08-22 11:46:01 -06:00
expires_at =
activity.inserted_at
|> DateTime.from_naive!("Etc/UTC")
|> Timex.shift(days: days)
Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
activity_id: activity.id,
expires_at: expires_at
2020-08-22 11:46:01 -06:00
})
end)
end)
|> Stream.run()
end
end