2022-06-30 10:36:57 -06:00
|
|
|
# Pleroma: A lightweight social networking server
|
|
|
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
|
|
|
defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
|
|
|
|
require Pleroma.Constants
|
|
|
|
|
|
|
|
import Mix.Pleroma
|
|
|
|
import Ecto.Query
|
|
|
|
|
|
|
|
import Pleroma.Search.Meilisearch,
|
2022-08-16 16:56:49 -06:00
|
|
|
only: [meili_put: 2, meili_get: 1, meili_delete!: 1]
|
2022-06-30 10:36:57 -06:00
|
|
|
|
|
|
|
def run(["index"]) do
|
|
|
|
start_pleroma()
|
|
|
|
|
|
|
|
meili_version =
|
|
|
|
(
|
|
|
|
{:ok, result} = meili_get("/version")
|
|
|
|
|
|
|
|
result["pkgVersion"]
|
|
|
|
)
|
|
|
|
|
|
|
|
# The ranking rule syntax was changed but nothing about that is mentioned in the changelog
|
|
|
|
if not Version.match?(meili_version, ">= 0.25.0") do
|
|
|
|
raise "Meilisearch <0.24.0 not supported"
|
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, _} =
|
2022-08-16 16:56:49 -06:00
|
|
|
meili_put(
|
2022-06-30 10:36:57 -06:00
|
|
|
"/indexes/objects/settings/ranking-rules",
|
|
|
|
[
|
|
|
|
"words",
|
|
|
|
"proximity",
|
|
|
|
"typo",
|
2023-08-11 09:07:14 -06:00
|
|
|
"exactness",
|
2022-06-30 10:36:57 -06:00
|
|
|
"attribute",
|
2023-08-11 09:07:14 -06:00
|
|
|
"published:desc",
|
2022-06-30 10:36:57 -06:00
|
|
|
"sort"
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
{:ok, _} =
|
2022-08-16 16:56:49 -06:00
|
|
|
meili_put(
|
2022-06-30 10:36:57 -06:00
|
|
|
"/indexes/objects/settings/searchable-attributes",
|
|
|
|
[
|
|
|
|
"content"
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2024-05-29 19:15:04 -06:00
|
|
|
shell_info("Created indices. Starting to insert posts.")
|
2022-06-30 10:36:57 -06:00
|
|
|
|
|
|
|
chunk_size = Pleroma.Config.get([Pleroma.Search.Meilisearch, :initial_indexing_chunk_size])
|
|
|
|
|
|
|
|
Pleroma.Repo.transaction(
|
|
|
|
fn ->
|
|
|
|
query =
|
|
|
|
from(Pleroma.Object,
|
|
|
|
# Only index public and unlisted posts which are notes and have some text
|
|
|
|
where:
|
|
|
|
fragment("data->>'type' = 'Note'") and
|
|
|
|
(fragment("data->'to' \\? ?", ^Pleroma.Constants.as_public()) or
|
|
|
|
fragment("data->'cc' \\? ?", ^Pleroma.Constants.as_public())),
|
|
|
|
order_by: [desc: fragment("data->'published'")]
|
|
|
|
)
|
|
|
|
|
|
|
|
count = query |> Pleroma.Repo.aggregate(:count, :data)
|
2024-05-29 19:15:04 -06:00
|
|
|
shell_info("Entries to index: #{count}")
|
2022-06-30 10:36:57 -06:00
|
|
|
|
|
|
|
Pleroma.Repo.stream(
|
|
|
|
query,
|
|
|
|
timeout: :infinity
|
|
|
|
)
|
|
|
|
|> Stream.map(&Pleroma.Search.Meilisearch.object_to_search_data/1)
|
|
|
|
|> Stream.filter(fn o -> not is_nil(o) end)
|
|
|
|
|> Stream.chunk_every(chunk_size)
|
|
|
|
|> Stream.transform(0, fn objects, acc ->
|
|
|
|
new_acc = acc + Enum.count(objects)
|
|
|
|
|
|
|
|
# Reset to the beginning of the line and rewrite it
|
|
|
|
IO.write("\r")
|
|
|
|
IO.write("Indexed #{new_acc} entries")
|
|
|
|
|
|
|
|
{[objects], new_acc}
|
|
|
|
end)
|
|
|
|
|> Stream.each(fn objects ->
|
|
|
|
result =
|
|
|
|
meili_put(
|
|
|
|
"/indexes/objects/documents",
|
|
|
|
objects
|
|
|
|
)
|
|
|
|
|
|
|
|
with {:ok, res} <- result do
|
2022-08-16 16:56:49 -06:00
|
|
|
if not Map.has_key?(res, "indexUid") do
|
2024-05-29 19:15:04 -06:00
|
|
|
shell_info("\nFailed to index: #{inspect(result)}")
|
2022-06-30 10:36:57 -06:00
|
|
|
end
|
|
|
|
else
|
2024-05-29 19:15:04 -06:00
|
|
|
e -> shell_error("\nFailed to index due to network error: #{inspect(e)}")
|
2022-06-30 10:36:57 -06:00
|
|
|
end
|
|
|
|
end)
|
|
|
|
|> Stream.run()
|
|
|
|
end,
|
|
|
|
timeout: :infinity
|
|
|
|
)
|
|
|
|
|
|
|
|
IO.write("\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
def run(["clear"]) do
|
|
|
|
start_pleroma()
|
|
|
|
|
|
|
|
meili_delete!("/indexes/objects/documents")
|
|
|
|
end
|
|
|
|
|
|
|
|
def run(["show-keys", master_key]) do
|
|
|
|
start_pleroma()
|
|
|
|
|
|
|
|
endpoint = Pleroma.Config.get([Pleroma.Search.Meilisearch, :url])
|
|
|
|
|
|
|
|
{:ok, result} =
|
|
|
|
Pleroma.HTTP.get(
|
|
|
|
Path.join(endpoint, "/keys"),
|
|
|
|
[{"Authorization", "Bearer #{master_key}"}]
|
|
|
|
)
|
|
|
|
|
|
|
|
decoded = Jason.decode!(result.body)
|
|
|
|
|
|
|
|
if decoded["results"] do
|
2024-05-14 22:46:47 -06:00
|
|
|
Enum.each(decoded["results"], fn
|
|
|
|
%{"name" => name, "key" => key} ->
|
2024-05-29 19:15:04 -06:00
|
|
|
shell_info("#{name}: #{key}")
|
2024-05-14 22:46:47 -06:00
|
|
|
|
|
|
|
%{"description" => desc, "key" => key} ->
|
2024-05-29 19:15:04 -06:00
|
|
|
shell_info("#{desc}: #{key}")
|
2022-06-30 10:36:57 -06:00
|
|
|
end)
|
|
|
|
else
|
2024-05-29 19:15:04 -06:00
|
|
|
shell_error("Error fetching the keys, check the master key is correct: #{inspect(decoded)}")
|
2022-06-30 10:36:57 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def run(["stats"]) do
|
|
|
|
start_pleroma()
|
|
|
|
|
|
|
|
{:ok, result} = meili_get("/indexes/objects/stats")
|
2024-05-29 19:15:04 -06:00
|
|
|
shell_info("Number of entries: #{result["numberOfDocuments"]}")
|
|
|
|
shell_info("Indexing? #{result["isIndexing"]}")
|
2022-06-30 10:36:57 -06:00
|
|
|
end
|
|
|
|
end
|