logo

pleroma

My custom branche(s) on git.pleroma.social/pleroma/pleroma git clone https://hacktivis.me/git/pleroma.git

meilisearch.ex (3937B)


  1. # Pleroma: A lightweight social networking server
  2. # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
  3. # SPDX-License-Identifier: AGPL-3.0-only
  4. defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
  5. require Pleroma.Constants
  6. import Mix.Pleroma
  7. import Ecto.Query
  8. import Pleroma.Search.Meilisearch,
  9. only: [meili_post: 2, meili_put: 2, meili_get: 1, meili_delete: 1]
  10. def run(["index"]) do
  11. start_pleroma()
  12. Pleroma.HTML.compile_scrubbers()
  13. meili_version =
  14. (
  15. {:ok, result} = meili_get("/version")
  16. result["pkgVersion"]
  17. )
  18. # The ranking rule syntax was changed but nothing about that is mentioned in the changelog
  19. if not Version.match?(meili_version, ">= 0.25.0") do
  20. raise "Meilisearch <0.24.0 not supported"
  21. end
  22. {:ok, _} =
  23. meili_post(
  24. "/indexes/objects/settings/ranking-rules",
  25. [
  26. "published:desc",
  27. "words",
  28. "exactness",
  29. "proximity",
  30. "typo",
  31. "attribute",
  32. "sort"
  33. ]
  34. )
  35. {:ok, _} =
  36. meili_post(
  37. "/indexes/objects/settings/searchable-attributes",
  38. [
  39. "content"
  40. ]
  41. )
  42. IO.puts("Created indices. Starting to insert posts.")
  43. chunk_size = Pleroma.Config.get([Pleroma.Search.Meilisearch, :initial_indexing_chunk_size])
  44. Pleroma.Repo.transaction(
  45. fn ->
  46. query =
  47. from(Pleroma.Object,
  48. # Only index public and unlisted posts which are notes and have some text
  49. where:
  50. fragment("data->>'type' = 'Note'") and
  51. (fragment("data->'to' \\? ?", ^Pleroma.Constants.as_public()) or
  52. fragment("data->'cc' \\? ?", ^Pleroma.Constants.as_public())),
  53. order_by: [desc: fragment("data->'published'")]
  54. )
  55. count = query |> Pleroma.Repo.aggregate(:count, :data)
  56. IO.puts("Entries to index: #{count}")
  57. Pleroma.Repo.stream(
  58. query,
  59. timeout: :infinity
  60. )
  61. |> Stream.map(&Pleroma.Search.Meilisearch.object_to_search_data/1)
  62. |> Stream.filter(fn o -> not is_nil(o) end)
  63. |> Stream.chunk_every(chunk_size)
  64. |> Stream.transform(0, fn objects, acc ->
  65. new_acc = acc + Enum.count(objects)
  66. # Reset to the beginning of the line and rewrite it
  67. IO.write("\r")
  68. IO.write("Indexed #{new_acc} entries")
  69. {[objects], new_acc}
  70. end)
  71. |> Stream.each(fn objects ->
  72. result =
  73. meili_put(
  74. "/indexes/objects/documents",
  75. objects
  76. )
  77. with {:ok, res} <- result do
  78. if not Map.has_key?(res, "uid") do
  79. IO.puts("\nFailed to index: #{inspect(result)}")
  80. end
  81. else
  82. e -> IO.puts("\nFailed to index due to network error: #{inspect(e)}")
  83. end
  84. end)
  85. |> Stream.run()
  86. end,
  87. timeout: :infinity
  88. )
  89. IO.write("\n")
  90. end
  91. def run(["clear"]) do
  92. start_pleroma()
  93. meili_delete("/indexes/objects/documents")
  94. end
  95. def run(["show-keys", master_key]) do
  96. start_pleroma()
  97. endpoint = Pleroma.Config.get([Pleroma.Search.Meilisearch, :url])
  98. {:ok, result} =
  99. Pleroma.HTTP.get(
  100. Path.join(endpoint, "/keys"),
  101. [{"Authorization", "Bearer #{master_key}"}]
  102. )
  103. decoded = Jason.decode!(result.body)
  104. if decoded["results"] do
  105. Enum.each(decoded["results"], fn %{"description" => desc, "key" => key} ->
  106. IO.puts("#{desc}: #{key}")
  107. end)
  108. else
  109. IO.puts("Error fetching the keys, check the master key is correct: #{inspect(decoded)}")
  110. end
  111. end
  112. def run(["stats"]) do
  113. start_pleroma()
  114. {:ok, result} = meili_get("/indexes/objects/stats")
  115. IO.puts("Number of entries: #{result["numberOfDocuments"]}")
  116. IO.puts("Indexing? #{result["isIndexing"]}")
  117. end
  118. end