logo

pleroma

My custom branche(s) on git.pleroma.social/pleroma/pleroma
commit: ca24ad2a2babed0700a38effb74910008d58f6ea
parent: e99bab8f279b419a6b54efa2f5397a78970c10ee
Author: kaniini <nenolod@gmail.com>
Date:   Fri, 30 Nov 2018 18:15:44 +0000

Merge branch 'feature/reverse-proxy' into 'develop'

reverse proxy / uploads

See merge request pleroma/pleroma!470

Diffstat:

M.gitignore6++++--
Mconfig/config.exs20+++++++++++---------
Mconfig/config.md15++++++++++++---
Mconfig/test.exs2+-
Minstallation/pleroma.nginx4+++-
Alib/mix/tasks/migrate_local_uploads.ex97+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Mlib/pleroma/application.ex5+++++
Alib/pleroma/mime.ex108+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Alib/pleroma/plugs/uploaded_media.ex78++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Alib/pleroma/reverse_proxy.ex343+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Mlib/pleroma/upload.ex338+++++++++++++++++++++++++++++++++++++++++++------------------------------------
Alib/pleroma/upload/filter.ex35+++++++++++++++++++++++++++++++++++
Alib/pleroma/upload/filter/dedupe.ex10++++++++++
Alib/pleroma/upload/filter/mogrifun.ex60++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Alib/pleroma/upload/filter/mogrify.ex37+++++++++++++++++++++++++++++++++++++
Mlib/pleroma/uploaders/local.ex55+++++++++++++++++++------------------------------------
Mlib/pleroma/uploaders/mdii.ex17+++++++++++------
Mlib/pleroma/uploaders/s3.ex56+++++++++++++++++++++++++++++++-------------------------
Mlib/pleroma/uploaders/swift/swift.ex2+-
Mlib/pleroma/uploaders/swift/uploader.ex13+++++++++----
Mlib/pleroma/uploaders/uploader.ex42+++++++++++++++++++++++++++++++-----------
Mlib/pleroma/web/activity_pub/activity_pub.ex6++----
Mlib/pleroma/web/endpoint.ex2+-
Mlib/pleroma/web/mastodon_api/mastodon_api_controller.ex25+++++--------------------
Mlib/pleroma/web/media_proxy/controller.ex135++++++++++---------------------------------------------------------------------
Mlib/pleroma/web/media_proxy/media_proxy.ex20+++++++++++++++++---
Mlib/pleroma/web/twitter_api/twitter_api.ex2+-
Mlib/pleroma/web/twitter_api/twitter_api_controller.ex18+++---------------
Mtest/media_proxy_test.exs17+++++++++++++++++
Mtest/support/httpoison_mock.ex2++
Mtest/upload_test.exs70+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++---------
31 files changed, 1217 insertions(+), 423 deletions(-)

diff --git a/.gitignore b/.gitignore @@ -6,6 +6,9 @@ /uploads /test/uploads /.elixir_ls +/test/fixtures/test_tmp.txt +/test/fixtures/image_tmp.jpg +/doc # Prevent committing custom emojis /priv/static/emoji/custom/* @@ -28,4 +31,4 @@ erl_crash.dump .env # Editor config -/.vscode- \ No newline at end of file +/.vscode diff --git a/config/config.exs b/config/config.exs @@ -10,18 +10,18 @@ config :pleroma, ecto_repos: [Pleroma.Repo] config :pleroma, Pleroma.Repo, types: Pleroma.PostgresTypes +# Upload configuration config :pleroma, Pleroma.Upload, uploader: Pleroma.Uploaders.Local, - strip_exif: false + filters: [], + proxy_remote: false, + proxy_opts: [] -config :pleroma, Pleroma.Uploaders.Local, - uploads: "uploads", - uploads_url: "{{base_url}}/media/{{file}}" +config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads" config :pleroma, Pleroma.Uploaders.S3, bucket: nil, - public_endpoint: "https://s3.amazonaws.com", - force_media_proxy: false + public_endpoint: "https://s3.amazonaws.com" config :pleroma, Pleroma.Uploaders.MDII, cgi: "https://mdii.sakura.ne.jp/mdii-post.cgi", @@ -150,9 +150,11 @@ config :pleroma, :mrf_simple, config :pleroma, :media_proxy, enabled: false, - redirect_on_failure: true - -# base_url: "https://cache.pleroma.social" + # base_url: "https://cache.pleroma.social", + proxy_opts: [ + # inline_content_types: [] | false | true, + # http: [:insecure] + ] config :pleroma, :chat, enabled: true diff --git a/config/config.md b/config/config.md @@ -5,11 +5,19 @@ If you run Pleroma with ``MIX_ENV=prod`` the file is ``prod.secret.exs``, otherw ## Pleroma.Upload * `uploader`: Select which `Pleroma.Uploaders` to use -* `strip_exif`: boolean, uses ImageMagick(!) to strip exif. +* `filters`: List of `Pleroma.Upload.Filter` to use. +* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host. +* `proxy_remote`: If you're using a remote uploader, Pleroma will proxy media requests instead of redirecting to it. +* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation. + +Note: `strip_exif` has been replaced by `Pleroma.Upload.Filter.Mogrify`. ## Pleroma.Uploaders.Local * `uploads`: Which directory to store the user-uploads in, relative to pleroma’s working directory -* `uploads_url`: The URL to access a user-uploaded file, ``{{base_url}}`` is replaced to the instance URL and ``{{file}}`` to the filename. Useful when you want to proxy the media files via another host. + +## Pleroma.Upload.Filter.Mogrify + +* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", {"impode", "1"}]`. ## :uri_schemes * `valid_schemes`: List of the scheme part that is considered valid to be an URL @@ -68,7 +76,8 @@ This section is used to configure Pleroma-FE, unless ``:managed_config`` in ``:i ## :media_proxy * `enabled`: Enables proxying of remote media to the instance’s proxy -* `redirect_on_failure`: Use the original URL when Media Proxy fails to get it +* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts. +* `proxy_opts`: All options defined in `Pleroma.ReverseProxy` documentation, defaults to `[max_body_length: (25*1_048_576)]`. ## :gopher * `enabled`: Enables the gopher interface diff --git a/config/test.exs b/config/test.exs @@ -9,7 +9,7 @@ config :pleroma, Pleroma.Web.Endpoint, # Print only warnings and errors during test config :logger, level: :warn -config :pleroma, Pleroma.Upload, uploads: "test/uploads" +config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads" # Configure your database config :pleroma, Pleroma.Repo, diff --git a/installation/pleroma.nginx b/installation/pleroma.nginx @@ -70,10 +70,12 @@ server { client_max_body_size 16m; } - location /proxy { + location ~ ^/(media|proxy) { proxy_cache pleroma_media_cache; proxy_cache_lock on; proxy_ignore_client_abort on; + proxy_buffering off; + chunked_transfer_encoding on; proxy_pass http://localhost:4000; } } diff --git a/lib/mix/tasks/migrate_local_uploads.ex b/lib/mix/tasks/migrate_local_uploads.ex @@ -0,0 +1,97 @@ +defmodule Mix.Tasks.MigrateLocalUploads do + use Mix.Task + import Mix.Ecto + alias Pleroma.{Upload, Uploaders.Local, Uploaders.S3} + require Logger + + @log_every 50 + @shortdoc "Migrate uploads from local to remote storage" + + def run([target_uploader | args]) do + delete? = Enum.member?(args, "--delete") + Application.ensure_all_started(:pleroma) + + local_path = Pleroma.Config.get!([Local, :uploads]) + uploader = Module.concat(Pleroma.Uploaders, target_uploader) + + unless Code.ensure_loaded?(uploader) do + raise("The uploader #{inspect(uploader)} is not an existing/loaded module.") + end + + target_enabled? = Pleroma.Config.get([Upload, :uploader]) == uploader + + unless target_enabled? do + Pleroma.Config.put([Upload, :uploader], uploader) + end + + Logger.info("Migrating files from local #{local_path} to #{to_string(uploader)}") + + if delete? do + Logger.warn( + "Attention: uploaded files will be deleted, hope you have backups! (--delete ; cancel with ^C)" + ) + + :timer.sleep(:timer.seconds(5)) + end + + uploads = + File.ls!(local_path) + |> Enum.map(fn id -> + root_path = Path.join(local_path, id) + + cond do + File.dir?(root_path) -> + files = for file <- File.ls!(root_path), do: {id, file, Path.join([root_path, file])} + + case List.first(files) do + {id, file, path} -> + {%Pleroma.Upload{id: id, name: file, path: id <> "/" <> file, tempfile: path}, + root_path} + + _ -> + nil + end + + File.exists?(root_path) -> + file = Path.basename(id) + [hash, ext] = String.split(id, ".") + {%Pleroma.Upload{id: hash, name: file, path: file, tempfile: root_path}, root_path} + + true -> + nil + end + end) + |> Enum.filter(& &1) + + total_count = length(uploads) + Logger.info("Found #{total_count} uploads") + + uploads + |> Task.async_stream( + fn {upload, root_path} -> + case Upload.store(upload, uploader: uploader, filters: [], size_limit: nil) do + {:ok, _} -> + if delete?, do: File.rm_rf!(root_path) + Logger.debug("uploaded: #{inspect(upload.path)} #{inspect(upload)}") + :ok + + error -> + Logger.error("failed to upload #{inspect(upload.path)}: #{inspect(error)}") + end + end, + timeout: 150_000 + ) + |> Stream.chunk_every(@log_every) + |> Enum.reduce(0, fn done, count -> + count = count + length(done) + Logger.info("Uploaded #{count}/#{total_count} files") + count + end) + + Logger.info("Done!") + end + + def run(_) do + Logger.error("Usage: migrate_local_uploads S3|Swift [--delete]") + end +end diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex @@ -8,6 +8,11 @@ defmodule Pleroma.Application do def version, do: @version def named_version(), do: @name <> " " <> @version + def user_agent() do + info = "#{Pleroma.Web.base_url()} <#{Pleroma.Config.get([:instance, :email], "")}>" + named_version() <> "; " <> info + end + # See http://elixir-lang.org/docs/stable/elixir/Application.html # for more information on OTP Applications @env Mix.env() diff --git a/lib/pleroma/mime.ex b/lib/pleroma/mime.ex @@ -0,0 +1,108 @@ +defmodule Pleroma.MIME do + @moduledoc """ + Returns the mime-type of a binary and optionally a normalized file-name. + """ + @default "application/octet-stream" + @read_bytes 31 + + @spec file_mime_type(String.t()) :: + {:ok, content_type :: String.t(), filename :: String.t()} | {:error, any()} | :error + def file_mime_type(path, filename) do + with {:ok, content_type} <- file_mime_type(path), + filename <- fix_extension(filename, content_type) do + {:ok, content_type, filename} + end + end + + @spec file_mime_type(String.t()) :: {:ok, String.t()} | {:error, any()} | :error + def file_mime_type(filename) do + File.open(filename, [:read], fn f -> + check_mime_type(IO.binread(f, @read_bytes)) + end) + end + + def bin_mime_type(binary, filename) do + with {:ok, content_type} <- bin_mime_type(binary), + filename <- fix_extension(filename, content_type) do + {:ok, content_type, filename} + end + end + + @spec bin_mime_type(binary()) :: {:ok, String.t()} | :error + def bin_mime_type(<<head::binary-size(@read_bytes), _::binary>>) do + {:ok, check_mime_type(head)} + end + + def mime_type(<<_::binary>>), do: {:ok, @default} + + def bin_mime_type(_), do: :error + + defp fix_extension(filename, content_type) do + parts = String.split(filename, ".") + + new_filename = + if length(parts) > 1 do + Enum.drop(parts, -1) |> Enum.join(".") + else + Enum.join(parts) + end + + cond do + content_type == "application/octet-stream" -> + filename + + ext = List.first(MIME.extensions(content_type)) -> + new_filename <> "." <> ext + + true -> + Enum.join([new_filename, String.split(content_type, "/") |> List.last()], ".") + end + end + + defp check_mime_type(<<0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, _::binary>>) do + "image/png" + end + + defp check_mime_type(<<0x47, 0x49, 0x46, 0x38, _, 0x61, _::binary>>) do + "image/gif" + end + + defp check_mime_type(<<0xFF, 0xD8, 0xFF, _::binary>>) do + "image/jpeg" + end + + defp check_mime_type(<<0x1A, 0x45, 0xDF, 0xA3, _::binary>>) do + "video/webm" + end + + defp check_mime_type(<<0x00, 0x00, 0x00, _, 0x66, 0x74, 0x79, 0x70, _::binary>>) do + "video/mp4" + end + + defp check_mime_type(<<0x49, 0x44, 0x33, _::binary>>) do + "audio/mpeg" + end + + defp check_mime_type(<<255, 251, _, 68, 0, 0, 0, 0, _::binary>>) do + "audio/mpeg" + end + + defp check_mime_type( + <<0x4F, 0x67, 0x67, 0x53, 0x00, 0x02, 0x00, 0x00, _::size(160), 0x80, 0x74, 0x68, 0x65, + 0x6F, 0x72, 0x61, _::binary>> + ) do + "video/ogg" + end + + defp check_mime_type(<<0x4F, 0x67, 0x67, 0x53, 0x00, 0x02, 0x00, 0x00, _::binary>>) do + "audio/ogg" + end + + defp check_mime_type(<<0x52, 0x49, 0x46, 0x46, _::binary>>) do + "audio/wav" + end + + defp check_mime_type(_) do + @default + end +end diff --git a/lib/pleroma/plugs/uploaded_media.ex b/lib/pleroma/plugs/uploaded_media.ex @@ -0,0 +1,78 @@ +defmodule Pleroma.Plugs.UploadedMedia do + @moduledoc """ + """ + + import Plug.Conn + require Logger + + @behaviour Plug + # no slashes + @path "media" + @cache_control %{ + default: "public, max-age=1209600", + error: "public, must-revalidate, max-age=160" + } + + def init(_opts) do + static_plug_opts = + [] + |> Keyword.put(:from, "__unconfigured_media_plug") + |> Keyword.put(:at, "/__unconfigured_media_plug") + |> Plug.Static.init() + + %{static_plug_opts: static_plug_opts} + end + + def call(conn = %{request_path: <<"/", @path, "/", file::binary>>}, opts) do + config = Pleroma.Config.get([Pleroma.Upload]) + + with uploader <- Keyword.fetch!(config, :uploader), + proxy_remote = Keyword.get(config, :proxy_remote, false), + {:ok, get_method} <- uploader.get_file(file) do + get_media(conn, get_method, proxy_remote, opts) + else + _ -> + conn + |> send_resp(500, "Failed") + |> halt() + end + end + + def call(conn, _opts), do: conn + + defp get_media(conn, {:static_dir, directory}, _, opts) do + static_opts = + Map.get(opts, :static_plug_opts) + |> Map.put(:at, [@path]) + |> Map.put(:from, directory) + + conn = Plug.Static.call(conn, static_opts) + + if conn.halted do + conn + else + conn + |> send_resp(404, "Not found") + |> halt() + end + end + + defp get_media(conn, {:url, url}, true, _) do + conn + |> Pleroma.ReverseProxy.call(url, Pleroma.Config.get([Pleroma.Upload, :proxy_opts], [])) + end + + defp get_media(conn, {:url, url}, _, _) do + conn + |> Phoenix.Controller.redirect(external: url) + |> halt() + end + + defp get_media(conn, unknown, _, _) do + Logger.error("#{__MODULE__}: Unknown get startegy: #{inspect(unknown)}") + + conn + |> send_resp(500, "Internal Error") + |> halt() + end +end diff --git a/lib/pleroma/reverse_proxy.ex b/lib/pleroma/reverse_proxy.ex @@ -0,0 +1,343 @@ +defmodule Pleroma.ReverseProxy do + @keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since if-unmodified-since if-none-match if-range range) + @resp_cache_headers ~w(etag date last-modified cache-control) + @keep_resp_headers @resp_cache_headers ++ + ~w(content-type content-disposition content-encoding content-range accept-ranges vary) + @default_cache_control_header "public, max-age=1209600" + @valid_resp_codes [200, 206, 304] + @max_read_duration :timer.seconds(30) + @max_body_length :infinity + @methods ~w(GET HEAD) + + @moduledoc """ + A reverse proxy. + + Pleroma.ReverseProxy.call(conn, url, options) + + It is not meant to be added into a plug pipeline, but to be called from another plug or controller. + + Supports `#{inspect(@methods)}` HTTP methods, and only allows `#{inspect(@valid_resp_codes)}` status codes. + + Responses are chunked to the client while downloading from the upstream. + + Some request / responses headers are preserved: + + * request: `#{inspect(@keep_req_headers)}` + * response: `#{inspect(@keep_resp_headers)}` + + If no caching headers (`#{inspect(@resp_cache_headers)}`) are returned by upstream, `cache-control` will be + set to `#{inspect(@default_cache_control_header)}`. + + Options: + + * `redirect_on_failure` (default `false`). Redirects the client to the real remote URL if there's any HTTP + errors. Any error during body processing will not be redirected as the response is chunked. This may expose + remote URL, clients IPs, …. + + * `max_body_length` (default `#{inspect(@max_body_length)}`): limits the content length to be approximately the + specified length. It is validated with the `content-length` header and also verified when proxying. + + * `max_read_duration` (default `#{inspect(@max_read_duration)}` ms): the total time the connection is allowed to + read from the remote upstream. + + * `inline_content_types`: + * `true` will not alter `content-disposition` (up to the upstream), + * `false` will add `content-disposition: attachment` to any request, + * a list of whitelisted content types + + * `keep_user_agent` will forward the client's user-agent to the upstream. This may be useful if the upstream is + doing content transformation (encoding, …) depending on the request. + + * `req_headers`, `resp_headers` additional headers. + + * `http`: options for [hackney](https://github.com/benoitc/hackney). + + """ + @hackney Application.get_env(:pleroma, :hackney, :hackney) + @httpoison Application.get_env(:pleroma, :httpoison, HTTPoison) + + @default_hackney_options [{:follow_redirect, true}] + + @inline_content_types [ + "image/gif", + "image/jpeg", + "image/jpg", + "image/png", + "image/svg+xml", + "audio/mpeg", + "audio/mp3", + "video/webm", + "video/mp4", + "video/quicktime" + ] + + require Logger + import Plug.Conn + + @type option() :: + {:keep_user_agent, boolean} + | {:max_read_duration, :timer.time() | :infinity} + | {:max_body_length, non_neg_integer() | :infinity} + | {:http, []} + | {:req_headers, [{String.t(), String.t()}]} + | {:resp_headers, [{String.t(), String.t()}]} + | {:inline_content_types, boolean() | [String.t()]} + | {:redirect_on_failure, boolean()} + + @spec call(Plug.Conn.t(), url :: String.t(), [option()]) :: Plug.Conn.t() + def call(conn = %{method: method}, url, opts \\ []) when method in @methods do + hackney_opts = + @default_hackney_options + |> Keyword.merge(Keyword.get(opts, :http, [])) + |> @httpoison.process_request_options() + + req_headers = build_req_headers(conn.req_headers, opts) + + opts = + if filename = Pleroma.Web.MediaProxy.filename(url) do + Keyword.put_new(opts, :attachment_name, filename) + else + opts + end + + with {:ok, code, headers, client} <- request(method, url, req_headers, hackney_opts), + :ok <- header_length_constraint(headers, Keyword.get(opts, :max_body_length)) do + response(conn, client, url, code, headers, opts) + else + {:ok, code, headers} -> + head_response(conn, url, code, headers, opts) + |> halt() + + {:error, {:invalid_http_response, code}} -> + Logger.error("#{__MODULE__}: request to #{inspect(url)} failed with HTTP status #{code}") + + conn + |> error_or_redirect( + url, + code, + "Request failed: " <> Plug.Conn.Status.reason_phrase(code), + opts + ) + |> halt() + + {:error, error} -> + Logger.error("#{__MODULE__}: request to #{inspect(url)} failed: #{inspect(error)}") + + conn + |> error_or_redirect(url, 500, "Request failed", opts) + |> halt() + end + end + + def call(conn, _, _) do + conn + |> send_resp(400, Plug.Conn.Status.reason_phrase(400)) + |> halt() + end + + defp request(method, url, headers, hackney_opts) do + Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}") + method = method |> String.downcase() |> String.to_existing_atom() + + case @hackney.request(method, url, headers, "", hackney_opts) do + {:ok, code, headers, client} when code in @valid_resp_codes -> + {:ok, code, downcase_headers(headers), client} + + {:ok, code, headers} when code in @valid_resp_codes -> + {:ok, code, downcase_headers(headers)} + + {:ok, code, _, _} -> + {:error, {:invalid_http_response, code}} + + {:error, error} -> + {:error, error} + end + end + + defp response(conn, client, url, status, headers, opts) do + result = + conn + |> put_resp_headers(build_resp_headers(headers, opts)) + |> send_chunked(status) + |> chunk_reply(client, opts) + + case result do + {:ok, conn} -> + halt(conn) + + {:error, :closed, conn} -> + :hackney.close(client) + halt(conn) + + {:error, error, conn} -> + Logger.warn( + "#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}" + ) + + :hackney.close(client) + halt(conn) + end + end + + defp chunk_reply(conn, client, opts) do + chunk_reply(conn, client, opts, 0, 0) + end + + defp chunk_reply(conn, client, opts, sent_so_far, duration) do + with {:ok, duration} <- + check_read_duration( + duration, + Keyword.get(opts, :max_read_duration, @max_read_duration) + ), + {:ok, data} <- @hackney.stream_body(client), + {:ok, duration} <- increase_read_duration(duration), + sent_so_far = sent_so_far + byte_size(data), + :ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)), + {:ok, conn} <- chunk(conn, data) do + chunk_reply(conn, client, opts, sent_so_far, duration) + else + :done -> {:ok, conn} + {:error, error} -> {:error, error, conn} + end + end + + defp head_response(conn, _url, code, headers, opts) do + conn + |> put_resp_headers(build_resp_headers(headers, opts)) + |> send_resp(code, "") + end + + defp error_or_redirect(conn, url, code, body, opts) do + if Keyword.get(opts, :redirect_on_failure, false) do + conn + |> Phoenix.Controller.redirect(external: url) + |> halt() + else + conn + |> send_resp(code, body) + |> halt + end + end + + defp downcase_headers(headers) do + Enum.map(headers, fn {k, v} -> + {String.downcase(k), v} + end) + end + + defp get_content_type(headers) do + {_, content_type} = + List.keyfind(headers, "content-type", 0, {"content-type", "application/octet-stream"}) + + [content_type | _] = String.split(content_type, ";") + content_type + end + + defp put_resp_headers(conn, headers) do + Enum.reduce(headers, conn, fn {k, v}, conn -> + put_resp_header(conn, k, v) + end) + end + + defp build_req_headers(headers, opts) do + headers = + headers + |> downcase_headers() + |> Enum.filter(fn {k, _} -> k in @keep_req_headers end) + |> (fn headers -> + headers = headers ++ Keyword.get(opts, :req_headers, []) + + if Keyword.get(opts, :keep_user_agent, false) do + List.keystore( + headers, + "user-agent", + 0, + {"user-agent", Pleroma.Application.user_agent()} + ) + else + headers + end + end).() + end + + defp build_resp_headers(headers, opts) do + headers + |> Enum.filter(fn {k, _} -> k in @keep_resp_headers end) + |> build_resp_cache_headers(opts) + |> build_resp_content_disposition_header(opts) + |> (fn headers -> headers ++ Keyword.get(opts, :resp_headers, []) end).() + end + + defp build_resp_cache_headers(headers, opts) do + has_cache? = Enum.any?(headers, fn {k, _} -> k in @resp_cache_headers end) + + if has_cache? do + headers + else + List.keystore(headers, "cache-control", 0, {"cache-control", @default_cache_control_header}) + end + end + + defp build_resp_content_disposition_header(headers, opts) do + opt = Keyword.get(opts, :inline_content_types, @inline_content_types) + + content_type = get_content_type(headers) + + attachment? = + cond do + is_list(opt) && !Enum.member?(opt, content_type) -> true + opt == false -> true + true -> false + end + + if attachment? do + disposition = "attachment; filename=" <> Keyword.get(opts, :attachment_name, "attachment") + List.keystore(headers, "content-disposition", 0, {"content-disposition", disposition}) + else + headers + end + end + + defp header_length_constraint(headers, limit) when is_integer(limit) and limit > 0 do + with {_, size} <- List.keyfind(headers, "content-length", 0), + {size, _} <- Integer.parse(size), + true <- size <= limit do + :ok + else + false -> + {:error, :body_too_large} + + _ -> + :ok + end + end + + defp header_length_constraint(_, _), do: :ok + + defp body_size_constraint(size, limit) when is_integer(limit) and limit > 0 and size >= limit do + {:error, :body_too_large} + end + + defp body_size_constraint(_, _), do: :ok + + defp check_read_duration(duration, max) + when is_integer(duration) and is_integer(max) and max > 0 do + if duration > max do + {:error, :read_duration_exceeded} + else + {:ok, {duration, :erlang.system_time(:millisecond)}} + end + end + + defp check_read_duration(_, _), do: {:ok, :no_duration_limit, :no_duration_limit} + + defp increase_read_duration({previous_duration, started}) + when is_integer(previous_duration) and is_integer(started) do + duration = :erlang.system_time(:millisecond) - started + {:ok, previous_duration + duration} + end + + defp increase_read_duration(_) do + {:ok, :no_duration_limit, :no_duration_limit} + end +end diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex @@ -1,190 +1,222 @@ defmodule Pleroma.Upload do - alias Ecto.UUID + @moduledoc """ + # Upload - def check_file_size(path, nil), do: true + Options: + * `:type`: presets for activity type (defaults to Document) and size limits from app configuration + * `:description`: upload alternative text + * `:base_url`: override base url + * `:uploader`: override uploader + * `:filters`: override filters + * `:size_limit`: override size limit + * `:activity_type`: override activity type - def check_file_size(path, size_limit) do - {:ok, %{size: size}} = File.stat(path) - size <= size_limit - end + The `%Pleroma.Upload{}` struct: all documented fields are meant to be overwritten in filters: + + * `:id` - the upload id. + * `:name` - the upload file name. + * `:path` - the upload path: set at first to `id/name` but can be changed. Keep in mind that the path + is once created permanent and changing it (especially in uploaders) is probably a bad idea! + * `:tempfile` - path to the temporary file. Prefer in-place changes on the file rather than changing the + path as the temporary file is also tracked by `Plug.Upload{}` and automatically deleted once the request is over. + + Related behaviors: - def store(file, should_dedupe, size_limit \\ nil) - - def store(%Plug.Upload{} = file, should_dedupe, size_limit) do - content_type = get_content_type(file.path) - - with uuid <- get_uuid(file, should_dedupe), - name <- get_name(file, uuid, content_type, should_dedupe), - true <- check_file_size(file.path, size_limit) do - strip_exif_data(content_type, file.path) - - {:ok, url_path} = uploader().put_file(name, uuid, file.path, content_type, should_dedupe) - - %{ - "type" => "Document", - "url" => [ - %{ - "type" => "Link", - "mediaType" => content_type, - "href" => url_path - } - ], - "name" => name - } + * `Pleroma.Uploaders.Uploader` + * `Pleroma.Upload.Filter` + + """ + alias Ecto.UUID + require Logger + + @type source :: + Plug.Upload.t() | data_uri_string :: + String.t() | {:from_local, name :: String.t(), id :: String.t(), path :: String.t()} + + @type option :: + {:type, :avatar | :banner | :background} + | {:description, String.t()} + | {:activity_type, String.t()} + | {:size_limit, nil | non_neg_integer()} + | {:uploader, module()} + | {:filters, [module()]} + + @type t :: %__MODULE__{ + id: String.t(), + name: String.t(), + tempfile: String.t(), + content_type: String.t(), + path: String.t() + } + defstruct [:id, :name, :tempfile, :content_type, :path] + + @spec store(source, options :: [option()]) :: {:ok, Map.t()} | {:error, any()} + def store(upload, opts \\ []) do + opts = get_opts(opts) + + with {:ok, upload} <- prepare_upload(upload, opts), + upload = %__MODULE__{upload | path: upload.path || "#{upload.id}/#{upload.name}"}, + {:ok, upload} <- Pleroma.Upload.Filter.filter(opts.filters, upload), + {:ok, url_spec} <- Pleroma.Uploaders.Uploader.put_file(opts.uploader, upload) do + {:ok, + %{ + "type" => opts.activity_type, + "url" => [ + %{ + "type" => "Link", + "mediaType" => upload.content_type, + "href" => url_from_spec(opts.base_url, url_spec) + } + ], + "name" => Map.get(opts, :description) || upload.name + }} else - _e -> nil + {:error, error} -> + Logger.error( + "#{__MODULE__} store (using #{inspect(opts.uploader)}) failed: #{inspect(error)}" + ) + + {:error, error} end end - def store(%{"img" => "data:image/" <> image_data}, should_dedupe, size_limit) do - parsed = Regex.named_captures(~r/(?<filetype>jpeg|png|gif);base64,(?<data>.*)/, image_data) - data = Base.decode64!(parsed["data"], ignore: :whitespace) + defp get_opts(opts) do + {size_limit, activity_type} = + case Keyword.get(opts, :type) do + :banner -> + {Pleroma.Config.get!([:instance, :banner_upload_limit]), "Image"} + + :avatar -> + {Pleroma.Config.get!([:instance, :avatar_upload_limit]), "Image"} + + :background -> + {Pleroma.Config.get!([:instance, :background_upload_limit]), "Image"} - with tmp_path <- tempfile_for_image(data), - uuid <- UUID.generate(), - true <- check_file_size(tmp_path, size_limit) do - content_type = get_content_type(tmp_path) - strip_exif_data(content_type, tmp_path) - - name = - create_name( - String.downcase(Base.encode16(:crypto.hash(:sha256, data))), - parsed["filetype"], - content_type + _ -> + {Pleroma.Config.get!([:instance, :upload_limit]), "Document"} + end + + opts = %{ + activity_type: Keyword.get(opts, :activity_type, activity_type), + size_limit: Keyword.get(opts, :size_limit, size_limit), + uploader: Keyword.get(opts, :uploader, Pleroma.Config.get([__MODULE__, :uploader])), + filters: Keyword.get(opts, :filters, Pleroma.Config.get([__MODULE__, :filters])), + description: Keyword.get(opts, :description), + base_url: + Keyword.get( + opts, + :base_url, + Pleroma.Config.get([__MODULE__, :base_url], Pleroma.Web.base_url()) ) + } - {:ok, url_path} = uploader().put_file(name, uuid, tmp_path, content_type, should_dedupe) - - %{ - "type" => "Image", - "url" => [ - %{ - "type" => "Link", - "mediaType" => content_type, - "href" => url_path - } - ], - "name" => name - } - else - _e -> nil - end - end + # TODO: 1.0+ : remove old config compatibility + opts = + if Pleroma.Config.get([__MODULE__, :strip_exif]) == true && + !Enum.member?(opts.filters, Pleroma.Upload.Filter.Mogrify) do + Logger.warn(""" + Pleroma: configuration `:instance, :strip_exif` is deprecated, please instead set: - @doc """ - Creates a tempfile using the Plug.Upload Genserver which cleans them up - automatically. - """ - def tempfile_for_image(data) do - {:ok, tmp_path} = Plug.Upload.random_file("profile_pics") - {:ok, tmp_file} = File.open(tmp_path, [:write, :raw, :binary]) - IO.binwrite(tmp_file, data) + :pleroma, Pleroma.Upload, [filters: [Pleroma.Upload.Filter.Mogrify]] - tmp_path - end + :pleroma, Pleroma.Upload.Filter.Mogrify, args: "strip" + """) - def strip_exif_data(content_type, file) do - settings = Application.get_env(:pleroma, Pleroma.Upload) - do_strip = Keyword.fetch!(settings, :strip_exif) - [filetype, _ext] = String.split(content_type, "/") + Pleroma.Config.put([Pleroma.Upload.Filter.Mogrify], args: "strip") + Map.put(opts, :filters, opts.filters ++ [Pleroma.Upload.Filter.Mogrify]) + else + opts + end - if filetype == "image" and do_strip == true do - Mogrify.open(file) |> Mogrify.custom("strip") |> Mogrify.save(in_place: true) - end - end + opts = + if Pleroma.Config.get([:instance, :dedupe_media]) == true && + !Enum.member?(opts.filters, Pleroma.Upload.Filter.Dedupe) do + Logger.warn(""" + Pleroma: configuration `:instance, :dedupe_media` is deprecated, please instead set: - defp create_name(uuid, ext, type) do - case type do - "application/octet-stream" -> - String.downcase(Enum.join([uuid, ext], ".")) + :pleroma, Pleroma.Upload, [filters: [Pleroma.Upload.Filter.Dedupe]] + """) - "audio/mpeg" -> - String.downcase(Enum.join([uuid, "mp3"], ".")) + Map.put(opts, :filters, opts.filters ++ [Pleroma.Upload.Filter.Dedupe]) + else + opts + end + end - _ -> - String.downcase(Enum.join([uuid, List.last(String.split(type, "/"))], ".")) + defp prepare_upload(%Plug.Upload{} = file, opts) do + with :ok <- check_file_size(file.path, opts.size_limit), + {:ok, content_type, name} <- Pleroma.MIME.file_mime_type(file.path, file.filename) do + {:ok, + %__MODULE__{ + id: UUID.generate(), + name: name, + tempfile: file.path, + content_type: content_type + }} end end - defp get_uuid(file, should_dedupe) do - if should_dedupe do - Base.encode16(:crypto.hash(:sha256, File.read!(file.path))) - else - UUID.generate() + defp prepare_upload(%{"img" => "data:image/" <> image_data}, opts) do + parsed = Regex.named_captures(~r/(?<filetype>jpeg|png|gif);base64,(?<data>.*)/, image_data) + data = Base.decode64!(parsed["data"], ignore: :whitespace) + hash = String.downcase(Base.encode16(:crypto.hash(:sha256, data))) + + with :ok <- check_binary_size(data, opts.size_limit), + tmp_path <- tempfile_for_image(data), + {:ok, content_type, name} <- + Pleroma.MIME.bin_mime_type(data, hash <> "." <> parsed["filetype"]) do + {:ok, + %__MODULE__{ + id: UUID.generate(), + name: name, + tempfile: tmp_path, + content_type: content_type + }} end end - defp get_name(file, uuid, type, should_dedupe) do - if should_dedupe do - create_name(uuid, List.last(String.split(file.filename, ".")), type) - else - parts = String.split(file.filename, ".") - - new_filename = - if length(parts) > 1 do - Enum.drop(parts, -1) |> Enum.join(".") - else - Enum.join(parts) - end - - case type do - "application/octet-stream" -> file.filename - "audio/mpeg" -> new_filename <> ".mp3" - "image/jpeg" -> new_filename <> ".jpg" - _ -> Enum.join([new_filename, String.split(type, "/") |> List.last()], ".") - end + # For Mix.Tasks.MigrateLocalUploads + defp prepare_upload(upload = %__MODULE__{tempfile: path}, _opts) do + with {:ok, content_type} <- Pleroma.MIME.file_mime_type(path) do + {:ok, %__MODULE__{upload | content_type: content_type}} end end - def get_content_type(file) do - match = - File.open(file, [:read], fn f -> - case IO.binread(f, 8) do - <<0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A>> -> - "image/png" - - <<0x47, 0x49, 0x46, 0x38, _, 0x61, _, _>> -> - "image/gif" - - <<0xFF, 0xD8, 0xFF, _, _, _, _, _>> -> - "image/jpeg" - - <<0x1A, 0x45, 0xDF, 0xA3, _, _, _, _>> -> - "video/webm" - - <<0x00, 0x00, 0x00, _, 0x66, 0x74, 0x79, 0x70>> -> - "video/mp4" - - <<0x49, 0x44, 0x33, _, _, _, _, _>> -> - "audio/mpeg" + defp check_binary_size(binary, size_limit) + when is_integer(size_limit) and size_limit > 0 and byte_size(binary) >= size_limit do + {:error, :file_too_large} + end - <<255, 251, _, 68, 0, 0, 0, 0>> -> - "audio/mpeg" + defp check_binary_size(_, _), do: :ok - <<0x4F, 0x67, 0x67, 0x53, 0x00, 0x02, 0x00, 0x00>> -> - case IO.binread(f, 27) do - <<_::size(160), 0x80, 0x74, 0x68, 0x65, 0x6F, 0x72, 0x61>> -> - "video/ogg" + defp check_file_size(path, size_limit) when is_integer(size_limit) and size_limit > 0 do + with {:ok, %{size: size}} <- File.stat(path), + true <- size <= size_limit do + :ok + else + false -> {:error, :file_too_large} + error -> error + end + end - _ -> - "audio/ogg" - end + defp check_file_size(_, _), do: :ok - <<0x52, 0x49, 0x46, 0x46, _, _, _, _>> -> - "audio/wav" + # Creates a tempfile using the Plug.Upload Genserver which cleans them up + # automatically. + defp tempfile_for_image(data) do + {:ok, tmp_path} = Plug.Upload.random_file("profile_pics") + {:ok, tmp_file} = File.open(tmp_path, [:write, :raw, :binary]) + IO.binwrite(tmp_file, data) - _ -> - "application/octet-stream" - end - end) + tmp_path + end - case match do - {:ok, type} -> type - _e -> "application/octet-stream" - end + defp url_from_spec(base_url, {:file, path}) do + [base_url, "media", path] + |> Path.join() end - defp uploader() do - Pleroma.Config.get!([Pleroma.Upload, :uploader]) + defp url_from_spec({:url, url}) do + url end end diff --git a/lib/pleroma/upload/filter.ex b/lib/pleroma/upload/filter.ex @@ -0,0 +1,35 @@ +defmodule Pleroma.Upload.Filter do + @moduledoc """ + Upload Filter behaviour + + This behaviour allows to run filtering actions just before a file is uploaded. This allows to: + + * morph in place the temporary file + * change any field of a `Pleroma.Upload` struct + * cancel/stop the upload + """ + + require Logger + + @callback filter(Pleroma.Upload.t()) :: :ok | {:ok, Pleroma.Upload.t()} | {:error, any()} + + @spec filter([module()], Pleroma.Upload.t()) :: {:ok, Pleroma.Upload.t()} | {:error, any()} + + def filter([], upload) do + {:ok, upload} + end + + def filter([filter | rest], upload) do + case filter.filter(upload) do + :ok -> + filter(rest, upload) + + {:ok, upload} -> + filter(rest, upload) + + error -> + Logger.error("#{__MODULE__}: Filter #{filter} failed: #{inspect(error)}") + error + end + end +end diff --git a/lib/pleroma/upload/filter/dedupe.ex b/lib/pleroma/upload/filter/dedupe.ex @@ -0,0 +1,10 @@ +defmodule Pleroma.Upload.Filter.Dedupe do + @behaviour Pleroma.Upload.Filter + + def filter(upload = %Pleroma.Upload{name: name, tempfile: path}) do + extension = String.split(name, ".") |> List.last() + shasum = :crypto.hash(:sha256, File.read!(upload.tempfile)) |> Base.encode16(case: :lower) + filename = shasum <> "." <> extension + {:ok, %Pleroma.Upload{upload | id: shasum, path: filename}} + end +end diff --git a/lib/pleroma/upload/filter/mogrifun.ex b/lib/pleroma/upload/filter/mogrifun.ex @@ -0,0 +1,60 @@ +defmodule Pleroma.Upload.Filter.Mogrifun do + @behaviour Pleroma.Upload.Filter + + @filters [ + {"implode", "1"}, + {"-raise", "20"}, + {"+raise", "20"}, + [{"-interpolate", "nearest"}, {"-virtual-pixel", "mirror"}, {"-spread", "5"}], + "+polaroid", + {"-statistic", "Mode 10"}, + {"-emboss", "0x1.1"}, + {"-emboss", "0x2"}, + {"-colorspace", "Gray"}, + "-negate", + [{"-channel", "green"}, "-negate"], + [{"-channel", "red"}, "-negate"], + [{"-channel", "blue"}, "-negate"], + {"+level-colors", "green,gold"}, + {"+level-colors", ",DodgerBlue"}, + {"+level-colors", ",Gold"}, + {"+level-colors", ",Lime"}, + {"+level-colors", ",Red"}, + {"+level-colors", ",DarkGreen"}, + {"+level-colors", "firebrick,yellow"}, + {"+level-colors", "'rgb(102,75,25)',lemonchiffon"}, + [{"fill", "red"}, {"tint", "40"}], + [{"fill", "green"}, {"tint", "40"}], + [{"fill", "blue"}, {"tint", "40"}], + [{"fill", "yellow"}, {"tint", "40"}] + ] + + def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do + filter = Enum.random(@filters) + + file + |> Mogrify.open() + |> mogrify_filter(filter) + |> Mogrify.save(in_place: true) + + :ok + end + + def filter(_), do: :ok + + defp mogrify_filter(mogrify, [filter | rest]) do + mogrify + |> mogrify_filter(filter) + |> mogrify_filter(rest) + end + + defp mogrify_filter(mogrify, []), do: mogrify + + defp mogrify_filter(mogrify, {action, options}) do + Mogrify.custom(mogrify, action, options) + end + + defp mogrify_filter(mogrify, string) when is_binary(string) do + Mogrify.custom(mogrify, string) + end +end diff --git a/lib/pleroma/upload/filter/mogrify.ex b/lib/pleroma/upload/filter/mogrify.ex @@ -0,0 +1,37 @@ +defmodule Pleroma.Upload.Filter.Mogrify do + @behaviour Pleroma.Uploader.Filter + + @type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()} + @type conversions :: conversion() | [conversion()] + + def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do + filters = Pleroma.Config.get!([__MODULE__, :args]) + + file + |> Mogrify.open() + |> mogrify_filter(filters) + |> Mogrify.save(in_place: true) + + :ok + end + + def filter(_), do: :ok + + defp mogrify_filter(mogrify, nil), do: mogrify + + defp mogrify_filter(mogrify, [filter | rest]) do + mogrify + |> mogrify_filter(filter) + |> mogrify_filter(rest) + end + + defp mogrify_filter(mogrify, []), do: mogrify + + defp mogrify_filter(mogrify, {action, options}) do + Mogrify.custom(mogrify, action, options) + end + + defp mogrify_filter(mogrify, action) when is_binary(action) do + Mogrify.custom(mogrify, action) + end +end diff --git a/lib/pleroma/uploaders/local.ex b/lib/pleroma/uploaders/local.ex @@ -3,49 +3,32 @@ defmodule Pleroma.Uploaders.Local do alias Pleroma.Web - def put_file(name, uuid, tmpfile, _content_type, should_dedupe) do - upload_folder = get_upload_path(uuid, should_dedupe) - url_path = get_url(name, uuid, should_dedupe) - - File.mkdir_p!(upload_folder) + def get_file(_) do + {:ok, {:static_dir, upload_path()}} + end - result_file = Path.join(upload_folder, name) + def put_file(upload) do + {local_path, file} = + case Enum.reverse(String.split(upload.path, "/", trim: true)) do + [file] -> + {upload_path(), file} - if File.exists?(result_file) do - File.rm!(tmpfile) - else - File.cp!(tmpfile, result_file) - end + [file | folders] -> + path = Path.join([upload_path()] ++ Enum.reverse(folders)) + File.mkdir_p!(path) + {path, file} + end - {:ok, url_path} - end + result_file = Path.join(local_path, file) - def upload_path do - settings = Application.get_env(:pleroma, Pleroma.Uploaders.Local) - Keyword.fetch!(settings, :uploads) - end - - defp get_upload_path(uuid, should_dedupe) do - if should_dedupe do - upload_path() - else - Path.join(upload_path(), uuid) + unless File.exists?(result_file) do + File.cp!(upload.tempfile, result_file) end - end - defp get_url(name, uuid, should_dedupe) do - if should_dedupe do - url_for(:cow_uri.urlencode(name)) - else - url_for(Path.join(uuid, :cow_uri.urlencode(name))) - end + :ok end - defp url_for(file) do - settings = Application.get_env(:pleroma, Pleroma.Uploaders.Local) - - Keyword.get(settings, :uploads_url) - |> String.replace("{{file}}", file) - |> String.replace("{{base_url}}", Web.base_url()) + def upload_path do + Pleroma.Config.get!([__MODULE__, :uploads]) end end diff --git a/lib/pleroma/uploaders/mdii.ex b/lib/pleroma/uploaders/mdii.ex @@ -5,22 +5,27 @@ defmodule Pleroma.Uploaders.MDII do @httpoison Application.get_env(:pleroma, :httpoison) - def put_file(name, uuid, path, content_type, should_dedupe) do + # MDII-hosted images are never passed through the MediaPlug; only local media. + # Delegate to Pleroma.Uploaders.Local + def get_file(file) do + Pleroma.Uploaders.Local.get_file(file) + end + + def put_file(upload) do cgi = Pleroma.Config.get([Pleroma.Uploaders.MDII, :cgi]) files = Pleroma.Config.get([Pleroma.Uploaders.MDII, :files]) - {:ok, file_data} = File.read(path) + {:ok, file_data} = File.read(upload.tempfile) - extension = String.split(name, ".") |> List.last() + extension = String.split(upload.name, ".") |> List.last() query = "#{cgi}?#{extension}" with {:ok, %{status_code: 200, body: body}} <- @httpoison.post(query, file_data) do - File.rm!(path) remote_file_name = String.split(body) |> List.first() public_url = "#{files}/#{remote_file_name}.#{extension}" - {:ok, public_url} + {:ok, {:url, public_url}} else - _ -> Pleroma.Uploaders.Local.put_file(name, uuid, path, content_type, should_dedupe) + _ -> Pleroma.Uploaders.Local.put_file(upload) end end end diff --git a/lib/pleroma/uploaders/s3.ex b/lib/pleroma/uploaders/s3.ex @@ -1,40 +1,46 @@ defmodule Pleroma.Uploaders.S3 do - alias Pleroma.Web.MediaProxy - @behaviour Pleroma.Uploaders.Uploader + require Logger + + # The file name is re-encoded with S3's constraints here to comply with previous links with less strict filenames + def get_file(file) do + config = Pleroma.Config.get([__MODULE__]) + + {:ok, + {:url, + Path.join([ + Keyword.fetch!(config, :public_endpoint), + Keyword.fetch!(config, :bucket), + strict_encode(URI.decode(file)) + ])}} + end - def put_file(name, uuid, path, content_type, _should_dedupe) do - settings = Application.get_env(:pleroma, Pleroma.Uploaders.S3) - bucket = Keyword.fetch!(settings, :bucket) - public_endpoint = Keyword.fetch!(settings, :public_endpoint) - force_media_proxy = Keyword.fetch!(settings, :force_media_proxy) - - {:ok, file_data} = File.read(path) + def put_file(upload = %Pleroma.Upload{}) do + config = Pleroma.Config.get([__MODULE__]) + bucket = Keyword.get(config, :bucket) - File.rm!(path) + {:ok, file_data} = File.read(upload.tempfile) - s3_name = "#{uuid}/#{encode(name)}" + s3_name = strict_encode(upload.path) - {:ok, _} = + op = ExAws.S3.put_object(bucket, s3_name, file_data, [ {:acl, :public_read}, - {:content_type, content_type} + {:content_type, upload.content_type} ]) - |> ExAws.request() - - url_base = "#{public_endpoint}/#{bucket}/#{s3_name}" - public_url = - if force_media_proxy do - MediaProxy.url(url_base) - else - url_base - end + case ExAws.request(op) do + {:ok, _} -> + {:ok, {:file, s3_name}} - {:ok, public_url} + error -> + Logger.error("#{__MODULE__}: #{inspect(error)}") + {:error, "S3 Upload failed"} + end end - defp encode(name) do - String.replace(name, ~r/[^0-9a-zA-Z!.*'()_-]/, "-") + @regex Regex.compile!("[^0-9a-zA-Z!.*/'()_-]") + def strict_encode(name) do + String.replace(name, @regex, "-") end end diff --git a/lib/pleroma/uploaders/swift/swift.ex b/lib/pleroma/uploaders/swift/swift.ex @@ -14,7 +14,7 @@ defmodule Pleroma.Uploaders.Swift.Client do case put("#{filename}", body, "X-Auth-Token": token, "Content-Type": content_type) do {:ok, %HTTPoison.Response{status_code: 201}} -> - {:ok, "#{object_url}/#{filename}"} + {:ok, {:file, filename}} {:ok, %HTTPoison.Response{status_code: 401}} -> {:error, "Unauthorized, Bad Token"} diff --git a/lib/pleroma/uploaders/swift/uploader.ex b/lib/pleroma/uploaders/swift/uploader.ex @@ -1,10 +1,15 @@ defmodule Pleroma.Uploaders.Swift do @behaviour Pleroma.Uploaders.Uploader - def put_file(name, uuid, tmp_path, content_type, _should_dedupe) do - {:ok, file_data} = File.read(tmp_path) - remote_name = "#{uuid}/#{name}" + def get_file(name) do + {:ok, {:url, Path.join([Pleroma.Config.get!([__MODULE__, :object_url]), name])}} + end - Pleroma.Uploaders.Swift.Client.upload_file(remote_name, file_data, content_type) + def put_file(upload) do + Pleroma.Uploaders.Swift.Client.upload_file( + upload.path, + File.read!(upload.tmpfile), + upload.content_type + ) end end diff --git a/lib/pleroma/uploaders/uploader.ex b/lib/pleroma/uploaders/uploader.ex @@ -1,20 +1,40 @@ defmodule Pleroma.Uploaders.Uploader do @moduledoc """ - Defines the contract to put an uploaded file to any backend. + Defines the contract to put and get an uploaded file to any backend. """ @doc """ + Instructs how to get the file from the backend. + + Used by `Pleroma.Plugs.UploadedMedia`. + """ + @type get_method :: {:static_dir, directory :: String.t()} | {:url, url :: String.t()} + @callback get_file(file :: String.t()) :: {:ok, get_method()} + + @doc """ Put a file to the backend. - Returns `{:ok, String.t } | {:error, String.t} containing the path of the - uploaded file, or error information if the file failed to be saved to the - respective backend. + Returns: + + * `:ok` which assumes `{:ok, upload.path}` + * `{:ok, spec}` where spec is: + * `{:file, filename :: String.t}` to handle reads with `get_file/1` (recommended) + + This allows to correctly proxy or redirect requests to the backend, while allowing to migrate backends without breaking any URL. + * `{url, url :: String.t}` to bypass `get_file/2` and use the `url` directly in the activity. + * `{:error, String.t}` error information if the file failed to be saved to the backend. + + """ - @callback put_file( - name :: String.t(), - uuid :: String.t(), - file :: File.t(), - content_type :: String.t(), - should_dedupe :: Boolean.t() - ) :: {:ok, String.t()} | {:error, String.t()} + @callback put_file(Pleroma.Upload.t()) :: + :ok | {:ok, {:file | :url, String.t()}} | {:error, String.t()} + + @spec put_file(module(), Pleroma.Upload.t()) :: + {:ok, {:file | :url, String.t()}} | {:error, String.t()} + def put_file(uploader, upload) do + case uploader.put_file(upload) do + :ok -> {:ok, {:file, upload.path}} + other -> other + end + end end diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -572,10 +572,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do |> Enum.reverse() end - def upload(file, size_limit \\ nil) do - with data <- - Upload.store(file, Application.get_env(:pleroma, :instance)[:dedupe_media], size_limit), - false <- is_nil(data) do + def upload(file, opts \\ []) do + with {:ok, data} <- Upload.store(file, opts) do Repo.insert(%Object{data: data}) end end diff --git a/lib/pleroma/web/endpoint.ex b/lib/pleroma/web/endpoint.ex @@ -12,7 +12,7 @@ defmodule Pleroma.Web.Endpoint do plug(CORSPlug) plug(Pleroma.Plugs.HTTPSecurityPlug) - plug(Plug.Static, at: "/media", from: Pleroma.Uploaders.Local.upload_path(), gzip: false) + plug(Pleroma.Plugs.UploadedMedia) plug( Plug.Static, diff --git a/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex b/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex @@ -35,14 +35,6 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do def update_credentials(%{assigns: %{user: user}} = conn, params) do original_user = user - avatar_upload_limit = - Application.get_env(:pleroma, :instance) - |> Keyword.fetch(:avatar_upload_limit) - - banner_upload_limit = - Application.get_env(:pleroma, :instance) - |> Keyword.fetch(:banner_upload_limit) - params = if bio = params["note"] do Map.put(params, "bio", bio) @@ -60,7 +52,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do user = if avatar = params["avatar"] do with %Plug.Upload{} <- avatar, - {:ok, object} <- ActivityPub.upload(avatar, avatar_upload_limit), + {:ok, object} <- ActivityPub.upload(avatar, type: :avatar), change = Ecto.Changeset.change(user, %{avatar: object.data}), {:ok, user} = User.update_and_set_cache(change) do user @@ -74,7 +66,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do user = if banner = params["header"] do with %Plug.Upload{} <- banner, - {:ok, object} <- ActivityPub.upload(banner, banner_upload_limit), + {:ok, object} <- ActivityPub.upload(banner, type: :banner), new_info <- Map.put(user.info, "banner", object.data), change <- User.info_changeset(user, %{info: new_info}), {:ok, user} <- User.update_and_set_cache(change) do @@ -471,19 +463,12 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do end def upload(%{assigns: %{user: _}} = conn, %{"file" => file} = data) do - with {:ok, object} <- ActivityPub.upload(file) do - objdata = - if Map.has_key?(data, "description") do - Map.put(object.data, "name", data["description"]) - else - object.data - end - - change = Object.change(object, %{data: objdata}) + with {:ok, object} <- ActivityPub.upload(file, description: Map.get(data, "description")) do + change = Object.change(object, %{data: object.data}) {:ok, object} = Repo.update(change) objdata = - objdata + object.data |> Map.put("id", object.id) render(conn, StatusView, "attachment.json", %{attachment: objdata}) diff --git a/lib/pleroma/web/media_proxy/controller.ex b/lib/pleroma/web/media_proxy/controller.ex @@ -1,135 +1,34 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do use Pleroma.Web, :controller - require Logger + alias Pleroma.{Web.MediaProxy, ReverseProxy} - @httpoison Application.get_env(:pleroma, :httpoison) + @default_proxy_opts [max_body_length: 25 * 1_048_576] - @max_body_length 25 * 1_048_576 - - @cache_control %{ - default: "public, max-age=1209600", - error: "public, must-revalidate, max-age=160" - } - - # Content-types that will not be returned as content-disposition attachments - # Override with :media_proxy, :safe_content_types in the configuration - @safe_content_types [ - "image/gif", - "image/jpeg", - "image/jpg", - "image/png", - "image/svg+xml", - "audio/mpeg", - "audio/mp3", - "video/webm", - "video/mp4" - ] - - def remote(conn, params = %{"sig" => sig, "url" => url}) do - config = Application.get_env(:pleroma, :media_proxy, []) - - with true <- Keyword.get(config, :enabled, false), - {:ok, url} <- Pleroma.Web.MediaProxy.decode_url(sig, url), + def remote(conn, params = %{"sig" => sig64, "url" => url64}) do + with config <- Pleroma.Config.get([:media_proxy]), + true <- Keyword.get(config, :enabled, false), + {:ok, url} <- MediaProxy.decode_url(sig64, url64), filename <- Path.basename(URI.parse(url).path), - true <- - if(Map.get(params, "filename"), - do: filename == Path.basename(conn.request_path), - else: true - ), - {:ok, content_type, body} <- proxy_request(url), - safe_content_type <- - Enum.member?( - Keyword.get(config, :safe_content_types, @safe_content_types), - content_type - ) do - conn - |> put_resp_content_type(content_type) - |> set_cache_header(:default) - |> put_resp_header( - "content-security-policy", - "default-src 'none'; style-src 'unsafe-inline'; media-src data:; img-src 'self' data:" - ) - |> put_resp_header("x-xss-protection", "1; mode=block") - |> put_resp_header("x-content-type-options", "nosniff") - |> put_attachement_header(safe_content_type, filename) - |> send_resp(200, body) + :ok <- filename_matches(Map.has_key?(params, "filename"), conn.request_path, url) do + ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_length)) else false -> - send_error(conn, 404) + send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404)) {:error, :invalid_signature} -> - send_error(conn, 403) + send_resp(conn, 403, Plug.Conn.Status.reason_phrase(403)) - {:error, {:http, _, url}} -> - redirect_or_error(conn, url, Keyword.get(config, :redirect_on_failure, true)) + {:wrong_filename, filename} -> + redirect(conn, external: MediaProxy.build_url(sig64, url64, filename)) end end - defp proxy_request(link) do - headers = [ - {"user-agent", - "Pleroma/MediaProxy; #{Pleroma.Web.base_url()} <#{ - Application.get_env(:pleroma, :instance)[:email] - }>"} - ] - - options = - @httpoison.process_request_options([:insecure, {:follow_redirect, true}]) ++ - [{:pool, :default}] - - with {:ok, 200, headers, client} <- :hackney.request(:get, link, headers, "", options), - headers = Enum.into(headers, Map.new()), - {:ok, body} <- proxy_request_body(client), - content_type <- proxy_request_content_type(headers, body) do - {:ok, content_type, body} - else - {:ok, status, _, _} -> - Logger.warn("MediaProxy: request failed, status #{status}, link: #{link}") - {:error, {:http, :bad_status, link}} + def filename_matches(has_filename, path, url) do + filename = MediaProxy.filename(url) - {:error, error} -> - Logger.warn("MediaProxy: request failed, error #{inspect(error)}, link: #{link}") - {:error, {:http, error, link}} + cond do + has_filename && filename && Path.basename(path) != filename -> {:wrong_filename, filename} + true -> :ok end end - - defp set_cache_header(conn, key) do - Plug.Conn.put_resp_header(conn, "cache-control", @cache_control[key]) - end - - defp redirect_or_error(conn, url, true), do: redirect(conn, external: url) - defp redirect_or_error(conn, url, _), do: send_error(conn, 502, "Media proxy error: " <> url) - - defp send_error(conn, code, body \\ "") do - conn - |> set_cache_header(:error) - |> send_resp(code, body) - end - - defp proxy_request_body(client), do: proxy_request_body(client, <<>>) - - defp proxy_request_body(client, body) when byte_size(body) < @max_body_length do - case :hackney.stream_body(client) do - {:ok, data} -> proxy_request_body(client, <<body::binary, data::binary>>) - :done -> {:ok, body} - {:error, reason} -> {:error, reason} - end - end - - defp proxy_request_body(client, _) do - :hackney.close(client) - {:error, :body_too_large} - end - - # TODO: the body is passed here as well because some hosts do not provide a content-type. - # At some point we may want to use magic numbers to discover the content-type and reply a proper one. - defp proxy_request_content_type(headers, _body) do - headers["Content-Type"] || headers["content-type"] || "application/octet-stream" - end - - defp put_attachement_header(conn, true, _), do: conn - - defp put_attachement_header(conn, false, filename) do - put_resp_header(conn, "content-disposition", "attachment; filename='#{filename}'") - end end diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex @@ -17,10 +17,8 @@ defmodule Pleroma.Web.MediaProxy do base64 = Base.url_encode64(url, @base64_opts) sig = :crypto.hmac(:sha, secret, base64) sig64 = sig |> Base.url_encode64(@base64_opts) - filename = if path = URI.parse(url).path, do: "/" <> Path.basename(path), else: "" - Keyword.get(config, :base_url, Pleroma.Web.base_url()) <> - "/proxy/#{sig64}/#{base64}#{filename}" + build_url(sig64, base64, filename(url)) end end @@ -35,4 +33,20 @@ defmodule Pleroma.Web.MediaProxy do {:error, :invalid_signature} end end + + def filename(url_or_path) do + if path = URI.parse(url_or_path).path, do: Path.basename(path) + end + + def build_url(sig_base64, url_base64, filename \\ nil) do + [ + Pleroma.Config.get([:media_proxy, :base_url], Pleroma.Web.base_url()), + "proxy", + sig_base64, + url_base64, + filename + ] + |> Enum.filter(fn value -> value end) + |> Path.join() + end end diff --git a/lib/pleroma/web/twitter_api/twitter_api.ex b/lib/pleroma/web/twitter_api/twitter_api.ex @@ -97,7 +97,7 @@ defmodule Pleroma.Web.TwitterAPI.TwitterAPI do {:ok, object} = ActivityPub.upload(file) url = List.first(object.data["url"]) - href = url["href"] |> MediaProxy.url() + href = url["href"] type = url["mediaType"] case format do diff --git a/lib/pleroma/web/twitter_api/twitter_api_controller.ex b/lib/pleroma/web/twitter_api/twitter_api_controller.ex @@ -290,11 +290,7 @@ defmodule Pleroma.Web.TwitterAPI.Controller do end def update_avatar(%{assigns: %{user: user}} = conn, params) do - upload_limit = - Application.get_env(:pleroma, :instance) - |> Keyword.fetch(:avatar_upload_limit) - - {:ok, object} = ActivityPub.upload(params, upload_limit) + {:ok, object} = ActivityPub.upload(params, type: :avatar) change = Changeset.change(user, %{avatar: object.data}) {:ok, user} = User.update_and_set_cache(change) CommonAPI.update(user) @@ -303,11 +299,7 @@ defmodule Pleroma.Web.TwitterAPI.Controller do end def update_banner(%{assigns: %{user: user}} = conn, params) do - upload_limit = - Application.get_env(:pleroma, :instance) - |> Keyword.fetch(:banner_upload_limit) - - with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, upload_limit), + with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, type: :banner), new_info <- Map.put(user.info, "banner", object.data), change <- User.info_changeset(user, %{info: new_info}), {:ok, user} <- User.update_and_set_cache(change) do @@ -321,11 +313,7 @@ defmodule Pleroma.Web.TwitterAPI.Controller do end def update_background(%{assigns: %{user: user}} = conn, params) do - upload_limit = - Application.get_env(:pleroma, :instance) - |> Keyword.fetch(:background_upload_limit) - - with {:ok, object} <- ActivityPub.upload(params, upload_limit), + with {:ok, object} <- ActivityPub.upload(params, type: :background), new_info <- Map.put(user.info, "background", object.data), change <- User.info_changeset(user, %{info: new_info}), {:ok, _user} <- User.update_and_set_cache(change) do diff --git a/test/media_proxy_test.exs b/test/media_proxy_test.exs @@ -82,6 +82,23 @@ defmodule Pleroma.MediaProxyTest do [_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/") assert decode_url(sig, base64) == {:error, :invalid_signature} end + + test "uses the configured base_url" do + base_url = Pleroma.Config.get([:media_proxy, :base_url]) + + if base_url do + on_exit(fn -> + Pleroma.Config.put([:media_proxy, :base_url], base_url) + end) + end + + Pleroma.Config.put([:media_proxy, :base_url], "https://cache.pleroma.social") + + url = "https://pleroma.soykaf.com/static/logo.png" + encoded = url(url) + + assert String.starts_with?(encoded, Pleroma.Config.get([:media_proxy, :base_url])) + end end describe "when disabled" do diff --git a/test/support/httpoison_mock.ex b/test/support/httpoison_mock.ex @@ -1,6 +1,8 @@ defmodule HTTPoisonMock do alias HTTPoison.Response + def process_request_options(options), do: options + def get(url, body \\ [], headers \\ []) def get("https://prismo.news/@mxb", _, _) do diff --git a/test/upload_test.exs b/test/upload_test.exs @@ -2,7 +2,58 @@ defmodule Pleroma.UploadTest do alias Pleroma.Upload use Pleroma.DataCase - describe "Storing a file" do + describe "Storing a file with the Local uploader" do + setup do + uploader = Pleroma.Config.get([Pleroma.Upload, :uploader]) + filters = Pleroma.Config.get([Pleroma.Upload, :filters]) + + unless uploader == Pleroma.Uploaders.Local || filters != [] do + Pleroma.Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local) + Pleroma.Config.put([Pleroma.Upload, :filters], []) + + on_exit(fn -> + Pleroma.Config.put([Pleroma.Upload, :uploader], uploader) + Pleroma.Config.put([Pleroma.Upload, :filters], filters) + end) + end + + :ok + end + + test "returns a media url" do + File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg") + + file = %Plug.Upload{ + content_type: "image/jpg", + path: Path.absname("test/fixtures/image_tmp.jpg"), + filename: "image.jpg" + } + + {:ok, data} = Upload.store(file) + + assert %{"url" => [%{"href" => url}]} = data + + assert String.starts_with?(url, Pleroma.Web.base_url() <> "/media/") + end + + test "returns a media url with configured base_url" do + base_url = "https://cache.pleroma.social" + + File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg") + + file = %Plug.Upload{ + content_type: "image/jpg", + path: Path.absname("test/fixtures/image_tmp.jpg"), + filename: "image.jpg" + } + + {:ok, data} = Upload.store(file, base_url: base_url) + + assert %{"url" => [%{"href" => url}]} = data + + assert String.starts_with?(url, base_url <> "/media/") + end + test "copies the file to the configured folder with deduping" do File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg") @@ -12,10 +63,11 @@ defmodule Pleroma.UploadTest do filename: "an [image.jpg" } - data = Upload.store(file, true) + {:ok, data} = Upload.store(file, filters: [Pleroma.Upload.Filter.Dedupe]) - assert data["name"] == - "e7a6d0cf595bff76f14c9a98b6c199539559e8b844e02e51e5efcfd1f614a2df.jpeg" + assert List.first(data["url"])["href"] == + Pleroma.Web.base_url() <> + "/media/e7a6d0cf595bff76f14c9a98b6c199539559e8b844e02e51e5efcfd1f614a2df.jpg" end test "copies the file to the configured folder without deduping" do @@ -27,7 +79,7 @@ defmodule Pleroma.UploadTest do filename: "an [image.jpg" } - data = Upload.store(file, false) + {:ok, data} = Upload.store(file) assert data["name"] == "an [image.jpg" end @@ -40,7 +92,7 @@ defmodule Pleroma.UploadTest do filename: "an [image.jpg" } - data = Upload.store(file, true) + {:ok, data} = Upload.store(file, filters: [Pleroma.Upload.Filter.Dedupe]) assert hd(data["url"])["mediaType"] == "image/jpeg" end @@ -53,7 +105,7 @@ defmodule Pleroma.UploadTest do filename: "an [image" } - data = Upload.store(file, false) + {:ok, data} = Upload.store(file) assert data["name"] == "an [image.jpg" end @@ -66,7 +118,7 @@ defmodule Pleroma.UploadTest do filename: "an [image.blah" } - data = Upload.store(file, false) + {:ok, data} = Upload.store(file) assert data["name"] == "an [image.jpg" end @@ -79,7 +131,7 @@ defmodule Pleroma.UploadTest do filename: "test.txt" } - data = Upload.store(file, false) + {:ok, data} = Upload.store(file) assert data["name"] == "test.txt" end end