From b19597f602e70121a1762476873377c782549817 Mon Sep 17 00:00:00 2001 From: href Date: Fri, 23 Nov 2018 17:40:45 +0100 Subject: [PATCH] reverse proxy / uploads --- .gitignore | 5 +- config/config.exs | 19 +- config/test.exs | 2 +- lib/mix/tasks/migrate_local_uploads.ex | 80 +++++ lib/pleroma/application.ex | 5 + lib/pleroma/plugs/uploaded_media.ex | 78 ++++ lib/pleroma/reverse_proxy.ex | 338 ++++++++++++++++++ lib/pleroma/upload.ex | 200 ++++++----- lib/pleroma/uploaders/local.ex | 26 +- lib/pleroma/uploaders/mdii.ex | 12 +- lib/pleroma/uploaders/s3.ex | 50 +-- lib/pleroma/uploaders/swift/swift.ex | 2 +- lib/pleroma/uploaders/swift/uploader.ex | 6 +- lib/pleroma/uploaders/uploader.ex | 27 +- lib/pleroma/web/activity_pub/activity_pub.ex | 6 +- lib/pleroma/web/endpoint.ex | 2 +- .../mastodon_api/mastodon_api_controller.ex | 4 +- lib/pleroma/web/media_proxy/controller.ex | 135 +------ lib/pleroma/web/media_proxy/media_proxy.ex | 20 +- lib/pleroma/web/twitter_api/twitter_api.ex | 2 +- .../web/twitter_api/twitter_api_controller.ex | 7 +- test/support/httpoison_mock.ex | 2 + test/upload_test.exs | 42 ++- 23 files changed, 790 insertions(+), 280 deletions(-) create mode 100644 lib/mix/tasks/migrate_local_uploads.ex create mode 100644 lib/pleroma/plugs/uploaded_media.ex create mode 100644 lib/pleroma/reverse_proxy.ex diff --git a/.gitignore b/.gitignore index 9aad700ee..b71dfa9fa 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,9 @@ /uploads /test/uploads /.elixir_ls +/test/fixtures/test_tmp.txt +/test/fixtures/image_tmp.jpg +/doc # Prevent committing custom emojis /priv/static/emoji/custom/* @@ -28,4 +31,4 @@ erl_crash.dump .env # Editor config -/.vscode \ No newline at end of file +/.vscode diff --git a/config/config.exs b/config/config.exs index 848c9d6cf..ee43071ea 100644 --- a/config/config.exs +++ b/config/config.exs @@ -12,16 +12,15 @@ config :pleroma, Pleroma.Repo, types: Pleroma.PostgresTypes config :pleroma, Pleroma.Upload, uploader: Pleroma.Uploaders.Local, - strip_exif: false + strip_exif: false, + proxy_remote: false, + proxy_opts: [inline_content_types: true, keep_user_agent: true] -config :pleroma, Pleroma.Uploaders.Local, - uploads: "uploads", - uploads_url: "{{base_url}}/media/{{file}}" +config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads" config :pleroma, Pleroma.Uploaders.S3, bucket: nil, - public_endpoint: "https://s3.amazonaws.com", - force_media_proxy: false + public_endpoint: "https://s3.amazonaws.com" config :pleroma, Pleroma.Uploaders.MDII, cgi: "https://mdii.sakura.ne.jp/mdii-post.cgi", @@ -150,9 +149,11 @@ config :pleroma, :mrf_simple, config :pleroma, :media_proxy, enabled: false, - redirect_on_failure: true - -# base_url: "https://cache.pleroma.social" + # base_url: "https://cache.pleroma.social", + proxy_opts: [ + # inline_content_types: [] | false | true, + # http: [:insecure] + ] config :pleroma, :chat, enabled: true diff --git a/config/test.exs b/config/test.exs index 1bd11dee4..3aaed1b2c 100644 --- a/config/test.exs +++ b/config/test.exs @@ -9,7 +9,7 @@ config :pleroma, Pleroma.Web.Endpoint, # Print only warnings and errors during test config :logger, level: :warn -config :pleroma, Pleroma.Upload, uploads: "test/uploads" +config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads" # Configure your database config :pleroma, Pleroma.Repo, diff --git a/lib/mix/tasks/migrate_local_uploads.ex b/lib/mix/tasks/migrate_local_uploads.ex new file mode 100644 index 000000000..40117350c --- /dev/null +++ b/lib/mix/tasks/migrate_local_uploads.ex @@ -0,0 +1,80 @@ +defmodule Mix.Tasks.MigrateLocalUploads do + use Mix.Task + import Mix.Ecto + alias Pleroma.{Upload, Uploaders.Local, Uploaders.S3} + require Logger + + @log_every 50 + @shortdoc "Migrate uploads from local to remote storage" + + def run([target_uploader | args]) do + delete? = Enum.member?(args, "--delete") + Application.ensure_all_started(:pleroma) + + local_path = Pleroma.Config.get!([Local, :uploads]) + uploader = Module.concat(Pleroma.Uploaders, target_uploader) + + unless Code.ensure_loaded?(uploader) do + raise("The uploader #{inspect(uploader)} is not an existing/loaded module.") + end + + target_enabled? = Pleroma.Config.get([Upload, :uploader]) == uploader + + unless target_enabled? do + Pleroma.Config.put([Upload, :uploader], uploader) + end + + Logger.info("Migrating files from local #{local_path} to #{to_string(uploader)}") + + if delete? do + Logger.warn( + "Attention: uploaded files will be deleted, hope you have backups! (--delete ; cancel with ^C)" + ) + + :timer.sleep(:timer.seconds(5)) + end + + uploads = File.ls!(local_path) + total_count = length(uploads) + + uploads + |> Task.async_stream( + fn uuid -> + u_path = Path.join(local_path, uuid) + + {name, path} = + cond do + File.dir?(u_path) -> + files = for file <- File.ls!(u_path), do: {{file, uuid}, Path.join([u_path, file])} + List.first(files) + + File.exists?(u_path) -> + # {uuid, u_path} + raise "should_dedupe local storage not supported yet sorry" + end + + {:ok, _} = + Upload.store({:from_local, name, path}, should_dedupe: false, uploader: uploader) + + if delete? do + File.rm_rf!(u_path) + end + + Logger.debug("uploaded: #{inspect(name)}") + end, + timeout: 150_000 + ) + |> Stream.chunk_every(@log_every) + |> Enum.reduce(0, fn done, count -> + count = count + length(done) + Logger.info("Uploaded #{count}/#{total_count} files") + count + end) + + Logger.info("Done!") + end + + def run(_) do + Logger.error("Usage: migrate_local_uploads UploaderName [--delete]") + end +end diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index 2d86efae5..ca5b9fe65 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -7,6 +7,11 @@ defmodule Pleroma.Application do def version, do: @version def named_version(), do: @name <> " " <> @version + def user_agent() do + info = "#{Pleroma.Web.base_url()} <#{Pleroma.Config.get([:instance, :email], "")}>" + named_version() <> "; " <> info + end + # See http://elixir-lang.org/docs/stable/elixir/Application.html # for more information on OTP Applications @env Mix.env() diff --git a/lib/pleroma/plugs/uploaded_media.ex b/lib/pleroma/plugs/uploaded_media.ex new file mode 100644 index 000000000..994cc8bf6 --- /dev/null +++ b/lib/pleroma/plugs/uploaded_media.ex @@ -0,0 +1,78 @@ +defmodule Pleroma.Plugs.UploadedMedia do + @moduledoc """ + """ + + import Plug.Conn + require Logger + + @behaviour Plug + # no slashes + @path "media" + @cache_control %{ + default: "public, max-age=1209600", + error: "public, must-revalidate, max-age=160" + } + + def init(_opts) do + static_plug_opts = + [] + |> Keyword.put(:from, "__unconfigured_media_plug") + |> Keyword.put(:at, "/__unconfigured_media_plug") + |> Plug.Static.init() + + %{static_plug_opts: static_plug_opts} + end + + def call(conn = %{request_path: <<"/", @path, "/", file::binary>>}, opts) do + config = Pleroma.Config.get([Pleroma.Upload]) + + with uploader <- Keyword.fetch!(config, :uploader), + proxy_remote = Keyword.get(config, :proxy_remote, false), + {:ok, get_method} <- uploader.get_file(file) do + get_media(conn, get_method, proxy_remote, opts) + else + _ -> + conn + |> send_resp(500, "Failed") + |> halt() + end + end + + def call(conn, _opts), do: conn + + defp get_media(conn, {:static_dir, directory}, _, opts) do + static_opts = + Map.get(opts, :static_plug_opts) + |> Map.put(:at, [@path]) + |> Map.put(:from, directory) + + conn = Plug.Static.call(conn, static_opts) + + if conn.halted do + conn + else + conn + |> send_resp(404, "Not found") + |> halt() + end + end + + defp get_media(conn, {:url, url}, true, _) do + conn + |> Pleroma.ReverseProxy.call(url, Pleroma.Config.get([Pleroma.Upload, :proxy_opts], [])) + end + + defp get_media(conn, {:url, url}, _, _) do + conn + |> Phoenix.Controller.redirect(external: url) + |> halt() + end + + defp get_media(conn, unknown, _, _) do + Logger.error("#{__MODULE__}: Unknown get startegy: #{inspect(unknown)}") + + conn + |> send_resp(500, "Internal Error") + |> halt() + end +end diff --git a/lib/pleroma/reverse_proxy.ex b/lib/pleroma/reverse_proxy.ex new file mode 100644 index 000000000..64c3c3a19 --- /dev/null +++ b/lib/pleroma/reverse_proxy.ex @@ -0,0 +1,338 @@ +defmodule Pleroma.ReverseProxy do + @keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since if-none-match range) + @resp_cache_headers ~w(etag date last-modified cache-control) + @keep_resp_headers @resp_cache_headers ++ + ~w(content-type content-disposition content-length accept-ranges vary) + @default_cache_control_header "public, max-age=1209600" + @valid_resp_codes [200, 206, 304] + @max_read_duration :timer.minutes(2) + @max_body_length :infinity + @methods ~w(GET HEAD) + + @moduledoc """ + A reverse proxy. + + Pleroma.ReverseProxy.call(conn, url, options) + + It is not meant to be added into a plug pipeline, but to be called from another plug or controller. + + Supports `#{inspect(@methods)}` HTTP methods, and only allows `#{inspect(@valid_resp_codes)}` status codes. + + Responses are chunked to the client while downloading from the upstream. + + Some request / responses headers are preserved: + + * request: `#{inspect(@keep_req_headers)}` + * response: `#{inspect(@keep_resp_headers)}` + + If no caching headers (`#{inspect(@resp_cache_headers)}`) are returned by upstream, `cache-control` will be + set to `#{inspect(@default_cache_control_header)}`. + + Options: + + * `redirect_on_failure` (default `false`). Redirects the client to the real remote URL if there's any HTTP + errors. Any error during body processing will not be redirected as the response is chunked. This may expose + remote URL, clients IPs, …. + + * `max_body_length` (default `#{inspect(@max_body_length)}`): limits the content length to be approximately the + specified length. It is validated with the `content-length` header and also verified when proxying. + + * `max_read_duration` (default `#{inspect(@max_read_duration)}` ms): the total time the connection is allowed to + read from the remote upstream. + + * `inline_content_types`: + * `true` will not alter `content-disposition` (up to the upstream), + * `false` will add `content-disposition: attachment` to any request, + * a list of whitelisted content types + + * `keep_user_agent` will forward the client's user-agent to the upstream. This may be useful if the upstream is + doing content transformation (encoding, …) depending on the request. + + * `req_headers`, `resp_headers` additional headers. + + * `http`: options for [hackney](https://github.com/benoitc/hackney). + + """ + @hackney Application.get_env(:pleroma, :hackney, :hackney) + @httpoison Application.get_env(:pleroma, :httpoison, HTTPoison) + + @default_hackney_options [{:follow_redirect, true}] + + @inline_content_types [ + "image/gif", + "image/jpeg", + "image/jpg", + "image/png", + "image/svg+xml", + "audio/mpeg", + "audio/mp3", + "video/webm", + "video/mp4", + "video/quicktime" + ] + + require Logger + import Plug.Conn + + @type option() :: + {:keep_user_agent, boolean} + | {:max_read_duration, :timer.time() | :infinity} + | {:max_body_length, non_neg_integer() | :infinity} + | {:http, []} + | {:req_headers, [{String.t(), String.t()}]} + | {:resp_headers, [{String.t(), String.t()}]} + | {:inline_content_types, boolean() | [String.t()]} + | {:redirect_on_failure, boolean()} + + @spec call(Plug.Conn.t(), url :: String.t(), [option()]) :: Plug.Conn.t() + def call(conn = %{method: method}, url, opts \\ []) when method in @methods do + hackney_opts = + @default_hackney_options + |> Keyword.merge(Keyword.get(opts, :http, [])) + |> @httpoison.process_request_options() + + req_headers = build_req_headers(conn.req_headers, opts) + + opts = + if filename = Pleroma.Web.MediaProxy.filename(url) do + Keyword.put_new(opts, :attachment_name, filename) + else + opts + end + + with {:ok, code, headers, client} <- request(method, url, req_headers, hackney_opts), + :ok <- header_lenght_constraint(headers, Keyword.get(opts, :max_body_length)) do + response(conn, client, url, code, headers, opts) + else + {:ok, code, headers} -> + head_response(conn, url, code, headers, opts) + |> halt() + + {:error, {:invalid_http_response, code}} -> + Logger.error("#{__MODULE__}: request to #{inspect(url)} failed with HTTP status #{code}") + + conn + |> error_or_redirect( + url, + code, + "Request failed: " <> Plug.Conn.Status.reason_phrase(code), + opts + ) + |> halt() + + {:error, error} -> + Logger.error("#{__MODULE__}: request to #{inspect(url)} failed: #{inspect(error)}") + + conn + |> error_or_redirect(url, 500, "Request failed", opts) + |> halt() + end + end + + def call(conn, _, _) do + conn + |> send_resp(400, Plug.Conn.Status.reason_phrase(400)) + |> halt() + end + + defp request(method, url, headers, hackney_opts) do + Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}") + method = method |> String.downcase() |> String.to_existing_atom() + + case @hackney.request(method, url, headers, "", hackney_opts) do + {:ok, code, headers, client} when code in @valid_resp_codes -> + {:ok, code, downcase_headers(headers), client} + + {:ok, code, headers} when code in @valid_resp_codes -> + {:ok, code, downcase_headers(headers)} + + {:ok, code, _, _} -> + {:error, {:invalid_http_response, code}} + + {:error, error} -> + {:error, error} + end + end + + defp response(conn, client, url, status, headers, opts) do + result = + conn + |> put_resp_headers(build_resp_headers(headers, opts)) + |> send_chunked(status) + |> chunk_reply(client, opts) + + case result do + {:ok, conn} -> + halt(conn) + + {:error, :closed, conn} -> + :hackney.close(client) + halt(conn) + + {:error, error, conn} -> + Logger.warn( + "#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}" + ) + + :hackney.close(client) + halt(conn) + end + end + + defp chunk_reply(conn, client, opts) do + chunk_reply(conn, client, opts, 0, 0) + end + + defp chunk_reply(conn, client, opts, sent_so_far, duration) do + with {:ok, duration} <- + check_read_duration( + duration, + Keyword.get(opts, :max_read_duration, @max_read_duration) + ), + {:ok, data} <- @hackney.stream_body(client), + {:ok, duration} <- increase_read_duration(duration), + sent_so_far = sent_so_far + byte_size(data), + :ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)), + {:ok, conn} <- chunk(conn, data) do + chunk_reply(conn, client, opts, sent_so_far, duration) + else + :done -> {:ok, conn} + {:error, error} -> {:error, error, conn} + end + end + + defp head_response(conn, _url, code, headers, opts) do + conn + |> put_resp_headers(build_resp_headers(headers, opts)) + |> send_resp(code, "") + end + + defp error_or_redirect(conn, url, code, body, opts) do + if Keyword.get(opts, :redirect_on_failure, false) do + conn + |> Phoenix.Controller.redirect(external: url) + |> halt() + else + conn + |> send_resp(code, body) + |> halt + end + end + + defp downcase_headers(headers) do + Enum.map(headers, fn {k, v} -> + {String.downcase(k), v} + end) + end + + defp put_resp_headers(conn, headers) do + Enum.reduce(headers, conn, fn {k, v}, conn -> + put_resp_header(conn, k, v) + end) + end + + defp build_req_headers(headers, opts) do + headers = + headers + |> downcase_headers() + |> Enum.filter(fn {k, _} -> k in @keep_req_headers end) + |> (fn headers -> + headers = headers ++ Keyword.get(opts, :req_headers, []) + + if Keyword.get(opts, :keep_user_agent, false) do + List.keystore( + headers, + "user-agent", + 0, + {"user-agent", Pleroma.Application.user_agent()} + ) + else + headers + end + end).() + end + + defp build_resp_headers(headers, opts) do + headers = + headers + |> Enum.filter(fn {k, _} -> k in @keep_resp_headers end) + |> build_resp_cache_headers(opts) + |> build_resp_content_disposition_header(opts) + |> (fn headers -> headers ++ Keyword.get(opts, :resp_headers, []) end).() + end + + defp build_resp_cache_headers(headers, opts) do + has_cache? = Enum.any?(headers, fn {k, _} -> k in @resp_cache_headers end) + + if has_cache? do + headers + else + List.keystore(headers, "cache-control", 0, {"cache-control", @default_cache_control_header}) + end + end + + defp build_resp_content_disposition_header(headers, opts) do + opt = Keyword.get(opts, :inline_content_types, @inline_content_types) + + {_, content_type} = + List.keyfind(headers, "content-type", 0, {"content-type", "application/octect-stream"}) + + attachment? = + cond do + is_list(opt) && !Enum.member?(opt, content_type) -> true + opt == false -> true + true -> false + end + + if attachment? do + disposition = "attachment; filename=" <> Keyword.get(opts, :attachment_name, "attachment") + List.keystore(headers, "content-disposition", 0, {"content-disposition", disposition}) + else + headers + end + end + + defp header_lenght_constraint(headers, limit) when is_integer(limit) and limit > 0 do + with {_, size} <- List.keyfind(headers, "content-length", 0), + {size, _} <- Integer.parse(size), + true <- size <= limit do + :ok + else + false -> + {:error, :body_too_large} + + _ -> + :ok + end + end + + defp header_lenght_constraint(_, _), do: :ok + + defp body_size_constraint(size, limit) when is_integer(limit) and limit > 0 and size >= limit do + {:error, :body_too_large} + end + + defp body_size_constraint(_, _), do: :ok + + defp check_read_duration(duration, max) + when is_integer(duration) and is_integer(max) and max > 0 do + if duration > max do + {:error, :read_duration_exceeded} + else + Logger.debug("Duration #{inspect(duration)}") + {:ok, {duration, :erlang.system_time(:millisecond)}} + end + end + + defp check_read_duration(_, _), do: {:ok, :no_duration_limit, :no_duration_limit} + + defp increase_read_duration({previous_duration, started}) + when is_integer(previous_duration) and is_integer(started) do + duration = :erlang.system_time(:millisecond) - started + {:ok, previous_duration + duration} + end + + defp increase_read_duration(_) do + {:ok, :no_duration_limit, :no_duration_limit} + end +end diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex index 238630bf3..16043a264 100644 --- a/lib/pleroma/upload.ex +++ b/lib/pleroma/upload.ex @@ -1,81 +1,102 @@ defmodule Pleroma.Upload do alias Ecto.UUID + require Logger - def check_file_size(path, nil), do: true + @type upload_option :: + {:dedupe, boolean()} | {:size_limit, non_neg_integer()} | {:uploader, module()} + @type upload_source :: + Plug.Upload.t() | data_uri_string() :: + String.t() | {:from_local, name :: String.t(), uuid :: String.t(), path :: String.t()} - def check_file_size(path, size_limit) do - {:ok, %{size: size}} = File.stat(path) - size <= size_limit - end + @spec store(upload_source, options :: [upload_option()]) :: {:ok, Map.t()} | {:error, any()} + def store(upload, opts \\ []) do + opts = get_opts(opts) - def store(file, should_dedupe, size_limit \\ nil) - - def store(%Plug.Upload{} = file, should_dedupe, size_limit) do - content_type = get_content_type(file.path) - - with uuid <- get_uuid(file, should_dedupe), - name <- get_name(file, uuid, content_type, should_dedupe), - true <- check_file_size(file.path, size_limit) do - strip_exif_data(content_type, file.path) - - {:ok, url_path} = uploader().put_file(name, uuid, file.path, content_type, should_dedupe) - - %{ - "type" => "Document", - "url" => [ - %{ - "type" => "Link", - "mediaType" => content_type, - "href" => url_path - } - ], - "name" => name - } + with {:ok, name, uuid, path, content_type} <- process_upload(upload, opts), + _ <- strip_exif_data(content_type, path), + {:ok, url_spec} <- opts.uploader.put_file(name, uuid, path, content_type, opts) do + {:ok, + %{ + "type" => "Image", + "url" => [ + %{ + "type" => "Link", + "mediaType" => content_type, + "href" => url_from_spec(url_spec) + } + ], + "name" => name + }} else - _e -> nil - end - end - - def store(%{"img" => "data:image/" <> image_data}, should_dedupe, size_limit) do - parsed = Regex.named_captures(~r/(?jpeg|png|gif);base64,(?.*)/, image_data) - data = Base.decode64!(parsed["data"], ignore: :whitespace) - - with tmp_path <- tempfile_for_image(data), - uuid <- UUID.generate(), - true <- check_file_size(tmp_path, size_limit) do - content_type = get_content_type(tmp_path) - strip_exif_data(content_type, tmp_path) - - name = - create_name( - String.downcase(Base.encode16(:crypto.hash(:sha256, data))), - parsed["filetype"], - content_type + {:error, error} -> + Logger.error( + "#{__MODULE__} store (using #{inspect(opts.uploader)}) failed: #{inspect(error)}" ) - {:ok, url_path} = uploader().put_file(name, uuid, tmp_path, content_type, should_dedupe) - - %{ - "type" => "Image", - "url" => [ - %{ - "type" => "Link", - "mediaType" => content_type, - "href" => url_path - } - ], - "name" => name - } - else - _e -> nil + {:error, error} end end - @doc """ - Creates a tempfile using the Plug.Upload Genserver which cleans them up - automatically. - """ - def tempfile_for_image(data) do + defp get_opts(opts) do + %{ + dedupe: Keyword.get(opts, :dedupe, Pleroma.Config.get([:instance, :dedupe_media])), + size_limit: Keyword.get(opts, :size_limit, Pleroma.Config.get([:instance, :upload_limit])), + uploader: Keyword.get(opts, :uploader, Pleroma.Config.get([__MODULE__, :uploader])) + } + end + + defp process_upload(%Plug.Upload{} = file, opts) do + with :ok <- check_file_size(file.path, opts.size_limit), + uuid <- get_uuid(file, opts.dedupe), + content_type <- get_content_type(file.path), + name <- get_name(file, uuid, content_type, opts.dedupe) do + {:ok, name, uuid, file.path, content_type} + end + end + + defp process_upload(%{"img" => "data:image/" <> image_data}, opts) do + parsed = Regex.named_captures(~r/(?jpeg|png|gif);base64,(?.*)/, image_data) + data = Base.decode64!(parsed["data"], ignore: :whitespace) + hash = String.downcase(Base.encode16(:crypto.hash(:sha256, data))) + + with :ok <- check_binary_size(data, opts.size_limit), + tmp_path <- tempfile_for_image(data), + content_type <- get_content_type(tmp_path), + uuid <- UUID.generate(), + name <- create_name(hash, parsed["filetype"], content_type) do + {:ok, name, uuid, tmp_path, content_type} + end + end + + # For Mix.Tasks.MigrateLocalUploads + defp process_upload({:from_local, name, uuid, path}, _opts) do + with content_type <- get_content_type(path) do + {:ok, name, uuid, path, content_type} + end + end + + defp check_binary_size(binary, size_limit) + when is_integer(size_limit) and size_limit > 0 and byte_size(binary) >= size_limit do + {:error, :file_too_large} + end + + defp check_binary_size(_, _), do: :ok + + defp check_file_size(path, size_limit) when is_integer(size_limit) and size_limit > 0 do + with {:ok, %{size: size}} <- File.stat(path), + true <- size <= size_limit do + :ok + else + false -> {:error, :file_too_large} + error -> error + end + end + + defp check_file_size(_, _), do: :ok + + # Creates a tempfile using the Plug.Upload Genserver which cleans them up + # automatically. + defp tempfile_for_image(data) do {:ok, tmp_path} = Plug.Upload.random_file("profile_pics") {:ok, tmp_file} = File.open(tmp_path, [:write, :raw, :binary]) IO.binwrite(tmp_file, data) @@ -83,7 +104,7 @@ defmodule Pleroma.Upload do tmp_path end - def strip_exif_data(content_type, file) do + defp strip_exif_data(content_type, file) do settings = Application.get_env(:pleroma, Pleroma.Upload) do_strip = Keyword.fetch!(settings, :strip_exif) [filetype, _ext] = String.split(content_type, "/") @@ -94,16 +115,20 @@ defmodule Pleroma.Upload do end defp create_name(uuid, ext, type) do - case type do - "application/octet-stream" -> - String.downcase(Enum.join([uuid, ext], ".")) + extension = + cond do + type == "application/octect-stream" -> ext + ext = mime_extension(ext) -> ext + true -> String.split(type, "/") |> List.last() + end - "audio/mpeg" -> - String.downcase(Enum.join([uuid, "mp3"], ".")) + [uuid, extension] + |> Enum.join(".") + |> String.downcase() + end - _ -> - String.downcase(Enum.join([uuid, List.last(String.split(type, "/"))], ".")) - end + defp mime_extension(type) do + List.first(MIME.extensions(type)) end defp get_uuid(file, should_dedupe) do @@ -127,11 +152,15 @@ defmodule Pleroma.Upload do Enum.join(parts) end - case type do - "application/octet-stream" -> file.filename - "audio/mpeg" -> new_filename <> ".mp3" - "image/jpeg" -> new_filename <> ".jpg" - _ -> Enum.join([new_filename, String.split(type, "/") |> List.last()], ".") + cond do + type == "application/octet-stream" -> + file.filename + + ext = mime_extension(type) -> + new_filename <> "." <> ext + + true -> + Enum.join([new_filename, String.split(type, "/") |> List.last()], ".") end end end @@ -187,4 +216,13 @@ defmodule Pleroma.Upload do defp uploader() do Pleroma.Config.get!([Pleroma.Upload, :uploader]) end + + defp url_from_spec({:file, path}) do + [Pleroma.Web.base_url(), "media", path] + |> Path.join() + end + + defp url_from_spec({:url, url}) do + url + end end diff --git a/lib/pleroma/uploaders/local.ex b/lib/pleroma/uploaders/local.ex index d96481c8d..7ca1ba07d 100644 --- a/lib/pleroma/uploaders/local.ex +++ b/lib/pleroma/uploaders/local.ex @@ -3,9 +3,12 @@ defmodule Pleroma.Uploaders.Local do alias Pleroma.Web - def put_file(name, uuid, tmpfile, _content_type, should_dedupe) do - upload_folder = get_upload_path(uuid, should_dedupe) - url_path = get_url(name, uuid, should_dedupe) + def get_file(_) do + {:ok, {:static_dir, upload_path()}} + end + + def put_file(name, uuid, tmpfile, _content_type, opts) do + upload_folder = get_upload_path(uuid, opts.dedupe) File.mkdir_p!(upload_folder) @@ -17,12 +20,11 @@ defmodule Pleroma.Uploaders.Local do File.cp!(tmpfile, result_file) end - {:ok, url_path} + {:ok, {:file, get_url(name, uuid, opts.dedupe)}} end def upload_path do - settings = Application.get_env(:pleroma, Pleroma.Uploaders.Local) - Keyword.fetch!(settings, :uploads) + Pleroma.Config.get!([__MODULE__, :uploads]) end defp get_upload_path(uuid, should_dedupe) do @@ -35,17 +37,9 @@ defmodule Pleroma.Uploaders.Local do defp get_url(name, uuid, should_dedupe) do if should_dedupe do - url_for(:cow_uri.urlencode(name)) + :cow_uri.urlencode(name) else - url_for(Path.join(uuid, :cow_uri.urlencode(name))) + Path.join(uuid, :cow_uri.urlencode(name)) end end - - defp url_for(file) do - settings = Application.get_env(:pleroma, Pleroma.Uploaders.Local) - - Keyword.get(settings, :uploads_url) - |> String.replace("{{file}}", file) - |> String.replace("{{base_url}}", Web.base_url()) - end end diff --git a/lib/pleroma/uploaders/mdii.ex b/lib/pleroma/uploaders/mdii.ex index a9d52b0dc..1d93c8154 100644 --- a/lib/pleroma/uploaders/mdii.ex +++ b/lib/pleroma/uploaders/mdii.ex @@ -5,7 +5,13 @@ defmodule Pleroma.Uploaders.MDII do @httpoison Application.get_env(:pleroma, :httpoison) - def put_file(name, uuid, path, content_type, should_dedupe) do + # MDII-hosted images are never passed through the MediaPlug; only local media. + # Delegate to Pleroma.Uploaders.Local + def get_file(file) do + Pleroma.Uploaders.Local.get_file(file) + end + + def put_file(name, uuid, path, content_type, opts) do cgi = Pleroma.Config.get([Pleroma.Uploaders.MDII, :cgi]) files = Pleroma.Config.get([Pleroma.Uploaders.MDII, :files]) @@ -18,9 +24,9 @@ defmodule Pleroma.Uploaders.MDII do File.rm!(path) remote_file_name = String.split(body) |> List.first() public_url = "#{files}/#{remote_file_name}.#{extension}" - {:ok, public_url} + {:ok, {:url, public_url}} else - _ -> Pleroma.Uploaders.Local.put_file(name, uuid, path, content_type, should_dedupe) + _ -> Pleroma.Uploaders.Local.put_file(name, uuid, path, content_type, opts) end end end diff --git a/lib/pleroma/uploaders/s3.ex b/lib/pleroma/uploaders/s3.ex index 40a836460..2d1ddef75 100644 --- a/lib/pleroma/uploaders/s3.ex +++ b/lib/pleroma/uploaders/s3.ex @@ -1,40 +1,48 @@ defmodule Pleroma.Uploaders.S3 do - alias Pleroma.Web.MediaProxy - @behaviour Pleroma.Uploaders.Uploader + require Logger - def put_file(name, uuid, path, content_type, _should_dedupe) do - settings = Application.get_env(:pleroma, Pleroma.Uploaders.S3) - bucket = Keyword.fetch!(settings, :bucket) - public_endpoint = Keyword.fetch!(settings, :public_endpoint) - force_media_proxy = Keyword.fetch!(settings, :force_media_proxy) + # The file name is re-encoded with S3's constraints here to comply with previous links with less strict filenames + def get_file(file) do + config = Pleroma.Config.get([__MODULE__]) + + {:ok, + {:url, + Path.join([ + Keyword.fetch!(config, :public_endpoint), + Keyword.fetch!(config, :bucket), + strict_encode(URI.decode(file)) + ])}} + end + + def put_file(name, uuid, path, content_type, _opts) do + config = Pleroma.Config.get([__MODULE__]) + bucket = Keyword.get(config, :bucket) {:ok, file_data} = File.read(path) File.rm!(path) - s3_name = "#{uuid}/#{encode(name)}" + s3_name = "#{uuid}/#{strict_encode(name)}" - {:ok, _} = + op = ExAws.S3.put_object(bucket, s3_name, file_data, [ {:acl, :public_read}, {:content_type, content_type} ]) - |> ExAws.request() - url_base = "#{public_endpoint}/#{bucket}/#{s3_name}" + case ExAws.request(op) do + {:ok, _} -> + {:ok, {:file, s3_name}} - public_url = - if force_media_proxy do - MediaProxy.url(url_base) - else - url_base - end - - {:ok, public_url} + error -> + Logger.error("#{__MODULE__}: #{inspect(error)}") + {:error, "S3 Upload failed"} + end end - defp encode(name) do - String.replace(name, ~r/[^0-9a-zA-Z!.*'()_-]/, "-") + @regex Regex.compile!("[^0-9a-zA-Z!.*/'()_-]") + def strict_encode(name) do + String.replace(name, @regex, "-") end end diff --git a/lib/pleroma/uploaders/swift/swift.ex b/lib/pleroma/uploaders/swift/swift.ex index fa08ca966..1e865f101 100644 --- a/lib/pleroma/uploaders/swift/swift.ex +++ b/lib/pleroma/uploaders/swift/swift.ex @@ -14,7 +14,7 @@ defmodule Pleroma.Uploaders.Swift.Client do case put("#{filename}", body, "X-Auth-Token": token, "Content-Type": content_type) do {:ok, %HTTPoison.Response{status_code: 201}} -> - {:ok, "#{object_url}/#{filename}"} + {:ok, {:file, filename}} {:ok, %HTTPoison.Response{status_code: 401}} -> {:error, "Unauthorized, Bad Token"} diff --git a/lib/pleroma/uploaders/swift/uploader.ex b/lib/pleroma/uploaders/swift/uploader.ex index 794f76cb0..5db35fe50 100644 --- a/lib/pleroma/uploaders/swift/uploader.ex +++ b/lib/pleroma/uploaders/swift/uploader.ex @@ -1,7 +1,11 @@ defmodule Pleroma.Uploaders.Swift do @behaviour Pleroma.Uploaders.Uploader - def put_file(name, uuid, tmp_path, content_type, _should_dedupe) do + def get_file(name) do + {:ok, {:url, Path.join([Pleroma.Config.get!([__MODULE__, :object_url]), name])}} + end + + def put_file(name, uuid, tmp_path, content_type, _opts) do {:ok, file_data} = File.read(tmp_path) remote_name = "#{uuid}/#{name}" diff --git a/lib/pleroma/uploaders/uploader.ex b/lib/pleroma/uploaders/uploader.ex index b58fc6d71..8ef82b4ef 100644 --- a/lib/pleroma/uploaders/uploader.ex +++ b/lib/pleroma/uploaders/uploader.ex @@ -1,20 +1,35 @@ defmodule Pleroma.Uploaders.Uploader do @moduledoc """ - Defines the contract to put an uploaded file to any backend. + Defines the contract to put and get an uploaded file to any backend. """ + @doc """ + Instructs how to get the file from the backend. + + Used by `Pleroma.Plugs.UploadedMedia`. + """ + @type get_method :: {:static_dir, directory :: String.t()} | {:url, url :: String.t()} + @callback get_file(file :: String.t()) :: {:ok, get_method()} + @doc """ Put a file to the backend. - Returns `{:ok, String.t } | {:error, String.t} containing the path of the - uploaded file, or error information if the file failed to be saved to the - respective backend. + Returns: + + * `{:ok, spec}` where spec is: + * `{:file, filename :: String.t}` to handle reads with `get_file/1` (recommended) + + This allows to correctly proxy or redirect requests to the backend, while allowing to migrate backends without breaking any URL. + + * `{url, url :: String.t}` to bypass `get_file/2` and use the `url` directly in the activity. + * `{:error, String.t}` error information if the file failed to be saved to the backend. + """ @callback put_file( name :: String.t(), uuid :: String.t(), file :: File.t(), content_type :: String.t(), - should_dedupe :: Boolean.t() - ) :: {:ok, String.t()} | {:error, String.t()} + options :: Map.t() + ) :: {:ok, {:file, String.t()} | {:url, String.t()}} | {:error, String.t()} end diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index ed579e336..76c15cf21 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -572,10 +572,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do |> Enum.reverse() end - def upload(file, size_limit \\ nil) do - with data <- - Upload.store(file, Application.get_env(:pleroma, :instance)[:dedupe_media], size_limit), - false <- is_nil(data) do + def upload(file, opts \\ []) do + with {:ok, data} <- Upload.store(file, opts) do Repo.insert(%Object{data: data}) end end diff --git a/lib/pleroma/web/endpoint.ex b/lib/pleroma/web/endpoint.ex index 8728c908b..c5f9d51d9 100644 --- a/lib/pleroma/web/endpoint.ex +++ b/lib/pleroma/web/endpoint.ex @@ -12,7 +12,7 @@ defmodule Pleroma.Web.Endpoint do plug(CORSPlug) plug(Pleroma.Plugs.HTTPSecurityPlug) - plug(Plug.Static, at: "/media", from: Pleroma.Uploaders.Local.upload_path(), gzip: false) + plug(Pleroma.Plugs.UploadedMedia) plug( Plug.Static, diff --git a/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex b/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex index aa7e9418e..9d50d906d 100644 --- a/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex +++ b/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex @@ -60,7 +60,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do user = if avatar = params["avatar"] do with %Plug.Upload{} <- avatar, - {:ok, object} <- ActivityPub.upload(avatar, avatar_upload_limit), + {:ok, object} <- ActivityPub.upload(avatar, size_limit: avatar_upload_limit), change = Ecto.Changeset.change(user, %{avatar: object.data}), {:ok, user} = User.update_and_set_cache(change) do user @@ -74,7 +74,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do user = if banner = params["header"] do with %Plug.Upload{} <- banner, - {:ok, object} <- ActivityPub.upload(banner, banner_upload_limit), + {:ok, object} <- ActivityPub.upload(banner, size_limit: banner_upload_limit), new_info <- Map.put(user.info, "banner", object.data), change <- User.info_changeset(user, %{info: new_info}), {:ok, user} <- User.update_and_set_cache(change) do diff --git a/lib/pleroma/web/media_proxy/controller.ex b/lib/pleroma/web/media_proxy/controller.ex index bb257c262..324f75263 100644 --- a/lib/pleroma/web/media_proxy/controller.ex +++ b/lib/pleroma/web/media_proxy/controller.ex @@ -1,135 +1,32 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do use Pleroma.Web, :controller - require Logger + alias Pleroma.{Web.MediaProxy, ReverseProxy} - @httpoison Application.get_env(:pleroma, :httpoison) - - @max_body_length 25 * 1_048_576 - - @cache_control %{ - default: "public, max-age=1209600", - error: "public, must-revalidate, max-age=160" - } - - # Content-types that will not be returned as content-disposition attachments - # Override with :media_proxy, :safe_content_types in the configuration - @safe_content_types [ - "image/gif", - "image/jpeg", - "image/jpg", - "image/png", - "image/svg+xml", - "audio/mpeg", - "audio/mp3", - "video/webm", - "video/mp4" - ] - - def remote(conn, params = %{"sig" => sig, "url" => url}) do - config = Application.get_env(:pleroma, :media_proxy, []) - - with true <- Keyword.get(config, :enabled, false), - {:ok, url} <- Pleroma.Web.MediaProxy.decode_url(sig, url), + def remote(conn, params = %{"sig" => sig64, "url" => url64}) do + with config <- Pleroma.Config.get([:media_proxy]), + true <- Keyword.get(config, :enabled, false), + {:ok, url} <- MediaProxy.decode_url(sig64, url64), filename <- Path.basename(URI.parse(url).path), - true <- - if(Map.get(params, "filename"), - do: filename == Path.basename(conn.request_path), - else: true - ), - {:ok, content_type, body} <- proxy_request(url), - safe_content_type <- - Enum.member?( - Keyword.get(config, :safe_content_types, @safe_content_types), - content_type - ) do - conn - |> put_resp_content_type(content_type) - |> set_cache_header(:default) - |> put_resp_header( - "content-security-policy", - "default-src 'none'; style-src 'unsafe-inline'; media-src data:; img-src 'self' data:" - ) - |> put_resp_header("x-xss-protection", "1; mode=block") - |> put_resp_header("x-content-type-options", "nosniff") - |> put_attachement_header(safe_content_type, filename) - |> send_resp(200, body) + :ok <- filename_matches(Map.has_key?(params, "filename"), conn.request_path, url) do + ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, [])) else false -> - send_error(conn, 404) + send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404)) {:error, :invalid_signature} -> - send_error(conn, 403) + send_resp(conn, 403, Plug.Conn.Status.reason_phrase(403)) - {:error, {:http, _, url}} -> - redirect_or_error(conn, url, Keyword.get(config, :redirect_on_failure, true)) + {:wrong_filename, filename} -> + redirect(conn, external: MediaProxy.build_url(sig64, url64, filename)) end end - defp proxy_request(link) do - headers = [ - {"user-agent", - "Pleroma/MediaProxy; #{Pleroma.Web.base_url()} <#{ - Application.get_env(:pleroma, :instance)[:email] - }>"} - ] + def filename_matches(has_filename, path, url) do + filename = MediaProxy.filename(url) - options = - @httpoison.process_request_options([:insecure, {:follow_redirect, true}]) ++ - [{:pool, :default}] - - with {:ok, 200, headers, client} <- :hackney.request(:get, link, headers, "", options), - headers = Enum.into(headers, Map.new()), - {:ok, body} <- proxy_request_body(client), - content_type <- proxy_request_content_type(headers, body) do - {:ok, content_type, body} - else - {:ok, status, _, _} -> - Logger.warn("MediaProxy: request failed, status #{status}, link: #{link}") - {:error, {:http, :bad_status, link}} - - {:error, error} -> - Logger.warn("MediaProxy: request failed, error #{inspect(error)}, link: #{link}") - {:error, {:http, error, link}} + cond do + has_filename && filename && Path.basename(path) != filename -> {:wrong_filename, filename} + true -> :ok end end - - defp set_cache_header(conn, key) do - Plug.Conn.put_resp_header(conn, "cache-control", @cache_control[key]) - end - - defp redirect_or_error(conn, url, true), do: redirect(conn, external: url) - defp redirect_or_error(conn, url, _), do: send_error(conn, 502, "Media proxy error: " <> url) - - defp send_error(conn, code, body \\ "") do - conn - |> set_cache_header(:error) - |> send_resp(code, body) - end - - defp proxy_request_body(client), do: proxy_request_body(client, <<>>) - - defp proxy_request_body(client, body) when byte_size(body) < @max_body_length do - case :hackney.stream_body(client) do - {:ok, data} -> proxy_request_body(client, <>) - :done -> {:ok, body} - {:error, reason} -> {:error, reason} - end - end - - defp proxy_request_body(client, _) do - :hackney.close(client) - {:error, :body_too_large} - end - - # TODO: the body is passed here as well because some hosts do not provide a content-type. - # At some point we may want to use magic numbers to discover the content-type and reply a proper one. - defp proxy_request_content_type(headers, _body) do - headers["Content-Type"] || headers["content-type"] || "application/octet-stream" - end - - defp put_attachement_header(conn, true, _), do: conn - - defp put_attachement_header(conn, false, filename) do - put_resp_header(conn, "content-disposition", "attachment; filename='#{filename}'") - end end diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex index 0fc0a07b2..28aacb0b1 100644 --- a/lib/pleroma/web/media_proxy/media_proxy.ex +++ b/lib/pleroma/web/media_proxy/media_proxy.ex @@ -17,10 +17,8 @@ defmodule Pleroma.Web.MediaProxy do base64 = Base.url_encode64(url, @base64_opts) sig = :crypto.hmac(:sha, secret, base64) sig64 = sig |> Base.url_encode64(@base64_opts) - filename = if path = URI.parse(url).path, do: "/" <> Path.basename(path), else: "" - Keyword.get(config, :base_url, Pleroma.Web.base_url()) <> - "/proxy/#{sig64}/#{base64}#{filename}" + build_url(sig64, base64, filename(url)) end end @@ -35,4 +33,20 @@ defmodule Pleroma.Web.MediaProxy do {:error, :invalid_signature} end end + + def filename(url_or_path) do + if path = URI.parse(url_or_path).path, do: Path.basename(path) + end + + def build_url(sig_base64, url_base64, filename \\ nil) do + [ + Pleroma.Config.get([:media_proxy, :base_url], Pleroma.Web.base_url()), + "proxy", + sig_base64, + url_base64, + filename + ] + |> Enum.filter(fn value -> value end) + |> Path.join() + end end diff --git a/lib/pleroma/web/twitter_api/twitter_api.ex b/lib/pleroma/web/twitter_api/twitter_api.ex index 5bfb83b1e..6223580e1 100644 --- a/lib/pleroma/web/twitter_api/twitter_api.ex +++ b/lib/pleroma/web/twitter_api/twitter_api.ex @@ -97,7 +97,7 @@ defmodule Pleroma.Web.TwitterAPI.TwitterAPI do {:ok, object} = ActivityPub.upload(file) url = List.first(object.data["url"]) - href = url["href"] |> MediaProxy.url() + href = url["href"] type = url["mediaType"] case format do diff --git a/lib/pleroma/web/twitter_api/twitter_api_controller.ex b/lib/pleroma/web/twitter_api/twitter_api_controller.ex index cd0e2121c..fa9ee9f99 100644 --- a/lib/pleroma/web/twitter_api/twitter_api_controller.ex +++ b/lib/pleroma/web/twitter_api/twitter_api_controller.ex @@ -294,7 +294,7 @@ defmodule Pleroma.Web.TwitterAPI.Controller do Application.get_env(:pleroma, :instance) |> Keyword.fetch(:avatar_upload_limit) - {:ok, object} = ActivityPub.upload(params, upload_limit) + {:ok, object} = ActivityPub.upload(params, size_limit: upload_limit) change = Changeset.change(user, %{avatar: object.data}) {:ok, user} = User.update_and_set_cache(change) CommonAPI.update(user) @@ -307,7 +307,8 @@ defmodule Pleroma.Web.TwitterAPI.Controller do Application.get_env(:pleroma, :instance) |> Keyword.fetch(:banner_upload_limit) - with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, upload_limit), + with {:ok, object} <- + ActivityPub.upload(%{"img" => params["banner"]}, size_limit: upload_limit), new_info <- Map.put(user.info, "banner", object.data), change <- User.info_changeset(user, %{info: new_info}), {:ok, user} <- User.update_and_set_cache(change) do @@ -325,7 +326,7 @@ defmodule Pleroma.Web.TwitterAPI.Controller do Application.get_env(:pleroma, :instance) |> Keyword.fetch(:background_upload_limit) - with {:ok, object} <- ActivityPub.upload(params, upload_limit), + with {:ok, object} <- ActivityPub.upload(params, size_limit: upload_limit), new_info <- Map.put(user.info, "background", object.data), change <- User.info_changeset(user, %{info: new_info}), {:ok, _user} <- User.update_and_set_cache(change) do diff --git a/test/support/httpoison_mock.ex b/test/support/httpoison_mock.ex index 0be09b6ce..e7344500f 100644 --- a/test/support/httpoison_mock.ex +++ b/test/support/httpoison_mock.ex @@ -1,6 +1,8 @@ defmodule HTTPoisonMock do alias HTTPoison.Response + def process_request_options(options), do: options + def get(url, body \\ [], headers \\ []) def get("https://prismo.news/@mxb", _, _) do diff --git a/test/upload_test.exs b/test/upload_test.exs index d273ea5f6..998245b29 100644 --- a/test/upload_test.exs +++ b/test/upload_test.exs @@ -2,7 +2,35 @@ defmodule Pleroma.UploadTest do alias Pleroma.Upload use Pleroma.DataCase - describe "Storing a file" do + describe "Storing a file with the Local uploader" do + setup do + uploader = Pleroma.Config.get([Pleroma.Upload, :uploader]) + + unless uploader == Pleroma.Uploaders.Local do + on_exit(fn -> + Pleroma.Config.put([Pleroma.Upload, :uploader], uploader) + end) + end + + :ok + end + + test "returns a media url" do + File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg") + + file = %Plug.Upload{ + content_type: "image/jpg", + path: Path.absname("test/fixtures/image_tmp.jpg"), + filename: "image.jpg" + } + + {:ok, data} = Upload.store(file) + + assert %{"url" => [%{"href" => url}]} = data + + assert String.starts_with?(url, Pleroma.Web.base_url() <> "/media/") + end + test "copies the file to the configured folder with deduping" do File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg") @@ -12,7 +40,7 @@ defmodule Pleroma.UploadTest do filename: "an [image.jpg" } - data = Upload.store(file, true) + {:ok, data} = Upload.store(file, dedupe: true) assert data["name"] == "e7a6d0cf595bff76f14c9a98b6c199539559e8b844e02e51e5efcfd1f614a2df.jpeg" @@ -27,7 +55,7 @@ defmodule Pleroma.UploadTest do filename: "an [image.jpg" } - data = Upload.store(file, false) + {:ok, data} = Upload.store(file, dedupe: false) assert data["name"] == "an [image.jpg" end @@ -40,7 +68,7 @@ defmodule Pleroma.UploadTest do filename: "an [image.jpg" } - data = Upload.store(file, true) + {:ok, data} = Upload.store(file, dedupe: true) assert hd(data["url"])["mediaType"] == "image/jpeg" end @@ -53,7 +81,7 @@ defmodule Pleroma.UploadTest do filename: "an [image" } - data = Upload.store(file, false) + {:ok, data} = Upload.store(file, dedupe: false) assert data["name"] == "an [image.jpg" end @@ -66,7 +94,7 @@ defmodule Pleroma.UploadTest do filename: "an [image.blah" } - data = Upload.store(file, false) + {:ok, data} = Upload.store(file, dedupe: false) assert data["name"] == "an [image.jpg" end @@ -79,7 +107,7 @@ defmodule Pleroma.UploadTest do filename: "test.txt" } - data = Upload.store(file, false) + {:ok, data} = Upload.store(file, dedupe: false) assert data["name"] == "test.txt" end end