diff --git a/.gitignore b/.gitignore
index 9aad700ee..b71dfa9fa 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,6 +6,9 @@
/uploads
/test/uploads
/.elixir_ls
+/test/fixtures/test_tmp.txt
+/test/fixtures/image_tmp.jpg
+/doc
# Prevent committing custom emojis
/priv/static/emoji/custom/*
@@ -28,4 +31,4 @@ erl_crash.dump
.env
# Editor config
-/.vscode
\ No newline at end of file
+/.vscode
diff --git a/config/config.exs b/config/config.exs
index 848c9d6cf..8d2fdd40d 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -10,18 +10,18 @@
config :pleroma, Pleroma.Repo, types: Pleroma.PostgresTypes
+# Upload configuration
config :pleroma, Pleroma.Upload,
uploader: Pleroma.Uploaders.Local,
- strip_exif: false
+ filters: [],
+ proxy_remote: false,
+ proxy_opts: []
-config :pleroma, Pleroma.Uploaders.Local,
- uploads: "uploads",
- uploads_url: "{{base_url}}/media/{{file}}"
+config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads"
config :pleroma, Pleroma.Uploaders.S3,
bucket: nil,
- public_endpoint: "https://s3.amazonaws.com",
- force_media_proxy: false
+ public_endpoint: "https://s3.amazonaws.com"
config :pleroma, Pleroma.Uploaders.MDII,
cgi: "https://mdii.sakura.ne.jp/mdii-post.cgi",
@@ -72,6 +72,7 @@
config :pleroma, :websub, Pleroma.Web.Websub
config :pleroma, :ostatus, Pleroma.Web.OStatus
config :pleroma, :httpoison, Pleroma.HTTP
+config :tesla, adapter: Tesla.Adapter.Hackney
# Configures http settings, upstream proxy etc.
config :pleroma, :http, proxy_url: nil
@@ -150,9 +151,11 @@
config :pleroma, :media_proxy,
enabled: false,
- redirect_on_failure: true
-
-# base_url: "https://cache.pleroma.social"
+ # base_url: "https://cache.pleroma.social",
+ proxy_opts: [
+ # inline_content_types: [] | false | true,
+ # http: [:insecure]
+ ]
config :pleroma, :chat, enabled: true
diff --git a/config/config.md b/config/config.md
index c843bca5d..47e838dd6 100644
--- a/config/config.md
+++ b/config/config.md
@@ -5,11 +5,19 @@ If you run Pleroma with ``MIX_ENV=prod`` the file is ``prod.secret.exs``, otherw
## Pleroma.Upload
* `uploader`: Select which `Pleroma.Uploaders` to use
-* `strip_exif`: boolean, uses ImageMagick(!) to strip exif.
+* `filters`: List of `Pleroma.Upload.Filter` to use.
+* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host.
+* `proxy_remote`: If you're using a remote uploader, Pleroma will proxy media requests instead of redirecting to it.
+* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
+
+Note: `strip_exif` has been replaced by `Pleroma.Upload.Filter.Mogrify`.
## Pleroma.Uploaders.Local
* `uploads`: Which directory to store the user-uploads in, relative to pleroma’s working directory
-* `uploads_url`: The URL to access a user-uploaded file, ``{{base_url}}`` is replaced to the instance URL and ``{{file}}`` to the filename. Useful when you want to proxy the media files via another host.
+
+## Pleroma.Upload.Filter.Mogrify
+
+* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", {"impode", "1"}]`.
## :uri_schemes
* `valid_schemes`: List of the scheme part that is considered valid to be an URL
@@ -68,7 +76,8 @@ This section is used to configure Pleroma-FE, unless ``:managed_config`` in ``:i
## :media_proxy
* `enabled`: Enables proxying of remote media to the instance’s proxy
-* `redirect_on_failure`: Use the original URL when Media Proxy fails to get it
+* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.
+* `proxy_opts`: All options defined in `Pleroma.ReverseProxy` documentation, defaults to `[max_body_length: (25*1_048_576)]`.
## :gopher
* `enabled`: Enables the gopher interface
diff --git a/config/dev.exs b/config/dev.exs
index 7b06ad67e..166be721a 100644
--- a/config/dev.exs
+++ b/config/dev.exs
@@ -49,11 +49,10 @@
hostname: "localhost",
pool_size: 10
-try do
+if File.exists?("./config/dev.secret.exs") do
import_config "dev.secret.exs"
-rescue
- _ ->
- IO.puts(
- "!!! RUNNING IN LOCALHOST DEV MODE! !!!\nFEDERATION WON'T WORK UNTIL YOU CONFIGURE A dev.secret.exs"
- )
+else
+ IO.puts(
+ "!!! RUNNING IN LOCALHOST DEV MODE! !!!\nFEDERATION WON'T WORK UNTIL YOU CONFIGURE A dev.secret.exs"
+ )
end
diff --git a/config/test.exs b/config/test.exs
index 1bd11dee4..6f6227c20 100644
--- a/config/test.exs
+++ b/config/test.exs
@@ -9,7 +9,7 @@
# Print only warnings and errors during test
config :logger, level: :warn
-config :pleroma, Pleroma.Upload, uploads: "test/uploads"
+config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads"
# Configure your database
config :pleroma, Pleroma.Repo,
@@ -25,7 +25,7 @@
config :pleroma, :websub, Pleroma.Web.WebsubMock
config :pleroma, :ostatus, Pleroma.Web.OStatusMock
-config :pleroma, :httpoison, HTTPoisonMock
+config :tesla, adapter: Tesla.Mock
try do
import_config "test.secret.exs"
diff --git a/installation/pleroma.nginx b/installation/pleroma.nginx
index f0e684f2c..e1184fe77 100644
--- a/installation/pleroma.nginx
+++ b/installation/pleroma.nginx
@@ -70,10 +70,12 @@ server {
client_max_body_size 16m;
}
- location /proxy {
+ location ~ ^/(media|proxy) {
proxy_cache pleroma_media_cache;
proxy_cache_lock on;
proxy_ignore_client_abort on;
+ proxy_buffering off;
+ chunked_transfer_encoding on;
proxy_pass http://localhost:4000;
}
}
diff --git a/lib/mix/tasks/make_moderator.ex b/lib/mix/tasks/make_moderator.ex
index 15586dc30..8dc0a04dd 100644
--- a/lib/mix/tasks/make_moderator.ex
+++ b/lib/mix/tasks/make_moderator.ex
@@ -8,7 +8,7 @@ defmodule Mix.Tasks.SetModerator do
"""
use Mix.Task
- import Mix.Ecto
+ import Ecto.Changeset
alias Pleroma.{Repo, User}
def run([nickname | rest]) do
@@ -21,14 +21,15 @@ def run([nickname | rest]) do
end
with %User{local: true} = user <- User.get_by_nickname(nickname) do
- info =
- user.info
- |> Map.put("is_moderator", !!moderator)
+ info_cng = User.Info.admin_api_update(user.info, %{is_moderator: !!moderator})
- cng = User.info_changeset(user, %{info: info})
- {:ok, user} = User.update_and_set_cache(cng)
+ user_cng =
+ Ecto.Changeset.change(user)
+ |> put_embed(:info, info_cng)
- IO.puts("Moderator status of #{nickname}: #{user.info["is_moderator"]}")
+ {:ok, user} = User.update_and_set_cache(user_cng)
+
+ IO.puts("Moderator status of #{nickname}: #{user.info.is_moderator}")
else
_ ->
IO.puts("No local user #{nickname}")
diff --git a/lib/mix/tasks/migrate_local_uploads.ex b/lib/mix/tasks/migrate_local_uploads.ex
new file mode 100644
index 000000000..8f9e210c0
--- /dev/null
+++ b/lib/mix/tasks/migrate_local_uploads.ex
@@ -0,0 +1,97 @@
+defmodule Mix.Tasks.MigrateLocalUploads do
+ use Mix.Task
+ import Mix.Ecto
+ alias Pleroma.{Upload, Uploaders.Local, Uploaders.S3}
+ require Logger
+
+ @log_every 50
+ @shortdoc "Migrate uploads from local to remote storage"
+
+ def run([target_uploader | args]) do
+ delete? = Enum.member?(args, "--delete")
+ Application.ensure_all_started(:pleroma)
+
+ local_path = Pleroma.Config.get!([Local, :uploads])
+ uploader = Module.concat(Pleroma.Uploaders, target_uploader)
+
+ unless Code.ensure_loaded?(uploader) do
+ raise("The uploader #{inspect(uploader)} is not an existing/loaded module.")
+ end
+
+ target_enabled? = Pleroma.Config.get([Upload, :uploader]) == uploader
+
+ unless target_enabled? do
+ Pleroma.Config.put([Upload, :uploader], uploader)
+ end
+
+ Logger.info("Migrating files from local #{local_path} to #{to_string(uploader)}")
+
+ if delete? do
+ Logger.warn(
+ "Attention: uploaded files will be deleted, hope you have backups! (--delete ; cancel with ^C)"
+ )
+
+ :timer.sleep(:timer.seconds(5))
+ end
+
+ uploads =
+ File.ls!(local_path)
+ |> Enum.map(fn id ->
+ root_path = Path.join(local_path, id)
+
+ cond do
+ File.dir?(root_path) ->
+ files = for file <- File.ls!(root_path), do: {id, file, Path.join([root_path, file])}
+
+ case List.first(files) do
+ {id, file, path} ->
+ {%Pleroma.Upload{id: id, name: file, path: id <> "/" <> file, tempfile: path},
+ root_path}
+
+ _ ->
+ nil
+ end
+
+ File.exists?(root_path) ->
+ file = Path.basename(id)
+ [hash, ext] = String.split(id, ".")
+ {%Pleroma.Upload{id: hash, name: file, path: file, tempfile: root_path}, root_path}
+
+ true ->
+ nil
+ end
+ end)
+ |> Enum.filter(& &1)
+
+ total_count = length(uploads)
+ Logger.info("Found #{total_count} uploads")
+
+ uploads
+ |> Task.async_stream(
+ fn {upload, root_path} ->
+ case Upload.store(upload, uploader: uploader, filters: [], size_limit: nil) do
+ {:ok, _} ->
+ if delete?, do: File.rm_rf!(root_path)
+ Logger.debug("uploaded: #{inspect(upload.path)} #{inspect(upload)}")
+ :ok
+
+ error ->
+ Logger.error("failed to upload #{inspect(upload.path)}: #{inspect(error)}")
+ end
+ end,
+ timeout: 150_000
+ )
+ |> Stream.chunk_every(@log_every)
+ |> Enum.reduce(0, fn done, count ->
+ count = count + length(done)
+ Logger.info("Uploaded #{count}/#{total_count} files")
+ count
+ end)
+
+ Logger.info("Done!")
+ end
+
+ def run(_) do
+ Logger.error("Usage: migrate_local_uploads S3|Swift [--delete]")
+ end
+end
diff --git a/lib/mix/tasks/sample_psql.eex b/lib/mix/tasks/sample_psql.eex
index b6f57948b..c89b34ef2 100644
--- a/lib/mix/tasks/sample_psql.eex
+++ b/lib/mix/tasks/sample_psql.eex
@@ -4,3 +4,4 @@ CREATE DATABASE pleroma_dev OWNER pleroma;
--Extensions made by ecto.migrate that need superuser access
CREATE EXTENSION IF NOT EXISTS citext;
CREATE EXTENSION IF NOT EXISTS pg_trgm;
+CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
diff --git a/lib/mix/tasks/set_admin.ex b/lib/mix/tasks/set_admin.ex
index d5ccf261b..ac26516f1 100644
--- a/lib/mix/tasks/set_admin.ex
+++ b/lib/mix/tasks/set_admin.ex
@@ -1,5 +1,6 @@
defmodule Mix.Tasks.SetAdmin do
use Mix.Task
+ import Ecto.Changeset
alias Pleroma.User
@doc """
@@ -9,21 +10,22 @@ defmodule Mix.Tasks.SetAdmin do
def run([nickname | rest]) do
Application.ensure_all_started(:pleroma)
- status =
+ admin =
case rest do
- [status] -> status == "true"
+ [admin] -> admin == "true"
_ -> true
end
with %User{local: true} = user <- User.get_by_nickname(nickname) do
- info =
- user.info
- |> Map.put("is_admin", !!status)
+ info_cng = User.Info.admin_api_update(user.info, %{is_admin: !!admin})
- cng = User.info_changeset(user, %{info: info})
- {:ok, user} = User.update_and_set_cache(cng)
+ user_cng =
+ Ecto.Changeset.change(user)
+ |> put_embed(:info, info_cng)
- IO.puts("Admin status of #{nickname}: #{user.info["is_admin"]}")
+ {:ok, user} = User.update_and_set_cache(user_cng)
+
+ IO.puts("Admin status of #{nickname}: #{user.info.is_admin}")
else
_ ->
IO.puts("No local user #{nickname}")
diff --git a/lib/mix/tasks/set_locked.ex b/lib/mix/tasks/set_locked.ex
index a154595ca..e93a63505 100644
--- a/lib/mix/tasks/set_locked.ex
+++ b/lib/mix/tasks/set_locked.ex
@@ -10,11 +10,11 @@ defmodule Mix.Tasks.SetLocked do
"""
use Mix.Task
- import Mix.Ecto
+ import Ecto.Changeset
alias Pleroma.{Repo, User}
def run([nickname | rest]) do
- ensure_started(Repo, [])
+ Application.ensure_all_started(:pleroma)
locked =
case rest do
@@ -23,14 +23,15 @@ def run([nickname | rest]) do
end
with %User{local: true} = user <- User.get_by_nickname(nickname) do
- info =
- user.info
- |> Map.put("locked", !!locked)
+ info_cng = User.Info.profile_update(user.info, %{locked: !!locked})
- cng = User.info_changeset(user, %{info: info})
- user = Repo.update!(cng)
+ user_cng =
+ Ecto.Changeset.change(user)
+ |> put_embed(:info, info_cng)
- IO.puts("locked status of #{nickname}: #{user.info["locked"]}")
+ {:ok, user} = User.update_and_set_cache(user_cng)
+
+ IO.puts("Locked status of #{nickname}: #{user.info.locked}")
else
_ ->
IO.puts("No local user #{nickname}")
diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex
index d0f23527f..0b0ec0197 100644
--- a/lib/pleroma/application.ex
+++ b/lib/pleroma/application.ex
@@ -1,5 +1,6 @@
defmodule Pleroma.Application do
use Application
+ import Supervisor.Spec
@name "Pleroma"
@version Mix.Project.config()[:version]
@@ -7,11 +8,15 @@ def name, do: @name
def version, do: @version
def named_version(), do: @name <> " " <> @version
+ def user_agent() do
+ info = "#{Pleroma.Web.base_url()} <#{Pleroma.Config.get([:instance, :email], "")}>"
+ named_version() <> "; " <> info
+ end
+
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
@env Mix.env()
def start(_type, _args) do
- import Supervisor.Spec
import Cachex.Spec
# Define workers and child supervisors to be supervised
@@ -20,10 +25,6 @@ def start(_type, _args) do
# Start the Ecto repository
supervisor(Pleroma.Repo, []),
worker(Pleroma.Emoji, []),
- # Start the endpoint when the application starts
- supervisor(Pleroma.Web.Endpoint, []),
- # Start your own worker by calling: Pleroma.Worker.start_link(arg1, arg2, arg3)
- # worker(Pleroma.Worker, [arg1, arg2, arg3]),
worker(
Cachex,
[
@@ -63,21 +64,18 @@ def start(_type, _args) do
],
id: :cachex_idem
),
- worker(Pleroma.Web.Federator, []),
worker(Pleroma.Web.Federator.RetryQueue, []),
- worker(Pleroma.Gopher.Server, []),
+ worker(Pleroma.Web.Federator, []),
worker(Pleroma.Stats, []),
worker(Pleroma.Web.Push, [])
] ++
- if @env == :test,
- do: [],
- else:
- [worker(Pleroma.Web.Streamer, [])] ++
- if(
- !chat_enabled(),
- do: [],
- else: [worker(Pleroma.Web.ChatChannel.ChatChannelState, [])]
- )
+ streamer_child() ++
+ chat_child() ++
+ [
+ # Start the endpoint when the application starts
+ supervisor(Pleroma.Web.Endpoint, []),
+ worker(Pleroma.Gopher.Server, [])
+ ]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
@@ -85,7 +83,20 @@ def start(_type, _args) do
Supervisor.start_link(children, opts)
end
- defp chat_enabled do
- Application.get_env(:pleroma, :chat, []) |> Keyword.get(:enabled)
+ if Mix.env() == :test do
+ defp streamer_child(), do: []
+ defp chat_child(), do: []
+ else
+ defp streamer_child() do
+ [worker(Pleroma.Web.Streamer, [])]
+ end
+
+ defp chat_child() do
+ if Pleroma.Config.get([:chat, :enabled]) do
+ [worker(Pleroma.Web.ChatChannel.ChatChannelState, [])]
+ else
+ []
+ end
+ end
end
end
diff --git a/lib/pleroma/config.ex b/lib/pleroma/config.ex
index 15f771b6e..3876ddf1f 100644
--- a/lib/pleroma/config.ex
+++ b/lib/pleroma/config.ex
@@ -39,4 +39,18 @@ def put([parent_key | keys], value) do
def put(key, value) do
Application.put_env(:pleroma, key, value)
end
+
+ def delete([key]), do: delete(key)
+
+ def delete([parent_key | keys]) do
+ {_, parent} =
+ Application.get_env(:pleroma, parent_key)
+ |> get_and_update_in(keys, fn _ -> :pop end)
+
+ Application.put_env(:pleroma, parent_key, parent)
+ end
+
+ def delete(key) do
+ Application.delete_env(:pleroma, key)
+ end
end
diff --git a/lib/pleroma/formatter.ex b/lib/pleroma/formatter.ex
index 26bb17377..5b03e9aeb 100644
--- a/lib/pleroma/formatter.ex
+++ b/lib/pleroma/formatter.ex
@@ -114,10 +114,10 @@ def add_user_links({subs, text}, mentions) do
subs =
subs ++
- Enum.map(mentions, fn {match, %User{ap_id: ap_id, info: info}, uuid} ->
+ Enum.map(mentions, fn {match, %User{id: id, ap_id: ap_id, info: info}, uuid} ->
ap_id =
- if is_binary(info["source_data"]["url"]) do
- info["source_data"]["url"]
+ if is_binary(info.source_data["url"]) do
+ info.source_data["url"]
else
ap_id
end
@@ -125,7 +125,7 @@ def add_user_links({subs, text}, mentions) do
short_match = String.split(match, "@") |> tl() |> hd()
{uuid,
- "@#{short_match}"}
+ "@#{short_match}"}
end)
{subs, uuid_text}
@@ -147,7 +147,11 @@ def add_hashtag_links({subs, text}, tags) do
subs =
subs ++
Enum.map(tags, fn {tag_text, tag, uuid} ->
- url = "#{tag_text}"
+ url =
+ "#{
+ tag_text
+ }"
+
{uuid, url}
end)
diff --git a/lib/pleroma/gopher/server.ex b/lib/pleroma/gopher/server.ex
index e6361a82c..3b0569a99 100644
--- a/lib/pleroma/gopher/server.ex
+++ b/lib/pleroma/gopher/server.ex
@@ -6,27 +6,28 @@ def start_link() do
config = Pleroma.Config.get(:gopher, [])
ip = Keyword.get(config, :ip, {0, 0, 0, 0})
port = Keyword.get(config, :port, 1234)
- GenServer.start_link(__MODULE__, [ip, port], [])
+
+ if Keyword.get(config, :enabled, false) do
+ GenServer.start_link(__MODULE__, [ip, port], [])
+ else
+ Logger.info("Gopher server disabled")
+ :ignore
+ end
end
def init([ip, port]) do
- if Pleroma.Config.get([:gopher, :enabled], false) do
- Logger.info("Starting gopher server on #{port}")
+ Logger.info("Starting gopher server on #{port}")
- :ranch.start_listener(
- :gopher,
- 100,
- :ranch_tcp,
- [port: port],
- __MODULE__.ProtocolHandler,
- []
- )
+ :ranch.start_listener(
+ :gopher,
+ 100,
+ :ranch_tcp,
+ [port: port],
+ __MODULE__.ProtocolHandler,
+ []
+ )
- {:ok, %{ip: ip, port: port}}
- else
- Logger.info("Gopher server disabled")
- {:ok, nil}
- end
+ {:ok, %{ip: ip, port: port}}
end
end
diff --git a/lib/pleroma/html.ex b/lib/pleroma/html.ex
index 1b920d7fd..5daaa5e69 100644
--- a/lib/pleroma/html.ex
+++ b/lib/pleroma/html.ex
@@ -45,7 +45,7 @@ defmodule Pleroma.HTML.Scrubber.TwitterText do
Meta.strip_comments()
# links
- Meta.allow_tag_with_uri_attributes("a", ["href"], @valid_schemes)
+ Meta.allow_tag_with_uri_attributes("a", ["href", "data-user", "data-tag"], @valid_schemes)
Meta.allow_tag_with_these_attributes("a", ["name", "title"])
# paragraphs and linebreaks
@@ -86,7 +86,7 @@ defmodule Pleroma.HTML.Scrubber.Default do
Meta.remove_cdata_sections_before_scrub()
Meta.strip_comments()
- Meta.allow_tag_with_uri_attributes("a", ["href"], @valid_schemes)
+ Meta.allow_tag_with_uri_attributes("a", ["href", "data-user", "data-tag"], @valid_schemes)
Meta.allow_tag_with_these_attributes("a", ["name", "title"])
Meta.allow_tag_with_these_attributes("abbr", ["title"])
diff --git a/lib/pleroma/http/connection.ex b/lib/pleroma/http/connection.ex
new file mode 100644
index 000000000..5e8f2aabd
--- /dev/null
+++ b/lib/pleroma/http/connection.ex
@@ -0,0 +1,27 @@
+defmodule Pleroma.HTTP.Connection do
+ @moduledoc """
+ Connection for http-requests.
+ """
+
+ @hackney_options [pool: :default]
+ @adapter Application.get_env(:tesla, :adapter)
+
+ @doc """
+ Configure a client connection
+
+ # Returns
+
+ Tesla.Env.client
+ """
+ @spec new(Keyword.t()) :: Tesla.Env.client()
+ def new(opts \\ []) do
+ Tesla.client([], {@adapter, hackney_options(opts)})
+ end
+
+ # fetch Hackney options
+ #
+ defp hackney_options(opts \\ []) do
+ options = Keyword.get(opts, :adapter, [])
+ @hackney_options ++ options
+ end
+end
diff --git a/lib/pleroma/http/http.ex b/lib/pleroma/http/http.ex
index e64266ae7..3c0256575 100644
--- a/lib/pleroma/http/http.ex
+++ b/lib/pleroma/http/http.ex
@@ -1,14 +1,42 @@
defmodule Pleroma.HTTP do
- require HTTPoison
+ @moduledoc """
+ """
+
+ alias Pleroma.HTTP.Connection
+ alias Pleroma.HTTP.RequestBuilder, as: Builder
+
+ @doc """
+ Builds and perform http request.
+
+ # Arguments:
+ `method` - :get, :post, :put, :delete
+ `url`
+ `body`
+ `headers` - a keyworld list of headers, e.g. `[{"content-type", "text/plain"}]`
+ `options` - custom, per-request middleware or adapter options
+
+ # Returns:
+ `{:ok, %Tesla.Env{}}` or `{:error, error}`
+
+ """
def request(method, url, body \\ "", headers \\ [], options \\ []) do
options =
process_request_options(options)
|> process_sni_options(url)
- HTTPoison.request(method, url, body, headers, options)
+ %{}
+ |> Builder.method(method)
+ |> Builder.headers(headers)
+ |> Builder.opts(options)
+ |> Builder.url(url)
+ |> Builder.add_param(:body, :body, body)
+ |> Enum.into([])
+ |> (&Tesla.request(Connection.new(), &1)).()
end
+ defp process_sni_options(options, nil), do: options
+
defp process_sni_options(options, url) do
uri = URI.parse(url)
host = uri.host |> to_charlist()
@@ -22,7 +50,7 @@ defp process_sni_options(options, url) do
def process_request_options(options) do
config = Application.get_env(:pleroma, :http, [])
proxy = Keyword.get(config, :proxy_url, nil)
- options = options ++ [hackney: [pool: :default]]
+ options = options ++ [adapter: [pool: :default]]
case proxy do
nil -> options
@@ -30,8 +58,19 @@ def process_request_options(options) do
end
end
- def get(url, headers \\ [], options \\ []), do: request(:get, url, "", headers, options)
+ @doc """
+ Performs GET request.
+ See `Pleroma.HTTP.request/5`
+ """
+ def get(url, headers \\ [], options \\ []),
+ do: request(:get, url, "", headers, options)
+
+ @doc """
+ Performs POST request.
+
+ See `Pleroma.HTTP.request/5`
+ """
def post(url, body, headers \\ [], options \\ []),
do: request(:post, url, body, headers, options)
end
diff --git a/lib/pleroma/http/request_builder.ex b/lib/pleroma/http/request_builder.ex
new file mode 100644
index 000000000..5aee2b8ae
--- /dev/null
+++ b/lib/pleroma/http/request_builder.ex
@@ -0,0 +1,126 @@
+defmodule Pleroma.HTTP.RequestBuilder do
+ @moduledoc """
+ Helper functions for building Tesla requests
+ """
+
+ @doc """
+ Specify the request method when building a request
+
+ ## Parameters
+
+ - request (Map) - Collected request options
+ - m (atom) - Request method
+
+ ## Returns
+
+ Map
+ """
+ @spec method(map(), atom) :: map()
+ def method(request, m) do
+ Map.put_new(request, :method, m)
+ end
+
+ @doc """
+ Specify the request method when building a request
+
+ ## Parameters
+
+ - request (Map) - Collected request options
+ - u (String) - Request URL
+
+ ## Returns
+
+ Map
+ """
+ @spec url(map(), String.t()) :: map()
+ def url(request, u) do
+ Map.put_new(request, :url, u)
+ end
+
+ @doc """
+ Add headers to the request
+ """
+ @spec headers(map(), list(tuple)) :: map()
+ def headers(request, h) do
+ Map.put_new(request, :headers, h)
+ end
+
+ @doc """
+ Add custom, per-request middleware or adapter options to the request
+ """
+ @spec opts(map(), Keyword.t()) :: map()
+ def opts(request, options) do
+ Map.put_new(request, :opts, options)
+ end
+
+ @doc """
+ Add optional parameters to the request
+
+ ## Parameters
+
+ - request (Map) - Collected request options
+ - definitions (Map) - Map of parameter name to parameter location.
+ - options (KeywordList) - The provided optional parameters
+
+ ## Returns
+
+ Map
+ """
+ @spec add_optional_params(map(), %{optional(atom) => atom}, keyword()) :: map()
+ def add_optional_params(request, _, []), do: request
+
+ def add_optional_params(request, definitions, [{key, value} | tail]) do
+ case definitions do
+ %{^key => location} ->
+ request
+ |> add_param(location, key, value)
+ |> add_optional_params(definitions, tail)
+
+ _ ->
+ add_optional_params(request, definitions, tail)
+ end
+ end
+
+ @doc """
+ Add optional parameters to the request
+
+ ## Parameters
+
+ - request (Map) - Collected request options
+ - location (atom) - Where to put the parameter
+ - key (atom) - The name of the parameter
+ - value (any) - The value of the parameter
+
+ ## Returns
+
+ Map
+ """
+ @spec add_param(map(), atom, atom, any()) :: map()
+ def add_param(request, :body, :body, value), do: Map.put(request, :body, value)
+
+ def add_param(request, :body, key, value) do
+ request
+ |> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
+ |> Map.update!(
+ :body,
+ &Tesla.Multipart.add_field(&1, key, Poison.encode!(value),
+ headers: [{:"Content-Type", "application/json"}]
+ )
+ )
+ end
+
+ def add_param(request, :file, name, path) do
+ request
+ |> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
+ |> Map.update!(:body, &Tesla.Multipart.add_file(&1, path, name: name))
+ end
+
+ def add_param(request, :form, name, value) do
+ request
+ |> Map.update(:body, %{name => value}, &Map.put(&1, name, value))
+ end
+
+ def add_param(request, location, key, value) do
+ Map.update(request, location, [{key, value}], &(&1 ++ [{key, value}]))
+ end
+end
diff --git a/lib/pleroma/mime.ex b/lib/pleroma/mime.ex
new file mode 100644
index 000000000..db8b7c742
--- /dev/null
+++ b/lib/pleroma/mime.ex
@@ -0,0 +1,108 @@
+defmodule Pleroma.MIME do
+ @moduledoc """
+ Returns the mime-type of a binary and optionally a normalized file-name.
+ """
+ @default "application/octet-stream"
+ @read_bytes 31
+
+ @spec file_mime_type(String.t()) ::
+ {:ok, content_type :: String.t(), filename :: String.t()} | {:error, any()} | :error
+ def file_mime_type(path, filename) do
+ with {:ok, content_type} <- file_mime_type(path),
+ filename <- fix_extension(filename, content_type) do
+ {:ok, content_type, filename}
+ end
+ end
+
+ @spec file_mime_type(String.t()) :: {:ok, String.t()} | {:error, any()} | :error
+ def file_mime_type(filename) do
+ File.open(filename, [:read], fn f ->
+ check_mime_type(IO.binread(f, @read_bytes))
+ end)
+ end
+
+ def bin_mime_type(binary, filename) do
+ with {:ok, content_type} <- bin_mime_type(binary),
+ filename <- fix_extension(filename, content_type) do
+ {:ok, content_type, filename}
+ end
+ end
+
+ @spec bin_mime_type(binary()) :: {:ok, String.t()} | :error
+ def bin_mime_type(<>) do
+ {:ok, check_mime_type(head)}
+ end
+
+ def mime_type(<<_::binary>>), do: {:ok, @default}
+
+ def bin_mime_type(_), do: :error
+
+ defp fix_extension(filename, content_type) do
+ parts = String.split(filename, ".")
+
+ new_filename =
+ if length(parts) > 1 do
+ Enum.drop(parts, -1) |> Enum.join(".")
+ else
+ Enum.join(parts)
+ end
+
+ cond do
+ content_type == "application/octet-stream" ->
+ filename
+
+ ext = List.first(MIME.extensions(content_type)) ->
+ new_filename <> "." <> ext
+
+ true ->
+ Enum.join([new_filename, String.split(content_type, "/") |> List.last()], ".")
+ end
+ end
+
+ defp check_mime_type(<<0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, _::binary>>) do
+ "image/png"
+ end
+
+ defp check_mime_type(<<0x47, 0x49, 0x46, 0x38, _, 0x61, _::binary>>) do
+ "image/gif"
+ end
+
+ defp check_mime_type(<<0xFF, 0xD8, 0xFF, _::binary>>) do
+ "image/jpeg"
+ end
+
+ defp check_mime_type(<<0x1A, 0x45, 0xDF, 0xA3, _::binary>>) do
+ "video/webm"
+ end
+
+ defp check_mime_type(<<0x00, 0x00, 0x00, _, 0x66, 0x74, 0x79, 0x70, _::binary>>) do
+ "video/mp4"
+ end
+
+ defp check_mime_type(<<0x49, 0x44, 0x33, _::binary>>) do
+ "audio/mpeg"
+ end
+
+ defp check_mime_type(<<255, 251, _, 68, 0, 0, 0, 0, _::binary>>) do
+ "audio/mpeg"
+ end
+
+ defp check_mime_type(
+ <<0x4F, 0x67, 0x67, 0x53, 0x00, 0x02, 0x00, 0x00, _::size(160), 0x80, 0x74, 0x68, 0x65,
+ 0x6F, 0x72, 0x61, _::binary>>
+ ) do
+ "video/ogg"
+ end
+
+ defp check_mime_type(<<0x4F, 0x67, 0x67, 0x53, 0x00, 0x02, 0x00, 0x00, _::binary>>) do
+ "audio/ogg"
+ end
+
+ defp check_mime_type(<<0x52, 0x49, 0x46, 0x46, _::binary>>) do
+ "audio/wav"
+ end
+
+ defp check_mime_type(_) do
+ @default
+ end
+end
diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex
index 03a75dfbd..31c8dd5bd 100644
--- a/lib/pleroma/object.ex
+++ b/lib/pleroma/object.ex
@@ -1,6 +1,6 @@
defmodule Pleroma.Object do
use Ecto.Schema
- alias Pleroma.{Repo, Object, Activity}
+ alias Pleroma.{Repo, Object, User, Activity}
import Ecto.{Query, Changeset}
schema "objects" do
@@ -31,6 +31,13 @@ def normalize(obj) when is_map(obj), do: Object.get_by_ap_id(obj["id"])
def normalize(ap_id) when is_binary(ap_id), do: Object.get_by_ap_id(ap_id)
def normalize(_), do: nil
+ # Owned objects can only be mutated by their owner
+ def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),
+ do: actor == ap_id
+
+ # Legacy objects can be mutated by anybody
+ def authorize_mutation(%Object{}, %User{}), do: true
+
if Mix.env() == :test do
def get_cached_by_ap_id(ap_id) do
get_by_ap_id(ap_id)
diff --git a/lib/pleroma/plugs/oauth_plug.ex b/lib/pleroma/plugs/oauth_plug.ex
index 651485e09..8b99a74d1 100644
--- a/lib/pleroma/plugs/oauth_plug.ex
+++ b/lib/pleroma/plugs/oauth_plug.ex
@@ -1,26 +1,22 @@
defmodule Pleroma.Plugs.OAuthPlug do
import Plug.Conn
- alias Pleroma.User
- alias Pleroma.Repo
- alias Pleroma.Web.OAuth.Token
+ import Ecto.Query
- def init(options) do
- options
- end
+ alias Pleroma.{
+ User,
+ Repo,
+ Web.OAuth.Token
+ }
+
+ @realm_reg Regex.compile!("Bearer\:?\s+(.*)$", "i")
+
+ def init(options), do: options
def call(%{assigns: %{user: %User{}}} = conn, _), do: conn
def call(conn, _) do
- token =
- case get_req_header(conn, "authorization") do
- ["Bearer " <> header] -> header
- _ -> get_session(conn, :oauth_token)
- end
-
- with token when not is_nil(token) <- token,
- %Token{user_id: user_id} = token <- Repo.get_by(Token, token: token),
- %User{} = user <- Repo.get(User, user_id),
- false <- !!user.info["deactivated"] do
+ with {:ok, token} <- fetch_token(conn),
+ {:ok, user} <- fetch_user(token) do
conn
|> assign(:token, token)
|> assign(:user, user)
@@ -28,4 +24,47 @@ def call(conn, _) do
_ -> conn
end
end
+
+ # Gets user by token
+ #
+ @spec fetch_user(String.t()) :: {:ok, User.t()} | nil
+ defp fetch_user(token) do
+ query = from(q in Token, where: q.token == ^token, preload: [:user])
+
+ with %Token{user: %{info: %{deactivated: false} = _} = user} <- Repo.one(query) do
+ {:ok, user}
+ end
+ end
+
+ # Gets token from session by :oauth_token key
+ #
+ @spec fetch_token_from_session(Plug.Conn.t()) :: :no_token_found | {:ok, String.t()}
+ defp fetch_token_from_session(conn) do
+ case get_session(conn, :oauth_token) do
+ nil -> :no_token_found
+ token -> {:ok, token}
+ end
+ end
+
+ # Gets token from headers
+ #
+ @spec fetch_token(Plug.Conn.t()) :: :no_token_found | {:ok, String.t()}
+ defp fetch_token(%Plug.Conn{} = conn) do
+ headers = get_req_header(conn, "authorization")
+
+ with :no_token_found <- fetch_token(headers),
+ do: fetch_token_from_session(conn)
+ end
+
+ @spec fetch_token(Keyword.t()) :: :no_token_found | {:ok, String.t()}
+ defp fetch_token([]), do: :no_token_found
+
+ defp fetch_token([token | tail]) do
+ trimmed_token = String.trim(token)
+
+ case Regex.run(@realm_reg, trimmed_token) do
+ [_, match] -> {:ok, String.trim(match)}
+ _ -> fetch_token(tail)
+ end
+ end
end
diff --git a/lib/pleroma/plugs/uploaded_media.ex b/lib/pleroma/plugs/uploaded_media.ex
new file mode 100644
index 000000000..994cc8bf6
--- /dev/null
+++ b/lib/pleroma/plugs/uploaded_media.ex
@@ -0,0 +1,78 @@
+defmodule Pleroma.Plugs.UploadedMedia do
+ @moduledoc """
+ """
+
+ import Plug.Conn
+ require Logger
+
+ @behaviour Plug
+ # no slashes
+ @path "media"
+ @cache_control %{
+ default: "public, max-age=1209600",
+ error: "public, must-revalidate, max-age=160"
+ }
+
+ def init(_opts) do
+ static_plug_opts =
+ []
+ |> Keyword.put(:from, "__unconfigured_media_plug")
+ |> Keyword.put(:at, "/__unconfigured_media_plug")
+ |> Plug.Static.init()
+
+ %{static_plug_opts: static_plug_opts}
+ end
+
+ def call(conn = %{request_path: <<"/", @path, "/", file::binary>>}, opts) do
+ config = Pleroma.Config.get([Pleroma.Upload])
+
+ with uploader <- Keyword.fetch!(config, :uploader),
+ proxy_remote = Keyword.get(config, :proxy_remote, false),
+ {:ok, get_method} <- uploader.get_file(file) do
+ get_media(conn, get_method, proxy_remote, opts)
+ else
+ _ ->
+ conn
+ |> send_resp(500, "Failed")
+ |> halt()
+ end
+ end
+
+ def call(conn, _opts), do: conn
+
+ defp get_media(conn, {:static_dir, directory}, _, opts) do
+ static_opts =
+ Map.get(opts, :static_plug_opts)
+ |> Map.put(:at, [@path])
+ |> Map.put(:from, directory)
+
+ conn = Plug.Static.call(conn, static_opts)
+
+ if conn.halted do
+ conn
+ else
+ conn
+ |> send_resp(404, "Not found")
+ |> halt()
+ end
+ end
+
+ defp get_media(conn, {:url, url}, true, _) do
+ conn
+ |> Pleroma.ReverseProxy.call(url, Pleroma.Config.get([Pleroma.Upload, :proxy_opts], []))
+ end
+
+ defp get_media(conn, {:url, url}, _, _) do
+ conn
+ |> Phoenix.Controller.redirect(external: url)
+ |> halt()
+ end
+
+ defp get_media(conn, unknown, _, _) do
+ Logger.error("#{__MODULE__}: Unknown get startegy: #{inspect(unknown)}")
+
+ conn
+ |> send_resp(500, "Internal Error")
+ |> halt()
+ end
+end
diff --git a/lib/pleroma/plugs/user_enabled_plug.ex b/lib/pleroma/plugs/user_enabled_plug.ex
index 9c3285896..01482f47d 100644
--- a/lib/pleroma/plugs/user_enabled_plug.ex
+++ b/lib/pleroma/plugs/user_enabled_plug.ex
@@ -6,7 +6,7 @@ def init(options) do
options
end
- def call(%{assigns: %{user: %User{info: %{"deactivated" => true}}}} = conn, _) do
+ def call(%{assigns: %{user: %User{info: %{deactivated: true}}}} = conn, _) do
conn
|> assign(:user, nil)
end
diff --git a/lib/pleroma/plugs/user_is_admin_plug.ex b/lib/pleroma/plugs/user_is_admin_plug.ex
index 5312f1499..cf22ce5d0 100644
--- a/lib/pleroma/plugs/user_is_admin_plug.ex
+++ b/lib/pleroma/plugs/user_is_admin_plug.ex
@@ -6,7 +6,7 @@ def init(options) do
options
end
- def call(%{assigns: %{user: %User{info: %{"is_admin" => true}}}} = conn, _) do
+ def call(%{assigns: %{user: %User{info: %{is_admin: true}}}} = conn, _) do
conn
end
diff --git a/lib/pleroma/reverse_proxy.ex b/lib/pleroma/reverse_proxy.ex
new file mode 100644
index 000000000..ad9dc82fe
--- /dev/null
+++ b/lib/pleroma/reverse_proxy.ex
@@ -0,0 +1,343 @@
+defmodule Pleroma.ReverseProxy do
+ @keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since if-unmodified-since if-none-match if-range range)
+ @resp_cache_headers ~w(etag date last-modified cache-control)
+ @keep_resp_headers @resp_cache_headers ++
+ ~w(content-type content-disposition content-encoding content-range accept-ranges vary)
+ @default_cache_control_header "public, max-age=1209600"
+ @valid_resp_codes [200, 206, 304]
+ @max_read_duration :timer.seconds(30)
+ @max_body_length :infinity
+ @methods ~w(GET HEAD)
+
+ @moduledoc """
+ A reverse proxy.
+
+ Pleroma.ReverseProxy.call(conn, url, options)
+
+ It is not meant to be added into a plug pipeline, but to be called from another plug or controller.
+
+ Supports `#{inspect(@methods)}` HTTP methods, and only allows `#{inspect(@valid_resp_codes)}` status codes.
+
+ Responses are chunked to the client while downloading from the upstream.
+
+ Some request / responses headers are preserved:
+
+ * request: `#{inspect(@keep_req_headers)}`
+ * response: `#{inspect(@keep_resp_headers)}`
+
+ If no caching headers (`#{inspect(@resp_cache_headers)}`) are returned by upstream, `cache-control` will be
+ set to `#{inspect(@default_cache_control_header)}`.
+
+ Options:
+
+ * `redirect_on_failure` (default `false`). Redirects the client to the real remote URL if there's any HTTP
+ errors. Any error during body processing will not be redirected as the response is chunked. This may expose
+ remote URL, clients IPs, ….
+
+ * `max_body_length` (default `#{inspect(@max_body_length)}`): limits the content length to be approximately the
+ specified length. It is validated with the `content-length` header and also verified when proxying.
+
+ * `max_read_duration` (default `#{inspect(@max_read_duration)}` ms): the total time the connection is allowed to
+ read from the remote upstream.
+
+ * `inline_content_types`:
+ * `true` will not alter `content-disposition` (up to the upstream),
+ * `false` will add `content-disposition: attachment` to any request,
+ * a list of whitelisted content types
+
+ * `keep_user_agent` will forward the client's user-agent to the upstream. This may be useful if the upstream is
+ doing content transformation (encoding, …) depending on the request.
+
+ * `req_headers`, `resp_headers` additional headers.
+
+ * `http`: options for [hackney](https://github.com/benoitc/hackney).
+
+ """
+ @hackney Application.get_env(:pleroma, :hackney, :hackney)
+ @httpoison Application.get_env(:pleroma, :httpoison, HTTPoison)
+
+ @default_hackney_options [{:follow_redirect, true}]
+
+ @inline_content_types [
+ "image/gif",
+ "image/jpeg",
+ "image/jpg",
+ "image/png",
+ "image/svg+xml",
+ "audio/mpeg",
+ "audio/mp3",
+ "video/webm",
+ "video/mp4",
+ "video/quicktime"
+ ]
+
+ require Logger
+ import Plug.Conn
+
+ @type option() ::
+ {:keep_user_agent, boolean}
+ | {:max_read_duration, :timer.time() | :infinity}
+ | {:max_body_length, non_neg_integer() | :infinity}
+ | {:http, []}
+ | {:req_headers, [{String.t(), String.t()}]}
+ | {:resp_headers, [{String.t(), String.t()}]}
+ | {:inline_content_types, boolean() | [String.t()]}
+ | {:redirect_on_failure, boolean()}
+
+ @spec call(Plug.Conn.t(), url :: String.t(), [option()]) :: Plug.Conn.t()
+ def call(conn = %{method: method}, url, opts \\ []) when method in @methods do
+ hackney_opts =
+ @default_hackney_options
+ |> Keyword.merge(Keyword.get(opts, :http, []))
+ |> @httpoison.process_request_options()
+
+ req_headers = build_req_headers(conn.req_headers, opts)
+
+ opts =
+ if filename = Pleroma.Web.MediaProxy.filename(url) do
+ Keyword.put_new(opts, :attachment_name, filename)
+ else
+ opts
+ end
+
+ with {:ok, code, headers, client} <- request(method, url, req_headers, hackney_opts),
+ :ok <- header_length_constraint(headers, Keyword.get(opts, :max_body_length)) do
+ response(conn, client, url, code, headers, opts)
+ else
+ {:ok, code, headers} ->
+ head_response(conn, url, code, headers, opts)
+ |> halt()
+
+ {:error, {:invalid_http_response, code}} ->
+ Logger.error("#{__MODULE__}: request to #{inspect(url)} failed with HTTP status #{code}")
+
+ conn
+ |> error_or_redirect(
+ url,
+ code,
+ "Request failed: " <> Plug.Conn.Status.reason_phrase(code),
+ opts
+ )
+ |> halt()
+
+ {:error, error} ->
+ Logger.error("#{__MODULE__}: request to #{inspect(url)} failed: #{inspect(error)}")
+
+ conn
+ |> error_or_redirect(url, 500, "Request failed", opts)
+ |> halt()
+ end
+ end
+
+ def call(conn, _, _) do
+ conn
+ |> send_resp(400, Plug.Conn.Status.reason_phrase(400))
+ |> halt()
+ end
+
+ defp request(method, url, headers, hackney_opts) do
+ Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
+ method = method |> String.downcase() |> String.to_existing_atom()
+
+ case @hackney.request(method, url, headers, "", hackney_opts) do
+ {:ok, code, headers, client} when code in @valid_resp_codes ->
+ {:ok, code, downcase_headers(headers), client}
+
+ {:ok, code, headers} when code in @valid_resp_codes ->
+ {:ok, code, downcase_headers(headers)}
+
+ {:ok, code, _, _} ->
+ {:error, {:invalid_http_response, code}}
+
+ {:error, error} ->
+ {:error, error}
+ end
+ end
+
+ defp response(conn, client, url, status, headers, opts) do
+ result =
+ conn
+ |> put_resp_headers(build_resp_headers(headers, opts))
+ |> send_chunked(status)
+ |> chunk_reply(client, opts)
+
+ case result do
+ {:ok, conn} ->
+ halt(conn)
+
+ {:error, :closed, conn} ->
+ :hackney.close(client)
+ halt(conn)
+
+ {:error, error, conn} ->
+ Logger.warn(
+ "#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
+ )
+
+ :hackney.close(client)
+ halt(conn)
+ end
+ end
+
+ defp chunk_reply(conn, client, opts) do
+ chunk_reply(conn, client, opts, 0, 0)
+ end
+
+ defp chunk_reply(conn, client, opts, sent_so_far, duration) do
+ with {:ok, duration} <-
+ check_read_duration(
+ duration,
+ Keyword.get(opts, :max_read_duration, @max_read_duration)
+ ),
+ {:ok, data} <- @hackney.stream_body(client),
+ {:ok, duration} <- increase_read_duration(duration),
+ sent_so_far = sent_so_far + byte_size(data),
+ :ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
+ {:ok, conn} <- chunk(conn, data) do
+ chunk_reply(conn, client, opts, sent_so_far, duration)
+ else
+ :done -> {:ok, conn}
+ {:error, error} -> {:error, error, conn}
+ end
+ end
+
+ defp head_response(conn, _url, code, headers, opts) do
+ conn
+ |> put_resp_headers(build_resp_headers(headers, opts))
+ |> send_resp(code, "")
+ end
+
+ defp error_or_redirect(conn, url, code, body, opts) do
+ if Keyword.get(opts, :redirect_on_failure, false) do
+ conn
+ |> Phoenix.Controller.redirect(external: url)
+ |> halt()
+ else
+ conn
+ |> send_resp(code, body)
+ |> halt
+ end
+ end
+
+ defp downcase_headers(headers) do
+ Enum.map(headers, fn {k, v} ->
+ {String.downcase(k), v}
+ end)
+ end
+
+ defp get_content_type(headers) do
+ {_, content_type} =
+ List.keyfind(headers, "content-type", 0, {"content-type", "application/octet-stream"})
+
+ [content_type | _] = String.split(content_type, ";")
+ content_type
+ end
+
+ defp put_resp_headers(conn, headers) do
+ Enum.reduce(headers, conn, fn {k, v}, conn ->
+ put_resp_header(conn, k, v)
+ end)
+ end
+
+ defp build_req_headers(headers, opts) do
+ headers =
+ headers
+ |> downcase_headers()
+ |> Enum.filter(fn {k, _} -> k in @keep_req_headers end)
+ |> (fn headers ->
+ headers = headers ++ Keyword.get(opts, :req_headers, [])
+
+ if Keyword.get(opts, :keep_user_agent, false) do
+ List.keystore(
+ headers,
+ "user-agent",
+ 0,
+ {"user-agent", Pleroma.Application.user_agent()}
+ )
+ else
+ headers
+ end
+ end).()
+ end
+
+ defp build_resp_headers(headers, opts) do
+ headers
+ |> Enum.filter(fn {k, _} -> k in @keep_resp_headers end)
+ |> build_resp_cache_headers(opts)
+ |> build_resp_content_disposition_header(opts)
+ |> (fn headers -> headers ++ Keyword.get(opts, :resp_headers, []) end).()
+ end
+
+ defp build_resp_cache_headers(headers, opts) do
+ has_cache? = Enum.any?(headers, fn {k, _} -> k in @resp_cache_headers end)
+
+ if has_cache? do
+ headers
+ else
+ List.keystore(headers, "cache-control", 0, {"cache-control", @default_cache_control_header})
+ end
+ end
+
+ defp build_resp_content_disposition_header(headers, opts) do
+ opt = Keyword.get(opts, :inline_content_types, @inline_content_types)
+
+ content_type = get_content_type(headers)
+
+ attachment? =
+ cond do
+ is_list(opt) && !Enum.member?(opt, content_type) -> true
+ opt == false -> true
+ true -> false
+ end
+
+ if attachment? do
+ disposition = "attachment; filename=" <> Keyword.get(opts, :attachment_name, "attachment")
+ List.keystore(headers, "content-disposition", 0, {"content-disposition", disposition})
+ else
+ headers
+ end
+ end
+
+ defp header_length_constraint(headers, limit) when is_integer(limit) and limit > 0 do
+ with {_, size} <- List.keyfind(headers, "content-length", 0),
+ {size, _} <- Integer.parse(size),
+ true <- size <= limit do
+ :ok
+ else
+ false ->
+ {:error, :body_too_large}
+
+ _ ->
+ :ok
+ end
+ end
+
+ defp header_length_constraint(_, _), do: :ok
+
+ defp body_size_constraint(size, limit) when is_integer(limit) and limit > 0 and size >= limit do
+ {:error, :body_too_large}
+ end
+
+ defp body_size_constraint(_, _), do: :ok
+
+ defp check_read_duration(duration, max)
+ when is_integer(duration) and is_integer(max) and max > 0 do
+ if duration > max do
+ {:error, :read_duration_exceeded}
+ else
+ {:ok, {duration, :erlang.system_time(:millisecond)}}
+ end
+ end
+
+ defp check_read_duration(_, _), do: {:ok, :no_duration_limit, :no_duration_limit}
+
+ defp increase_read_duration({previous_duration, started})
+ when is_integer(previous_duration) and is_integer(started) do
+ duration = :erlang.system_time(:millisecond) - started
+ {:ok, previous_duration + duration}
+ end
+
+ defp increase_read_duration(_) do
+ {:ok, :no_duration_limit, :no_duration_limit}
+ end
+end
diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex
index 238630bf3..bf2c60102 100644
--- a/lib/pleroma/upload.ex
+++ b/lib/pleroma/upload.ex
@@ -1,81 +1,209 @@
defmodule Pleroma.Upload do
+ @moduledoc """
+ # Upload
+
+ Options:
+ * `:type`: presets for activity type (defaults to Document) and size limits from app configuration
+ * `:description`: upload alternative text
+ * `:base_url`: override base url
+ * `:uploader`: override uploader
+ * `:filters`: override filters
+ * `:size_limit`: override size limit
+ * `:activity_type`: override activity type
+
+ The `%Pleroma.Upload{}` struct: all documented fields are meant to be overwritten in filters:
+
+ * `:id` - the upload id.
+ * `:name` - the upload file name.
+ * `:path` - the upload path: set at first to `id/name` but can be changed. Keep in mind that the path
+ is once created permanent and changing it (especially in uploaders) is probably a bad idea!
+ * `:tempfile` - path to the temporary file. Prefer in-place changes on the file rather than changing the
+ path as the temporary file is also tracked by `Plug.Upload{}` and automatically deleted once the request is over.
+
+ Related behaviors:
+
+ * `Pleroma.Uploaders.Uploader`
+ * `Pleroma.Upload.Filter`
+
+ """
alias Ecto.UUID
+ require Logger
- def check_file_size(path, nil), do: true
+ @type source ::
+ Plug.Upload.t() | data_uri_string ::
+ String.t() | {:from_local, name :: String.t(), id :: String.t(), path :: String.t()}
- def check_file_size(path, size_limit) do
- {:ok, %{size: size}} = File.stat(path)
- size <= size_limit
- end
+ @type option ::
+ {:type, :avatar | :banner | :background}
+ | {:description, String.t()}
+ | {:activity_type, String.t()}
+ | {:size_limit, nil | non_neg_integer()}
+ | {:uploader, module()}
+ | {:filters, [module()]}
- def store(file, should_dedupe, size_limit \\ nil)
+ @type t :: %__MODULE__{
+ id: String.t(),
+ name: String.t(),
+ tempfile: String.t(),
+ content_type: String.t(),
+ path: String.t()
+ }
+ defstruct [:id, :name, :tempfile, :content_type, :path]
- def store(%Plug.Upload{} = file, should_dedupe, size_limit) do
- content_type = get_content_type(file.path)
+ @spec store(source, options :: [option()]) :: {:ok, Map.t()} | {:error, any()}
+ def store(upload, opts \\ []) do
+ opts = get_opts(opts)
- with uuid <- get_uuid(file, should_dedupe),
- name <- get_name(file, uuid, content_type, should_dedupe),
- true <- check_file_size(file.path, size_limit) do
- strip_exif_data(content_type, file.path)
-
- {:ok, url_path} = uploader().put_file(name, uuid, file.path, content_type, should_dedupe)
-
- %{
- "type" => "Document",
- "url" => [
- %{
- "type" => "Link",
- "mediaType" => content_type,
- "href" => url_path
- }
- ],
- "name" => name
- }
+ with {:ok, upload} <- prepare_upload(upload, opts),
+ upload = %__MODULE__{upload | path: upload.path || "#{upload.id}/#{upload.name}"},
+ {:ok, upload} <- Pleroma.Upload.Filter.filter(opts.filters, upload),
+ {:ok, url_spec} <- Pleroma.Uploaders.Uploader.put_file(opts.uploader, upload) do
+ {:ok,
+ %{
+ "type" => opts.activity_type,
+ "url" => [
+ %{
+ "type" => "Link",
+ "mediaType" => upload.content_type,
+ "href" => url_from_spec(opts.base_url, url_spec)
+ }
+ ],
+ "name" => Map.get(opts, :description) || upload.name
+ }}
else
- _e -> nil
- end
- end
-
- def store(%{"img" => "data:image/" <> image_data}, should_dedupe, size_limit) do
- parsed = Regex.named_captures(~r/(?jpeg|png|gif);base64,(?.*)/, image_data)
- data = Base.decode64!(parsed["data"], ignore: :whitespace)
-
- with tmp_path <- tempfile_for_image(data),
- uuid <- UUID.generate(),
- true <- check_file_size(tmp_path, size_limit) do
- content_type = get_content_type(tmp_path)
- strip_exif_data(content_type, tmp_path)
-
- name =
- create_name(
- String.downcase(Base.encode16(:crypto.hash(:sha256, data))),
- parsed["filetype"],
- content_type
+ {:error, error} ->
+ Logger.error(
+ "#{__MODULE__} store (using #{inspect(opts.uploader)}) failed: #{inspect(error)}"
)
- {:ok, url_path} = uploader().put_file(name, uuid, tmp_path, content_type, should_dedupe)
-
- %{
- "type" => "Image",
- "url" => [
- %{
- "type" => "Link",
- "mediaType" => content_type,
- "href" => url_path
- }
- ],
- "name" => name
- }
- else
- _e -> nil
+ {:error, error}
end
end
- @doc """
- Creates a tempfile using the Plug.Upload Genserver which cleans them up
- automatically.
- """
- def tempfile_for_image(data) do
+ defp get_opts(opts) do
+ {size_limit, activity_type} =
+ case Keyword.get(opts, :type) do
+ :banner ->
+ {Pleroma.Config.get!([:instance, :banner_upload_limit]), "Image"}
+
+ :avatar ->
+ {Pleroma.Config.get!([:instance, :avatar_upload_limit]), "Image"}
+
+ :background ->
+ {Pleroma.Config.get!([:instance, :background_upload_limit]), "Image"}
+
+ _ ->
+ {Pleroma.Config.get!([:instance, :upload_limit]), "Document"}
+ end
+
+ opts = %{
+ activity_type: Keyword.get(opts, :activity_type, activity_type),
+ size_limit: Keyword.get(opts, :size_limit, size_limit),
+ uploader: Keyword.get(opts, :uploader, Pleroma.Config.get([__MODULE__, :uploader])),
+ filters: Keyword.get(opts, :filters, Pleroma.Config.get([__MODULE__, :filters])),
+ description: Keyword.get(opts, :description),
+ base_url:
+ Keyword.get(
+ opts,
+ :base_url,
+ Pleroma.Config.get([__MODULE__, :base_url], Pleroma.Web.base_url())
+ )
+ }
+
+ # TODO: 1.0+ : remove old config compatibility
+ opts =
+ if Pleroma.Config.get([__MODULE__, :strip_exif]) == true &&
+ !Enum.member?(opts.filters, Pleroma.Upload.Filter.Mogrify) do
+ Logger.warn("""
+ Pleroma: configuration `:instance, :strip_exif` is deprecated, please instead set:
+
+ :pleroma, Pleroma.Upload, [filters: [Pleroma.Upload.Filter.Mogrify]]
+
+ :pleroma, Pleroma.Upload.Filter.Mogrify, args: "strip"
+ """)
+
+ Pleroma.Config.put([Pleroma.Upload.Filter.Mogrify], args: "strip")
+ Map.put(opts, :filters, opts.filters ++ [Pleroma.Upload.Filter.Mogrify])
+ else
+ opts
+ end
+
+ opts =
+ if Pleroma.Config.get([:instance, :dedupe_media]) == true &&
+ !Enum.member?(opts.filters, Pleroma.Upload.Filter.Dedupe) do
+ Logger.warn("""
+ Pleroma: configuration `:instance, :dedupe_media` is deprecated, please instead set:
+
+ :pleroma, Pleroma.Upload, [filters: [Pleroma.Upload.Filter.Dedupe]]
+ """)
+
+ Map.put(opts, :filters, opts.filters ++ [Pleroma.Upload.Filter.Dedupe])
+ else
+ opts
+ end
+ end
+
+ defp prepare_upload(%Plug.Upload{} = file, opts) do
+ with :ok <- check_file_size(file.path, opts.size_limit),
+ {:ok, content_type, name} <- Pleroma.MIME.file_mime_type(file.path, file.filename) do
+ {:ok,
+ %__MODULE__{
+ id: UUID.generate(),
+ name: name,
+ tempfile: file.path,
+ content_type: content_type
+ }}
+ end
+ end
+
+ defp prepare_upload(%{"img" => "data:image/" <> image_data}, opts) do
+ parsed = Regex.named_captures(~r/(?jpeg|png|gif);base64,(?.*)/, image_data)
+ data = Base.decode64!(parsed["data"], ignore: :whitespace)
+ hash = String.downcase(Base.encode16(:crypto.hash(:sha256, data)))
+
+ with :ok <- check_binary_size(data, opts.size_limit),
+ tmp_path <- tempfile_for_image(data),
+ {:ok, content_type, name} <-
+ Pleroma.MIME.bin_mime_type(data, hash <> "." <> parsed["filetype"]) do
+ {:ok,
+ %__MODULE__{
+ id: UUID.generate(),
+ name: name,
+ tempfile: tmp_path,
+ content_type: content_type
+ }}
+ end
+ end
+
+ # For Mix.Tasks.MigrateLocalUploads
+ defp prepare_upload(upload = %__MODULE__{tempfile: path}, _opts) do
+ with {:ok, content_type} <- Pleroma.MIME.file_mime_type(path) do
+ {:ok, %__MODULE__{upload | content_type: content_type}}
+ end
+ end
+
+ defp check_binary_size(binary, size_limit)
+ when is_integer(size_limit) and size_limit > 0 and byte_size(binary) >= size_limit do
+ {:error, :file_too_large}
+ end
+
+ defp check_binary_size(_, _), do: :ok
+
+ defp check_file_size(path, size_limit) when is_integer(size_limit) and size_limit > 0 do
+ with {:ok, %{size: size}} <- File.stat(path),
+ true <- size <= size_limit do
+ :ok
+ else
+ false -> {:error, :file_too_large}
+ error -> error
+ end
+ end
+
+ defp check_file_size(_, _), do: :ok
+
+ # Creates a tempfile using the Plug.Upload Genserver which cleans them up
+ # automatically.
+ defp tempfile_for_image(data) do
{:ok, tmp_path} = Plug.Upload.random_file("profile_pics")
{:ok, tmp_file} = File.open(tmp_path, [:write, :raw, :binary])
IO.binwrite(tmp_file, data)
@@ -83,108 +211,12 @@ def tempfile_for_image(data) do
tmp_path
end
- def strip_exif_data(content_type, file) do
- settings = Application.get_env(:pleroma, Pleroma.Upload)
- do_strip = Keyword.fetch!(settings, :strip_exif)
- [filetype, _ext] = String.split(content_type, "/")
-
- if filetype == "image" and do_strip == true do
- Mogrify.open(file) |> Mogrify.custom("strip") |> Mogrify.save(in_place: true)
- end
+ defp url_from_spec(base_url, {:file, path}) do
+ [base_url, "media", path]
+ |> Path.join()
end
- defp create_name(uuid, ext, type) do
- case type do
- "application/octet-stream" ->
- String.downcase(Enum.join([uuid, ext], "."))
-
- "audio/mpeg" ->
- String.downcase(Enum.join([uuid, "mp3"], "."))
-
- _ ->
- String.downcase(Enum.join([uuid, List.last(String.split(type, "/"))], "."))
- end
- end
-
- defp get_uuid(file, should_dedupe) do
- if should_dedupe do
- Base.encode16(:crypto.hash(:sha256, File.read!(file.path)))
- else
- UUID.generate()
- end
- end
-
- defp get_name(file, uuid, type, should_dedupe) do
- if should_dedupe do
- create_name(uuid, List.last(String.split(file.filename, ".")), type)
- else
- parts = String.split(file.filename, ".")
-
- new_filename =
- if length(parts) > 1 do
- Enum.drop(parts, -1) |> Enum.join(".")
- else
- Enum.join(parts)
- end
-
- case type do
- "application/octet-stream" -> file.filename
- "audio/mpeg" -> new_filename <> ".mp3"
- "image/jpeg" -> new_filename <> ".jpg"
- _ -> Enum.join([new_filename, String.split(type, "/") |> List.last()], ".")
- end
- end
- end
-
- def get_content_type(file) do
- match =
- File.open(file, [:read], fn f ->
- case IO.binread(f, 8) do
- <<0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A>> ->
- "image/png"
-
- <<0x47, 0x49, 0x46, 0x38, _, 0x61, _, _>> ->
- "image/gif"
-
- <<0xFF, 0xD8, 0xFF, _, _, _, _, _>> ->
- "image/jpeg"
-
- <<0x1A, 0x45, 0xDF, 0xA3, _, _, _, _>> ->
- "video/webm"
-
- <<0x00, 0x00, 0x00, _, 0x66, 0x74, 0x79, 0x70>> ->
- "video/mp4"
-
- <<0x49, 0x44, 0x33, _, _, _, _, _>> ->
- "audio/mpeg"
-
- <<255, 251, _, 68, 0, 0, 0, 0>> ->
- "audio/mpeg"
-
- <<0x4F, 0x67, 0x67, 0x53, 0x00, 0x02, 0x00, 0x00>> ->
- case IO.binread(f, 27) do
- <<_::size(160), 0x80, 0x74, 0x68, 0x65, 0x6F, 0x72, 0x61>> ->
- "video/ogg"
-
- _ ->
- "audio/ogg"
- end
-
- <<0x52, 0x49, 0x46, 0x46, _, _, _, _>> ->
- "audio/wav"
-
- _ ->
- "application/octet-stream"
- end
- end)
-
- case match do
- {:ok, type} -> type
- _e -> "application/octet-stream"
- end
- end
-
- defp uploader() do
- Pleroma.Config.get!([Pleroma.Upload, :uploader])
+ defp url_from_spec({:url, url}) do
+ url
end
end
diff --git a/lib/pleroma/upload/filter.ex b/lib/pleroma/upload/filter.ex
new file mode 100644
index 000000000..d1384ddad
--- /dev/null
+++ b/lib/pleroma/upload/filter.ex
@@ -0,0 +1,35 @@
+defmodule Pleroma.Upload.Filter do
+ @moduledoc """
+ Upload Filter behaviour
+
+ This behaviour allows to run filtering actions just before a file is uploaded. This allows to:
+
+ * morph in place the temporary file
+ * change any field of a `Pleroma.Upload` struct
+ * cancel/stop the upload
+ """
+
+ require Logger
+
+ @callback filter(Pleroma.Upload.t()) :: :ok | {:ok, Pleroma.Upload.t()} | {:error, any()}
+
+ @spec filter([module()], Pleroma.Upload.t()) :: {:ok, Pleroma.Upload.t()} | {:error, any()}
+
+ def filter([], upload) do
+ {:ok, upload}
+ end
+
+ def filter([filter | rest], upload) do
+ case filter.filter(upload) do
+ :ok ->
+ filter(rest, upload)
+
+ {:ok, upload} ->
+ filter(rest, upload)
+
+ error ->
+ Logger.error("#{__MODULE__}: Filter #{filter} failed: #{inspect(error)}")
+ error
+ end
+ end
+end
diff --git a/lib/pleroma/upload/filter/anonymize_filename.ex b/lib/pleroma/upload/filter/anonymize_filename.ex
new file mode 100644
index 000000000..a83e764e5
--- /dev/null
+++ b/lib/pleroma/upload/filter/anonymize_filename.ex
@@ -0,0 +1,10 @@
+defmodule Pleroma.Upload.Filter.AnonymizeFilename do
+ @moduledoc "Replaces the original filename with a randomly generated string."
+ @behaviour Pleroma.Upload.Filter
+
+ def filter(upload) do
+ extension = List.last(String.split(upload.name, "."))
+ string = Base.url_encode64(:crypto.strong_rand_bytes(10), padding: false)
+ {:ok, %Pleroma.Upload{upload | name: string <> "." <> extension}}
+ end
+end
diff --git a/lib/pleroma/upload/filter/dedupe.ex b/lib/pleroma/upload/filter/dedupe.ex
new file mode 100644
index 000000000..28091a627
--- /dev/null
+++ b/lib/pleroma/upload/filter/dedupe.ex
@@ -0,0 +1,10 @@
+defmodule Pleroma.Upload.Filter.Dedupe do
+ @behaviour Pleroma.Upload.Filter
+
+ def filter(upload = %Pleroma.Upload{name: name, tempfile: path}) do
+ extension = String.split(name, ".") |> List.last()
+ shasum = :crypto.hash(:sha256, File.read!(upload.tempfile)) |> Base.encode16(case: :lower)
+ filename = shasum <> "." <> extension
+ {:ok, %Pleroma.Upload{upload | id: shasum, path: filename}}
+ end
+end
diff --git a/lib/pleroma/upload/filter/mogrifun.ex b/lib/pleroma/upload/filter/mogrifun.ex
new file mode 100644
index 000000000..4d4f0b401
--- /dev/null
+++ b/lib/pleroma/upload/filter/mogrifun.ex
@@ -0,0 +1,60 @@
+defmodule Pleroma.Upload.Filter.Mogrifun do
+ @behaviour Pleroma.Upload.Filter
+
+ @filters [
+ {"implode", "1"},
+ {"-raise", "20"},
+ {"+raise", "20"},
+ [{"-interpolate", "nearest"}, {"-virtual-pixel", "mirror"}, {"-spread", "5"}],
+ "+polaroid",
+ {"-statistic", "Mode 10"},
+ {"-emboss", "0x1.1"},
+ {"-emboss", "0x2"},
+ {"-colorspace", "Gray"},
+ "-negate",
+ [{"-channel", "green"}, "-negate"],
+ [{"-channel", "red"}, "-negate"],
+ [{"-channel", "blue"}, "-negate"],
+ {"+level-colors", "green,gold"},
+ {"+level-colors", ",DodgerBlue"},
+ {"+level-colors", ",Gold"},
+ {"+level-colors", ",Lime"},
+ {"+level-colors", ",Red"},
+ {"+level-colors", ",DarkGreen"},
+ {"+level-colors", "firebrick,yellow"},
+ {"+level-colors", "'rgb(102,75,25)',lemonchiffon"},
+ [{"fill", "red"}, {"tint", "40"}],
+ [{"fill", "green"}, {"tint", "40"}],
+ [{"fill", "blue"}, {"tint", "40"}],
+ [{"fill", "yellow"}, {"tint", "40"}]
+ ]
+
+ def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
+ filter = Enum.random(@filters)
+
+ file
+ |> Mogrify.open()
+ |> mogrify_filter(filter)
+ |> Mogrify.save(in_place: true)
+
+ :ok
+ end
+
+ def filter(_), do: :ok
+
+ defp mogrify_filter(mogrify, [filter | rest]) do
+ mogrify
+ |> mogrify_filter(filter)
+ |> mogrify_filter(rest)
+ end
+
+ defp mogrify_filter(mogrify, []), do: mogrify
+
+ defp mogrify_filter(mogrify, {action, options}) do
+ Mogrify.custom(mogrify, action, options)
+ end
+
+ defp mogrify_filter(mogrify, string) when is_binary(string) do
+ Mogrify.custom(mogrify, string)
+ end
+end
diff --git a/lib/pleroma/upload/filter/mogrify.ex b/lib/pleroma/upload/filter/mogrify.ex
new file mode 100644
index 000000000..d6ed471ed
--- /dev/null
+++ b/lib/pleroma/upload/filter/mogrify.ex
@@ -0,0 +1,37 @@
+defmodule Pleroma.Upload.Filter.Mogrify do
+ @behaviour Pleroma.Uploader.Filter
+
+ @type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()}
+ @type conversions :: conversion() | [conversion()]
+
+ def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
+ filters = Pleroma.Config.get!([__MODULE__, :args])
+
+ file
+ |> Mogrify.open()
+ |> mogrify_filter(filters)
+ |> Mogrify.save(in_place: true)
+
+ :ok
+ end
+
+ def filter(_), do: :ok
+
+ defp mogrify_filter(mogrify, nil), do: mogrify
+
+ defp mogrify_filter(mogrify, [filter | rest]) do
+ mogrify
+ |> mogrify_filter(filter)
+ |> mogrify_filter(rest)
+ end
+
+ defp mogrify_filter(mogrify, []), do: mogrify
+
+ defp mogrify_filter(mogrify, {action, options}) do
+ Mogrify.custom(mogrify, action, options)
+ end
+
+ defp mogrify_filter(mogrify, action) when is_binary(action) do
+ Mogrify.custom(mogrify, action)
+ end
+end
diff --git a/lib/pleroma/uploaders/local.ex b/lib/pleroma/uploaders/local.ex
index d96481c8d..434a6b515 100644
--- a/lib/pleroma/uploaders/local.ex
+++ b/lib/pleroma/uploaders/local.ex
@@ -3,49 +3,32 @@ defmodule Pleroma.Uploaders.Local do
alias Pleroma.Web
- def put_file(name, uuid, tmpfile, _content_type, should_dedupe) do
- upload_folder = get_upload_path(uuid, should_dedupe)
- url_path = get_url(name, uuid, should_dedupe)
+ def get_file(_) do
+ {:ok, {:static_dir, upload_path()}}
+ end
- File.mkdir_p!(upload_folder)
+ def put_file(upload) do
+ {local_path, file} =
+ case Enum.reverse(String.split(upload.path, "/", trim: true)) do
+ [file] ->
+ {upload_path(), file}
- result_file = Path.join(upload_folder, name)
+ [file | folders] ->
+ path = Path.join([upload_path()] ++ Enum.reverse(folders))
+ File.mkdir_p!(path)
+ {path, file}
+ end
- if File.exists?(result_file) do
- File.rm!(tmpfile)
- else
- File.cp!(tmpfile, result_file)
+ result_file = Path.join(local_path, file)
+
+ unless File.exists?(result_file) do
+ File.cp!(upload.tempfile, result_file)
end
- {:ok, url_path}
+ :ok
end
def upload_path do
- settings = Application.get_env(:pleroma, Pleroma.Uploaders.Local)
- Keyword.fetch!(settings, :uploads)
- end
-
- defp get_upload_path(uuid, should_dedupe) do
- if should_dedupe do
- upload_path()
- else
- Path.join(upload_path(), uuid)
- end
- end
-
- defp get_url(name, uuid, should_dedupe) do
- if should_dedupe do
- url_for(:cow_uri.urlencode(name))
- else
- url_for(Path.join(uuid, :cow_uri.urlencode(name)))
- end
- end
-
- defp url_for(file) do
- settings = Application.get_env(:pleroma, Pleroma.Uploaders.Local)
-
- Keyword.get(settings, :uploads_url)
- |> String.replace("{{file}}", file)
- |> String.replace("{{base_url}}", Web.base_url())
+ Pleroma.Config.get!([__MODULE__, :uploads])
end
end
diff --git a/lib/pleroma/uploaders/mdii.ex b/lib/pleroma/uploaders/mdii.ex
index a9d52b0dc..820cf88f5 100644
--- a/lib/pleroma/uploaders/mdii.ex
+++ b/lib/pleroma/uploaders/mdii.ex
@@ -5,22 +5,27 @@ defmodule Pleroma.Uploaders.MDII do
@httpoison Application.get_env(:pleroma, :httpoison)
- def put_file(name, uuid, path, content_type, should_dedupe) do
+ # MDII-hosted images are never passed through the MediaPlug; only local media.
+ # Delegate to Pleroma.Uploaders.Local
+ def get_file(file) do
+ Pleroma.Uploaders.Local.get_file(file)
+ end
+
+ def put_file(upload) do
cgi = Pleroma.Config.get([Pleroma.Uploaders.MDII, :cgi])
files = Pleroma.Config.get([Pleroma.Uploaders.MDII, :files])
- {:ok, file_data} = File.read(path)
+ {:ok, file_data} = File.read(upload.tempfile)
- extension = String.split(name, ".") |> List.last()
+ extension = String.split(upload.name, ".") |> List.last()
query = "#{cgi}?#{extension}"
- with {:ok, %{status_code: 200, body: body}} <- @httpoison.post(query, file_data) do
- File.rm!(path)
+ with {:ok, %{status: 200, body: body}} <- @httpoison.post(query, file_data) do
remote_file_name = String.split(body) |> List.first()
public_url = "#{files}/#{remote_file_name}.#{extension}"
- {:ok, public_url}
+ {:ok, {:url, public_url}}
else
- _ -> Pleroma.Uploaders.Local.put_file(name, uuid, path, content_type, should_dedupe)
+ _ -> Pleroma.Uploaders.Local.put_file(upload)
end
end
end
diff --git a/lib/pleroma/uploaders/s3.ex b/lib/pleroma/uploaders/s3.ex
index 40a836460..19832a7ec 100644
--- a/lib/pleroma/uploaders/s3.ex
+++ b/lib/pleroma/uploaders/s3.ex
@@ -1,40 +1,46 @@
defmodule Pleroma.Uploaders.S3 do
- alias Pleroma.Web.MediaProxy
-
@behaviour Pleroma.Uploaders.Uploader
+ require Logger
- def put_file(name, uuid, path, content_type, _should_dedupe) do
- settings = Application.get_env(:pleroma, Pleroma.Uploaders.S3)
- bucket = Keyword.fetch!(settings, :bucket)
- public_endpoint = Keyword.fetch!(settings, :public_endpoint)
- force_media_proxy = Keyword.fetch!(settings, :force_media_proxy)
+ # The file name is re-encoded with S3's constraints here to comply with previous links with less strict filenames
+ def get_file(file) do
+ config = Pleroma.Config.get([__MODULE__])
- {:ok, file_data} = File.read(path)
+ {:ok,
+ {:url,
+ Path.join([
+ Keyword.fetch!(config, :public_endpoint),
+ Keyword.fetch!(config, :bucket),
+ strict_encode(URI.decode(file))
+ ])}}
+ end
- File.rm!(path)
+ def put_file(upload = %Pleroma.Upload{}) do
+ config = Pleroma.Config.get([__MODULE__])
+ bucket = Keyword.get(config, :bucket)
- s3_name = "#{uuid}/#{encode(name)}"
+ {:ok, file_data} = File.read(upload.tempfile)
- {:ok, _} =
+ s3_name = strict_encode(upload.path)
+
+ op =
ExAws.S3.put_object(bucket, s3_name, file_data, [
{:acl, :public_read},
- {:content_type, content_type}
+ {:content_type, upload.content_type}
])
- |> ExAws.request()
- url_base = "#{public_endpoint}/#{bucket}/#{s3_name}"
+ case ExAws.request(op) do
+ {:ok, _} ->
+ {:ok, {:file, s3_name}}
- public_url =
- if force_media_proxy do
- MediaProxy.url(url_base)
- else
- url_base
- end
-
- {:ok, public_url}
+ error ->
+ Logger.error("#{__MODULE__}: #{inspect(error)}")
+ {:error, "S3 Upload failed"}
+ end
end
- defp encode(name) do
- String.replace(name, ~r/[^0-9a-zA-Z!.*'()_-]/, "-")
+ @regex Regex.compile!("[^0-9a-zA-Z!.*/'()_-]")
+ def strict_encode(name) do
+ String.replace(name, @regex, "-")
end
end
diff --git a/lib/pleroma/uploaders/swift/keystone.ex b/lib/pleroma/uploaders/swift/keystone.ex
index e578b3c61..4aed977b1 100644
--- a/lib/pleroma/uploaders/swift/keystone.ex
+++ b/lib/pleroma/uploaders/swift/keystone.ex
@@ -25,10 +25,10 @@ def get_token() do
["Content-Type": "application/json"],
hackney: [:insecure]
) do
- {:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
+ {:ok, %Tesla.Env{status: 200, body: body}} ->
body["access"]["token"]["id"]
- {:ok, %HTTPoison.Response{status_code: _}} ->
+ {:ok, %Tesla.Env{status: _}} ->
""
end
end
diff --git a/lib/pleroma/uploaders/swift/swift.ex b/lib/pleroma/uploaders/swift/swift.ex
index fa08ca966..a5b3d2852 100644
--- a/lib/pleroma/uploaders/swift/swift.ex
+++ b/lib/pleroma/uploaders/swift/swift.ex
@@ -13,10 +13,10 @@ def upload_file(filename, body, content_type) do
token = Pleroma.Uploaders.Swift.Keystone.get_token()
case put("#{filename}", body, "X-Auth-Token": token, "Content-Type": content_type) do
- {:ok, %HTTPoison.Response{status_code: 201}} ->
- {:ok, "#{object_url}/#{filename}"}
+ {:ok, %Tesla.Env{status: 201}} ->
+ {:ok, {:file, filename}}
- {:ok, %HTTPoison.Response{status_code: 401}} ->
+ {:ok, %Tesla.Env{status: 401}} ->
{:error, "Unauthorized, Bad Token"}
{:error, _} ->
diff --git a/lib/pleroma/uploaders/swift/uploader.ex b/lib/pleroma/uploaders/swift/uploader.ex
index 794f76cb0..b35b9807b 100644
--- a/lib/pleroma/uploaders/swift/uploader.ex
+++ b/lib/pleroma/uploaders/swift/uploader.ex
@@ -1,10 +1,15 @@
defmodule Pleroma.Uploaders.Swift do
@behaviour Pleroma.Uploaders.Uploader
- def put_file(name, uuid, tmp_path, content_type, _should_dedupe) do
- {:ok, file_data} = File.read(tmp_path)
- remote_name = "#{uuid}/#{name}"
+ def get_file(name) do
+ {:ok, {:url, Path.join([Pleroma.Config.get!([__MODULE__, :object_url]), name])}}
+ end
- Pleroma.Uploaders.Swift.Client.upload_file(remote_name, file_data, content_type)
+ def put_file(upload) do
+ Pleroma.Uploaders.Swift.Client.upload_file(
+ upload.path,
+ File.read!(upload.tmpfile),
+ upload.content_type
+ )
end
end
diff --git a/lib/pleroma/uploaders/uploader.ex b/lib/pleroma/uploaders/uploader.ex
index b58fc6d71..afda5609e 100644
--- a/lib/pleroma/uploaders/uploader.ex
+++ b/lib/pleroma/uploaders/uploader.ex
@@ -1,20 +1,40 @@
defmodule Pleroma.Uploaders.Uploader do
@moduledoc """
- Defines the contract to put an uploaded file to any backend.
+ Defines the contract to put and get an uploaded file to any backend.
"""
+ @doc """
+ Instructs how to get the file from the backend.
+
+ Used by `Pleroma.Plugs.UploadedMedia`.
+ """
+ @type get_method :: {:static_dir, directory :: String.t()} | {:url, url :: String.t()}
+ @callback get_file(file :: String.t()) :: {:ok, get_method()}
+
@doc """
Put a file to the backend.
- Returns `{:ok, String.t } | {:error, String.t} containing the path of the
- uploaded file, or error information if the file failed to be saved to the
- respective backend.
+ Returns:
+
+ * `:ok` which assumes `{:ok, upload.path}`
+ * `{:ok, spec}` where spec is:
+ * `{:file, filename :: String.t}` to handle reads with `get_file/1` (recommended)
+
+ This allows to correctly proxy or redirect requests to the backend, while allowing to migrate backends without breaking any URL.
+ * `{url, url :: String.t}` to bypass `get_file/2` and use the `url` directly in the activity.
+ * `{:error, String.t}` error information if the file failed to be saved to the backend.
+
+
"""
- @callback put_file(
- name :: String.t(),
- uuid :: String.t(),
- file :: File.t(),
- content_type :: String.t(),
- should_dedupe :: Boolean.t()
- ) :: {:ok, String.t()} | {:error, String.t()}
+ @callback put_file(Pleroma.Upload.t()) ::
+ :ok | {:ok, {:file | :url, String.t()}} | {:error, String.t()}
+
+ @spec put_file(module(), Pleroma.Upload.t()) ::
+ {:ok, {:file | :url, String.t()}} | {:error, String.t()}
+ def put_file(uploader, upload) do
+ case uploader.put_file(upload) do
+ :ok -> {:ok, {:file, upload.path}}
+ other -> other
+ end
+ end
end
diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex
index 6e1d5559d..74ae5ef0d 100644
--- a/lib/pleroma/user.ex
+++ b/lib/pleroma/user.ex
@@ -4,6 +4,8 @@ defmodule Pleroma.User do
import Ecto.{Changeset, Query}
alias Pleroma.{Repo, User, Object, Web, Activity, Notification}
alias Comeonin.Pbkdf2
+ alias Pleroma.Formatter
+ alias Pleroma.Web.CommonAPI.Utils, as: CommonUtils
alias Pleroma.Web.{OStatus, Websub, OAuth}
alias Pleroma.Web.ActivityPub.{Utils, ActivityPub}
@@ -19,11 +21,11 @@ defmodule Pleroma.User do
field(:ap_id, :string)
field(:avatar, :map)
field(:local, :boolean, default: true)
- field(:info, :map, default: %{})
field(:follower_address, :string)
field(:search_distance, :float, virtual: true)
field(:last_refreshed_at, :naive_datetime)
has_many(:notifications, Notification)
+ embeds_one(:info, Pleroma.User.Info)
timestamps()
end
@@ -36,13 +38,13 @@ def avatar_url(user) do
end
def banner_url(user) do
- case user.info["banner"] do
+ case user.info.banner do
%{"url" => [%{"href" => href} | _]} -> href
_ -> "#{Web.base_url()}/images/banner.png"
end
end
- def profile_url(%User{info: %{"source_data" => %{"url" => url}}}), do: url
+ def profile_url(%User{info: %{source_data: %{"url" => url}}}), do: url
def profile_url(%User{ap_id: ap_id}), do: ap_id
def profile_url(_), do: nil
@@ -61,9 +63,7 @@ def follow_changeset(struct, params \\ %{}) do
end
def info_changeset(struct, params \\ %{}) do
- struct
- |> cast(params, [:info])
- |> validate_required([:info])
+ raise "NOT VALID ANYMORE"
end
def user_info(%User{} = user) do
@@ -71,27 +71,34 @@ def user_info(%User{} = user) do
%{
following_count: length(user.following) - oneself,
- note_count: user.info["note_count"] || 0,
- follower_count: user.info["follower_count"] || 0,
- locked: user.info["locked"] || false,
- default_scope: user.info["default_scope"] || "public"
+ note_count: user.info.note_count,
+ follower_count: user.info.follower_count,
+ locked: user.info.locked,
+ default_scope: user.info.default_scope
}
end
@email_regex ~r/^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/
def remote_user_creation(params) do
+ params =
+ params
+ |> Map.put(:info, params[:info] || %{})
+
+ info_cng = User.Info.remote_user_creation(%User.Info{}, params[:info])
+
changes =
%User{}
- |> cast(params, [:bio, :name, :ap_id, :nickname, :info, :avatar])
+ |> cast(params, [:bio, :name, :ap_id, :nickname, :avatar])
|> validate_required([:name, :ap_id])
|> unique_constraint(:nickname)
|> validate_format(:nickname, @email_regex)
|> validate_length(:bio, max: 5000)
|> validate_length(:name, max: 100)
|> put_change(:local, false)
+ |> put_embed(:info, info_cng)
if changes.valid? do
- case changes.changes[:info]["source_data"] do
+ case info_cng.changes[:source_data] do
%{"followers" => followers} ->
changes
|> put_change(:follower_address, followers)
@@ -109,7 +116,7 @@ def remote_user_creation(params) do
def update_changeset(struct, params \\ %{}) do
struct
- |> cast(params, [:bio, :name])
+ |> cast(params, [:bio, :name, :avatar])
|> unique_constraint(:nickname)
|> validate_format(:nickname, ~r/^[a-zA-Z\d]+$/)
|> validate_length(:bio, max: 5000)
@@ -121,12 +128,17 @@ def upgrade_changeset(struct, params \\ %{}) do
params
|> Map.put(:last_refreshed_at, NaiveDateTime.utc_now())
+ info_cng =
+ struct.info
+ |> User.Info.user_upgrade(params[:info])
+
struct
- |> cast(params, [:bio, :name, :info, :follower_address, :avatar, :last_refreshed_at])
+ |> cast(params, [:bio, :name, :follower_address, :avatar, :last_refreshed_at])
|> unique_constraint(:nickname)
|> validate_format(:nickname, ~r/^[a-zA-Z\d]+$/)
|> validate_length(:bio, max: 5000)
|> validate_length(:name, max: 100)
+ |> put_embed(:info, info_cng)
end
def password_update_changeset(struct, params) do
@@ -165,6 +177,7 @@ def register_changeset(struct, params \\ %{}) do
|> validate_format(:email, @email_regex)
|> validate_length(:bio, max: 1000)
|> validate_length(:name, min: 1, max: 100)
+ |> put_change(:info, %Pleroma.User.Info{})
if changeset.valid? do
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
@@ -191,7 +204,7 @@ def needs_update?(%User{local: false} = user) do
def needs_update?(_), do: true
- def maybe_direct_follow(%User{} = follower, %User{local: true, info: %{"locked" => true}}) do
+ def maybe_direct_follow(%User{} = follower, %User{local: true, info: %{locked: true}}) do
{:ok, follower}
end
@@ -222,7 +235,7 @@ def follow(%User{} = follower, %User{info: info} = followed) do
ap_followers = followed.follower_address
cond do
- following?(follower, followed) or info["deactivated"] ->
+ following?(follower, followed) or info.deactivated ->
{:error, "Could not follow user: #{followed.nickname} is already on your list."}
deny_follow_blocked and blocks?(followed, follower) ->
@@ -274,7 +287,7 @@ def following?(%User{} = follower, %User{} = followed) do
end
def locked?(%User{} = user) do
- user.info["locked"] || false
+ user.info.locked || false
end
def get_by_ap_id(ap_id) do
@@ -411,22 +424,23 @@ def get_follow_requests(%User{} = user) do
end
def increase_note_count(%User{} = user) do
- note_count = (user.info["note_count"] || 0) + 1
- new_info = Map.put(user.info, "note_count", note_count)
+ info_cng = User.Info.add_to_note_count(user.info, 1)
- cs = info_changeset(user, %{info: new_info})
+ cng =
+ change(user)
+ |> put_embed(:info, info_cng)
- update_and_set_cache(cs)
+ update_and_set_cache(cng)
end
def decrease_note_count(%User{} = user) do
- note_count = user.info["note_count"] || 0
- note_count = if note_count <= 0, do: 0, else: note_count - 1
- new_info = Map.put(user.info, "note_count", note_count)
+ info_cng = User.Info.add_to_note_count(user.info, -1)
- cs = info_changeset(user, %{info: new_info})
+ cng =
+ change(user)
+ |> put_embed(:info, info_cng)
- update_and_set_cache(cs)
+ update_and_set_cache(cng)
end
def update_note_count(%User{} = user) do
@@ -439,11 +453,13 @@ def update_note_count(%User{} = user) do
note_count = Repo.one(note_count_query)
- new_info = Map.put(user.info, "note_count", note_count)
+ info_cng = User.Info.set_note_count(user.info, note_count)
- cs = info_changeset(user, %{info: new_info})
+ cng =
+ change(user)
+ |> put_embed(:info, info_cng)
- update_and_set_cache(cs)
+ update_and_set_cache(cng)
end
def update_follower_count(%User{} = user) do
@@ -457,11 +473,15 @@ def update_follower_count(%User{} = user) do
follower_count = Repo.one(follower_count_query)
- new_info = Map.put(user.info, "follower_count", follower_count)
+ info_cng =
+ user.info
+ |> User.Info.set_follower_count(follower_count)
- cs = info_changeset(user, %{info: new_info})
+ cng =
+ change(user)
+ |> put_embed(:info, info_cng)
- update_and_set_cache(cs)
+ update_and_set_cache(cng)
end
def get_users_from_set_query(ap_ids, false) do
@@ -545,12 +565,15 @@ def block(blocker, %User{ap_id: ap_id} = blocked) do
unfollow(blocked, blocker)
end
- blocks = blocker.info["blocks"] || []
- new_blocks = Enum.uniq([ap_id | blocks])
- new_info = Map.put(blocker.info, "blocks", new_blocks)
+ info_cng =
+ blocker.info
+ |> User.Info.add_to_block(ap_id)
- cs = User.info_changeset(blocker, %{info: new_info})
- update_and_set_cache(cs)
+ cng =
+ change(blocker)
+ |> put_embed(:info, info_cng)
+
+ update_and_set_cache(cng)
end
# helper to handle the block given only an actor's AP id
@@ -558,18 +581,21 @@ def block(blocker, %{ap_id: ap_id}) do
block(blocker, User.get_by_ap_id(ap_id))
end
- def unblock(user, %{ap_id: ap_id}) do
- blocks = user.info["blocks"] || []
- new_blocks = List.delete(blocks, ap_id)
- new_info = Map.put(user.info, "blocks", new_blocks)
+ def unblock(blocker, %{ap_id: ap_id}) do
+ info_cng =
+ blocker.info
+ |> User.Info.remove_from_block(ap_id)
- cs = User.info_changeset(user, %{info: new_info})
- update_and_set_cache(cs)
+ cng =
+ change(blocker)
+ |> put_embed(:info, info_cng)
+
+ update_and_set_cache(cng)
end
def blocks?(user, %{ap_id: ap_id}) do
- blocks = user.info["blocks"] || []
- domain_blocks = user.info["domain_blocks"] || []
+ blocks = user.info.blocks
+ domain_blocks = user.info.domain_blocks
%{host: host} = URI.parse(ap_id)
Enum.member?(blocks, ap_id) ||
@@ -579,21 +605,27 @@ def blocks?(user, %{ap_id: ap_id}) do
end
def block_domain(user, domain) do
- domain_blocks = user.info["domain_blocks"] || []
- new_blocks = Enum.uniq([domain | domain_blocks])
- new_info = Map.put(user.info, "domain_blocks", new_blocks)
+ info_cng =
+ user.info
+ |> User.Info.add_to_domain_block(domain)
- cs = User.info_changeset(user, %{info: new_info})
- update_and_set_cache(cs)
+ cng =
+ change(user)
+ |> put_embed(:info, info_cng)
+
+ update_and_set_cache(cng)
end
def unblock_domain(user, domain) do
- blocks = user.info["domain_blocks"] || []
- new_blocks = List.delete(blocks, domain)
- new_info = Map.put(user.info, "domain_blocks", new_blocks)
+ info_cng =
+ user.info
+ |> User.Info.remove_from_domain_block(domain)
- cs = User.info_changeset(user, %{info: new_info})
- update_and_set_cache(cs)
+ cng =
+ change(user)
+ |> put_embed(:info, info_cng)
+
+ update_and_set_cache(cng)
end
def local_user_query() do
@@ -613,9 +645,13 @@ def moderator_user_query() do
end
def deactivate(%User{} = user, status \\ true) do
- new_info = Map.put(user.info, "deactivated", status)
- cs = User.info_changeset(user, %{info: new_info})
- update_and_set_cache(cs)
+ info_cng = User.Info.set_activation_status(user.info, status)
+
+ cng =
+ change(user)
+ |> put_embed(:info, info_cng)
+
+ update_and_set_cache(cng)
end
def delete(%User{} = user) do
@@ -649,7 +685,7 @@ def delete(%User{} = user) do
{:ok, user}
end
- def html_filter_policy(%User{info: %{"no_rich_text" => true}}) do
+ def html_filter_policy(%User{info: %{no_rich_text: true}}) do
Pleroma.HTML.Scrubber.TwitterText
end
@@ -683,7 +719,7 @@ def get_or_create_instance_user do
user
else
changes =
- %User{}
+ %User{info: %User.Info{}}
|> cast(%{}, [:ap_id, :nickname, :local])
|> put_change(:ap_id, relay_uri)
|> put_change(:nickname, nil)
@@ -697,7 +733,7 @@ def get_or_create_instance_user do
# AP style
def public_key_from_info(%{
- "source_data" => %{"publicKey" => %{"publicKeyPem" => public_key_pem}}
+ source_data: %{"publicKey" => %{"publicKeyPem" => public_key_pem}}
}) do
key =
:public_key.pem_decode(public_key_pem)
@@ -708,7 +744,7 @@ def public_key_from_info(%{
end
# OStatus Magic Key
- def public_key_from_info(%{"magic_key" => magic_key}) do
+ def public_key_from_info(%{magic_key: magic_key}) do
{:ok, Pleroma.Web.Salmon.decode_key(magic_key)}
end
@@ -730,11 +766,12 @@ def insert_or_update_user(data) do
|> Map.put(:name, blank?(data[:name]) || data[:nickname])
cs = User.remote_user_creation(data)
+
Repo.insert(cs, on_conflict: :replace_all, conflict_target: :nickname)
end
def ap_enabled?(%User{local: true}), do: true
- def ap_enabled?(%User{info: info}), do: info["ap_enabled"]
+ def ap_enabled?(%User{info: info}), do: info.ap_enabled
def ap_enabled?(_), do: false
def get_or_fetch(uri_or_nickname) do
@@ -768,4 +805,18 @@ def wait_and_refresh(timeout, %User{} = a, %User{} = b) do
:error
end
end
+
+ def parse_bio(bio, user \\ %User{info: %{source_data: %{}}}) do
+ mentions = Formatter.parse_mentions(bio)
+ tags = Formatter.parse_tags(bio)
+
+ emoji =
+ (user.info.source_data["tag"] || [])
+ |> Enum.filter(fn %{"type" => t} -> t == "Emoji" end)
+ |> Enum.map(fn %{"icon" => %{"url" => url}, "name" => name} ->
+ {String.trim(name, ":"), url}
+ end)
+
+ CommonUtils.format_input(bio, mentions, tags, "text/plain") |> Formatter.emojify(emoji)
+ end
end
diff --git a/lib/pleroma/user/info.ex b/lib/pleroma/user/info.ex
new file mode 100644
index 000000000..7a99787f8
--- /dev/null
+++ b/lib/pleroma/user/info.ex
@@ -0,0 +1,167 @@
+defmodule Pleroma.User.Info do
+ use Ecto.Schema
+ import Ecto.Changeset
+
+ embedded_schema do
+ field(:banner, :map, default: %{})
+ field(:background, :map, default: %{})
+ field(:source_data, :map, default: %{})
+ field(:note_count, :integer, default: 0)
+ field(:follower_count, :integer, default: 0)
+ field(:locked, :boolean, default: false)
+ field(:default_scope, :string, default: "public")
+ field(:blocks, {:array, :string}, default: [])
+ field(:domain_blocks, {:array, :string}, default: [])
+ field(:deactivated, :boolean, default: false)
+ field(:no_rich_text, :boolean, default: false)
+ field(:ap_enabled, :boolean, default: false)
+ field(:is_moderator, :boolean, default: false)
+ field(:is_admin, :boolean, default: false)
+ field(:keys, :string, default: nil)
+ field(:settings, :map, default: nil)
+ field(:magic_key, :string, default: nil)
+ field(:uri, :string, default: nil)
+ field(:topic, :string, default: nil)
+ field(:hub, :string, default: nil)
+ field(:salmon, :string, default: nil)
+ field(:hide_network, :boolean, default: false)
+
+ # Found in the wild
+ # ap_id -> Where is this used?
+ # bio -> Where is this used?
+ # avatar -> Where is this used?
+ # fqn -> Where is this used?
+ # host -> Where is this used?
+ # subject _> Where is this used?
+ end
+
+ def set_activation_status(info, deactivated) do
+ params = %{deactivated: deactivated}
+
+ info
+ |> cast(params, [:deactivated])
+ |> validate_required([:deactivated])
+ end
+
+ def add_to_note_count(info, number) do
+ set_note_count(info, info.note_count + number)
+ end
+
+ def set_note_count(info, number) do
+ params = %{note_count: Enum.max([0, number])}
+
+ info
+ |> cast(params, [:note_count])
+ |> validate_required([:note_count])
+ end
+
+ def set_follower_count(info, number) do
+ params = %{follower_count: Enum.max([0, number])}
+
+ info
+ |> cast(params, [:follower_count])
+ |> validate_required([:follower_count])
+ end
+
+ def set_blocks(info, blocks) do
+ params = %{blocks: blocks}
+
+ info
+ |> cast(params, [:blocks])
+ |> validate_required([:blocks])
+ end
+
+ def add_to_block(info, blocked) do
+ set_blocks(info, Enum.uniq([blocked | info.blocks]))
+ end
+
+ def remove_from_block(info, blocked) do
+ set_blocks(info, List.delete(info.blocks, blocked))
+ end
+
+ def set_domain_blocks(info, domain_blocks) do
+ params = %{domain_blocks: domain_blocks}
+
+ info
+ |> cast(params, [:domain_blocks])
+ |> validate_required([:domain_blocks])
+ end
+
+ def add_to_domain_block(info, domain_blocked) do
+ set_domain_blocks(info, Enum.uniq([domain_blocked | info.domain_blocks]))
+ end
+
+ def remove_from_domain_block(info, domain_blocked) do
+ set_domain_blocks(info, List.delete(info.domain_blocks, domain_blocked))
+ end
+
+ def set_keys(info, keys) do
+ params = %{keys: keys}
+
+ info
+ |> cast(params, [:keys])
+ |> validate_required([:keys])
+ end
+
+ def remote_user_creation(info, params) do
+ info
+ |> cast(params, [
+ :ap_enabled,
+ :source_data,
+ :banner,
+ :locked,
+ :magic_key,
+ :uri,
+ :hub,
+ :topic,
+ :salmon
+ ])
+ end
+
+ def user_upgrade(info, params) do
+ info
+ |> cast(params, [
+ :ap_enabled,
+ :source_data,
+ :banner,
+ :locked,
+ :magic_key
+ ])
+ end
+
+ def profile_update(info, params) do
+ info
+ |> cast(params, [
+ :locked,
+ :no_rich_text,
+ :default_scope,
+ :banner,
+ :hide_network,
+ :background
+ ])
+ end
+
+ def mastodon_profile_update(info, params) do
+ info
+ |> cast(params, [
+ :locked,
+ :banner
+ ])
+ end
+
+ def set_source_data(info, source_data) do
+ params = %{source_data: source_data}
+
+ info
+ |> cast(params, [:source_data])
+ |> validate_required([:source_data])
+ end
+
+ def admin_api_update(info, params) do
+ info
+ |> cast(params, [
+ :is_moderator,
+ :is_admin
+ ])
+ end
+end
diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex
index ed579e336..28da57a10 100644
--- a/lib/pleroma/web/activity_pub/activity_pub.ex
+++ b/lib/pleroma/web/activity_pub/activity_pub.ex
@@ -42,7 +42,7 @@ defp get_recipients(data) do
defp check_actor_is_active(actor) do
if not is_nil(actor) do
with user <- User.get_cached_by_ap_id(actor),
- false <- !!user.info["deactivated"] do
+ false <- user.info.deactivated do
:ok
else
_e -> :reject
@@ -509,8 +509,8 @@ defp restrict_recent(query, _) do
end
defp restrict_blocked(query, %{"blocking_user" => %User{info: info}}) do
- blocks = info["blocks"] || []
- domain_blocks = info["domain_blocks"] || []
+ blocks = info.blocks || []
+ domain_blocks = info.domain_blocks || []
from(
activity in query,
@@ -572,11 +572,16 @@ def fetch_activities_bounded(recipients_to, recipients_cc, opts \\ %{}) do
|> Enum.reverse()
end
- def upload(file, size_limit \\ nil) do
- with data <-
- Upload.store(file, Application.get_env(:pleroma, :instance)[:dedupe_media], size_limit),
- false <- is_nil(data) do
- Repo.insert(%Object{data: data})
+ def upload(file, opts \\ []) do
+ with {:ok, data} <- Upload.store(file, opts) do
+ obj_data =
+ if opts[:actor] do
+ Map.put(data, "actor", opts[:actor])
+ else
+ data
+ end
+
+ Repo.insert(%Object{data: obj_data})
end
end
@@ -678,7 +683,7 @@ def publish(actor, activity) do
remote_inboxes =
(Pleroma.Web.Salmon.remote_users(activity) ++ followers)
|> Enum.filter(fn user -> User.ap_enabled?(user) end)
- |> Enum.map(fn %{info: %{"source_data" => data}} ->
+ |> Enum.map(fn %{info: %{source_data: data}} ->
(is_map(data["endpoints"]) && Map.get(data["endpoints"], "sharedInbox")) || data["inbox"]
end)
|> Enum.uniq()
@@ -764,7 +769,7 @@ def fetch_and_contain_remote_object_from_id(id) do
Logger.info("Fetching #{id} via AP")
with true <- String.starts_with?(id, "http"),
- {:ok, %{body: body, status_code: code}} when code in 200..299 <-
+ {:ok, %{body: body, status: code}} when code in 200..299 <-
@httpoison.get(
id,
[Accept: "application/activity+json"],
diff --git a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex
index 86dcf5080..12fc3b181 100644
--- a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex
@@ -23,7 +23,7 @@ defp check_reject(%{host: actor_host} = _actor_info, object) do
defp check_media_removal(
%{host: actor_host} = _actor_info,
- %{"type" => "Create", "object" => %{"attachement" => child_attachment}} = object
+ %{"type" => "Create", "object" => %{"attachment" => child_attachment}} = object
)
when length(child_attachment) > 0 do
object =
diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex
index 5864855b0..17b063609 100644
--- a/lib/pleroma/web/activity_pub/transmogrifier.ex
+++ b/lib/pleroma/web/activity_pub/transmogrifier.ex
@@ -447,7 +447,7 @@ def handle_incoming(
update_data =
new_user_data
|> Map.take([:name, :bio, :avatar])
- |> Map.put(:info, Map.merge(actor.info, %{"banner" => banner, "locked" => locked}))
+ |> Map.put(:info, %{"banner" => banner, "locked" => locked})
actor
|> User.upgrade_changeset(update_data)
@@ -850,10 +850,6 @@ defp user_upgrade_task(user) do
def upgrade_user_from_ap_id(ap_id, async \\ true) do
with %User{local: false} = user <- User.get_by_ap_id(ap_id),
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id) do
- data =
- data
- |> Map.put(:info, Map.merge(user.info, data[:info]))
-
already_ap = User.ap_enabled?(user)
{:ok, user} =
diff --git a/lib/pleroma/web/activity_pub/views/user_view.ex b/lib/pleroma/web/activity_pub/views/user_view.ex
index eb335813d..869934172 100644
--- a/lib/pleroma/web/activity_pub/views/user_view.ex
+++ b/lib/pleroma/web/activity_pub/views/user_view.ex
@@ -12,7 +12,7 @@ defmodule Pleroma.Web.ActivityPub.UserView do
# the instance itself is not a Person, but instead an Application
def render("user.json", %{user: %{nickname: nil} = user}) do
{:ok, user} = WebFinger.ensure_keys_present(user)
- {:ok, _, public_key} = Salmon.keys_from_pem(user.info["keys"])
+ {:ok, _, public_key} = Salmon.keys_from_pem(user.info.keys)
public_key = :public_key.pem_entry_encode(:SubjectPublicKeyInfo, public_key)
public_key = :public_key.pem_encode([public_key])
@@ -40,7 +40,7 @@ def render("user.json", %{user: %{nickname: nil} = user}) do
def render("user.json", %{user: user}) do
{:ok, user} = WebFinger.ensure_keys_present(user)
- {:ok, _, public_key} = Salmon.keys_from_pem(user.info["keys"])
+ {:ok, _, public_key} = Salmon.keys_from_pem(user.info.keys)
public_key = :public_key.pem_entry_encode(:SubjectPublicKeyInfo, public_key)
public_key = :public_key.pem_encode([public_key])
@@ -55,7 +55,7 @@ def render("user.json", %{user: user}) do
"name" => user.name,
"summary" => user.bio,
"url" => user.ap_id,
- "manuallyApprovesFollowers" => user.info["locked"] || false,
+ "manuallyApprovesFollowers" => user.info.locked,
"publicKey" => %{
"id" => "#{user.ap_id}#main-key",
"owner" => user.ap_id,
@@ -72,7 +72,7 @@ def render("user.json", %{user: user}) do
"type" => "Image",
"url" => User.banner_url(user)
},
- "tag" => user.info["source_data"]["tag"] || []
+ "tag" => user.info.source_data["tag"] || []
}
|> Map.merge(Utils.make_json_ld_header())
end
@@ -82,7 +82,7 @@ def render("following.json", %{user: user, page: page}) do
query = from(user in query, select: [:ap_id])
following = Repo.all(query)
- collection(following, "#{user.ap_id}/following", page)
+ collection(following, "#{user.ap_id}/following", page, !user.info.hide_network)
|> Map.merge(Utils.make_json_ld_header())
end
@@ -95,7 +95,7 @@ def render("following.json", %{user: user}) do
"id" => "#{user.ap_id}/following",
"type" => "OrderedCollection",
"totalItems" => length(following),
- "first" => collection(following, "#{user.ap_id}/following", 1)
+ "first" => collection(following, "#{user.ap_id}/following", 1, !user.info.hide_network)
}
|> Map.merge(Utils.make_json_ld_header())
end
@@ -105,7 +105,7 @@ def render("followers.json", %{user: user, page: page}) do
query = from(user in query, select: [:ap_id])
followers = Repo.all(query)
- collection(followers, "#{user.ap_id}/followers", page)
+ collection(followers, "#{user.ap_id}/followers", page, !user.info.hide_network)
|> Map.merge(Utils.make_json_ld_header())
end
@@ -118,7 +118,7 @@ def render("followers.json", %{user: user}) do
"id" => "#{user.ap_id}/followers",
"type" => "OrderedCollection",
"totalItems" => length(followers),
- "first" => collection(followers, "#{user.ap_id}/followers", 1)
+ "first" => collection(followers, "#{user.ap_id}/followers", 1, !user.info.hide_network)
}
|> Map.merge(Utils.make_json_ld_header())
end
@@ -172,7 +172,7 @@ def render("outbox.json", %{user: user, max_id: max_qid}) do
end
end
- def collection(collection, iri, page, total \\ nil) do
+ def collection(collection, iri, page, show_items \\ true, total \\ nil) do
offset = (page - 1) * 10
items = Enum.slice(collection, offset, 10)
items = Enum.map(items, fn user -> user.ap_id end)
@@ -183,7 +183,7 @@ def collection(collection, iri, page, total \\ nil) do
"type" => "OrderedCollectionPage",
"partOf" => iri,
"totalItems" => total,
- "orderedItems" => items
+ "orderedItems" => if(show_items, do: items, else: [])
}
if offset < total do
diff --git a/lib/pleroma/web/admin_api/admin_api_controller.ex b/lib/pleroma/web/admin_api/admin_api_controller.ex
index bcdb4ba37..2c67d9cda 100644
--- a/lib/pleroma/web/admin_api/admin_api_controller.ex
+++ b/lib/pleroma/web/admin_api/admin_api_controller.ex
@@ -45,21 +45,29 @@ def right_add(conn, %{"permission_group" => permission_group, "nickname" => nick
user = User.get_by_nickname(nickname)
info =
- user.info
+ %{}
|> Map.put("is_" <> permission_group, true)
- cng = User.info_changeset(user, %{info: info})
+ info_cng = User.Info.admin_api_update(user.info, info)
+
+ cng =
+ Ecto.Changeset.change(user)
+ |> Ecto.Changeset.put_embed(:info, info_cng)
+
{:ok, user} = User.update_and_set_cache(cng)
conn
- |> json(user.info)
+ |> json(info)
end
def right_get(conn, %{"nickname" => nickname}) do
user = User.get_by_nickname(nickname)
conn
- |> json(user.info)
+ |> json(%{
+ is_moderator: user.info.is_moderator,
+ is_admin: user.info.is_admin
+ })
end
def right_add(conn, _) do
@@ -84,14 +92,19 @@ def right_delete(
user = User.get_by_nickname(nickname)
info =
- user.info
+ %{}
|> Map.put("is_" <> permission_group, false)
- cng = User.info_changeset(user, %{info: info})
+ info_cng = User.Info.admin_api_update(user.info, info)
+
+ cng =
+ Ecto.Changeset.change(user)
+ |> Ecto.Changeset.put_embed(:info, info_cng)
+
{:ok, user} = User.update_and_set_cache(cng)
conn
- |> json(user.info)
+ |> json(info)
end
end
diff --git a/lib/pleroma/web/common_api/common_api.ex b/lib/pleroma/web/common_api/common_api.ex
index 77e4dbbd7..e3385310f 100644
--- a/lib/pleroma/web/common_api/common_api.ex
+++ b/lib/pleroma/web/common_api/common_api.ex
@@ -8,7 +8,7 @@ defmodule Pleroma.Web.CommonAPI do
def delete(activity_id, user) do
with %Activity{data: %{"object" => %{"id" => object_id}}} <- Repo.get(Activity, activity_id),
%Object{} = object <- Object.normalize(object_id),
- true <- user.info["is_moderator"] || user.ap_id == object.data["actor"],
+ true <- user.info.is_moderator || user.ap_id == object.data["actor"],
{:ok, delete} <- ActivityPub.delete(object) do
{:ok, delete}
end
@@ -135,12 +135,13 @@ def post(user, %{"status" => status} = data) do
end
end
+ # Updates the emojis for a user based on their profile
def update(user) do
user =
with emoji <- emoji_from_profile(user),
- source_data <- (user.info["source_data"] || %{}) |> Map.put("tag", emoji),
- new_info <- Map.put(user.info, "source_data", source_data),
- change <- User.info_changeset(user, %{info: new_info}),
+ source_data <- (user.info.source_data || %{}) |> Map.put("tag", emoji),
+ info_cng <- Pleroma.User.Info.set_source_data(user.info, source_data),
+ change <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
{:ok, user} <- User.update_and_set_cache(change) do
user
else
diff --git a/lib/pleroma/web/endpoint.ex b/lib/pleroma/web/endpoint.ex
index 8728c908b..c5f9d51d9 100644
--- a/lib/pleroma/web/endpoint.ex
+++ b/lib/pleroma/web/endpoint.ex
@@ -12,7 +12,7 @@ defmodule Pleroma.Web.Endpoint do
plug(CORSPlug)
plug(Pleroma.Plugs.HTTPSecurityPlug)
- plug(Plug.Static, at: "/media", from: Pleroma.Uploaders.Local.upload_path(), gzip: false)
+ plug(Pleroma.Plugs.UploadedMedia)
plug(
Plug.Static,
diff --git a/lib/pleroma/web/federator/retry_queue.ex b/lib/pleroma/web/federator/retry_queue.ex
index 06c094f26..13df40c80 100644
--- a/lib/pleroma/web/federator/retry_queue.ex
+++ b/lib/pleroma/web/federator/retry_queue.ex
@@ -17,7 +17,15 @@ def init(args) do
end
def start_link() do
- GenServer.start_link(__MODULE__, %{delivered: 0, dropped: 0}, name: __MODULE__)
+ enabled = Pleroma.Config.get([:retry_queue, :enabled], false)
+
+ if enabled do
+ Logger.info("Starting retry queue")
+ GenServer.start_link(__MODULE__, %{delivered: 0, dropped: 0}, name: __MODULE__)
+ else
+ Logger.info("Retry queue disabled")
+ :ignore
+ end
end
def enqueue(data, transport, retries \\ 0) do
diff --git a/lib/pleroma/web/http_signatures/http_signatures.ex b/lib/pleroma/web/http_signatures/http_signatures.ex
index 5e42a871b..0e54debd5 100644
--- a/lib/pleroma/web/http_signatures/http_signatures.ex
+++ b/lib/pleroma/web/http_signatures/http_signatures.ex
@@ -65,7 +65,7 @@ def build_signing_string(headers, used_headers) do
end
def sign(user, headers) do
- with {:ok, %{info: %{"keys" => keys}}} <- Pleroma.Web.WebFinger.ensure_keys_present(user),
+ with {:ok, %{info: %{keys: keys}}} <- Pleroma.Web.WebFinger.ensure_keys_present(user),
{:ok, private_key, _} = Pleroma.Web.Salmon.keys_from_pem(keys) do
sigstring = build_signing_string(headers, Map.keys(headers))
diff --git a/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex b/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex
index f5b23971e..dd6b0a361 100644
--- a/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex
+++ b/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex
@@ -32,75 +32,55 @@ def create_app(conn, params) do
end
end
+ defp add_if_present(
+ map,
+ params,
+ params_field,
+ map_field,
+ value_function \\ fn x -> {:ok, x} end
+ ) do
+ if Map.has_key?(params, params_field) do
+ case value_function.(params[params_field]) do
+ {:ok, new_value} -> Map.put(map, map_field, new_value)
+ :error -> map
+ end
+ else
+ map
+ end
+ end
+
def update_credentials(%{assigns: %{user: user}} = conn, params) do
original_user = user
- avatar_upload_limit =
- Application.get_env(:pleroma, :instance)
- |> Keyword.fetch(:avatar_upload_limit)
-
- banner_upload_limit =
- Application.get_env(:pleroma, :instance)
- |> Keyword.fetch(:banner_upload_limit)
-
- params =
- if bio = params["note"] do
- Map.put(params, "bio", bio)
- else
- params
- end
-
- params =
- if name = params["display_name"] do
- Map.put(params, "name", name)
- else
- params
- end
-
- user =
- if avatar = params["avatar"] do
- with %Plug.Upload{} <- avatar,
- {:ok, object} <- ActivityPub.upload(avatar, avatar_upload_limit),
- change = Ecto.Changeset.change(user, %{avatar: object.data}),
- {:ok, user} = User.update_and_set_cache(change) do
- user
+ user_params =
+ %{}
+ |> add_if_present(params, "display_name", :name)
+ |> add_if_present(params, "note", :bio, fn value -> {:ok, User.parse_bio(value)} end)
+ |> add_if_present(params, "avatar", :avatar, fn value ->
+ with %Plug.Upload{} <- value,
+ {:ok, object} <- ActivityPub.upload(value, type: :avatar) do
+ {:ok, object.data}
else
- _e -> user
+ _ -> :error
end
- else
- user
- end
+ end)
- user =
- if banner = params["header"] do
- with %Plug.Upload{} <- banner,
- {:ok, object} <- ActivityPub.upload(banner, banner_upload_limit),
- new_info <- Map.put(user.info, "banner", object.data),
- change <- User.info_changeset(user, %{info: new_info}),
- {:ok, user} <- User.update_and_set_cache(change) do
- user
+ info_params =
+ %{}
+ |> add_if_present(params, "locked", :locked, fn value -> {:ok, value == "true"} end)
+ |> add_if_present(params, "header", :banner, fn value ->
+ with %Plug.Upload{} <- value,
+ {:ok, object} <- ActivityPub.upload(value, type: :banner) do
+ {:ok, object.data}
else
- _e -> user
+ _ -> :error
end
- else
- user
- end
+ end)
- user =
- if locked = params["locked"] do
- with locked <- locked == "true",
- new_info <- Map.put(user.info, "locked", locked),
- change <- User.info_changeset(user, %{info: new_info}),
- {:ok, user} <- User.update_and_set_cache(change) do
- user
- else
- _e -> user
- end
- else
- user
- end
+ info_cng = User.Info.mastodon_profile_update(user.info, info_params)
- with changeset <- User.update_changeset(user, params),
+ with changeset <- User.update_changeset(user, user_params),
+ changeset <- Ecto.Changeset.put_embed(changeset, :info, info_cng),
{:ok, user} <- User.update_and_set_cache(changeset) do
if original_user != user do
CommonAPI.update(user)
@@ -453,40 +433,31 @@ def relationships(%{assigns: %{user: user}} = conn, _) do
|> json([])
end
- def update_media(%{assigns: %{user: _}} = conn, data) do
+ def update_media(%{assigns: %{user: user}} = conn, data) do
with %Object{} = object <- Repo.get(Object, data["id"]),
+ true <- Object.authorize_mutation(object, user),
true <- is_binary(data["description"]),
description <- data["description"] do
new_data = %{object.data | "name" => description}
- change = Object.change(object, %{data: new_data})
- {:ok, _} = Repo.update(change)
+ {:ok, _} =
+ object
+ |> Object.change(%{data: new_data})
+ |> Repo.update()
- data =
- new_data
- |> Map.put("id", object.id)
-
- render(conn, StatusView, "attachment.json", %{attachment: data})
+ attachment_data = Map.put(new_data, "id", object.id)
+ render(conn, StatusView, "attachment.json", %{attachment: attachment_data})
end
end
- def upload(%{assigns: %{user: _}} = conn, %{"file" => file} = data) do
- with {:ok, object} <- ActivityPub.upload(file) do
- objdata =
- if Map.has_key?(data, "description") do
- Map.put(object.data, "name", data["description"])
- else
- object.data
- end
-
- change = Object.change(object, %{data: objdata})
- {:ok, object} = Repo.update(change)
-
- objdata =
- objdata
- |> Map.put("id", object.id)
-
- render(conn, StatusView, "attachment.json", %{attachment: objdata})
+ def upload(%{assigns: %{user: user}} = conn, %{"file" => file} = data) do
+ with {:ok, object} <-
+ ActivityPub.upload(file,
+ actor: User.ap_id(user),
+ description: Map.get(data, "description")
+ ) do
+ attachment_data = Map.put(object.data, "id", object.id)
+ render(conn, StatusView, "attachment.json", %{attachment: attachment_data})
end
end
@@ -529,17 +500,30 @@ def hashtag_timeline(%{assigns: %{user: user}} = conn, params) do
|> render(StatusView, "index.json", %{activities: activities, for: user, as: :activity})
end
- # TODO: Pagination
- def followers(conn, %{"id" => id}) do
+ def followers(%{assigns: %{user: for_user}} = conn, %{"id" => id}) do
with %User{} = user <- Repo.get(User, id),
{:ok, followers} <- User.get_followers(user) do
+ followers =
+ cond do
+ for_user && user.id == for_user.id -> followers
+ user.info.hide_network -> []
+ true -> followers
+ end
+
render(conn, AccountView, "accounts.json", %{users: followers, as: :user})
end
end
- def following(conn, %{"id" => id}) do
+ def following(%{assigns: %{user: for_user}} = conn, %{"id" => id}) do
with %User{} = user <- Repo.get(User, id),
{:ok, followers} <- User.get_friends(user) do
+ followers =
+ cond do
+ for_user && user.id == for_user.id -> followers
+ user.info.hide_network -> []
+ true -> followers
+ end
+
render(conn, AccountView, "accounts.json", %{users: followers, as: :user})
end
end
@@ -659,7 +643,7 @@ def unblock(%{assigns: %{user: blocker}} = conn, %{"id" => id}) do
# TODO: Use proper query
def blocks(%{assigns: %{user: user}} = conn, _) do
- with blocked_users <- user.info["blocks"] || [],
+ with blocked_users <- user.info.blocks || [],
accounts <- Enum.map(blocked_users, fn ap_id -> User.get_cached_by_ap_id(ap_id) end) do
res = AccountView.render("accounts.json", users: accounts, for: user, as: :user)
json(conn, res)
@@ -667,7 +651,7 @@ def blocks(%{assigns: %{user: user}} = conn, _) do
end
def domain_blocks(%{assigns: %{user: %{info: info}}} = conn, _) do
- json(conn, info["domain_blocks"] || [])
+ json(conn, info.domain_blocks || [])
end
def block_domain(%{assigns: %{user: blocker}} = conn, %{"domain" => domain}) do
@@ -915,11 +899,11 @@ def index(%{assigns: %{user: user}} = conn, _params) do
max_toot_chars: limit
},
rights: %{
- delete_others_notice: !!user.info["is_moderator"]
+ delete_others_notice: !!user.info.is_moderator
},
compose: %{
me: "#{user.id}",
- default_privacy: user.info["default_scope"] || "public",
+ default_privacy: user.info.default_scope,
default_sensitive: false
},
media_attachments: %{
@@ -939,7 +923,7 @@ def index(%{assigns: %{user: user}} = conn, _params) do
]
},
settings:
- Map.get(user.info, "settings") ||
+ Map.get(user.info, :settings) ||
%{
onboarded: true,
home: %{
@@ -1224,7 +1208,7 @@ def suggestions(%{assigns: %{user: user}} = conn, _) do
user = user.nickname
url = String.replace(api, "{{host}}", host) |> String.replace("{{user}}", user)
- with {:ok, %{status_code: 200, body: body}} <-
+ with {:ok, %{status: 200, body: body}} <-
@httpoison.get(url, [], timeout: timeout, recv_timeout: timeout),
{:ok, data} <- Jason.decode(body) do
data2 =
diff --git a/lib/pleroma/web/mastodon_api/views/account_view.ex b/lib/pleroma/web/mastodon_api/views/account_view.ex
index b68845e16..bcfa8836e 100644
--- a/lib/pleroma/web/mastodon_api/views/account_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/account_view.ex
@@ -14,10 +14,10 @@ def render("account.json", %{user: user} = opts) do
image = User.avatar_url(user) |> MediaProxy.url()
header = User.banner_url(user) |> MediaProxy.url()
user_info = User.user_info(user)
- bot = (user.info["source_data"]["type"] || "Person") in ["Application", "Service"]
+ bot = (user.info.source_data["type"] || "Person") in ["Application", "Service"]
emojis =
- (user.info["source_data"]["tag"] || [])
+ (user.info.source_data["tag"] || [])
|> Enum.filter(fn %{"type" => t} -> t == "Emoji" end)
|> Enum.map(fn %{"icon" => %{"url" => url}, "name" => name} ->
%{
@@ -29,7 +29,7 @@ def render("account.json", %{user: user} = opts) do
end)
fields =
- (user.info["source_data"]["attachment"] || [])
+ (user.info.source_data["attachment"] || [])
|> Enum.filter(fn %{"type" => t} -> t == "PropertyValue" end)
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
diff --git a/lib/pleroma/web/media_proxy/controller.ex b/lib/pleroma/web/media_proxy/controller.ex
index bb257c262..e1b87e026 100644
--- a/lib/pleroma/web/media_proxy/controller.ex
+++ b/lib/pleroma/web/media_proxy/controller.ex
@@ -1,135 +1,34 @@
defmodule Pleroma.Web.MediaProxy.MediaProxyController do
use Pleroma.Web, :controller
- require Logger
+ alias Pleroma.{Web.MediaProxy, ReverseProxy}
- @httpoison Application.get_env(:pleroma, :httpoison)
+ @default_proxy_opts [max_body_length: 25 * 1_048_576]
- @max_body_length 25 * 1_048_576
-
- @cache_control %{
- default: "public, max-age=1209600",
- error: "public, must-revalidate, max-age=160"
- }
-
- # Content-types that will not be returned as content-disposition attachments
- # Override with :media_proxy, :safe_content_types in the configuration
- @safe_content_types [
- "image/gif",
- "image/jpeg",
- "image/jpg",
- "image/png",
- "image/svg+xml",
- "audio/mpeg",
- "audio/mp3",
- "video/webm",
- "video/mp4"
- ]
-
- def remote(conn, params = %{"sig" => sig, "url" => url}) do
- config = Application.get_env(:pleroma, :media_proxy, [])
-
- with true <- Keyword.get(config, :enabled, false),
- {:ok, url} <- Pleroma.Web.MediaProxy.decode_url(sig, url),
+ def remote(conn, params = %{"sig" => sig64, "url" => url64}) do
+ with config <- Pleroma.Config.get([:media_proxy], []),
+ true <- Keyword.get(config, :enabled, false),
+ {:ok, url} <- MediaProxy.decode_url(sig64, url64),
filename <- Path.basename(URI.parse(url).path),
- true <-
- if(Map.get(params, "filename"),
- do: filename == Path.basename(conn.request_path),
- else: true
- ),
- {:ok, content_type, body} <- proxy_request(url),
- safe_content_type <-
- Enum.member?(
- Keyword.get(config, :safe_content_types, @safe_content_types),
- content_type
- ) do
- conn
- |> put_resp_content_type(content_type)
- |> set_cache_header(:default)
- |> put_resp_header(
- "content-security-policy",
- "default-src 'none'; style-src 'unsafe-inline'; media-src data:; img-src 'self' data:"
- )
- |> put_resp_header("x-xss-protection", "1; mode=block")
- |> put_resp_header("x-content-type-options", "nosniff")
- |> put_attachement_header(safe_content_type, filename)
- |> send_resp(200, body)
+ :ok <- filename_matches(Map.has_key?(params, "filename"), conn.request_path, url) do
+ ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts))
else
false ->
- send_error(conn, 404)
+ send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404))
{:error, :invalid_signature} ->
- send_error(conn, 403)
+ send_resp(conn, 403, Plug.Conn.Status.reason_phrase(403))
- {:error, {:http, _, url}} ->
- redirect_or_error(conn, url, Keyword.get(config, :redirect_on_failure, true))
+ {:wrong_filename, filename} ->
+ redirect(conn, external: MediaProxy.build_url(sig64, url64, filename))
end
end
- defp proxy_request(link) do
- headers = [
- {"user-agent",
- "Pleroma/MediaProxy; #{Pleroma.Web.base_url()} <#{
- Application.get_env(:pleroma, :instance)[:email]
- }>"}
- ]
+ def filename_matches(has_filename, path, url) do
+ filename = MediaProxy.filename(url)
- options =
- @httpoison.process_request_options([:insecure, {:follow_redirect, true}]) ++
- [{:pool, :default}]
-
- with {:ok, 200, headers, client} <- :hackney.request(:get, link, headers, "", options),
- headers = Enum.into(headers, Map.new()),
- {:ok, body} <- proxy_request_body(client),
- content_type <- proxy_request_content_type(headers, body) do
- {:ok, content_type, body}
- else
- {:ok, status, _, _} ->
- Logger.warn("MediaProxy: request failed, status #{status}, link: #{link}")
- {:error, {:http, :bad_status, link}}
-
- {:error, error} ->
- Logger.warn("MediaProxy: request failed, error #{inspect(error)}, link: #{link}")
- {:error, {:http, error, link}}
+ cond do
+ has_filename && filename && Path.basename(path) != filename -> {:wrong_filename, filename}
+ true -> :ok
end
end
-
- defp set_cache_header(conn, key) do
- Plug.Conn.put_resp_header(conn, "cache-control", @cache_control[key])
- end
-
- defp redirect_or_error(conn, url, true), do: redirect(conn, external: url)
- defp redirect_or_error(conn, url, _), do: send_error(conn, 502, "Media proxy error: " <> url)
-
- defp send_error(conn, code, body \\ "") do
- conn
- |> set_cache_header(:error)
- |> send_resp(code, body)
- end
-
- defp proxy_request_body(client), do: proxy_request_body(client, <<>>)
-
- defp proxy_request_body(client, body) when byte_size(body) < @max_body_length do
- case :hackney.stream_body(client) do
- {:ok, data} -> proxy_request_body(client, <>)
- :done -> {:ok, body}
- {:error, reason} -> {:error, reason}
- end
- end
-
- defp proxy_request_body(client, _) do
- :hackney.close(client)
- {:error, :body_too_large}
- end
-
- # TODO: the body is passed here as well because some hosts do not provide a content-type.
- # At some point we may want to use magic numbers to discover the content-type and reply a proper one.
- defp proxy_request_content_type(headers, _body) do
- headers["Content-Type"] || headers["content-type"] || "application/octet-stream"
- end
-
- defp put_attachement_header(conn, true, _), do: conn
-
- defp put_attachement_header(conn, false, filename) do
- put_resp_header(conn, "content-disposition", "attachment; filename='#{filename}'")
- end
end
diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex
index 0fc0a07b2..28aacb0b1 100644
--- a/lib/pleroma/web/media_proxy/media_proxy.ex
+++ b/lib/pleroma/web/media_proxy/media_proxy.ex
@@ -17,10 +17,8 @@ def url(url) do
base64 = Base.url_encode64(url, @base64_opts)
sig = :crypto.hmac(:sha, secret, base64)
sig64 = sig |> Base.url_encode64(@base64_opts)
- filename = if path = URI.parse(url).path, do: "/" <> Path.basename(path), else: ""
- Keyword.get(config, :base_url, Pleroma.Web.base_url()) <>
- "/proxy/#{sig64}/#{base64}#{filename}"
+ build_url(sig64, base64, filename(url))
end
end
@@ -35,4 +33,20 @@ def decode_url(sig, url) do
{:error, :invalid_signature}
end
end
+
+ def filename(url_or_path) do
+ if path = URI.parse(url_or_path).path, do: Path.basename(path)
+ end
+
+ def build_url(sig_base64, url_base64, filename \\ nil) do
+ [
+ Pleroma.Config.get([:media_proxy, :base_url], Pleroma.Web.base_url()),
+ "proxy",
+ sig_base64,
+ url_base64,
+ filename
+ ]
+ |> Enum.filter(fn value -> value end)
+ |> Path.join()
+ end
end
diff --git a/lib/pleroma/web/ostatus/ostatus.ex b/lib/pleroma/web/ostatus/ostatus.ex
index 1d0019d3b..67df354db 100644
--- a/lib/pleroma/web/ostatus/ostatus.ex
+++ b/lib/pleroma/web/ostatus/ostatus.ex
@@ -226,25 +226,21 @@ def maybe_update_ostatus(doc, user) do
old_data = %{
avatar: user.avatar,
bio: user.bio,
- name: user.name,
- info: user.info
+ name: user.name
}
with false <- user.local,
avatar <- make_avatar_object(doc),
bio <- string_from_xpath("//author[1]/summary", doc),
name <- string_from_xpath("//author[1]/poco:displayName", doc),
- info <-
- Map.put(user.info, "banner", make_avatar_object(doc, "header") || user.info["banner"]),
new_data <- %{
avatar: avatar || old_data.avatar,
name: name || old_data.name,
- bio: bio || old_data.bio,
- info: info || old_data.info
+ bio: bio || old_data.bio
},
false <- new_data == old_data do
change = Ecto.Changeset.change(user, new_data)
- Repo.update(change)
+ User.update_and_set_cache(change)
else
_ ->
{:ok, user}
@@ -350,13 +346,15 @@ def get_atom_url(body) do
def fetch_activity_from_atom_url(url) do
with true <- String.starts_with?(url, "http"),
- {:ok, %{body: body, status_code: code}} when code in 200..299 <-
+ {:ok, %{body: body, status: code}} when code in 200..299 <-
@httpoison.get(
url,
[Accept: "application/atom+xml"],
follow_redirect: true,
- timeout: 10000,
- recv_timeout: 20000
+ adapter: [
+ timeout: 10000,
+ recv_timeout: 20000
+ ]
) do
Logger.debug("Got document from #{url}, handling...")
handle_incoming(body)
diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex
index d8af81992..75d965c6d 100644
--- a/lib/pleroma/web/router.ex
+++ b/lib/pleroma/web/router.ex
@@ -302,12 +302,6 @@ defmodule Pleroma.Web.Router do
post("/account/update_profile_banner", TwitterAPI.Controller, :update_banner)
post("/qvitter/update_background_image", TwitterAPI.Controller, :update_background)
- post(
- "/account/most_recent_notification",
- TwitterAPI.Controller,
- :update_most_recent_notification
- )
-
get("/statuses/home_timeline", TwitterAPI.Controller, :friends_timeline)
get("/statuses/friends_timeline", TwitterAPI.Controller, :friends_timeline)
get("/statuses/mentions", TwitterAPI.Controller, :mentions_timeline)
@@ -335,6 +329,7 @@ defmodule Pleroma.Web.Router do
post("/statusnet/media/upload", TwitterAPI.Controller, :upload)
post("/media/upload", TwitterAPI.Controller, :upload_json)
+ post("/media/metadata/create", TwitterAPI.Controller, :update_media)
post("/favorites/create/:id", TwitterAPI.Controller, :favorite)
post("/favorites/create", TwitterAPI.Controller, :favorite)
diff --git a/lib/pleroma/web/salmon/salmon.ex b/lib/pleroma/web/salmon/salmon.ex
index 562ec3d9c..97251c05e 100644
--- a/lib/pleroma/web/salmon/salmon.ex
+++ b/lib/pleroma/web/salmon/salmon.ex
@@ -157,15 +157,17 @@ def remote_users(%{data: %{"to" => to} = data}) do
|> Enum.filter(fn user -> user && !user.local end)
end
- defp send_to_user(%{info: %{"salmon" => salmon}}, feed, poster) do
- with {:ok, %{status_code: code}} <-
+ defp send_to_user(%{info: %{salmon: salmon}}, feed, poster) do
+ with {:ok, %{status: code}} <-
poster.(
salmon,
feed,
[{"Content-Type", "application/magic-envelope+xml"}],
- timeout: 10000,
- recv_timeout: 20000,
- hackney: [pool: :default]
+ adapter: [
+ timeout: 10000,
+ recv_timeout: 20000,
+ pool: :default
+ ]
) do
Logger.debug(fn -> "Pushed to #{salmon}, code #{code}" end)
else
@@ -185,7 +187,7 @@ defp send_to_user(_, _, _), do: nil
]
def publish(user, activity, poster \\ &@httpoison.post/4)
- def publish(%{info: %{"keys" => keys}} = user, %{data: %{"type" => type}} = activity, poster)
+ def publish(%{info: %{keys: keys}} = user, %{data: %{"type" => type}} = activity, poster)
when type in @supported_activities do
feed = ActivityRepresenter.to_simple_form(activity, user, true)
diff --git a/lib/pleroma/web/streamer.ex b/lib/pleroma/web/streamer.ex
index 306598157..99b8b7063 100644
--- a/lib/pleroma/web/streamer.ex
+++ b/lib/pleroma/web/streamer.ex
@@ -188,7 +188,7 @@ def push_to_socket(topics, topic, %Activity{data: %{"type" => "Announce"}} = ite
# Get the current user so we have up-to-date blocks etc.
if socket.assigns[:user] do
user = User.get_cached_by_ap_id(socket.assigns[:user].ap_id)
- blocks = user.info["blocks"] || []
+ blocks = user.info.blocks || []
parent = Object.normalize(item.data["object"])
@@ -206,7 +206,7 @@ def push_to_socket(topics, topic, item) do
# Get the current user so we have up-to-date blocks etc.
if socket.assigns[:user] do
user = User.get_cached_by_ap_id(socket.assigns[:user].ap_id)
- blocks = user.info["blocks"] || []
+ blocks = user.info.blocks || []
unless item.actor in blocks do
send(socket.transport_pid, {:text, represent_update(item, user)})
diff --git a/lib/pleroma/web/twitter_api/twitter_api.ex b/lib/pleroma/web/twitter_api/twitter_api.ex
index 5bfb83b1e..9c485d965 100644
--- a/lib/pleroma/web/twitter_api/twitter_api.ex
+++ b/lib/pleroma/web/twitter_api/twitter_api.ex
@@ -93,11 +93,11 @@ def unfav(%User{} = user, ap_id_or_id) do
end
end
- def upload(%Plug.Upload{} = file, format \\ "xml") do
- {:ok, object} = ActivityPub.upload(file)
+ def upload(%Plug.Upload{} = file, %User{} = user, format \\ "xml") do
+ {:ok, object} = ActivityPub.upload(file, actor: User.ap_id(user))
url = List.first(object.data["url"])
- href = url["href"] |> MediaProxy.url()
+ href = url["href"]
type = url["mediaType"]
case format do
@@ -132,7 +132,7 @@ def register_user(params) do
params = %{
nickname: params["nickname"],
name: params["fullname"],
- bio: params["bio"],
+ bio: User.parse_bio(params["bio"]),
email: params["email"],
password: params["password"],
password_confirmation: params["confirm"]
@@ -148,7 +148,7 @@ def register_user(params) do
cond do
registrations_open || (!is_nil(token) && !token.used) ->
- changeset = User.register_changeset(%User{}, params)
+ changeset = User.register_changeset(%User{info: %{}}, params)
with {:ok, user} <- Repo.insert(changeset) do
!registrations_open && UserInviteToken.mark_as_used(token.token)
@@ -279,14 +279,6 @@ def conversation_id_to_context(id) do
def get_external_profile(for_user, uri) do
with %User{} = user <- User.get_or_fetch(uri) do
- spawn(fn ->
- with url <- user.info["topic"],
- {:ok, %{body: body}} <-
- @httpoison.get(url, [], follow_redirect: true, timeout: 10000, recv_timeout: 20000) do
- OStatus.handle_incoming(body)
- end
- end)
-
{:ok, UserView.render("show.json", %{user: user, for: for_user})}
else
_e ->
diff --git a/lib/pleroma/web/twitter_api/twitter_api_controller.ex b/lib/pleroma/web/twitter_api/twitter_api_controller.ex
index cd0e2121c..0ccf937b0 100644
--- a/lib/pleroma/web/twitter_api/twitter_api_controller.ex
+++ b/lib/pleroma/web/twitter_api/twitter_api_controller.ex
@@ -4,7 +4,7 @@ defmodule Pleroma.Web.TwitterAPI.Controller do
alias Pleroma.Web.TwitterAPI.{TwitterAPI, UserView, ActivityView, NotificationView}
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.CommonAPI.Utils, as: CommonUtils
- alias Pleroma.{Repo, Activity, User, Notification}
+ alias Pleroma.{Repo, Activity, Object, User, Notification}
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Utils
alias Ecto.Changeset
@@ -226,16 +226,51 @@ def fetch_conversation(%{assigns: %{user: user}} = conn, %{"id" => id}) do
end
end
- def upload(conn, %{"media" => media}) do
- response = TwitterAPI.upload(media)
+ @doc """
+ Updates metadata of uploaded media object.
+ Derived from [Twitter API endpoint](https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-metadata-create).
+ """
+ def update_media(%{assigns: %{user: user}} = conn, %{"media_id" => id} = data) do
+ object = Repo.get(Object, id)
+ description = get_in(data, ["alt_text", "text"]) || data["name"] || data["description"]
+
+ {conn, status, response_body} =
+ cond do
+ !object ->
+ {halt(conn), :not_found, ""}
+
+ !Object.authorize_mutation(object, user) ->
+ {halt(conn), :forbidden, "You can only update your own uploads."}
+
+ !is_binary(description) ->
+ {conn, :not_modified, ""}
+
+ true ->
+ new_data = Map.put(object.data, "name", description)
+
+ {:ok, _} =
+ object
+ |> Object.change(%{data: new_data})
+ |> Repo.update()
+
+ {conn, :no_content, ""}
+ end
+
+ conn
+ |> put_status(status)
+ |> json(response_body)
+ end
+
+ def upload(%{assigns: %{user: user}} = conn, %{"media" => media}) do
+ response = TwitterAPI.upload(media, user)
conn
|> put_resp_content_type("application/atom+xml")
|> send_resp(200, response)
end
- def upload_json(conn, %{"media" => media}) do
- response = TwitterAPI.upload(media, "json")
+ def upload_json(%{assigns: %{user: user}} = conn, %{"media" => media}) do
+ response = TwitterAPI.upload(media, user, "json")
conn
|> json_reply(200, response)
@@ -290,11 +325,7 @@ def register(conn, params) do
end
def update_avatar(%{assigns: %{user: user}} = conn, params) do
- upload_limit =
- Application.get_env(:pleroma, :instance)
- |> Keyword.fetch(:avatar_upload_limit)
-
- {:ok, object} = ActivityPub.upload(params, upload_limit)
+ {:ok, object} = ActivityPub.upload(params, type: :avatar)
change = Changeset.change(user, %{avatar: object.data})
{:ok, user} = User.update_and_set_cache(change)
CommonAPI.update(user)
@@ -303,14 +334,11 @@ def update_avatar(%{assigns: %{user: user}} = conn, params) do
end
def update_banner(%{assigns: %{user: user}} = conn, params) do
- upload_limit =
- Application.get_env(:pleroma, :instance)
- |> Keyword.fetch(:banner_upload_limit)
-
- with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, upload_limit),
- new_info <- Map.put(user.info, "banner", object.data),
- change <- User.info_changeset(user, %{info: new_info}),
- {:ok, user} <- User.update_and_set_cache(change) do
+ with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, type: :banner),
+ new_info <- %{"banner" => object.data},
+ info_cng <- User.Info.profile_update(user.info, new_info),
+ changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
+ {:ok, user} <- User.update_and_set_cache(changeset) do
CommonAPI.update(user)
%{"url" => [%{"href" => href} | _]} = object.data
response = %{url: href} |> Jason.encode!()
@@ -321,14 +349,11 @@ def update_banner(%{assigns: %{user: user}} = conn, params) do
end
def update_background(%{assigns: %{user: user}} = conn, params) do
- upload_limit =
- Application.get_env(:pleroma, :instance)
- |> Keyword.fetch(:background_upload_limit)
-
- with {:ok, object} <- ActivityPub.upload(params, upload_limit),
- new_info <- Map.put(user.info, "background", object.data),
- change <- User.info_changeset(user, %{info: new_info}),
- {:ok, _user} <- User.update_and_set_cache(change) do
+ with {:ok, object} <- ActivityPub.upload(params, type: :background),
+ new_info <- %{"background" => object.data},
+ info_cng <- User.Info.profile_update(user.info, new_info),
+ changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
+ {:ok, _user} <- User.update_and_set_cache(changeset) do
%{"url" => [%{"href" => href} | _]} = object.data
response = %{url: href} |> Jason.encode!()
@@ -350,32 +375,32 @@ def external_profile(%{assigns: %{user: current_user}} = conn, %{"profileurl" =>
end
end
- def update_most_recent_notification(%{assigns: %{user: user}} = conn, %{"id" => id}) do
- with id when is_number(id) <- String.to_integer(id),
- info <- user.info,
- mrn <- max(id, user.info["most_recent_notification"] || 0),
- updated_info <- Map.put(info, "most_recent_notification", mrn),
- changeset <- User.info_changeset(user, %{info: updated_info}),
- {:ok, _user} <- User.update_and_set_cache(changeset) do
- conn
- |> json_reply(200, Jason.encode!(mrn))
- else
- _e -> bad_request_reply(conn, "Can't update.")
- end
- end
-
- def followers(conn, params) do
- with {:ok, user} <- TwitterAPI.get_user(conn.assigns[:user], params),
+ def followers(%{assigns: %{user: for_user}} = conn, params) do
+ with {:ok, user} <- TwitterAPI.get_user(for_user, params),
{:ok, followers} <- User.get_followers(user) do
+ followers =
+ cond do
+ for_user && user.id == for_user.id -> followers
+ user.info.hide_network -> []
+ true -> followers
+ end
+
render(conn, UserView, "index.json", %{users: followers, for: conn.assigns[:user]})
else
_e -> bad_request_reply(conn, "Can't get followers")
end
end
- def friends(conn, params) do
+ def friends(%{assigns: %{user: for_user}} = conn, params) do
with {:ok, user} <- TwitterAPI.get_user(conn.assigns[:user], params),
{:ok, friends} <- User.get_friends(user) do
+ friends =
+ cond do
+ for_user && user.id == for_user.id -> friends
+ user.info.hide_network -> []
+ true -> friends
+ end
+
render(conn, UserView, "index.json", %{users: friends, for: conn.assigns[:user]})
else
_e -> bad_request_reply(conn, "Can't get friends")
@@ -451,67 +476,41 @@ def raw_empty_array(conn, _params) do
json(conn, [])
end
+ defp build_info_cng(user, params) do
+ info_params =
+ ["no_rich_text", "locked", "hide_network"]
+ |> Enum.reduce(%{}, fn key, res ->
+ if value = params[key] do
+ Map.put(res, key, value == "true")
+ else
+ res
+ end
+ end)
+
+ info_params =
+ if value = params["default_scope"] do
+ Map.put(info_params, "default_scope", value)
+ else
+ info_params
+ end
+
+ User.Info.profile_update(user.info, info_params)
+ end
+
+ defp parse_profile_bio(user, params) do
+ if bio = params["description"] do
+ Map.put(params, "bio", User.parse_bio(bio, user))
+ else
+ params
+ end
+ end
+
def update_profile(%{assigns: %{user: user}} = conn, params) do
- params =
- if bio = params["description"] do
- mentions = Formatter.parse_mentions(bio)
- tags = Formatter.parse_tags(bio)
-
- emoji =
- (user.info["source_data"]["tag"] || [])
- |> Enum.filter(fn %{"type" => t} -> t == "Emoji" end)
- |> Enum.map(fn %{"icon" => %{"url" => url}, "name" => name} ->
- {String.trim(name, ":"), url}
- end)
-
- bio_html = CommonUtils.format_input(bio, mentions, tags, "text/plain")
- Map.put(params, "bio", bio_html |> Formatter.emojify(emoji))
- else
- params
- end
-
- user =
- if locked = params["locked"] do
- with locked <- locked == "true",
- new_info <- Map.put(user.info, "locked", locked),
- change <- User.info_changeset(user, %{info: new_info}),
- {:ok, user} <- User.update_and_set_cache(change) do
- user
- else
- _e -> user
- end
- else
- user
- end
-
- user =
- if no_rich_text = params["no_rich_text"] do
- with no_rich_text <- no_rich_text == "true",
- new_info <- Map.put(user.info, "no_rich_text", no_rich_text),
- change <- User.info_changeset(user, %{info: new_info}),
- {:ok, user} <- User.update_and_set_cache(change) do
- user
- else
- _e -> user
- end
- else
- user
- end
-
- user =
- if default_scope = params["default_scope"] do
- with new_info <- Map.put(user.info, "default_scope", default_scope),
- change <- User.info_changeset(user, %{info: new_info}),
- {:ok, user} <- User.update_and_set_cache(change) do
- user
- else
- _e -> user
- end
- else
- user
- end
+ params = parse_profile_bio(user, params)
+ info_cng = build_info_cng(user, params)
with changeset <- User.update_changeset(user, params),
+ changeset <- Ecto.Changeset.put_embed(changeset, :info, info_cng),
{:ok, user} <- User.update_and_set_cache(changeset) do
CommonAPI.update(user)
render(conn, UserView, "user.json", %{user: user, for: user})
diff --git a/lib/pleroma/web/twitter_api/views/user_view.ex b/lib/pleroma/web/twitter_api/views/user_view.ex
index a100a1127..b78024ed7 100644
--- a/lib/pleroma/web/twitter_api/views/user_view.ex
+++ b/lib/pleroma/web/twitter_api/views/user_view.ex
@@ -31,7 +31,7 @@ def render("user.json", %{user: user = %User{}} = assigns) do
user_info = User.get_cached_user_info(user)
emoji =
- (user.info["source_data"]["tag"] || [])
+ (user.info.source_data["tag"] || [])
|> Enum.filter(fn %{"type" => t} -> t == "Emoji" end)
|> Enum.map(fn %{"icon" => %{"url" => url}, "name" => name} ->
{String.trim(name, ":"), url}
@@ -40,7 +40,7 @@ def render("user.json", %{user: user = %User{}} = assigns) do
# ``fields`` is an array of mastodon profile field, containing ``{"name": "…", "value": "…"}``.
# For example: [{"name": "Pronoun", "value": "she/her"}, …]
fields =
- (user.info["source_data"]["attachment"] || [])
+ (user.info.source_data["attachment"] || [])
|> Enum.filter(fn %{"type" => t} -> t == "PropertyValue" end)
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
@@ -66,17 +66,17 @@ def render("user.json", %{user: user = %User{}} = assigns) do
"profile_image_url_profile_size" => image,
"profile_image_url_original" => image,
"rights" => %{
- "delete_others_notice" => !!user.info["is_moderator"]
+ "delete_others_notice" => !!user.info.is_moderator
},
"screen_name" => user.nickname,
"statuses_count" => user_info[:note_count],
"statusnet_profile_url" => user.ap_id,
"cover_photo" => User.banner_url(user) |> MediaProxy.url(),
- "background_image" => image_url(user.info["background"]) |> MediaProxy.url(),
+ "background_image" => image_url(user.info.background) |> MediaProxy.url(),
"is_local" => user.local,
- "locked" => !!user.info["locked"],
- "default_scope" => user.info["default_scope"] || "public",
- "no_rich_text" => user.info["no_rich_text"] || false,
+ "locked" => user.info.locked,
+ "default_scope" => user.info.default_scope,
+ "no_rich_text" => user.info.no_rich_text,
"fields" => fields
}
diff --git a/lib/pleroma/web/web_finger/web_finger.ex b/lib/pleroma/web/web_finger/web_finger.ex
index 9f554d286..99c65a6bf 100644
--- a/lib/pleroma/web/web_finger/web_finger.ex
+++ b/lib/pleroma/web/web_finger/web_finger.ex
@@ -45,7 +45,7 @@ def webfinger(resource, fmt) when fmt in ["XML", "JSON"] do
def represent_user(user, "JSON") do
{:ok, user} = ensure_keys_present(user)
- {:ok, _private, public} = Salmon.keys_from_pem(user.info["keys"])
+ {:ok, _private, public} = Salmon.keys_from_pem(user.info.keys)
magic_key = Salmon.encode_key(public)
%{
@@ -83,7 +83,7 @@ def represent_user(user, "JSON") do
def represent_user(user, "XML") do
{:ok, user} = ensure_keys_present(user)
- {:ok, _private, public} = Salmon.keys_from_pem(user.info["keys"])
+ {:ok, _private, public} = Salmon.keys_from_pem(user.info.keys)
magic_key = Salmon.encode_key(public)
{
@@ -113,16 +113,22 @@ def represent_user(user, "XML") do
# This seems a better fit in Salmon
def ensure_keys_present(user) do
- info = user.info || %{}
+ info = user.info
- if info["keys"] do
+ if info.keys do
{:ok, user}
else
{:ok, pem} = Salmon.generate_rsa_pem()
- info = Map.put(info, "keys", pem)
- Ecto.Changeset.change(user, info: info)
- |> User.update_and_set_cache()
+ info_cng =
+ info
+ |> Pleroma.User.Info.set_keys(pem)
+
+ cng =
+ Ecto.Changeset.change(user)
+ |> Ecto.Changeset.put_embed(:info, info_cng)
+
+ User.update_and_set_cache(cng)
end
end
@@ -214,7 +220,7 @@ def get_template_from_xml(body) do
end
def find_lrdd_template(domain) do
- with {:ok, %{status_code: status_code, body: body}} when status_code in 200..299 <-
+ with {:ok, %{status: status, body: body}} when status in 200..299 <-
@httpoison.get("http://#{domain}/.well-known/host-meta", [], follow_redirect: true) do
get_template_from_xml(body)
else
@@ -253,7 +259,7 @@ def finger(account) do
[Accept: "application/xrd+xml,application/jrd+json"],
follow_redirect: true
),
- {:ok, %{status_code: status_code, body: body}} when status_code in 200..299 <- response do
+ {:ok, %{status: status, body: body}} when status in 200..299 <- response do
doc = XML.parse_document(body)
if doc != :error do
diff --git a/lib/pleroma/web/websub/websub.ex b/lib/pleroma/web/websub/websub.ex
index 396dcf045..0761b5475 100644
--- a/lib/pleroma/web/websub/websub.ex
+++ b/lib/pleroma/web/websub/websub.ex
@@ -146,7 +146,7 @@ defp valid_topic(%{"hub.topic" => topic}, user) do
end
def subscribe(subscriber, subscribed, requester \\ &request_subscription/1) do
- topic = subscribed.info["topic"]
+ topic = subscribed.info.topic
# FIXME: Race condition, use transactions
{:ok, subscription} =
with subscription when not is_nil(subscription) <-
@@ -158,7 +158,7 @@ def subscribe(subscriber, subscribed, requester \\ &request_subscription/1) do
_e ->
subscription = %WebsubClientSubscription{
topic: topic,
- hub: subscribed.info["hub"],
+ hub: subscribed.info.hub,
subscribers: [subscriber.ap_id],
state: "requested",
secret: :crypto.strong_rand_bytes(8) |> Base.url_encode64(),
@@ -173,7 +173,7 @@ def subscribe(subscriber, subscribed, requester \\ &request_subscription/1) do
def gather_feed_data(topic, getter \\ &@httpoison.get/1) do
with {:ok, response} <- getter.(topic),
- status_code when status_code in 200..299 <- response.status_code,
+ status when status in 200..299 <- response.status,
body <- response.body,
doc <- XML.parse_document(body),
uri when not is_nil(uri) <- XML.string_from_xpath("/feed/author[1]/uri", doc),
@@ -221,7 +221,7 @@ def request_subscription(websub, poster \\ &@httpoison.post/3, timeout \\ 10_000
task = Task.async(websub_checker)
- with {:ok, %{status_code: 202}} <-
+ with {:ok, %{status: 202}} <-
poster.(websub.hub, {:form, data}, "Content-type": "application/x-www-form-urlencoded"),
{:ok, websub} <- Task.yield(task, timeout) do
{:ok, websub}
@@ -257,7 +257,7 @@ def publish_one(%{xml: xml, topic: topic, callback: callback, secret: secret}) d
signature = sign(secret || "", xml)
Logger.info(fn -> "Pushing #{topic} to #{callback}" end)
- with {:ok, %{status_code: code}} <-
+ with {:ok, %{status: code}} <-
@httpoison.post(
callback,
xml,
@@ -265,9 +265,11 @@ def publish_one(%{xml: xml, topic: topic, callback: callback, secret: secret}) d
{"Content-Type", "application/atom+xml"},
{"X-Hub-Signature", "sha1=#{signature}"}
],
- timeout: 10000,
- recv_timeout: 20000,
- hackney: [pool: :default]
+ adapter: [
+ timeout: 10000,
+ recv_timeout: 20000,
+ pool: :default
+ ]
) do
Logger.info(fn -> "Pushed to #{callback}, code #{code}" end)
{:ok, code}
diff --git a/mix.exs b/mix.exs
index d0a008108..bd9bce766 100644
--- a/mix.exs
+++ b/mix.exs
@@ -56,6 +56,7 @@ defp deps do
{:calendar, "~> 0.17.4"},
{:cachex, "~> 3.0.2"},
{:httpoison, "~> 1.2.0"},
+ {:tesla, "~> 1.2"},
{:jason, "~> 1.0"},
{:mogrify, "~> 0.6.1"},
{:ex_aws, "~> 2.0"},
diff --git a/mix.lock b/mix.lock
index e17111738..ff8e9fdca 100644
--- a/mix.lock
+++ b/mix.lock
@@ -49,6 +49,7 @@
"postgrex": {:hex, :postgrex, "0.13.5", "3d931aba29363e1443da167a4b12f06dcd171103c424de15e5f3fc2ba3e6d9c5", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 1.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm"},
"ranch": {:hex, :ranch, "1.3.2", "e4965a144dc9fbe70e5c077c65e73c57165416a901bd02ea899cfd95aa890986", [:rebar3], [], "hexpm"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.1", "28a4d65b7f59893bc2c7de786dec1e1555bd742d336043fe644ae956c3497fbe", [:make, :rebar], [], "hexpm"},
+ "tesla": {:hex, :tesla, "1.2.1", "864783cc27f71dd8c8969163704752476cec0f3a51eb3b06393b3971dc9733ff", [:mix], [{:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}], "hexpm"},
"trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"tzdata": {:hex, :tzdata, "0.5.17", "50793e3d85af49736701da1a040c415c97dc1caf6464112fd9bd18f425d3053b", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"},
"unicode_util_compat": {:hex, :unicode_util_compat, "0.3.1", "a1f612a7b512638634a603c8f401892afbf99b8ce93a45041f8aaca99cadb85e", [:rebar3], [], "hexpm"},
diff --git a/priv/repo/migrations/20181201104428_add_uuid_extension.exs b/priv/repo/migrations/20181201104428_add_uuid_extension.exs
new file mode 100644
index 000000000..2509e558d
--- /dev/null
+++ b/priv/repo/migrations/20181201104428_add_uuid_extension.exs
@@ -0,0 +1,7 @@
+defmodule Pleroma.Repo.Migrations.AddUUIDExtension do
+ use Ecto.Migration
+
+ def change do
+ execute("create extension if not exists \"uuid-ossp\"")
+ end
+end
diff --git a/priv/repo/migrations/20181201105617_add_uui_ds_to_user_info.exs b/priv/repo/migrations/20181201105617_add_uui_ds_to_user_info.exs
new file mode 100644
index 000000000..9571a1e4d
--- /dev/null
+++ b/priv/repo/migrations/20181201105617_add_uui_ds_to_user_info.exs
@@ -0,0 +1,7 @@
+defmodule Pleroma.Repo.Migrations.AddUUIDsToUserInfo do
+ use Ecto.Migration
+
+ def change do
+ execute("update users set info = jsonb_set(info, '{\"id\"}', to_jsonb(uuid_generate_v4()))")
+ end
+end
diff --git a/priv/static/index.html b/priv/static/index.html
index 19aa6a5f1..6f9830faa 100644
--- a/priv/static/index.html
+++ b/priv/static/index.html
@@ -1 +1 @@
-Pleroma
\ No newline at end of file
+Pleroma
\ No newline at end of file
diff --git a/priv/static/static/config.json b/priv/static/static/config.json
index 69a707415..67d84579f 100644
--- a/priv/static/static/config.json
+++ b/priv/static/static/config.json
@@ -11,6 +11,8 @@
"scopeOptionsEnabled": false,
"formattingOptionsEnabled": false,
"collapseMessageWithSubject": false,
+ "scopeCopy": false,
+ "subjectLineBehavior": "email",
"hidePostStats": false,
"hideUserStats": false,
"loginMethod": "password"
diff --git a/priv/static/static/js/app.065638d22ade92dea420.js b/priv/static/static/js/app.065638d22ade92dea420.js
deleted file mode 100644
index f4f243e27..000000000
Binary files a/priv/static/static/js/app.065638d22ade92dea420.js and /dev/null differ
diff --git a/priv/static/static/js/app.065638d22ade92dea420.js.map b/priv/static/static/js/app.065638d22ade92dea420.js.map
deleted file mode 100644
index 0a42ac51c..000000000
Binary files a/priv/static/static/js/app.065638d22ade92dea420.js.map and /dev/null differ
diff --git a/priv/static/static/js/app.3f7c9aaedc6b87fa9653.js b/priv/static/static/js/app.3f7c9aaedc6b87fa9653.js
new file mode 100644
index 000000000..ddc374bde
Binary files /dev/null and b/priv/static/static/js/app.3f7c9aaedc6b87fa9653.js differ
diff --git a/priv/static/static/js/app.3f7c9aaedc6b87fa9653.js.map b/priv/static/static/js/app.3f7c9aaedc6b87fa9653.js.map
new file mode 100644
index 000000000..ce305990e
Binary files /dev/null and b/priv/static/static/js/app.3f7c9aaedc6b87fa9653.js.map differ
diff --git a/priv/static/static/js/manifest.18df0da570d88ba76ec5.js b/priv/static/static/js/manifest.18df0da570d88ba76ec5.js
new file mode 100644
index 000000000..719c394b9
Binary files /dev/null and b/priv/static/static/js/manifest.18df0da570d88ba76ec5.js differ
diff --git a/priv/static/static/js/manifest.34667c2817916147413f.js.map b/priv/static/static/js/manifest.18df0da570d88ba76ec5.js.map
similarity index 92%
rename from priv/static/static/js/manifest.34667c2817916147413f.js.map
rename to priv/static/static/js/manifest.18df0da570d88ba76ec5.js.map
index 92289fa87..3034b6973 100644
Binary files a/priv/static/static/js/manifest.34667c2817916147413f.js.map and b/priv/static/static/js/manifest.18df0da570d88ba76ec5.js.map differ
diff --git a/priv/static/static/js/manifest.34667c2817916147413f.js b/priv/static/static/js/manifest.34667c2817916147413f.js
deleted file mode 100644
index 24c4176ae..000000000
Binary files a/priv/static/static/js/manifest.34667c2817916147413f.js and /dev/null differ
diff --git a/priv/static/static/js/vendor.0e895ca116d5ba12f2b6.js b/priv/static/static/js/vendor.0e895ca116d5ba12f2b6.js
new file mode 100644
index 000000000..b03b3b896
Binary files /dev/null and b/priv/static/static/js/vendor.0e895ca116d5ba12f2b6.js differ
diff --git a/priv/static/static/js/vendor.0e895ca116d5ba12f2b6.js.map b/priv/static/static/js/vendor.0e895ca116d5ba12f2b6.js.map
new file mode 100644
index 000000000..aa72e3f95
Binary files /dev/null and b/priv/static/static/js/vendor.0e895ca116d5ba12f2b6.js.map differ
diff --git a/priv/static/static/js/vendor.32c621c7157f34c20923.js b/priv/static/static/js/vendor.32c621c7157f34c20923.js
deleted file mode 100644
index b4e333f55..000000000
Binary files a/priv/static/static/js/vendor.32c621c7157f34c20923.js and /dev/null differ
diff --git a/priv/static/static/js/vendor.32c621c7157f34c20923.js.map b/priv/static/static/js/vendor.32c621c7157f34c20923.js.map
deleted file mode 100644
index a91276b2d..000000000
Binary files a/priv/static/static/js/vendor.32c621c7157f34c20923.js.map and /dev/null differ
diff --git a/test/config_test.exs b/test/config_test.exs
index 0124544c8..837cbb30c 100644
--- a/test/config_test.exs
+++ b/test/config_test.exs
@@ -53,4 +53,19 @@ test "put/2 with a list of keys" do
assert Pleroma.Config.get([:instance, :config_test]) == true
assert Pleroma.Config.get([:instance, :config_nested_test, :x]) == true
end
+
+ test "delete/1 with a key" do
+ Pleroma.Config.put([:delete_me], :delete_me)
+ Pleroma.Config.delete([:delete_me])
+ assert Pleroma.Config.get([:delete_me]) == nil
+ end
+
+ test "delete/2 with a list of keys" do
+ Pleroma.Config.put([:delete_me], hello: "world", world: "Hello")
+ Pleroma.Config.delete([:delete_me, :world])
+ assert Pleroma.Config.get([:delete_me]) == [hello: "world"]
+ Pleroma.Config.put([:delete_me, :delete_me], hello: "world", world: "Hello")
+ Pleroma.Config.delete([:delete_me, :delete_me, :world])
+ assert Pleroma.Config.get([:delete_me, :delete_me]) == [hello: "world"]
+ end
end
diff --git a/test/fixtures/httpoison_mock/framatube.org_host_meta b/test/fixtures/httpoison_mock/framatube.org_host_meta
new file mode 100644
index 000000000..91516ff6d
--- /dev/null
+++ b/test/fixtures/httpoison_mock/framatube.org_host_meta
@@ -0,0 +1,2 @@
+
+framatube.orgResource Descriptor
diff --git a/test/fixtures/httpoison_mock/gerzilla.de_host_meta b/test/fixtures/httpoison_mock/gerzilla.de_host_meta
new file mode 100644
index 000000000..fae8f37eb
--- /dev/null
+++ b/test/fixtures/httpoison_mock/gerzilla.de_host_meta
@@ -0,0 +1,10 @@
+
+
+ gerzilla.de
+
+
+
+
+
diff --git a/test/fixtures/httpoison_mock/gnusocial.de_host_meta b/test/fixtures/httpoison_mock/gnusocial.de_host_meta
new file mode 100644
index 000000000..a4affb102
--- /dev/null
+++ b/test/fixtures/httpoison_mock/gnusocial.de_host_meta
@@ -0,0 +1,2 @@
+
+gnusocial.deResource Descriptor
diff --git a/test/formatter_test.exs b/test/formatter_test.exs
index 13084baa7..abb9d882c 100644
--- a/test/formatter_test.exs
+++ b/test/formatter_test.exs
@@ -5,12 +5,17 @@ defmodule Pleroma.FormatterTest do
import Pleroma.Factory
+ setup_all do
+ Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
+
describe ".add_hashtag_links" do
test "turns hashtags into links" do
text = "I love #cofe and #2hu"
expected_text =
- "I love #cofe and #2hu"
+ "I love #cofe and #2hu"
tags = Formatter.parse_tags(text)
@@ -110,7 +115,7 @@ test "gives a replacement for user links" do
archaeme =
insert(:user, %{
nickname: "archaeme",
- info: %{"source_data" => %{"url" => "https://archeme/@archaeme"}}
+ info: %Pleroma.User.Info{source_data: %{"url" => "https://archeme/@archaeme"}}
})
archaeme_remote = insert(:user, %{nickname: "archaeme@archae.me"})
@@ -123,11 +128,11 @@ test "gives a replacement for user links" do
Enum.each(subs, fn {uuid, _} -> assert String.contains?(text, uuid) end)
expected_text =
- "@gsimg According to @archaeme, that is @daggsy. Also hello @archaeme"
+ "@gsimg According to @archaeme, that is @daggsy. Also hello @archaeme"
assert expected_text == Formatter.finalize({subs, text})
end
@@ -145,7 +150,7 @@ test "gives a replacement for user links when the user is using Osada" do
Enum.each(subs, fn {uuid, _} -> assert String.contains?(text, uuid) end)
expected_text =
- "@mike test"
+ "@mike test"
assert expected_text == Formatter.finalize({subs, text})
end
@@ -161,7 +166,9 @@ test "gives a replacement for single-character local nicknames" do
assert length(subs) == 1
Enum.each(subs, fn {uuid, _} -> assert String.contains?(text, uuid) end)
- expected_text = "@o hi"
+ expected_text =
+ "@o hi"
+
assert expected_text == Formatter.finalize({subs, text})
end
diff --git a/test/http_test.exs b/test/http_test.exs
new file mode 100644
index 000000000..62f3ccb30
--- /dev/null
+++ b/test/http_test.exs
@@ -0,0 +1,55 @@
+defmodule Pleroma.HTTPTest do
+ use Pleroma.DataCase
+ import Tesla.Mock
+
+ setup do
+ mock(fn
+ %{
+ method: :get,
+ url: "http://example.com/hello",
+ headers: [{"content-type", "application/json"}]
+ } ->
+ json(%{"my" => "data"})
+
+ %{method: :get, url: "http://example.com/hello"} ->
+ %Tesla.Env{status: 200, body: "hello"}
+
+ %{method: :post, url: "http://example.com/world"} ->
+ %Tesla.Env{status: 200, body: "world"}
+ end)
+
+ :ok
+ end
+
+ describe "get/1" do
+ test "returns successfully result" do
+ assert Pleroma.HTTP.get("http://example.com/hello") == {
+ :ok,
+ %Tesla.Env{status: 200, body: "hello"}
+ }
+ end
+ end
+
+ describe "get/2 (with headers)" do
+ test "returns successfully result for json content-type" do
+ assert Pleroma.HTTP.get("http://example.com/hello", [{"content-type", "application/json"}]) ==
+ {
+ :ok,
+ %Tesla.Env{
+ status: 200,
+ body: "{\"my\":\"data\"}",
+ headers: [{"content-type", "application/json"}]
+ }
+ }
+ end
+ end
+
+ describe "post/2" do
+ test "returns successfully result" do
+ assert Pleroma.HTTP.post("http://example.com/world", "") == {
+ :ok,
+ %Tesla.Env{status: 200, body: "world"}
+ }
+ end
+ end
+end
diff --git a/test/media_proxy_test.exs b/test/media_proxy_test.exs
index c69ed7ea4..d71f9f13a 100644
--- a/test/media_proxy_test.exs
+++ b/test/media_proxy_test.exs
@@ -82,6 +82,23 @@ test "validates signature" do
[_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/")
assert decode_url(sig, base64) == {:error, :invalid_signature}
end
+
+ test "uses the configured base_url" do
+ base_url = Pleroma.Config.get([:media_proxy, :base_url])
+
+ if base_url do
+ on_exit(fn ->
+ Pleroma.Config.put([:media_proxy, :base_url], base_url)
+ end)
+ end
+
+ Pleroma.Config.put([:media_proxy, :base_url], "https://cache.pleroma.social")
+
+ url = "https://pleroma.soykaf.com/static/logo.png"
+ encoded = url(url)
+
+ assert String.starts_with?(encoded, Pleroma.Config.get([:media_proxy, :base_url]))
+ end
end
describe "when disabled" do
diff --git a/test/plugs/oauth_plug_test.exs b/test/plugs/oauth_plug_test.exs
new file mode 100644
index 000000000..4dd12f207
--- /dev/null
+++ b/test/plugs/oauth_plug_test.exs
@@ -0,0 +1,56 @@
+defmodule Pleroma.Plugs.OAuthPlugTest do
+ use Pleroma.Web.ConnCase, async: true
+
+ alias Pleroma.Plugs.OAuthPlug
+ import Pleroma.Factory
+
+ @session_opts [
+ store: :cookie,
+ key: "_test",
+ signing_salt: "cooldude"
+ ]
+
+ setup %{conn: conn} do
+ user = insert(:user)
+ {:ok, %{token: token}} = Pleroma.Web.OAuth.Token.create_token(insert(:oauth_app), user)
+ %{user: user, token: token, conn: conn}
+ end
+
+ test "with valid token(uppercase), it assigns the user", %{conn: conn} = opts do
+ conn =
+ conn
+ |> put_req_header("authorization", "BEARER #{opts[:token]}")
+ |> OAuthPlug.call(%{})
+
+ assert conn.assigns[:user] == opts[:user]
+ end
+
+ test "with valid token(downcase), it assigns the user", %{conn: conn} = opts do
+ conn =
+ conn
+ |> put_req_header("authorization", "bearer #{opts[:token]}")
+ |> OAuthPlug.call(%{})
+
+ assert conn.assigns[:user] == opts[:user]
+ end
+
+ test "with invalid token, it not assigns the user", %{conn: conn} do
+ conn =
+ conn
+ |> put_req_header("authorization", "bearer TTTTT")
+ |> OAuthPlug.call(%{})
+
+ refute conn.assigns[:user]
+ end
+
+ test "when token is missed but token in session, it assigns the user", %{conn: conn} = opts do
+ conn =
+ conn
+ |> Plug.Session.call(Plug.Session.init(@session_opts))
+ |> fetch_session()
+ |> put_session(:oauth_token, opts[:token])
+ |> OAuthPlug.call(%{})
+
+ assert conn.assigns[:user] == opts[:user]
+ end
+end
diff --git a/test/plugs/user_enabled_plug_test.exs b/test/plugs/user_enabled_plug_test.exs
index ee4f72ccf..eeb167933 100644
--- a/test/plugs/user_enabled_plug_test.exs
+++ b/test/plugs/user_enabled_plug_test.exs
@@ -13,7 +13,7 @@ test "doesn't do anything if the user isn't set", %{conn: conn} do
end
test "with a user that is deactivated, it removes that user", %{conn: conn} do
- user = insert(:user, info: %{"deactivated" => true})
+ user = insert(:user, info: %{deactivated: true})
conn =
conn
diff --git a/test/plugs/user_is_admin_plug_test.exs b/test/plugs/user_is_admin_plug_test.exs
index ddf9eb139..031b2f466 100644
--- a/test/plugs/user_is_admin_plug_test.exs
+++ b/test/plugs/user_is_admin_plug_test.exs
@@ -5,7 +5,7 @@ defmodule Pleroma.Plugs.UserIsAdminPlugTest do
import Pleroma.Factory
test "accepts a user that is admin", %{conn: conn} do
- user = insert(:user, info: %{"is_admin" => true})
+ user = insert(:user, info: %{is_admin: true})
conn =
build_conn()
diff --git a/test/support/data_case.ex b/test/support/data_case.ex
index 8eff0fd94..9dde6b5e5 100644
--- a/test/support/data_case.ex
+++ b/test/support/data_case.ex
@@ -36,6 +36,23 @@ defmodule Pleroma.DataCase do
:ok
end
+ def ensure_local_uploader(_context) do
+ uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
+ filters = Pleroma.Config.get([Pleroma.Upload, :filters])
+
+ unless uploader == Pleroma.Uploaders.Local || filters != [] do
+ Pleroma.Config.put([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
+ Pleroma.Config.put([Pleroma.Upload, :filters], [])
+
+ on_exit(fn ->
+ Pleroma.Config.put([Pleroma.Upload, :uploader], uploader)
+ Pleroma.Config.put([Pleroma.Upload, :filters], filters)
+ end)
+ end
+
+ :ok
+ end
+
@doc """
A helper that transform changeset errors to a map of messages.
diff --git a/test/support/factory.ex b/test/support/factory.ex
index 4f5060abf..2889d8977 100644
--- a/test/support/factory.ex
+++ b/test/support/factory.ex
@@ -7,7 +7,8 @@ def user_factory do
email: sequence(:email, &"user#{&1}@example.com"),
nickname: sequence(:nickname, &"nick#{&1}"),
password_hash: Comeonin.Pbkdf2.hashpwsalt("test"),
- bio: sequence(:bio, &"Tester Number #{&1}")
+ bio: sequence(:bio, &"Tester Number #{&1}"),
+ info: %{}
}
%{
diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex
new file mode 100644
index 000000000..391342ad7
--- /dev/null
+++ b/test/support/http_request_mock.ex
@@ -0,0 +1,675 @@
+defmodule HttpRequestMock do
+ require Logger
+
+ def request(
+ %Tesla.Env{
+ url: url,
+ method: method,
+ headers: headers,
+ query: query,
+ body: body
+ } = _env
+ ) do
+ with {:ok, res} <- apply(__MODULE__, method, [url, query, body, headers]) do
+ res
+ else
+ {_, r} = error ->
+ # Logger.warn(r)
+ error
+ end
+ end
+
+ # GET Requests
+ #
+ def get(url, query \\ [], body \\ [], headers \\ [])
+
+ def get("https://osada.macgirvin.com/channel/mike", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!("test/fixtures/httpoison_mock/https___osada.macgirvin.com_channel_mike.json")
+ }}
+ end
+
+ def get(
+ "https://osada.macgirvin.com/.well-known/webfinger?resource=acct:mike@osada.macgirvin.com",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/mike@osada.macgirvin.com.json")
+ }}
+ end
+
+ def get(
+ "https://social.heldscal.la/.well-known/webfinger?resource=https://social.heldscal.la/user/29191",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/https___social.heldscal.la_user_29191.xml")
+ }}
+ end
+
+ def get("https://pawoo.net/users/pekorino.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.atom")
+ }}
+ end
+
+ def get(
+ "https://pawoo.net/.well-known/webfinger?resource=acct:https://pawoo.net/users/pekorino",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.xml")
+ }}
+ end
+
+ def get(
+ "https://social.stopwatchingus-heidelberg.de/api/statuses/user_timeline/18330.atom",
+ _,
+ _,
+ _
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/atarifrosch_feed.xml")
+ }}
+ end
+
+ def get(
+ "https://social.stopwatchingus-heidelberg.de/.well-known/webfinger?resource=acct:https://social.stopwatchingus-heidelberg.de/user/18330",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/atarifrosch_webfinger.xml")
+ }}
+ end
+
+ def get("https://mamot.fr/users/Skruyb.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/https___mamot.fr_users_Skruyb.atom")
+ }}
+ end
+
+ def get(
+ "https://mamot.fr/.well-known/webfinger?resource=acct:https://mamot.fr/users/Skruyb",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/skruyb@mamot.fr.atom")
+ }}
+ end
+
+ def get(
+ "https://social.heldscal.la/.well-known/webfinger?resource=nonexistant@social.heldscal.la",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/nonexistant@social.heldscal.la.xml")
+ }}
+ end
+
+ def get("https://squeet.me/xrd/?uri=lain@squeet.me", _, _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/lain_squeet.me_webfinger.xml")
+ }}
+ end
+
+ def get("https://mst3k.interlinked.me/users/luciferMysticus", _, _,
+ Accept: "application/activity+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/lucifermysticus.json")
+ }}
+ end
+
+ def get("https://prismo.news/@mxb", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/https___prismo.news__mxb.json")
+ }}
+ end
+
+ def get("https://hubzilla.example.org/channel/kaniini", _, _,
+ Accept: "application/activity+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/kaniini@hubzilla.example.org.json")
+ }}
+ end
+
+ def get("https://niu.moe/users/rye", _, _, Accept: "application/activity+json") do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/rye.json")
+ }}
+ end
+
+ def get("http://mastodon.example.org/users/admin/statuses/100787282858396771", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!(
+ "test/fixtures/httpoison_mock/http___mastodon.example.org_users_admin_status_1234.json"
+ )
+ }}
+ end
+
+ def get("https://puckipedia.com/", _, _, Accept: "application/activity+json") do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/puckipedia.com.json")
+ }}
+ end
+
+ def get("https://peertube.moe/accounts/7even", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/7even.json")
+ }}
+ end
+
+ def get("https://peertube.moe/videos/watch/df5f464b-be8d-46fb-ad81-2d4c2d1630e3", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/peertube.moe-vid.json")
+ }}
+ end
+
+ def get("https://baptiste.gelez.xyz/@/BaptisteGelez", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/baptiste.gelex.xyz-user.json")
+ }}
+ end
+
+ def get("https://baptiste.gelez.xyz/~/PlumeDevelopment/this-month-in-plume-june-2018/", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/baptiste.gelex.xyz-article.json")
+ }}
+ end
+
+ def get("http://mastodon.example.org/users/admin", _, _, Accept: "application/activity+json") do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/admin@mastdon.example.org.json")
+ }}
+ end
+
+ def get("http://mastodon.example.org/@admin/99541947525187367", _, _,
+ Accept: "application/activity+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/mastodon-note-object.json")
+ }}
+ end
+
+ def get("https://shitposter.club/notice/7369654", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/7369654.html")
+ }}
+ end
+
+ def get("https://mstdn.io/users/mayuutann", _, _, Accept: "application/activity+json") do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/mayumayu.json")
+ }}
+ end
+
+ def get("https://mstdn.io/users/mayuutann/statuses/99568293732299394", _, _,
+ Accept: "application/activity+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/mayumayupost.json")
+ }}
+ end
+
+ def get("https://pleroma.soykaf.com/users/lain/feed.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!(
+ "test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain_feed.atom.xml"
+ )
+ }}
+ end
+
+ def get(url, _, _, Accept: "application/xrd+xml,application/jrd+json")
+ when url in [
+ "https://pleroma.soykaf.com/.well-known/webfinger?resource=acct:https://pleroma.soykaf.com/users/lain",
+ "https://pleroma.soykaf.com/.well-known/webfinger?resource=https://pleroma.soykaf.com/users/lain"
+ ] do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain.xml")
+ }}
+ end
+
+ def get("https://shitposter.club/api/statuses/user_timeline/1.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!(
+ "test/fixtures/httpoison_mock/https___shitposter.club_api_statuses_user_timeline_1.atom.xml"
+ )
+ }}
+ end
+
+ def get(
+ "https://shitposter.club/.well-known/webfinger?resource=https://shitposter.club/user/1",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/https___shitposter.club_user_1.xml")
+ }}
+ end
+
+ def get("https://shitposter.club/notice/2827873", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!("test/fixtures/httpoison_mock/https___shitposter.club_notice_2827873.html")
+ }}
+ end
+
+ def get("https://shitposter.club/api/statuses/show/2827873.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!(
+ "test/fixtures/httpoison_mock/https___shitposter.club_api_statuses_show_2827873.atom.xml"
+ )
+ }}
+ end
+
+ def get("https://testing.pleroma.lol/objects/b319022a-4946-44c5-9de9-34801f95507b", _, _, _) do
+ {:ok, %Tesla.Env{status: 200}}
+ end
+
+ def get("https://shitposter.club/api/statuses/user_timeline/5381.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/spc_5381.atom")
+ }}
+ end
+
+ def get(
+ "https://shitposter.club/.well-known/webfinger?resource=https://shitposter.club/user/5381",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/spc_5381_xrd.xml")
+ }}
+ end
+
+ def get("http://shitposter.club/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/shitposter.club_host_meta")
+ }}
+ end
+
+ def get("https://shitposter.club/api/statuses/show/7369654.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/7369654.atom")
+ }}
+ end
+
+ def get("https://shitposter.club/notice/4027863", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/7369654.html")
+ }}
+ end
+
+ def get("https://social.sakamoto.gq/users/eal/feed.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/sakamoto_eal_feed.atom")
+ }}
+ end
+
+ def get("http://social.sakamoto.gq/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/social.sakamoto.gq_host_meta")
+ }}
+ end
+
+ def get(
+ "https://social.sakamoto.gq/.well-known/webfinger?resource=https://social.sakamoto.gq/users/eal",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/eal_sakamoto.xml")
+ }}
+ end
+
+ def get("https://social.sakamoto.gq/objects/0ccc1a2c-66b0-4305-b23a-7f7f2b040056", _, _,
+ Accept: "application/atom+xml"
+ ) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/httpoison_mock/sakamoto.atom")}}
+ end
+
+ def get("http://mastodon.social/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/mastodon.social_host_meta")
+ }}
+ end
+
+ def get(
+ "https://mastodon.social/.well-known/webfinger?resource=https://mastodon.social/users/lambadalambda",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!(
+ "test/fixtures/httpoison_mock/https___mastodon.social_users_lambadalambda.xml"
+ )
+ }}
+ end
+
+ def get("http://gs.example.org/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/gs.example.org_host_meta")
+ }}
+ end
+
+ def get(
+ "http://gs.example.org/.well-known/webfinger?resource=http://gs.example.org:4040/index.php/user/1",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!(
+ "test/fixtures/httpoison_mock/http___gs.example.org_4040_index.php_user_1.xml"
+ )
+ }}
+ end
+
+ def get("http://gs.example.org/index.php/api/statuses/user_timeline/1.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!(
+ "test/fixtures/httpoison_mock/http__gs.example.org_index.php_api_statuses_user_timeline_1.atom.xml"
+ )
+ }}
+ end
+
+ def get("https://social.heldscal.la/api/statuses/user_timeline/29191.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!(
+ "test/fixtures/httpoison_mock/https___social.heldscal.la_api_statuses_user_timeline_29191.atom.xml"
+ )
+ }}
+ end
+
+ def get("http://squeet.me/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{status: 200, body: File.read!("test/fixtures/httpoison_mock/squeet.me_host_meta")}}
+ end
+
+ def get("https://squeet.me/xrd?uri=lain@squeet.me", _, _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/lain_squeet.me_webfinger.xml")
+ }}
+ end
+
+ def get(
+ "https://social.heldscal.la/.well-known/webfinger?resource=shp@social.heldscal.la",
+ _,
+ _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/shp@social.heldscal.la.xml")
+ }}
+ end
+
+ def get("http://framatube.org/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/framatube.org_host_meta")
+ }}
+ end
+
+ def get("http://framatube.org/main/xrd?uri=framasoft@framatube.org", _, _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ headers: [{"content-type", "application/json"}],
+ body: File.read!("test/fixtures/httpoison_mock/framasoft@framatube.org.json")
+ }}
+ end
+
+ def get("http://gnusocial.de/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/gnusocial.de_host_meta")
+ }}
+ end
+
+ def get("http://gnusocial.de/main/xrd?uri=winterdienst@gnusocial.de", _, _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/winterdienst_webfinger.json")
+ }}
+ end
+
+ def get("http://status.alpicola.com/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/status.alpicola.com_host_meta")
+ }}
+ end
+
+ def get("http://macgirvin.com/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/macgirvin.com_host_meta")
+ }}
+ end
+
+ def get("http://gerzilla.de/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/gerzilla.de_host_meta")
+ }}
+ end
+
+ def get("https://gerzilla.de/xrd/?uri=kaniini@gerzilla.de", _, _,
+ Accept: "application/xrd+xml,application/jrd+json"
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ headers: [{"content-type", "application/json"}],
+ body: File.read!("test/fixtures/httpoison_mock/kaniini@gerzilla.de.json")
+ }}
+ end
+
+ def get("https://social.heldscal.la/api/statuses/user_timeline/23211.atom", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!(
+ "test/fixtures/httpoison_mock/https___social.heldscal.la_api_statuses_user_timeline_23211.atom.xml"
+ )
+ }}
+ end
+
+ def get(
+ "https://social.heldscal.la/.well-known/webfinger?resource=https://social.heldscal.la/user/23211",
+ _,
+ _,
+ _
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/https___social.heldscal.la_user_23211.xml")
+ }}
+ end
+
+ def get("http://social.heldscal.la/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/social.heldscal.la_host_meta")
+ }}
+ end
+
+ def get("https://social.heldscal.la/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/httpoison_mock/social.heldscal.la_host_meta")
+ }}
+ end
+
+ def get("https://mastodon.social/users/lambadalambda.atom", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/lambadalambda.atom")}}
+ end
+
+ def get("https://social.heldscal.la/user/23211", _, _, Accept: "application/activity+json") do
+ {:ok, Tesla.Mock.json(%{"id" => "https://social.heldscal.la/user/23211"}, status: 200)}
+ end
+
+ def get(url, query, body, headers) do
+ {:error,
+ "Not implemented the mock response for get #{inspect(url)}, #{query}, #{inspect(body)}, #{
+ inspect(headers)
+ }"}
+ end
+
+ # POST Requests
+ #
+
+ def post(url, query \\ [], body \\ [], headers \\ [])
+
+ def post("http://example.org/needs_refresh", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: ""
+ }}
+ end
+
+ def post(url, _query, _body, _headers) do
+ {:error, "Not implemented the mock response for post #{inspect(url)}"}
+ end
+end
diff --git a/test/support/httpoison_mock.ex b/test/support/httpoison_mock.ex
deleted file mode 100644
index 0be09b6ce..000000000
--- a/test/support/httpoison_mock.ex
+++ /dev/null
@@ -1,881 +0,0 @@
-defmodule HTTPoisonMock do
- alias HTTPoison.Response
-
- def get(url, body \\ [], headers \\ [])
-
- def get("https://prismo.news/@mxb", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___prismo.news__mxb.json")
- }}
- end
-
- def get("https://osada.macgirvin.com/channel/mike", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!("test/fixtures/httpoison_mock/https___osada.macgirvin.com_channel_mike.json")
- }}
- end
-
- def get(
- "https://osada.macgirvin.com/.well-known/webfinger?resource=acct:mike@osada.macgirvin.com",
- _,
- _
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/mike@osada.macgirvin.com.json")
- }}
- end
-
- def get("https://info.pleroma.site/activity.json", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https__info.pleroma.site_activity.json")
- }}
- end
-
- def get("https://info.pleroma.site/activity2.json", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https__info.pleroma.site_activity2.json")
- }}
- end
-
- def get("https://info.pleroma.site/activity3.json", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https__info.pleroma.site_activity3.json")
- }}
- end
-
- def get("https://info.pleroma.site/activity4.json", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https__info.pleroma.site_activity4.json")
- }}
- end
-
- def get("https://info.pleroma.site/actor.json", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___info.pleroma.site_actor.json")
- }}
- end
-
- def get("https://puckipedia.com/", [Accept: "application/activity+json"], _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/puckipedia.com.json")
- }}
- end
-
- def get(
- "https://gerzilla.de/.well-known/webfinger?resource=acct:kaniini@gerzilla.de",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/kaniini@gerzilla.de.json")
- }}
- end
-
- def get(
- "https://framatube.org/.well-known/webfinger?resource=acct:framasoft@framatube.org",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/framasoft@framatube.org.json")
- }}
- end
-
- def get(
- "https://gnusocial.de/.well-known/webfinger?resource=acct:winterdienst@gnusocial.de",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/winterdienst_webfinger.json")
- }}
- end
-
- def get(
- "https://social.heldscal.la/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "nonexistant@social.heldscal.la"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 500,
- body: File.read!("test/fixtures/httpoison_mock/nonexistant@social.heldscal.la.xml")
- }}
- end
-
- def get(
- "https://social.heldscal.la/.well-known/webfinger?resource=shp@social.heldscal.la",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/shp@social.heldscal.la.xml")
- }}
- end
-
- def get(
- "https://social.heldscal.la/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "shp@social.heldscal.la"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/shp@social.heldscal.la.xml")
- }}
- end
-
- def get(
- "https://social.heldscal.la/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "https://social.heldscal.la/user/23211"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___social.heldscal.la_user_23211.xml")
- }}
- end
-
- def get(
- "https://social.heldscal.la/.well-known/webfinger?resource=https://social.heldscal.la/user/23211",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___social.heldscal.la_user_23211.xml")
- }}
- end
-
- def get(
- "https://social.heldscal.la/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "https://social.heldscal.la/user/29191"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___social.heldscal.la_user_29191.xml")
- }}
- end
-
- def get(
- "https://social.heldscal.la/.well-known/webfinger?resource=https://social.heldscal.la/user/29191",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___social.heldscal.la_user_29191.xml")
- }}
- end
-
- def get(
- "https://mastodon.social/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "https://mastodon.social/users/lambadalambda"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/https___mastodon.social_users_lambadalambda.xml"
- )
- }}
- end
-
- def get(
- "https://mastodon.social/.well-known/webfinger?resource=https://mastodon.social/users/lambadalambda",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/https___mastodon.social_users_lambadalambda.xml"
- )
- }}
- end
-
- def get(
- "https://shitposter.club/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "https://shitposter.club/user/1"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___shitposter.club_user_1.xml")
- }}
- end
-
- def get(
- "https://shitposter.club/.well-known/webfinger?resource=https://shitposter.club/user/1",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___shitposter.club_user_1.xml")
- }}
- end
-
- def get(
- "https://shitposter.club/.well-known/webfinger?resource=https://shitposter.club/user/5381",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/spc_5381_xrd.xml")
- }}
- end
-
- def get(
- "http://gs.example.org/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "http://gs.example.org:4040/index.php/user/1"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/http___gs.example.org_4040_index.php_user_1.xml"
- )
- }}
- end
-
- def get(
- "http://gs.example.org/.well-known/webfinger?resource=http://gs.example.org:4040/index.php/user/1",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/http___gs.example.org_4040_index.php_user_1.xml"
- )
- }}
- end
-
- def get(
- "https://social.stopwatchingus-heidelberg.de/.well-known/webfinger?resource=https://social.stopwatchingus-heidelberg.de/user/18330",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/atarifrosch_webfinger.xml")
- }}
- end
-
- def get(
- "https://pleroma.soykaf.com/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "https://pleroma.soykaf.com/users/lain"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain.xml")
- }}
- end
-
- def get(
- "https://pleroma.soykaf.com/.well-known/webfinger?resource=https://pleroma.soykaf.com/users/lain",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain.xml")
- }}
- end
-
- def get("https://social.heldscal.la/api/statuses/user_timeline/29191.atom", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/https___social.heldscal.la_api_statuses_user_timeline_29191.atom.xml"
- )
- }}
- end
-
- def get("https://shitposter.club/api/statuses/user_timeline/5381.atom", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/spc_5381.atom")
- }}
- end
-
- def get("https://social.heldscal.la/api/statuses/user_timeline/23211.atom", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/https___social.heldscal.la_api_statuses_user_timeline_23211.atom.xml"
- )
- }}
- end
-
- def get("https://mastodon.social/users/lambadalambda.atom", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/https___mastodon.social_users_lambadalambda.atom"
- )
- }}
- end
-
- def get(
- "https://social.stopwatchingus-heidelberg.de/api/statuses/user_timeline/18330.atom",
- _body,
- _headers
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/atarifrosch_feed.xml")
- }}
- end
-
- def get("https://pleroma.soykaf.com/users/lain/feed.atom", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain_feed.atom.xml"
- )
- }}
- end
-
- def get("https://social.sakamoto.gq/users/eal/feed.atom", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/sakamoto_eal_feed.atom")
- }}
- end
-
- def get("http://gs.example.org/index.php/api/statuses/user_timeline/1.atom", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/http__gs.example.org_index.php_api_statuses_user_timeline_1.atom.xml"
- )
- }}
- end
-
- def get("https://shitposter.club/notice/2827873", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!("test/fixtures/httpoison_mock/https___shitposter.club_notice_2827873.html")
- }}
- end
-
- def get("https://shitposter.club/api/statuses/show/2827873.atom", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/https___shitposter.club_api_statuses_show_2827873.atom.xml"
- )
- }}
- end
-
- def get("https://shitposter.club/api/statuses/user_timeline/1.atom", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/https___shitposter.club_api_statuses_user_timeline_1.atom.xml"
- )
- }}
- end
-
- def post(
- "https://social.heldscal.la/main/push/hub",
- {:form, _data},
- "Content-type": "application/x-www-form-urlencoded"
- ) do
- {:ok,
- %Response{
- status_code: 202
- }}
- end
-
- def get("http://mastodon.example.org/users/admin/statuses/100787282858396771", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!(
- "test/fixtures/httpoison_mock/http___mastodon.example.org_users_admin_status_1234.json"
- )
- }}
- end
-
- def get(
- "https://pawoo.net/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "https://pawoo.net/users/pekorino"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.xml")
- }}
- end
-
- def get(
- "https://pawoo.net/.well-known/webfinger?resource=https://pawoo.net/users/pekorino",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.xml")
- }}
- end
-
- def get("https://pawoo.net/users/pekorino.atom", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.atom")
- }}
- end
-
- def get(
- "https://mamot.fr/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "https://mamot.fr/users/Skruyb"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/skruyb@mamot.fr.atom")
- }}
- end
-
- def get(
- "https://mamot.fr/.well-known/webfinger?resource=https://mamot.fr/users/Skruyb",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/skruyb@mamot.fr.atom")
- }}
- end
-
- def get(
- "https://social.sakamoto.gq/.well-known/webfinger",
- [Accept: "application/xrd+xml,application/jrd+json"],
- params: [resource: "https://social.sakamoto.gq/users/eal"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/eal_sakamoto.xml")
- }}
- end
-
- def get(
- "https://social.sakamoto.gq/.well-known/webfinger?resource=https://social.sakamoto.gq/users/eal",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/eal_sakamoto.xml")
- }}
- end
-
- def get(
- "https://pleroma.soykaf.com/.well-known/webfinger?resource=https://pleroma.soykaf.com/users/shp",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/shp@pleroma.soykaf.com.webfigner")
- }}
- end
-
- def get(
- "https://squeet.me/xrd/?uri=lain@squeet.me",
- [Accept: "application/xrd+xml,application/jrd+json"],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/lain_squeet.me_webfinger.xml")
- }}
- end
-
- def get("https://mamot.fr/users/Skruyb.atom", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___mamot.fr_users_Skruyb.atom")
- }}
- end
-
- def get(
- "https://social.sakamoto.gq/objects/0ccc1a2c-66b0-4305-b23a-7f7f2b040056",
- [Accept: "application/atom+xml"],
- _
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/sakamoto.atom")
- }}
- end
-
- def get("https://pleroma.soykaf.com/users/shp/feed.atom", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/shp@pleroma.soykaf.com.feed")
- }}
- end
-
- def get("http://social.heldscal.la/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/social.heldscal.la_host_meta")
- }}
- end
-
- def get("http://status.alpicola.com/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/status.alpicola.com_host_meta")
- }}
- end
-
- def get("http://macgirvin.com/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/macgirvin.com_host_meta")
- }}
- end
-
- def get("http://mastodon.social/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/mastodon.social_host_meta")
- }}
- end
-
- def get("http://shitposter.club/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/shitposter.club_host_meta")
- }}
- end
-
- def get("http://pleroma.soykaf.com/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/pleroma.soykaf.com_host_meta")
- }}
- end
-
- def get("http://social.sakamoto.gq/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/social.sakamoto.gq_host_meta")
- }}
- end
-
- def get("http://gs.example.org/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/gs.example.org_host_meta")
- }}
- end
-
- def get("http://pawoo.net/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/pawoo.net_host_meta")
- }}
- end
-
- def get("http://mamot.fr/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/mamot.fr_host_meta")
- }}
- end
-
- def get("http://mastodon.xyz/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/mastodon.xyz_host_meta")
- }}
- end
-
- def get("http://social.wxcafe.net/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/social.wxcafe.net_host_meta")
- }}
- end
-
- def get("http://squeet.me/.well-known/host-meta", [], follow_redirect: true) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/squeet.me_host_meta")
- }}
- end
-
- def get(
- "http://social.stopwatchingus-heidelberg.de/.well-known/host-meta",
- [],
- follow_redirect: true
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body:
- File.read!("test/fixtures/httpoison_mock/social.stopwatchingus-heidelberg.de_host_meta")
- }}
- end
-
- def get("http://mastodon.example.org/users/admin", [Accept: "application/activity+json"], _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/admin@mastdon.example.org.json")
- }}
- end
-
- def get(
- "https://hubzilla.example.org/channel/kaniini",
- [Accept: "application/activity+json"],
- _
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/kaniini@hubzilla.example.org.json")
- }}
- end
-
- def get("https://masto.quad.moe/users/_HellPie", [Accept: "application/activity+json"], _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/hellpie.json")
- }}
- end
-
- def get("https://niu.moe/users/rye", [Accept: "application/activity+json"], _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/rye.json")
- }}
- end
-
- def get("https://n1u.moe/users/rye", [Accept: "application/activity+json"], _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/rye.json")
- }}
- end
-
- def get(
- "https://mst3k.interlinked.me/users/luciferMysticus",
- [Accept: "application/activity+json"],
- _
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/lucifermysticus.json")
- }}
- end
-
- def get("https://mstdn.io/users/mayuutann", [Accept: "application/activity+json"], _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/mayumayu.json")
- }}
- end
-
- def get(
- "http://mastodon.example.org/@admin/99541947525187367",
- [Accept: "application/activity+json"],
- _
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/mastodon-note-object.json")
- }}
- end
-
- def get(
- "https://mstdn.io/users/mayuutann/statuses/99568293732299394",
- [Accept: "application/activity+json"],
- _
- ) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/mayumayupost.json")
- }}
- end
-
- def get("https://shitposter.club/notice/7369654", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/7369654.html")
- }}
- end
-
- def get("https://shitposter.club/api/statuses/show/7369654.atom", _body, _headers) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/7369654.atom")
- }}
- end
-
- def get("https://baptiste.gelez.xyz/~/PlumeDevelopment/this-month-in-plume-june-2018/", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/baptiste.gelex.xyz-article.json")
- }}
- end
-
- def get("https://baptiste.gelez.xyz/@/BaptisteGelez", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/baptiste.gelex.xyz-user.json")
- }}
- end
-
- def get("https://peertube.moe/videos/watch/df5f464b-be8d-46fb-ad81-2d4c2d1630e3", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/peertube.moe-vid.json")
- }}
- end
-
- def get("https://peertube.moe/accounts/7even", _, _) do
- {:ok,
- %Response{
- status_code: 200,
- body: File.read!("test/fixtures/httpoison_mock/7even.json")
- }}
- end
-
- def get(url, body, headers) do
- {:error,
- "Not implemented the mock response for get #{inspect(url)}, #{inspect(body)}, #{
- inspect(headers)
- }"}
- end
-
- def post(url, _body, _headers) do
- {:error, "Not implemented the mock response for post #{inspect(url)}"}
- end
-
- def post(url, _body, _headers, _options) do
- {:error, "Not implemented the mock response for post #{inspect(url)}"}
- end
-end
diff --git a/test/upload_test.exs b/test/upload_test.exs
index d273ea5f6..f2cad4cf0 100644
--- a/test/upload_test.exs
+++ b/test/upload_test.exs
@@ -2,7 +2,43 @@ defmodule Pleroma.UploadTest do
alias Pleroma.Upload
use Pleroma.DataCase
- describe "Storing a file" do
+ describe "Storing a file with the Local uploader" do
+ setup [:ensure_local_uploader]
+
+ test "returns a media url" do
+ File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
+
+ file = %Plug.Upload{
+ content_type: "image/jpg",
+ path: Path.absname("test/fixtures/image_tmp.jpg"),
+ filename: "image.jpg"
+ }
+
+ {:ok, data} = Upload.store(file)
+
+ assert %{"url" => [%{"href" => url}]} = data
+
+ assert String.starts_with?(url, Pleroma.Web.base_url() <> "/media/")
+ end
+
+ test "returns a media url with configured base_url" do
+ base_url = "https://cache.pleroma.social"
+
+ File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
+
+ file = %Plug.Upload{
+ content_type: "image/jpg",
+ path: Path.absname("test/fixtures/image_tmp.jpg"),
+ filename: "image.jpg"
+ }
+
+ {:ok, data} = Upload.store(file, base_url: base_url)
+
+ assert %{"url" => [%{"href" => url}]} = data
+
+ assert String.starts_with?(url, base_url <> "/media/")
+ end
+
test "copies the file to the configured folder with deduping" do
File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
@@ -12,10 +48,11 @@ test "copies the file to the configured folder with deduping" do
filename: "an [image.jpg"
}
- data = Upload.store(file, true)
+ {:ok, data} = Upload.store(file, filters: [Pleroma.Upload.Filter.Dedupe])
- assert data["name"] ==
- "e7a6d0cf595bff76f14c9a98b6c199539559e8b844e02e51e5efcfd1f614a2df.jpeg"
+ assert List.first(data["url"])["href"] ==
+ Pleroma.Web.base_url() <>
+ "/media/e7a6d0cf595bff76f14c9a98b6c199539559e8b844e02e51e5efcfd1f614a2df.jpg"
end
test "copies the file to the configured folder without deduping" do
@@ -27,7 +64,7 @@ test "copies the file to the configured folder without deduping" do
filename: "an [image.jpg"
}
- data = Upload.store(file, false)
+ {:ok, data} = Upload.store(file)
assert data["name"] == "an [image.jpg"
end
@@ -40,7 +77,7 @@ test "fixes incorrect content type" do
filename: "an [image.jpg"
}
- data = Upload.store(file, true)
+ {:ok, data} = Upload.store(file, filters: [Pleroma.Upload.Filter.Dedupe])
assert hd(data["url"])["mediaType"] == "image/jpeg"
end
@@ -53,7 +90,7 @@ test "adds missing extension" do
filename: "an [image"
}
- data = Upload.store(file, false)
+ {:ok, data} = Upload.store(file)
assert data["name"] == "an [image.jpg"
end
@@ -66,7 +103,7 @@ test "fixes incorrect file extension" do
filename: "an [image.blah"
}
- data = Upload.store(file, false)
+ {:ok, data} = Upload.store(file)
assert data["name"] == "an [image.jpg"
end
@@ -79,8 +116,22 @@ test "don't modify filename of an unknown type" do
filename: "test.txt"
}
- data = Upload.store(file, false)
+ {:ok, data} = Upload.store(file)
assert data["name"] == "test.txt"
end
+
+ test "copies the file to the configured folder with anonymizing filename" do
+ File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
+
+ file = %Plug.Upload{
+ content_type: "image/jpg",
+ path: Path.absname("test/fixtures/image_tmp.jpg"),
+ filename: "an [image.jpg"
+ }
+
+ {:ok, data} = Upload.store(file, filters: [Pleroma.Upload.Filter.AnonymizeFilename])
+
+ refute data["name"] == "an [image.jpg"
+ end
end
end
diff --git a/test/user_test.exs b/test/user_test.exs
index 231f1d94d..3d2f7f4e0 100644
--- a/test/user_test.exs
+++ b/test/user_test.exs
@@ -9,6 +9,11 @@ defmodule Pleroma.UserTest do
import Pleroma.Factory
import Ecto.Query
+ setup_all do
+ Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
+
test "ap_id returns the activity pub id for the user" do
user = UserBuilder.build()
@@ -34,14 +39,14 @@ test "follow takes a user and another user" do
user = Repo.get(User, user.id)
followed = User.get_by_ap_id(followed.ap_id)
- assert followed.info["follower_count"] == 1
+ assert followed.info.follower_count == 1
assert User.ap_followers(followed) in user.following
end
test "can't follow a deactivated users" do
user = insert(:user)
- followed = insert(:user, info: %{"deactivated" => true})
+ followed = insert(:user, info: %{deactivated: true})
{:error, _} = User.follow(user, followed)
end
@@ -56,8 +61,8 @@ test "can't follow a user who blocked us" do
end
test "local users do not automatically follow local locked accounts" do
- follower = insert(:user, info: %{"locked" => true})
- followed = insert(:user, info: %{"locked" => true})
+ follower = insert(:user, info: %{locked: true})
+ followed = insert(:user, info: %{locked: true})
{:ok, follower} = User.maybe_direct_follow(follower, followed)
@@ -144,6 +149,18 @@ test "it sets the password_hash, ap_id and following fields" do
assert changeset.changes.follower_address == "#{changeset.changes.ap_id}/followers"
end
+
+ test "it ensures info is not nil" do
+ changeset = User.register_changeset(%User{}, @full_user_data)
+
+ assert changeset.valid?
+
+ {:ok, user} =
+ changeset
+ |> Repo.insert()
+
+ refute is_nil(user.info)
+ end
end
describe "fetching a user from nickname or trying to build one" do
@@ -185,12 +202,14 @@ test "updates an existing user, if stale" do
local: false,
nickname: "admin@mastodon.example.org",
ap_id: "http://mastodon.example.org/users/admin",
- last_refreshed_at: a_week_ago
+ last_refreshed_at: a_week_ago,
+ info: %{}
)
assert orig_user.last_refreshed_at == a_week_ago
user = User.get_or_fetch_by_ap_id("http://mastodon.example.org/users/admin")
+ assert user.info.source_data["endpoints"]
refute user.last_refreshed_at == orig_user.last_refreshed_at
end
@@ -311,45 +330,45 @@ test "it sets the info->note_count property" do
user = User.get_by_ap_id(note.data["actor"])
- assert user.info["note_count"] == nil
+ assert user.info.note_count == 0
{:ok, user} = User.update_note_count(user)
- assert user.info["note_count"] == 1
+ assert user.info.note_count == 1
end
test "it increases the info->note_count property" do
note = insert(:note)
user = User.get_by_ap_id(note.data["actor"])
- assert user.info["note_count"] == nil
+ assert user.info.note_count == 0
{:ok, user} = User.increase_note_count(user)
- assert user.info["note_count"] == 1
+ assert user.info.note_count == 1
{:ok, user} = User.increase_note_count(user)
- assert user.info["note_count"] == 2
+ assert user.info.note_count == 2
end
test "it decreases the info->note_count property" do
note = insert(:note)
user = User.get_by_ap_id(note.data["actor"])
- assert user.info["note_count"] == nil
+ assert user.info.note_count == 0
{:ok, user} = User.increase_note_count(user)
- assert user.info["note_count"] == 1
+ assert user.info.note_count == 1
{:ok, user} = User.decrease_note_count(user)
- assert user.info["note_count"] == 0
+ assert user.info.note_count == 0
{:ok, user} = User.decrease_note_count(user)
- assert user.info["note_count"] == 0
+ assert user.info.note_count == 0
end
test "it sets the info->follower_count property" do
@@ -358,11 +377,11 @@ test "it sets the info->follower_count property" do
User.follow(follower, user)
- assert user.info["follower_count"] == nil
+ assert user.info.follower_count == 0
{:ok, user} = User.update_follower_count(user)
- assert user.info["follower_count"] == 1
+ assert user.info.follower_count == 1
end
end
@@ -489,11 +508,11 @@ test "get recipients from activity" do
test ".deactivate can de-activate then re-activate a user" do
user = insert(:user)
- assert false == !!user.info["deactivated"]
+ assert false == user.info.deactivated
{:ok, user} = User.deactivate(user)
- assert true == user.info["deactivated"]
+ assert true == user.info.deactivated
{:ok, user} = User.deactivate(user, false)
- assert false == !!user.info["deactivated"]
+ assert false == user.info.deactivated
end
test ".delete deactivates a user, all follow relationships and all create activities" do
@@ -517,7 +536,7 @@ test ".delete deactivates a user, all follow relationships and all create activi
follower = Repo.get(User, follower.id)
user = Repo.get(User, user.id)
- assert user.info["deactivated"]
+ assert user.info.deactivated
refute User.following?(user, followed)
refute User.following?(followed, follower)
@@ -546,7 +565,7 @@ test "html_filter_policy returns nil when rich-text is enabled" do
end
test "html_filter_policy returns TwitterText scrubber when rich-text is disabled" do
- user = insert(:user, %{info: %{"no_rich_text" => true}})
+ user = insert(:user, %{info: %{no_rich_text: true}})
assert Pleroma.HTML.Scrubber.TwitterText == User.html_filter_policy(user)
end
diff --git a/test/web/activity_pub/activity_pub_controller_test.exs b/test/web/activity_pub/activity_pub_controller_test.exs
index 1c24b348c..b4af2df5a 100644
--- a/test/web/activity_pub/activity_pub_controller_test.exs
+++ b/test/web/activity_pub/activity_pub_controller_test.exs
@@ -5,6 +5,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
alias Pleroma.{Repo, User}
alias Pleroma.Activity
+ setup_all do
+ Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
+
describe "/relay" do
test "with the relay active, it returns the relay user", %{conn: conn} do
res =
@@ -145,6 +150,20 @@ test "it returns the followers in a collection", %{conn: conn} do
assert result["first"]["orderedItems"] == [user.ap_id]
end
+ test "it returns returns empty if the user has 'hide_network' set", %{conn: conn} do
+ user = insert(:user)
+ user_two = insert(:user, %{info: %{hide_network: true}})
+ User.follow(user, user_two)
+
+ result =
+ conn
+ |> get("/users/#{user_two.nickname}/followers")
+ |> json_response(200)
+
+ assert result["first"]["orderedItems"] == []
+ assert result["totalItems"] == 1
+ end
+
test "it works for more than 10 users", %{conn: conn} do
user = insert(:user)
@@ -186,6 +205,20 @@ test "it returns the following in a collection", %{conn: conn} do
assert result["first"]["orderedItems"] == [user_two.ap_id]
end
+ test "it returns returns empty if the user has 'hide_network' set", %{conn: conn} do
+ user = insert(:user, %{info: %{hide_network: true}})
+ user_two = insert(:user)
+ User.follow(user, user_two)
+
+ result =
+ conn
+ |> get("/users/#{user.nickname}/following")
+ |> json_response(200)
+
+ assert result["first"]["orderedItems"] == []
+ assert result["totalItems"] == 1
+ end
+
test "it works for more than 10 users", %{conn: conn} do
user = insert(:user)
diff --git a/test/web/activity_pub/activity_pub_test.exs b/test/web/activity_pub/activity_pub_test.exs
index 35c381ac3..90f11ecd4 100644
--- a/test/web/activity_pub/activity_pub_test.exs
+++ b/test/web/activity_pub/activity_pub_test.exs
@@ -7,6 +7,12 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
alias Pleroma.Builders.ActivityBuilder
import Pleroma.Factory
+ import Tesla.Mock
+
+ setup do
+ mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
describe "building a user from his ap id" do
test "it returns a user" do
@@ -14,8 +20,8 @@ test "it returns a user" do
{:ok, user} = ActivityPub.make_user_from_ap_id(user_id)
assert user.ap_id == user_id
assert user.nickname == "admin@mastodon.example.org"
- assert user.info["source_data"]
- assert user.info["ap_enabled"]
+ assert user.info.source_data
+ assert user.info.ap_enabled
assert user.follower_address == "http://mastodon.example.org/users/admin/followers"
end
end
diff --git a/test/web/activity_pub/transmogrifier_test.exs b/test/web/activity_pub/transmogrifier_test.exs
index 829da0a65..fa526a222 100644
--- a/test/web/activity_pub/transmogrifier_test.exs
+++ b/test/web/activity_pub/transmogrifier_test.exs
@@ -12,6 +12,11 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
import Pleroma.Factory
alias Pleroma.Web.CommonAPI
+ setup_all do
+ Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
+
describe "handle_incoming" do
test "it ignores an incoming notice if we already have it" do
activity = insert(:note_activity)
@@ -92,7 +97,7 @@ test "it works for incoming notices" do
user = User.get_by_ap_id(object["actor"])
- assert user.info["note_count"] == 1
+ assert user.info.note_count == 1
end
test "it works for incoming notices with hashtags" do
@@ -307,7 +312,7 @@ test "it works for incoming update activities" do
}
]
- assert user.info["banner"]["url"] == [
+ assert user.info.banner["url"] == [
%{
"href" =>
"https://cd.niu.moe/accounts/headers/000/033/323/original/850b3448fa5fd477.png"
@@ -337,7 +342,7 @@ test "it works for incoming update activities which lock the account" do
{:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(update_data)
user = User.get_cached_by_ap_id(data["actor"])
- assert user.info["locked"] == true
+ assert user.info.locked == true
end
test "it works for incoming deletes" do
@@ -543,7 +548,7 @@ test "it works for incoming accepts which were pre-accepted" do
test "it works for incoming accepts which were orphaned" do
follower = insert(:user)
- followed = insert(:user, %{info: %{"locked" => true}})
+ followed = insert(:user, %{info: %User.Info{locked: true}})
{:ok, follow_activity} = ActivityPub.follow(follower, followed)
@@ -565,7 +570,7 @@ test "it works for incoming accepts which were orphaned" do
test "it works for incoming accepts which are referenced by IRI only" do
follower = insert(:user)
- followed = insert(:user, %{info: %{"locked" => true}})
+ followed = insert(:user, %{info: %User.Info{locked: true}})
{:ok, follow_activity} = ActivityPub.follow(follower, followed)
@@ -585,7 +590,7 @@ test "it works for incoming accepts which are referenced by IRI only" do
test "it fails for incoming accepts which cannot be correlated" do
follower = insert(:user)
- followed = insert(:user, %{info: %{"locked" => true}})
+ followed = insert(:user, %{info: %User.Info{locked: true}})
accept_data =
File.read!("test/fixtures/mastodon-accept-activity.json")
@@ -604,7 +609,7 @@ test "it fails for incoming accepts which cannot be correlated" do
test "it fails for incoming rejects which cannot be correlated" do
follower = insert(:user)
- followed = insert(:user, %{info: %{"locked" => true}})
+ followed = insert(:user, %{info: %User.Info{locked: true}})
accept_data =
File.read!("test/fixtures/mastodon-reject-activity.json")
@@ -623,7 +628,7 @@ test "it fails for incoming rejects which cannot be correlated" do
test "it works for incoming rejects which are orphaned" do
follower = insert(:user)
- followed = insert(:user, %{info: %{"locked" => true}})
+ followed = insert(:user, %{info: %User.Info{locked: true}})
{:ok, follower} = User.follow(follower, followed)
{:ok, _follow_activity} = ActivityPub.follow(follower, followed)
@@ -648,7 +653,7 @@ test "it works for incoming rejects which are orphaned" do
test "it works for incoming rejects which are referenced by IRI only" do
follower = insert(:user)
- followed = insert(:user, %{info: %{"locked" => true}})
+ followed = insert(:user, %{info: %User.Info{locked: true}})
{:ok, follower} = User.follow(follower, followed)
{:ok, follow_activity} = ActivityPub.follow(follower, followed)
@@ -815,18 +820,18 @@ test "it upgrades a user to activitypub" do
assert "http://localhost:4001/users/rye@niu.moe/followers" in activity.recipients
user = Repo.get(User, user.id)
- assert user.info["note_count"] == 1
+ assert user.info.note_count == 1
{:ok, user} = Transmogrifier.upgrade_user_from_ap_id("https://niu.moe/users/rye")
- assert user.info["ap_enabled"]
- assert user.info["note_count"] == 1
+ assert user.info.ap_enabled
+ assert user.info.note_count == 1
assert user.follower_address == "https://niu.moe/users/rye/followers"
# Wait for the background task
:timer.sleep(1000)
user = Repo.get(User, user.id)
- assert user.info["note_count"] == 1
+ assert user.info.note_count == 1
activity = Repo.get(Activity, activity.id)
assert user.follower_address in activity.recipients
@@ -847,7 +852,7 @@ test "it upgrades a user to activitypub" do
"https://cdn.niu.moe/accounts/headers/000/033/323/original/850b3448fa5fd477.png"
}
]
- } = user.info["banner"]
+ } = user.info.banner
refute "..." in activity.recipients
diff --git a/test/web/admin_api/admin_api_controller_test.exs b/test/web/admin_api/admin_api_controller_test.exs
index fa0cb71bf..9634ad7c5 100644
--- a/test/web/admin_api/admin_api_controller_test.exs
+++ b/test/web/admin_api/admin_api_controller_test.exs
@@ -8,7 +8,7 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do
describe "/api/pleroma/admin/user" do
test "Delete" do
- admin = insert(:user, info: %{"is_admin" => true})
+ admin = insert(:user, info: %{is_admin: true})
user = insert(:user)
conn =
@@ -21,7 +21,7 @@ test "Delete" do
end
test "Create" do
- admin = insert(:user, info: %{"is_admin" => true})
+ admin = insert(:user, info: %{is_admin: true})
conn =
build_conn()
@@ -39,7 +39,7 @@ test "Create" do
describe "/api/pleroma/admin/permission_group" do
test "GET is giving user_info" do
- admin = insert(:user, info: %{"is_admin" => true})
+ admin = insert(:user, info: %{is_admin: true})
conn =
build_conn()
@@ -47,33 +47,30 @@ test "GET is giving user_info" do
|> put_req_header("accept", "application/json")
|> get("/api/pleroma/admin/permission_group/#{admin.nickname}")
- assert json_response(conn, 200) == admin.info
+ assert json_response(conn, 200) == %{
+ "is_admin" => true,
+ "is_moderator" => false
+ }
end
test "/:right POST, can add to a permission group" do
- admin = insert(:user, info: %{"is_admin" => true})
+ admin = insert(:user, info: %{is_admin: true})
user = insert(:user)
- user_info =
- user.info
- |> Map.put("is_admin", true)
-
conn =
build_conn()
|> assign(:user, admin)
|> put_req_header("accept", "application/json")
|> post("/api/pleroma/admin/permission_group/#{user.nickname}/admin")
- assert json_response(conn, 200) == user_info
+ assert json_response(conn, 200) == %{
+ "is_admin" => true
+ }
end
test "/:right DELETE, can remove from a permission group" do
- admin = insert(:user, info: %{"is_admin" => true})
- user = insert(:user, info: %{"is_admin" => true})
-
- user_info =
- user.info
- |> Map.put("is_admin", false)
+ admin = insert(:user, info: %{is_admin: true})
+ user = insert(:user, info: %{is_admin: true})
conn =
build_conn()
@@ -81,12 +78,14 @@ test "/:right DELETE, can remove from a permission group" do
|> put_req_header("accept", "application/json")
|> delete("/api/pleroma/admin/permission_group/#{user.nickname}/admin")
- assert json_response(conn, 200) == user_info
+ assert json_response(conn, 200) == %{
+ "is_admin" => false
+ }
end
end
test "/api/pleroma/admin/invite_token" do
- admin = insert(:user, info: %{"is_admin" => true})
+ admin = insert(:user, info: %{is_admin: true})
conn =
build_conn()
@@ -98,8 +97,8 @@ test "/api/pleroma/admin/invite_token" do
end
test "/api/pleroma/admin/password_reset" do
- admin = insert(:user, info: %{"is_admin" => true})
- user = insert(:user, info: %{"is_admin" => true})
+ admin = insert(:user, info: %{is_admin: true})
+ user = insert(:user)
conn =
build_conn()
diff --git a/test/web/common_api/common_api_test.exs b/test/web/common_api/common_api_test.exs
index cd36e409c..8fc65f4c0 100644
--- a/test/web/common_api/common_api_test.exs
+++ b/test/web/common_api/common_api_test.exs
@@ -17,7 +17,7 @@ test "it adds emoji when updating profiles" do
CommonAPI.update(user)
user = User.get_cached_by_ap_id(user.ap_id)
- [karjalanpiirakka] = user.info["source_data"]["tag"]
+ [karjalanpiirakka] = user.info.source_data["tag"]
assert karjalanpiirakka["name"] == ":karjalanpiirakka:"
end
diff --git a/test/web/federator_test.exs b/test/web/federator_test.exs
index 02e1ca76e..87bf73dbd 100644
--- a/test/web/federator_test.exs
+++ b/test/web/federator_test.exs
@@ -5,6 +5,11 @@ defmodule Pleroma.Web.FederatorTest do
import Pleroma.Factory
import Mock
+ setup_all do
+ Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
+
test "enqueues an element according to priority" do
queue = [%{item: 1, priority: 2}]
diff --git a/test/web/http_sigs/http_sig_test.exs b/test/web/http_sigs/http_sig_test.exs
index b2bf8d61b..2e189d583 100644
--- a/test/web/http_sigs/http_sig_test.exs
+++ b/test/web/http_sigs/http_sig_test.exs
@@ -4,6 +4,12 @@ defmodule Pleroma.Web.HTTPSignaturesTest do
use Pleroma.DataCase
alias Pleroma.Web.HTTPSignatures
import Pleroma.Factory
+ import Tesla.Mock
+
+ setup do
+ mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
@private_key hd(:public_key.pem_decode(File.read!("test/web/http_sigs/priv.key")))
|> :public_key.pem_entry_decode()
diff --git a/test/web/mastodon_api/account_view_test.exs b/test/web/mastodon_api/account_view_test.exs
index dc52b92bc..a2d3a2547 100644
--- a/test/web/mastodon_api/account_view_test.exs
+++ b/test/web/mastodon_api/account_view_test.exs
@@ -17,7 +17,7 @@ test "Represent a user account" do
user =
insert(:user, %{
- info: %{"note_count" => 5, "follower_count" => 3, "source_data" => source_data},
+ info: %{note_count: 5, follower_count: 3, source_data: source_data},
nickname: "shp@shitposter.club",
name: ":karjalanpiirakka: shp",
bio: "valid html",
@@ -63,7 +63,7 @@ test "Represent a user account" do
test "Represent a Service(bot) account" do
user =
insert(:user, %{
- info: %{"note_count" => 5, "follower_count" => 3, "source_data" => %{"type" => "Service"}},
+ info: %{note_count: 5, follower_count: 3, source_data: %{"type" => "Service"}},
nickname: "shp@shitposter.club",
inserted_at: ~N[2017-08-15 15:47:06.597036]
})
diff --git a/test/web/mastodon_api/mastodon_api_controller_test.exs b/test/web/mastodon_api/mastodon_api_controller_test.exs
index ad67cae6b..092f0c9fc 100644
--- a/test/web/mastodon_api/mastodon_api_controller_test.exs
+++ b/test/web/mastodon_api/mastodon_api_controller_test.exs
@@ -2,12 +2,18 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIControllerTest do
use Pleroma.Web.ConnCase
alias Pleroma.Web.TwitterAPI.TwitterAPI
- alias Pleroma.{Repo, User, Activity, Notification}
+ alias Pleroma.{Repo, User, Object, Activity, Notification}
alias Pleroma.Web.{OStatus, CommonAPI}
alias Pleroma.Web.ActivityPub.ActivityPub
import Pleroma.Factory
import ExUnit.CaptureLog
+ import Tesla.Mock
+
+ setup do
+ mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
test "the home timeline", %{conn: conn} do
user = insert(:user)
@@ -252,7 +258,7 @@ test "verify_credentials", %{conn: conn} do
end
test "verify_credentials default scope unlisted", %{conn: conn} do
- user = insert(:user, %{info: %{"default_scope" => "unlisted"}})
+ user = insert(:user, %{info: %Pleroma.User.Info{default_scope: "unlisted"}})
conn =
conn
@@ -584,7 +590,7 @@ test "list of notifications", %{conn: conn} do
|> get("/api/v1/notifications")
expected_response =
- "hi @#{user.nickname}"
+ "hi @#{user.nickname}"
assert [%{"status" => %{"content" => response}} | _rest] = json_response(conn, 200)
assert response == expected_response
@@ -605,7 +611,7 @@ test "getting a single notification", %{conn: conn} do
|> get("/api/v1/notifications/#{notification.id}")
expected_response =
- "hi @#{user.nickname}"
+ "hi @#{user.nickname}"
assert %{"status" => %{"content" => response}} = json_response(conn, 200)
assert response == expected_response
@@ -804,7 +810,7 @@ test "gets an users media", %{conn: conn} do
}
media =
- TwitterAPI.upload(file, "json")
+ TwitterAPI.upload(file, user, "json")
|> Poison.decode!()
{:ok, image_post} =
@@ -845,7 +851,7 @@ test "returns the relationships for the current user", %{conn: conn} do
describe "locked accounts" do
test "/api/v1/follow_requests works" do
- user = insert(:user, %{info: %{"locked" => true}})
+ user = insert(:user, %{info: %Pleroma.User.Info{locked: true}})
other_user = insert(:user)
{:ok, activity} = ActivityPub.follow(other_user, user)
@@ -865,7 +871,7 @@ test "/api/v1/follow_requests works" do
end
test "/api/v1/follow_requests/:id/authorize works" do
- user = insert(:user, %{info: %{"locked" => true}})
+ user = insert(:user, %{info: %Pleroma.User.Info{locked: true}})
other_user = insert(:user)
{:ok, activity} = ActivityPub.follow(other_user, user)
@@ -890,7 +896,7 @@ test "/api/v1/follow_requests/:id/authorize works" do
end
test "verify_credentials", %{conn: conn} do
- user = insert(:user, %{info: %{"default_scope" => "private"}})
+ user = insert(:user, %{info: %Pleroma.User.Info{default_scope: "private"}})
conn =
conn
@@ -902,7 +908,7 @@ test "verify_credentials", %{conn: conn} do
end
test "/api/v1/follow_requests/:id/reject works" do
- user = insert(:user, %{info: %{"locked" => true}})
+ user = insert(:user, %{info: %Pleroma.User.Info{locked: true}})
other_user = insert(:user)
{:ok, activity} = ActivityPub.follow(other_user, user)
@@ -959,6 +965,10 @@ test "media upload", %{conn: conn} do
assert media["type"] == "image"
assert media["description"] == desc
+ assert media["id"]
+
+ object = Repo.get(Object, media["id"])
+ assert object.data["actor"] == User.ap_id(user)
end
test "hashtag timeline", %{conn: conn} do
@@ -1002,6 +1012,31 @@ test "getting followers", %{conn: conn} do
assert id == to_string(user.id)
end
+ test "getting followers, hide_network", %{conn: conn} do
+ user = insert(:user)
+ other_user = insert(:user, %{info: %{hide_network: true}})
+ {:ok, user} = User.follow(user, other_user)
+
+ conn =
+ conn
+ |> get("/api/v1/accounts/#{other_user.id}/followers")
+
+ assert [] == json_response(conn, 200)
+ end
+
+ test "getting followers, hide_network, same user requesting", %{conn: conn} do
+ user = insert(:user)
+ other_user = insert(:user, %{info: %{hide_network: true}})
+ {:ok, user} = User.follow(user, other_user)
+
+ conn =
+ conn
+ |> assign(:user, other_user)
+ |> get("/api/v1/accounts/#{other_user.id}/followers")
+
+ refute [] == json_response(conn, 200)
+ end
+
test "getting following", %{conn: conn} do
user = insert(:user)
other_user = insert(:user)
@@ -1015,6 +1050,31 @@ test "getting following", %{conn: conn} do
assert id == to_string(other_user.id)
end
+ test "getting following, hide_network", %{conn: conn} do
+ user = insert(:user, %{info: %{hide_network: true}})
+ other_user = insert(:user)
+ {:ok, user} = User.follow(user, other_user)
+
+ conn =
+ conn
+ |> get("/api/v1/accounts/#{user.id}/following")
+
+ assert [] == json_response(conn, 200)
+ end
+
+ test "getting following, hide_network, same user requesting", %{conn: conn} do
+ user = insert(:user, %{info: %{hide_network: true}})
+ other_user = insert(:user)
+ {:ok, user} = User.follow(user, other_user)
+
+ conn =
+ conn
+ |> assign(:user, user)
+ |> get("/api/v1/accounts/#{user.id}/following")
+
+ refute [] == json_response(conn, 200)
+ end
+
test "following / unfollowing a user", %{conn: conn} do
user = insert(:user)
other_user = insert(:user)
@@ -1105,7 +1165,7 @@ test "blocking / unblocking a domain", %{conn: conn} do
refute User.blocks?(user, other_user)
end
- test "getting a list of domain blocks" do
+ test "getting a list of domain blocks", %{conn: conn} do
user = insert(:user)
{:ok, user} = User.block_domain(user, "bad.site")
@@ -1253,14 +1313,33 @@ test "returns the favorites of a user", %{conn: conn} do
describe "updating credentials" do
test "updates the user's bio", %{conn: conn} do
user = insert(:user)
+ user2 = insert(:user)
conn =
conn
|> assign(:user, user)
- |> patch("/api/v1/accounts/update_credentials", %{"note" => "I drink #cofe"})
+ |> patch("/api/v1/accounts/update_credentials", %{
+ "note" => "I drink #cofe with @#{user2.nickname}"
+ })
assert user = json_response(conn, 200)
- assert user["note"] == "I drink #cofe"
+
+ assert user["note"] ==
+ "I drink #cofe with @#{user2.nickname}"
+ end
+
+ test "updates the user's locking status", %{conn: conn} do
+ user = insert(:user)
+
+ conn =
+ conn
+ |> assign(:user, user)
+ |> patch("/api/v1/accounts/update_credentials", %{locked: "true"})
+
+ assert user = json_response(conn, 200)
+ assert user["locked"] == true
end
test "updates the user's name", %{conn: conn} do
@@ -1289,8 +1368,8 @@ test "updates the user's avatar", %{conn: conn} do
|> assign(:user, user)
|> patch("/api/v1/accounts/update_credentials", %{"avatar" => new_avatar})
- assert user = json_response(conn, 200)
- assert user["avatar"] != "https://placehold.it/48x48"
+ assert user_response = json_response(conn, 200)
+ assert user_response["avatar"] != User.avatar_url(user)
end
test "updates the user's banner", %{conn: conn} do
@@ -1307,8 +1386,8 @@ test "updates the user's banner", %{conn: conn} do
|> assign(:user, user)
|> patch("/api/v1/accounts/update_credentials", %{"header" => new_header})
- assert user = json_response(conn, 200)
- assert user["header"] != "https://placehold.it/700x335"
+ assert user_response = json_response(conn, 200)
+ assert user_response["header"] != User.banner_url(user)
end
end
diff --git a/test/web/mastodon_api/status_view_test.exs b/test/web/mastodon_api/status_view_test.exs
index 31554a07d..9e69b3189 100644
--- a/test/web/mastodon_api/status_view_test.exs
+++ b/test/web/mastodon_api/status_view_test.exs
@@ -6,6 +6,12 @@ defmodule Pleroma.Web.MastodonAPI.StatusViewTest do
alias Pleroma.Web.OStatus
alias Pleroma.Web.CommonAPI
import Pleroma.Factory
+ import Tesla.Mock
+
+ setup do
+ mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
test "a note with null content" do
note = insert(:note_activity)
diff --git a/test/web/node_info_test.exs b/test/web/node_info_test.exs
index a6376453c..a5b0b7869 100644
--- a/test/web/node_info_test.exs
+++ b/test/web/node_info_test.exs
@@ -4,7 +4,7 @@ defmodule Pleroma.Web.NodeInfoTest do
import Pleroma.Factory
test "nodeinfo shows staff accounts", %{conn: conn} do
- user = insert(:user, %{local: true, info: %{"is_moderator" => true}})
+ user = insert(:user, %{local: true, info: %{is_moderator: true}})
conn =
conn
@@ -15,7 +15,7 @@ test "nodeinfo shows staff accounts", %{conn: conn} do
assert user.ap_id in result["metadata"]["staffAccounts"]
end
- test "returns 404 when federation is disabled" do
+ test "returns 404 when federation is disabled", %{conn: conn} do
instance =
Application.get_env(:pleroma, :instance)
|> Keyword.put(:federating, false)
@@ -37,7 +37,7 @@ test "returns 404 when federation is disabled" do
Application.put_env(:pleroma, :instance, instance)
end
- test "returns 200 when federation is enabled" do
+ test "returns 200 when federation is enabled", %{conn: conn} do
conn
|> get("/.well-known/nodeinfo")
|> json_response(200)
diff --git a/test/web/ostatus/activity_representer_test.exs b/test/web/ostatus/activity_representer_test.exs
index 8bf3bc775..a351510d8 100644
--- a/test/web/ostatus/activity_representer_test.exs
+++ b/test/web/ostatus/activity_representer_test.exs
@@ -7,6 +7,12 @@ defmodule Pleroma.Web.OStatus.ActivityRepresenterTest do
alias Pleroma.Web.OStatus
import Pleroma.Factory
+ import Tesla.Mock
+
+ setup do
+ mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
test "an external note activity" do
incoming = File.read!("test/fixtures/mastodon-note-cw.xml")
diff --git a/test/web/ostatus/ostatus_controller_test.exs b/test/web/ostatus/ostatus_controller_test.exs
index 371c835c0..411e89e94 100644
--- a/test/web/ostatus/ostatus_controller_test.exs
+++ b/test/web/ostatus/ostatus_controller_test.exs
@@ -5,6 +5,11 @@ defmodule Pleroma.Web.OStatus.OStatusControllerTest do
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.OStatus.ActivityRepresenter
+ setup_all do
+ Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
+
test "decodes a salmon", %{conn: conn} do
user = insert(:user)
salmon = File.read!("test/fixtures/salmon.xml")
@@ -31,14 +36,16 @@ test "decodes a salmon with a changed magic key", %{conn: conn} do
# Set a wrong magic-key for a user so it has to refetch
salmon_user = User.get_by_ap_id("http://gs.example.org:4040/index.php/user/1")
# Wrong key
- info =
- salmon_user.info
- |> Map.put(
- "magic_key",
- "RSA.pu0s-halox4tu7wmES1FVSx6u-4wc0YrUFXcqWXZG4-27UmbCOpMQftRCldNRfyA-qLbz-eqiwrong1EwUvjsD4cYbAHNGHwTvDOyx5AKthQUP44ykPv7kjKGh3DWKySJvcs9tlUG87hlo7AvnMo9pwRS_Zz2CacQ-MKaXyDepk=.AQAB"
- )
+ info_cng =
+ User.Info.remote_user_creation(salmon_user.info, %{
+ magic_key:
+ "RSA.pu0s-halox4tu7wmES1FVSx6u-4wc0YrUFXcqWXZG4-27UmbCOpMQftRCldNRfyA-qLbz-eqiwrong1EwUvjsD4cYbAHNGHwTvDOyx5AKthQUP44ykPv7kjKGh3DWKySJvcs9tlUG87hlo7AvnMo9pwRS_Zz2CacQ-MKaXyDepk=.AQAB"
+ })
- Repo.update(User.info_changeset(salmon_user, %{info: info}))
+ cng =
+ Ecto.Changeset.change(salmon_user)
+ |> Ecto.Changeset.put_embed(:info, info_cng)
+ |> Repo.update()
conn =
build_conn()
diff --git a/test/web/ostatus/ostatus_test.exs b/test/web/ostatus/ostatus_test.exs
index f95da8b0a..f3268e83d 100644
--- a/test/web/ostatus/ostatus_test.exs
+++ b/test/web/ostatus/ostatus_test.exs
@@ -6,6 +6,11 @@ defmodule Pleroma.Web.OStatusTest do
import Pleroma.Factory
import ExUnit.CaptureLog
+ setup_all do
+ Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
+
test "don't insert create notes twice" do
incoming = File.read!("test/fixtures/incoming_note_activity.xml")
{:ok, [activity]} = OStatus.handle_incoming(incoming)
@@ -17,7 +22,7 @@ test "handle incoming note - GS, Salmon" do
{:ok, [activity]} = OStatus.handle_incoming(incoming)
user = User.get_by_ap_id(activity.data["actor"])
- assert user.info["note_count"] == 1
+ assert user.info.note_count == 1
assert activity.data["type"] == "Create"
assert activity.data["object"]["type"] == "Note"
@@ -319,7 +324,7 @@ test "tries to use the information in poco fields" do
assert user.name == "Constance Variable"
assert user.nickname == "lambadalambda@social.heldscal.la"
assert user.local == false
- assert user.info["uri"] == uri
+ assert user.info.uri == uri
assert user.ap_id == uri
assert user.bio == "Call me Deacon Blues."
assert user.avatar["type"] == "Image"
@@ -329,6 +334,38 @@ test "tries to use the information in poco fields" do
assert user == user_again
end
+ test "find_or_make_user sets all the nessary input fields" do
+ uri = "https://social.heldscal.la/user/23211"
+ {:ok, user} = OStatus.find_or_make_user(uri)
+
+ assert user.info ==
+ %Pleroma.User.Info{
+ id: user.info.id,
+ ap_enabled: false,
+ background: %{},
+ banner: %{},
+ blocks: [],
+ deactivated: false,
+ default_scope: "public",
+ domain_blocks: [],
+ follower_count: 0,
+ is_admin: false,
+ is_moderator: false,
+ keys: nil,
+ locked: false,
+ no_rich_text: false,
+ note_count: 0,
+ settings: nil,
+ source_data: %{},
+ hub: "https://social.heldscal.la/main/push/hub",
+ magic_key:
+ "RSA.uzg6r1peZU0vXGADWxGJ0PE34WvmhjUmydbX5YYdOiXfODVLwCMi1umGoqUDm-mRu4vNEdFBVJU1CpFA7dKzWgIsqsa501i2XqElmEveXRLvNRWFB6nG03Q5OUY2as8eE54BJm0p20GkMfIJGwP6TSFb-ICp3QjzbatuSPJ6xCE=.AQAB",
+ salmon: "https://social.heldscal.la/main/salmon/user/23211",
+ topic: "https://social.heldscal.la/api/statuses/user_timeline/23211.atom",
+ uri: "https://social.heldscal.la/user/23211"
+ }
+ end
+
test "find_make_or_update_user takes an author element and returns an updated user" do
uri = "https://social.heldscal.la/user/23211"
@@ -447,7 +484,7 @@ test "it works for atom notes, too" do
end
end
- test "it doesn't add nil in the do field" do
+ test "it doesn't add nil in the to field" do
incoming = File.read!("test/fixtures/nil_mention_entry.xml")
{:ok, [activity]} = OStatus.handle_incoming(incoming)
diff --git a/test/web/ostatus/user_representer_test.exs b/test/web/ostatus/user_representer_test.exs
index e41dfeb3d..82fb8e793 100644
--- a/test/web/ostatus/user_representer_test.exs
+++ b/test/web/ostatus/user_representer_test.exs
@@ -6,7 +6,7 @@ defmodule Pleroma.Web.OStatus.UserRepresenterTest do
alias Pleroma.User
test "returns a user with id, uri, name and link" do
- user = build(:user, nickname: "レイン")
+ user = insert(:user, %{nickname: "レイン"})
tuple = UserRepresenter.to_simple_form(user)
res = :xmerl.export_simple_content(tuple, :xmerl_xml) |> to_string
diff --git a/test/web/salmon/salmon_test.exs b/test/web/salmon/salmon_test.exs
index 1b39b4b2d..23ccc038e 100644
--- a/test/web/salmon/salmon_test.exs
+++ b/test/web/salmon/salmon_test.exs
@@ -3,6 +3,7 @@ defmodule Pleroma.Web.Salmon.SalmonTest do
alias Pleroma.Web.Salmon
alias Pleroma.{Repo, Activity, User}
import Pleroma.Factory
+ import Tesla.Mock
@magickey "RSA.pu0s-halox4tu7wmES1FVSx6u-4wc0YrUFXcqWXZG4-27UmbCOpMQftRCldNRfyA-qLbz-eqiwQhh-1EwUvjsD4cYbAHNGHwTvDOyx5AKthQUP44ykPv7kjKGh3DWKySJvcs9tlUG87hlo7AvnMo9pwRS_Zz2CacQ-MKaXyDepk=.AQAB"
@@ -10,6 +11,11 @@ defmodule Pleroma.Web.Salmon.SalmonTest do
@magickey_friendica "RSA.AMwa8FUs2fWEjX0xN7yRQgegQffhBpuKNC6fa5VNSVorFjGZhRrlPMn7TQOeihlc9lBz2OsHlIedbYn2uJ7yCs0.AQAB"
+ setup do
+ mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
+
test "decodes a salmon" do
{:ok, salmon} = File.read("test/fixtures/salmon.xml")
{:ok, doc} = Salmon.decode_and_validate(@magickey, salmon)
diff --git a/test/web/twitter_api/representers/activity_representer_test.exs b/test/web/twitter_api/representers/activity_representer_test.exs
index 291fd5237..7cae4e4a1 100644
--- a/test/web/twitter_api/representers/activity_representer_test.exs
+++ b/test/web/twitter_api/representers/activity_representer_test.exs
@@ -58,7 +58,7 @@ test "a like activity" do
end
test "an activity" do
- {:ok, user} = UserBuilder.insert()
+ user = insert(:user)
# {:ok, mentioned_user } = UserBuilder.insert(%{nickname: "shp", ap_id: "shp"})
mentioned_user = insert(:user, %{nickname: "shp"})
diff --git a/test/web/twitter_api/twitter_api_controller_test.exs b/test/web/twitter_api/twitter_api_controller_test.exs
index 6bdcb4fd8..4119d1dd8 100644
--- a/test/web/twitter_api/twitter_api_controller_test.exs
+++ b/test/web/twitter_api/twitter_api_controller_test.exs
@@ -12,6 +12,42 @@ defmodule Pleroma.Web.TwitterAPI.ControllerTest do
import Pleroma.Factory
+ describe "POST /api/account/update_profile_banner" do
+ test "it updates the banner", %{conn: conn} do
+ user = insert(:user)
+
+ new_banner =
+ "data:image/gif;base64,R0lGODlhEAAQAMQAAORHHOVSKudfOulrSOp3WOyDZu6QdvCchPGolfO0o/XBs/fNwfjZ0frl3/zy7////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAkAABAALAAAAAAQABAAAAVVICSOZGlCQAosJ6mu7fiyZeKqNKToQGDsM8hBADgUXoGAiqhSvp5QAnQKGIgUhwFUYLCVDFCrKUE1lBavAViFIDlTImbKC5Gm2hB0SlBCBMQiB0UjIQA7"
+
+ response =
+ conn
+ |> assign(:user, user)
+ |> post(authenticated_twitter_api__path(conn, :update_banner), %{"banner" => new_banner})
+ |> json_response(200)
+
+ user = Repo.get(User, user.id)
+ assert user.info.banner["type"] == "Image"
+ end
+ end
+
+ describe "POST /api/qvitter/update_background_image" do
+ test "it updates the background", %{conn: conn} do
+ user = insert(:user)
+
+ new_bg =
+ "data:image/gif;base64,R0lGODlhEAAQAMQAAORHHOVSKudfOulrSOp3WOyDZu6QdvCchPGolfO0o/XBs/fNwfjZ0frl3/zy7////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAkAABAALAAAAAAQABAAAAVVICSOZGlCQAosJ6mu7fiyZeKqNKToQGDsM8hBADgUXoGAiqhSvp5QAnQKGIgUhwFUYLCVDFCrKUE1lBavAViFIDlTImbKC5Gm2hB0SlBCBMQiB0UjIQA7"
+
+ response =
+ conn
+ |> assign(:user, user)
+ |> post(authenticated_twitter_api__path(conn, :update_background), %{"img" => new_bg})
+ |> json_response(200)
+
+ user = Repo.get(User, user.id)
+ assert user.info.background["type"] == "Image"
+ end
+ end
+
describe "POST /api/account/verify_credentials" do
setup [:valid_user]
@@ -31,26 +67,6 @@ test "with credentials", %{conn: conn, user: user} do
end
end
- describe "POST /api/account/most_recent_notification" do
- setup [:valid_user]
-
- test "without valid credentials", %{conn: conn} do
- conn = post(conn, "/api/account/most_recent_notification.json")
- assert json_response(conn, 403) == %{"error" => "Invalid credentials."}
- end
-
- test "with credentials", %{conn: conn, user: user} do
- conn =
- conn
- |> with_credentials(user.nickname, "test")
- |> post("/api/account/most_recent_notification.json", %{id: "200"})
-
- assert json_response(conn, 200)
- user = User.get_by_nickname(user.nickname)
- assert user.info["most_recent_notification"] == 200
- end
- end
-
describe "POST /statuses/update.json" do
setup [:valid_user]
@@ -87,7 +103,7 @@ test "with credentials", %{conn: conn, user: user} do
describe "GET /statuses/public_timeline.json" do
test "returns statuses", %{conn: conn} do
- {:ok, user} = UserBuilder.insert()
+ user = insert(:user)
activities = ActivityBuilder.insert_list(30, %{}, %{user: user})
ActivityBuilder.insert_list(10, %{}, %{user: user})
since_id = List.last(activities).id
@@ -591,7 +607,7 @@ test "with credentials", %{conn: conn, user: current_user} do
|> post("/api/blocks/destroy.json", %{user_id: blocked.id})
current_user = Repo.get(User, current_user.id)
- assert current_user.info["blocks"] == []
+ assert current_user.info.blocks == []
assert json_response(conn, 200) ==
UserView.render("show.json", %{user: blocked, for: current_user})
@@ -845,6 +861,67 @@ test "it returns a user's followers", %{conn: conn} do
result = json_response(conn, 200)
assert Enum.sort(expected) == Enum.sort(result)
end
+
+ test "it returns a given user's followers with user_id", %{conn: conn} do
+ user = insert(:user)
+ follower_one = insert(:user)
+ follower_two = insert(:user)
+ not_follower = insert(:user)
+
+ {:ok, follower_one} = User.follow(follower_one, user)
+ {:ok, follower_two} = User.follow(follower_two, user)
+
+ conn =
+ conn
+ |> assign(:user, not_follower)
+ |> get("/api/statuses/followers", %{"user_id" => user.id})
+
+ assert MapSet.equal?(
+ MapSet.new(json_response(conn, 200)),
+ MapSet.new(
+ UserView.render("index.json", %{
+ users: [follower_one, follower_two],
+ for: not_follower
+ })
+ )
+ )
+ end
+
+ test "it returns empty for a hidden network", %{conn: conn} do
+ user = insert(:user, %{info: %{hide_network: true}})
+ follower_one = insert(:user)
+ follower_two = insert(:user)
+ not_follower = insert(:user)
+
+ {:ok, follower_one} = User.follow(follower_one, user)
+ {:ok, follower_two} = User.follow(follower_two, user)
+
+ conn =
+ conn
+ |> assign(:user, not_follower)
+ |> get("/api/statuses/followers", %{"user_id" => user.id})
+
+ assert [] == json_response(conn, 200)
+ end
+
+ test "it returns the followers for a hidden network if requested by the user themselves", %{
+ conn: conn
+ } do
+ user = insert(:user, %{info: %{hide_network: true}})
+ follower_one = insert(:user)
+ follower_two = insert(:user)
+ not_follower = insert(:user)
+
+ {:ok, follower_one} = User.follow(follower_one, user)
+ {:ok, follower_two} = User.follow(follower_two, user)
+
+ conn =
+ conn
+ |> assign(:user, user)
+ |> get("/api/statuses/followers", %{"user_id" => user.id})
+
+ refute [] == json_response(conn, 200)
+ end
end
describe "GET /api/statuses/friends" do
@@ -889,6 +966,42 @@ test "it returns a given user's friends with user_id", %{conn: conn} do
)
end
+ test "it returns empty for a hidden network", %{conn: conn} do
+ user = insert(:user, %{info: %{hide_network: true}})
+ followed_one = insert(:user)
+ followed_two = insert(:user)
+ not_followed = insert(:user)
+
+ {:ok, user} = User.follow(user, followed_one)
+ {:ok, user} = User.follow(user, followed_two)
+
+ conn =
+ conn
+ |> assign(:user, not_followed)
+ |> get("/api/statuses/friends", %{"user_id" => user.id})
+
+ assert [] == json_response(conn, 200)
+ end
+
+ test "it returns friends for a hidden network if the user themselves request it", %{
+ conn: conn
+ } do
+ user = insert(:user, %{info: %{hide_network: true}})
+ followed_one = insert(:user)
+ followed_two = insert(:user)
+ not_followed = insert(:user)
+
+ {:ok, user} = User.follow(user, followed_one)
+ {:ok, user} = User.follow(user, followed_two)
+
+ conn =
+ conn
+ |> assign(:user, user)
+ |> get("/api/statuses/friends", %{"user_id" => user.id})
+
+ refute [] == json_response(conn, 200)
+ end
+
test "it returns a given user's friends with screen_name", %{conn: conn} do
user = insert(:user)
followed_one = insert(:user)
@@ -939,19 +1052,48 @@ test "it returns a user's friends", %{conn: conn} do
describe "POST /api/account/update_profile.json" do
test "it updates a user's profile", %{conn: conn} do
user = insert(:user)
+ user2 = insert(:user)
conn =
conn
|> assign(:user, user)
|> post("/api/account/update_profile.json", %{
"name" => "new name",
- "description" => "new description"
+ "description" => "hi @#{user2.nickname}"
})
user = Repo.get!(User, user.id)
assert user.name == "new name"
- assert user.bio == "new description"
+ assert user.bio ==
+ "hi @#{
+ user2.nickname
+ }"
+
+ assert json_response(conn, 200) == UserView.render("user.json", %{user: user, for: user})
+ end
+
+ test "it sets and un-sets hide_network", %{conn: conn} do
+ user = insert(:user)
+
+ conn
+ |> assign(:user, user)
+ |> post("/api/account/update_profile.json", %{
+ "hide_network" => "true"
+ })
+
+ user = Repo.get!(User, user.id)
+ assert user.info.hide_network == true
+
+ conn =
+ conn
+ |> assign(:user, user)
+ |> post("/api/account/update_profile.json", %{
+ "hide_network" => "false"
+ })
+
+ user = Repo.get!(User, user.id)
+ assert user.info.hide_network == false
assert json_response(conn, 200) == UserView.render("user.json", %{user: user, for: user})
end
@@ -966,7 +1108,7 @@ test "it locks an account", %{conn: conn} do
})
user = Repo.get!(User, user.id)
- assert user.info["locked"] == true
+ assert user.info.locked == true
assert json_response(conn, 200) == UserView.render("user.json", %{user: user, for: user})
end
@@ -982,7 +1124,7 @@ test "it unlocks an account", %{conn: conn} do
})
user = Repo.get!(User, user.id)
- assert user.info["locked"] == false
+ assert user.info.locked == false
assert json_response(conn, 200) == UserView.render("user.json", %{user: user, for: user})
end
@@ -1153,10 +1295,10 @@ test "with credentials and valid password", %{conn: conn, user: current_user} do
describe "GET /api/pleroma/friend_requests" do
test "it lists friend requests" do
- user = insert(:user, %{info: %{"locked" => true}})
+ user = insert(:user)
other_user = insert(:user)
- {:ok, activity} = ActivityPub.follow(other_user, user)
+ {:ok, _activity} = ActivityPub.follow(other_user, user)
user = Repo.get(User, user.id)
other_user = Repo.get(User, other_user.id)
@@ -1175,10 +1317,10 @@ test "it lists friend requests" do
describe "POST /api/pleroma/friendships/approve" do
test "it approves a friend request" do
- user = insert(:user, %{info: %{"locked" => true}})
+ user = insert(:user)
other_user = insert(:user)
- {:ok, activity} = ActivityPub.follow(other_user, user)
+ {:ok, _activity} = ActivityPub.follow(other_user, user)
user = Repo.get(User, user.id)
other_user = Repo.get(User, other_user.id)
@@ -1198,10 +1340,10 @@ test "it approves a friend request" do
describe "POST /api/pleroma/friendships/deny" do
test "it denies a friend request" do
- user = insert(:user, %{info: %{"locked" => true}})
+ user = insert(:user)
other_user = insert(:user)
- {:ok, activity} = ActivityPub.follow(other_user, user)
+ {:ok, _activity} = ActivityPub.follow(other_user, user)
user = Repo.get(User, user.id)
other_user = Repo.get(User, other_user.id)
@@ -1234,4 +1376,82 @@ test "it returns users, ordered by similarity", %{conn: conn} do
assert [user.id, user_two.id, user_three.id] == Enum.map(resp, fn %{"id" => id} -> id end)
end
end
+
+ describe "POST /api/media/upload" do
+ setup context do
+ Pleroma.DataCase.ensure_local_uploader(context)
+ end
+
+ test "it performs the upload and sets `data[actor]` with AP id of uploader user", %{
+ conn: conn
+ } do
+ user = insert(:user)
+
+ upload_filename = "test/fixtures/image_tmp.jpg"
+ File.cp!("test/fixtures/image.jpg", upload_filename)
+
+ file = %Plug.Upload{
+ content_type: "image/jpg",
+ path: Path.absname(upload_filename),
+ filename: "image.jpg"
+ }
+
+ response =
+ conn
+ |> assign(:user, user)
+ |> put_req_header("content-type", "application/octet-stream")
+ |> post("/api/media/upload", %{
+ "media" => file
+ })
+ |> json_response(:ok)
+
+ assert response["media_id"]
+ object = Repo.get(Object, response["media_id"])
+ assert object
+ assert object.data["actor"] == User.ap_id(user)
+ end
+ end
+
+ describe "POST /api/media/metadata/create" do
+ setup do
+ object = insert(:note)
+ user = User.get_by_ap_id(object.data["actor"])
+ %{object: object, user: user}
+ end
+
+ test "it returns :forbidden status on attempt to modify someone else's upload", %{
+ conn: conn,
+ object: object
+ } do
+ initial_description = object.data["name"]
+ another_user = insert(:user)
+
+ conn
+ |> assign(:user, another_user)
+ |> post("/api/media/metadata/create", %{"media_id" => object.id})
+ |> json_response(:forbidden)
+
+ object = Repo.get(Object, object.id)
+ assert object.data["name"] == initial_description
+ end
+
+ test "it updates `data[name]` of referenced Object with provided value", %{
+ conn: conn,
+ object: object,
+ user: user
+ } do
+ description = "Informative description of the image. Initial value: #{object.data["name"]}}"
+
+ conn
+ |> assign(:user, user)
+ |> post("/api/media/metadata/create", %{
+ "media_id" => object.id,
+ "alt_text" => %{"text" => description}
+ })
+ |> json_response(:no_content)
+
+ object = Repo.get(Object, object.id)
+ assert object.data["name"] == description
+ end
+ end
end
diff --git a/test/web/twitter_api/twitter_api_test.exs b/test/web/twitter_api/twitter_api_test.exs
index 8b9920bd9..05f832de0 100644
--- a/test/web/twitter_api/twitter_api_test.exs
+++ b/test/web/twitter_api/twitter_api_test.exs
@@ -10,7 +10,7 @@ defmodule Pleroma.Web.TwitterAPI.TwitterAPITest do
test "create a status" do
user = insert(:user)
- _mentioned_user = UserBuilder.insert(%{nickname: "shp", ap_id: "shp"})
+ mentioned_user = insert(:user, %{nickname: "shp", ap_id: "shp"})
object_data = %{
"type" => "Image",
@@ -35,7 +35,7 @@ test "create a status" do
{:ok, activity = %Activity{}} = TwitterAPI.create_status(user, input)
expected_text =
- "Hello again, @shp.<script></script>
This is on another :moominmamma: line. #2hu #epic #phantasmagoric
image.jpg"
+ "Hello again, @shp.<script></script>
This is on another :moominmamma: line. #2hu #epic #phantasmagoric
image.jpg"
assert get_in(activity.data, ["object", "content"]) == expected_text
assert get_in(activity.data, ["object", "type"]) == "Note"
@@ -67,7 +67,7 @@ test "create a status" do
user = User.get_by_ap_id(user.ap_id)
- assert user.info["note_count"] == 1
+ assert user.info.note_count == 1
end
test "create a status that is a reply" do
@@ -116,7 +116,7 @@ test "Follow another user using screen_name" do
assert User.ap_followers(followed) in user.following
followed = User.get_by_ap_id(followed.ap_id)
- assert followed.info["follower_count"] == 1
+ assert followed.info.follower_count == 1
{:error, msg} = TwitterAPI.follow(user, %{"screen_name" => followed.nickname})
assert msg == "Could not follow user: #{followed.nickname} is already on your list."
@@ -169,7 +169,7 @@ test "Unblock another user using user_id" do
{:ok, user, _unblocked} = TwitterAPI.block(user, %{"user_id" => unblocked.id})
{:ok, user, _unblocked} = TwitterAPI.unblock(user, %{"user_id" => unblocked.id})
- assert user.info["blocks"] == []
+ assert user.info.blocks == []
end
test "Unblock another user using screen_name" do
@@ -178,17 +178,19 @@ test "Unblock another user using screen_name" do
{:ok, user, _unblocked} = TwitterAPI.block(user, %{"screen_name" => unblocked.nickname})
{:ok, user, _unblocked} = TwitterAPI.unblock(user, %{"screen_name" => unblocked.nickname})
- assert user.info["blocks"] == []
+ assert user.info.blocks == []
end
test "upload a file" do
+ user = insert(:user)
+
file = %Plug.Upload{
content_type: "image/jpg",
path: Path.absname("test/fixtures/image.jpg"),
filename: "an_image.jpg"
}
- response = TwitterAPI.upload(file)
+ response = TwitterAPI.upload(file, user)
assert is_binary(response)
end
@@ -257,6 +259,35 @@ test "it registers a new user and returns the user." do
UserView.render("show.json", %{user: fetched_user})
end
+ test "it registers a new user and parses mentions in the bio" do
+ data1 = %{
+ "nickname" => "john",
+ "email" => "john@gmail.com",
+ "fullname" => "John Doe",
+ "bio" => "test",
+ "password" => "bear",
+ "confirm" => "bear"
+ }
+
+ {:ok, user1} = TwitterAPI.register_user(data1)
+
+ data2 = %{
+ "nickname" => "lain",
+ "email" => "lain@wired.jp",
+ "fullname" => "lain iwakura",
+ "bio" => "@john test",
+ "password" => "bear",
+ "confirm" => "bear"
+ }
+
+ {:ok, user2} = TwitterAPI.register_user(data2)
+
+ expected_text =
+ "@john test"
+
+ assert user2.bio == expected_text
+ end
+
@moduletag skip: "needs 'registrations_open: false' in config"
test "it registers a new user via invite token and returns the user." do
{:ok, token} = UserInviteToken.create_token()
diff --git a/test/web/twitter_api/views/activity_view_test.exs b/test/web/twitter_api/views/activity_view_test.exs
index 5cef06f88..bc36b0e90 100644
--- a/test/web/twitter_api/views/activity_view_test.exs
+++ b/test/web/twitter_api/views/activity_view_test.exs
@@ -47,7 +47,7 @@ test "a create activity with a note" do
"repeated" => false,
"statusnet_conversation_id" => convo_id,
"statusnet_html" =>
- "Hey @shp!",
+ "Hey @shp!",
"tags" => [],
"text" => "Hey @shp!",
"uri" => activity.data["object"]["id"],
diff --git a/test/web/twitter_api/views/user_view_test.exs b/test/web/twitter_api/views/user_view_test.exs
index 2c583c0d3..e69ca24a9 100644
--- a/test/web/twitter_api/views/user_view_test.exs
+++ b/test/web/twitter_api/views/user_view_test.exs
@@ -31,10 +31,10 @@ test "A user with emoji in username", %{user: user} do
expected =
" man"
- user = %{
- user
- | info: %{
- "source_data" => %{
+ user =
+ insert(:user, %{
+ info: %{
+ source_data: %{
"tag" => [
%{
"type" => "Emoji",
@@ -43,10 +43,10 @@ test "A user with emoji in username", %{user: user} do
}
]
}
- }
- }
+ },
+ name: ":karjalanpiirakka: man"
+ })
- user = %{user | name: ":karjalanpiirakka: man"}
represented = UserView.render("show.json", %{user: user})
assert represented["name_html"] == expected
end
@@ -103,7 +103,7 @@ test "A user" do
end
test "A user for a given other follower", %{user: user} do
- {:ok, follower} = UserBuilder.insert(%{following: [User.ap_followers(user)]})
+ follower = insert(:user, %{following: [User.ap_followers(user)]})
{:ok, user} = User.update_follower_count(user)
image = "http://localhost:4001/images/avi.png"
banner = "http://localhost:4001/images/banner.png"
@@ -186,7 +186,7 @@ test "A user that follows you", %{user: user} do
end
test "a user that is a moderator" do
- user = insert(:user, %{info: %{"is_moderator" => true}})
+ user = insert(:user, %{info: %{is_moderator: true}})
represented = UserView.render("show.json", %{user: user, for: user})
assert represented["rights"]["delete_others_notice"]
@@ -250,7 +250,7 @@ test "a user with mastodon fields" do
user =
insert(:user, %{
info: %{
- "source_data" => %{
+ source_data: %{
"attachment" =>
Enum.map(fields, fn field -> Map.put(field, "type", "PropertyValue") end)
}
diff --git a/test/web/web_finger/web_finger_test.exs b/test/web/web_finger/web_finger_test.exs
index 99bf210ea..32eff9b7c 100644
--- a/test/web/web_finger/web_finger_test.exs
+++ b/test/web/web_finger/web_finger_test.exs
@@ -2,6 +2,12 @@ defmodule Pleroma.Web.WebFingerTest do
use Pleroma.DataCase
alias Pleroma.Web.WebFinger
import Pleroma.Factory
+ import Tesla.Mock
+
+ setup do
+ mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
describe "host meta" do
test "returns a link to the xml lrdd" do
@@ -99,15 +105,15 @@ test "it gets the xrd endpoint for statusnet" do
describe "ensure_keys_present" do
test "it creates keys for a user and stores them in info" do
user = insert(:user)
- refute is_binary(user.info["keys"])
+ refute is_binary(user.info.keys)
{:ok, user} = WebFinger.ensure_keys_present(user)
- assert is_binary(user.info["keys"])
+ assert is_binary(user.info.keys)
end
test "it doesn't create keys if there already are some" do
- user = insert(:user, %{info: %{"keys" => "xxx"}})
+ user = insert(:user, %{info: %{keys: "xxx"}})
{:ok, user} = WebFinger.ensure_keys_present(user)
- assert user.info["keys"] == "xxx"
+ assert user.info.keys == "xxx"
end
end
end
diff --git a/test/web/websub/websub_test.exs b/test/web/websub/websub_test.exs
index 5914a37fc..47d1a88e1 100644
--- a/test/web/websub/websub_test.exs
+++ b/test/web/websub/websub_test.exs
@@ -10,6 +10,12 @@ defmodule Pleroma.Web.WebsubTest do
alias Pleroma.Web.Websub.{WebsubServerSubscription, WebsubClientSubscription}
import Pleroma.Factory
alias Pleroma.Web.Router.Helpers
+ import Tesla.Mock
+
+ setup do
+ mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
test "a verification of a request that is accepted" do
sub = insert(:websub_subscription)
@@ -26,8 +32,8 @@ test "a verification of a request that is accepted" do
assert String.to_integer(seconds) > 0
{:ok,
- %HTTPoison.Response{
- status_code: 200,
+ %Tesla.Env{
+ status: 200,
body: challenge
}}
end
@@ -41,8 +47,8 @@ test "a verification of a request that doesn't return 200" do
getter = fn _path, _headers, _options ->
{:ok,
- %HTTPoison.Response{
- status_code: 500,
+ %Tesla.Env{
+ status: 500,
body: ""
}}
end
@@ -99,7 +105,7 @@ def accepting_verifier(subscription) do
test "initiate a subscription for a given user and topic" do
subscriber = insert(:user)
- user = insert(:user, %{info: %{"topic" => "some_topic", "hub" => "some_hub"}})
+ user = insert(:user, %{info: %Pleroma.User.Info{topic: "some_topic", hub: "some_hub"}})
{:ok, websub} = Websub.subscribe(subscriber, user, &accepting_verifier/1)
assert websub.subscribers == [subscriber.ap_id]
@@ -113,12 +119,7 @@ test "initiate a subscription for a given user and topic" do
test "discovers the hub and canonical url" do
topic = "https://mastodon.social/users/lambadalambda.atom"
- getter = fn ^topic ->
- doc = File.read!("test/fixtures/lambadalambda.atom")
- {:ok, %{status_code: 200, body: doc}}
- end
-
- {:ok, discovered} = Websub.gather_feed_data(topic, getter)
+ {:ok, discovered} = Websub.gather_feed_data(topic)
expected = %{
"hub" => "https://mastodon.social/api/push",
@@ -158,7 +159,7 @@ test "calls the hub, requests topic" do
websub.id
)
- {:ok, %{status_code: 202}}
+ {:ok, %{status: 202}}
end
task = Task.async(fn -> Websub.request_subscription(websub, poster) end)
@@ -177,7 +178,7 @@ test "rejects the subscription if it can't be accepted" do
websub = insert(:websub_client_subscription, %{hub: hub, topic: topic})
poster = fn ^hub, {:form, _data}, _headers ->
- {:ok, %{status_code: 202}}
+ {:ok, %{status: 202}}
end
{:error, websub} = Websub.request_subscription(websub, poster, 1000)
@@ -186,7 +187,7 @@ test "rejects the subscription if it can't be accepted" do
websub = insert(:websub_client_subscription, %{hub: hub, topic: topic})
poster = fn ^hub, {:form, _data}, _headers ->
- {:ok, %{status_code: 400}}
+ {:ok, %{status: 400}}
end
{:error, websub} = Websub.request_subscription(websub, poster, 1000)
@@ -209,6 +210,7 @@ test "it renews subscriptions that have less than a day of time left" do
insert(:websub_client_subscription, %{
valid_until: NaiveDateTime.add(now, 2 * day),
topic: "http://example.org/still_good",
+ hub: "http://example.org/still_good",
state: "accepted"
})
@@ -216,6 +218,7 @@ test "it renews subscriptions that have less than a day of time left" do
insert(:websub_client_subscription, %{
valid_until: NaiveDateTime.add(now, day - 100),
topic: "http://example.org/needs_refresh",
+ hub: "http://example.org/needs_refresh",
state: "accepted"
})