forked from AkkomaGang/akkoma
Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into remake-remodel-dms
This commit is contained in:
commit
af6d01ec93
65 changed files with 431 additions and 139 deletions
|
@ -274,7 +274,7 @@
|
||||||
config :pleroma, :frontend_configurations,
|
config :pleroma, :frontend_configurations,
|
||||||
pleroma_fe: %{
|
pleroma_fe: %{
|
||||||
alwaysShowSubjectInput: true,
|
alwaysShowSubjectInput: true,
|
||||||
background: "/static/aurora_borealis.jpg",
|
background: "/images/city.jpg",
|
||||||
collapseMessageWithSubject: false,
|
collapseMessageWithSubject: false,
|
||||||
disableChat: false,
|
disableChat: false,
|
||||||
greentext: false,
|
greentext: false,
|
||||||
|
|
|
@ -511,7 +511,23 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- `discoverable`
|
- `discoverable`
|
||||||
- `actor_type`
|
- `actor_type`
|
||||||
|
|
||||||
- Response: none (code `200`)
|
- Response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{"status": "success"}
|
||||||
|
```
|
||||||
|
|
||||||
|
```json
|
||||||
|
{"errors":
|
||||||
|
{"actor_type": "is invalid"},
|
||||||
|
{"email": "has invalid format"},
|
||||||
|
...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```json
|
||||||
|
{"error": "Unable to update user."}
|
||||||
|
```
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/reports`
|
## `GET /api/pleroma/admin/reports`
|
||||||
|
|
||||||
|
|
31
docs/configuration/postgresql.md
Normal file
31
docs/configuration/postgresql.md
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# Optimizing your PostgreSQL performance
|
||||||
|
|
||||||
|
Pleroma performance depends to a large extent on good database performance. The default PostgreSQL settings are mostly fine, but often you can get better performance by changing a few settings.
|
||||||
|
|
||||||
|
You can use [PGTune](https://pgtune.leopard.in.ua) to get recommendations for your setup. If you do, set the "Number of Connections" field to 20, as Pleroma will only use 10 concurrent connections anyway. If you don't, it will give you advice that might even hurt your performance.
|
||||||
|
|
||||||
|
We also recommend not using the "Network Storage" option.
|
||||||
|
|
||||||
|
## Example configurations
|
||||||
|
|
||||||
|
Here are some configuration suggestions for PostgreSQL 10+.
|
||||||
|
|
||||||
|
### 1GB RAM, 1 CPU
|
||||||
|
```
|
||||||
|
shared_buffers = 256MB
|
||||||
|
effective_cache_size = 768MB
|
||||||
|
maintenance_work_mem = 64MB
|
||||||
|
work_mem = 13107kB
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2GB RAM, 2 CPU
|
||||||
|
```
|
||||||
|
shared_buffers = 512MB
|
||||||
|
effective_cache_size = 1536MB
|
||||||
|
maintenance_work_mem = 128MB
|
||||||
|
work_mem = 26214kB
|
||||||
|
max_worker_processes = 2
|
||||||
|
max_parallel_workers_per_gather = 1
|
||||||
|
max_parallel_workers = 2
|
||||||
|
```
|
||||||
|
|
|
@ -38,8 +38,8 @@ sudo apt install git build-essential postgresql postgresql-contrib
|
||||||
* Download and add the Erlang repository:
|
* Download and add the Erlang repository:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_1.0_all.deb
|
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_2.0_all.deb
|
||||||
sudo dpkg -i /tmp/erlang-solutions_1.0_all.deb
|
sudo dpkg -i /tmp/erlang-solutions_2.0_all.deb
|
||||||
```
|
```
|
||||||
|
|
||||||
* Install Elixir and Erlang:
|
* Install Elixir and Erlang:
|
||||||
|
|
|
@ -40,8 +40,8 @@ sudo apt install git build-essential postgresql postgresql-contrib
|
||||||
|
|
||||||
* Erlangのリポジトリをダウンロードおよびインストールします。
|
* Erlangのリポジトリをダウンロードおよびインストールします。
|
||||||
```
|
```
|
||||||
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_1.0_all.deb
|
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_2.0_all.deb
|
||||||
sudo dpkg -i /tmp/erlang-solutions_1.0_all.deb
|
sudo dpkg -i /tmp/erlang-solutions_2.0_all.deb
|
||||||
```
|
```
|
||||||
|
|
||||||
* ElixirとErlangをインストールします、
|
* ElixirとErlangをインストールします、
|
||||||
|
|
|
@ -63,7 +63,7 @@ apt install postgresql-11-rum
|
||||||
```
|
```
|
||||||
|
|
||||||
#### (Optional) Performance configuration
|
#### (Optional) Performance configuration
|
||||||
For optimal performance, you may use [PGTune](https://pgtune.leopard.in.ua), don't forget to restart postgresql after editing the configuration
|
It is encouraged to check [Optimizing your PostgreSQL performance](../configuration/postgresql.md) document, for tips on PostgreSQL tuning.
|
||||||
|
|
||||||
```sh tab="Alpine"
|
```sh tab="Alpine"
|
||||||
rc-service postgresql restart
|
rc-service postgresql restart
|
||||||
|
|
|
@ -15,7 +15,7 @@ def run(["ls-packs" | args]) do
|
||||||
{options, [], []} = parse_global_opts(args)
|
{options, [], []} = parse_global_opts(args)
|
||||||
|
|
||||||
url_or_path = options[:manifest] || default_manifest()
|
url_or_path = options[:manifest] || default_manifest()
|
||||||
manifest = fetch_manifest(url_or_path)
|
manifest = fetch_and_decode(url_or_path)
|
||||||
|
|
||||||
Enum.each(manifest, fn {name, info} ->
|
Enum.each(manifest, fn {name, info} ->
|
||||||
to_print = [
|
to_print = [
|
||||||
|
@ -42,12 +42,12 @@ def run(["get-packs" | args]) do
|
||||||
|
|
||||||
url_or_path = options[:manifest] || default_manifest()
|
url_or_path = options[:manifest] || default_manifest()
|
||||||
|
|
||||||
manifest = fetch_manifest(url_or_path)
|
manifest = fetch_and_decode(url_or_path)
|
||||||
|
|
||||||
for pack_name <- pack_names do
|
for pack_name <- pack_names do
|
||||||
if Map.has_key?(manifest, pack_name) do
|
if Map.has_key?(manifest, pack_name) do
|
||||||
pack = manifest[pack_name]
|
pack = manifest[pack_name]
|
||||||
src_url = pack["src"]
|
src = pack["src"]
|
||||||
|
|
||||||
IO.puts(
|
IO.puts(
|
||||||
IO.ANSI.format([
|
IO.ANSI.format([
|
||||||
|
@ -57,11 +57,11 @@ def run(["get-packs" | args]) do
|
||||||
:normal,
|
:normal,
|
||||||
" from ",
|
" from ",
|
||||||
:underline,
|
:underline,
|
||||||
src_url
|
src
|
||||||
])
|
])
|
||||||
)
|
)
|
||||||
|
|
||||||
binary_archive = Tesla.get!(client(), src_url).body
|
{:ok, binary_archive} = fetch(src)
|
||||||
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
||||||
|
|
||||||
sha_status_text = ["SHA256 of ", :bright, pack_name, :normal, " source file is ", :bright]
|
sha_status_text = ["SHA256 of ", :bright, pack_name, :normal, " source file is ", :bright]
|
||||||
|
@ -74,8 +74,8 @@ def run(["get-packs" | args]) do
|
||||||
raise "Bad SHA256 for #{pack_name}"
|
raise "Bad SHA256 for #{pack_name}"
|
||||||
end
|
end
|
||||||
|
|
||||||
# The url specified in files should be in the same directory
|
# The location specified in files should be in the same directory
|
||||||
files_url =
|
files_loc =
|
||||||
url_or_path
|
url_or_path
|
||||||
|> Path.dirname()
|
|> Path.dirname()
|
||||||
|> Path.join(pack["files"])
|
|> Path.join(pack["files"])
|
||||||
|
@ -88,11 +88,11 @@ def run(["get-packs" | args]) do
|
||||||
:normal,
|
:normal,
|
||||||
" from ",
|
" from ",
|
||||||
:underline,
|
:underline,
|
||||||
files_url
|
files_loc
|
||||||
])
|
])
|
||||||
)
|
)
|
||||||
|
|
||||||
files = Tesla.get!(client(), files_url).body |> Jason.decode!()
|
files = fetch_and_decode(files_loc)
|
||||||
|
|
||||||
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
||||||
|
|
||||||
|
@ -237,16 +237,20 @@ def run(["gen-pack" | args]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp fetch_manifest(from) do
|
defp fetch_and_decode(from) do
|
||||||
Jason.decode!(
|
with {:ok, json} <- fetch(from) do
|
||||||
if String.starts_with?(from, "http") do
|
Jason.decode!(json)
|
||||||
Tesla.get!(client(), from).body
|
|
||||||
else
|
|
||||||
File.read!(from)
|
|
||||||
end
|
end
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp fetch("http" <> _ = from) do
|
||||||
|
with {:ok, %{body: body}} <- Tesla.get(client(), from) do
|
||||||
|
{:ok, body}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch(path), do: File.read(path)
|
||||||
|
|
||||||
defp parse_global_opts(args) do
|
defp parse_global_opts(args) do
|
||||||
OptionParser.parse(
|
OptionParser.parse(
|
||||||
args,
|
args,
|
||||||
|
|
|
@ -24,10 +24,7 @@ def by_ap_id(query \\ Activity, ap_id) do
|
||||||
|
|
||||||
@spec by_actor(query, String.t()) :: query
|
@spec by_actor(query, String.t()) :: query
|
||||||
def by_actor(query \\ Activity, actor) do
|
def by_actor(query \\ Activity, actor) do
|
||||||
from(
|
from(a in query, where: a.actor == ^actor)
|
||||||
activity in query,
|
|
||||||
where: fragment("(?)->>'actor' = ?", activity.data, ^actor)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec by_author(query, User.t()) :: query
|
@spec by_author(query, User.t()) :: query
|
||||||
|
|
|
@ -63,7 +63,7 @@ def create_or_bump_for(activity, opts \\ []) do
|
||||||
ap_id when is_binary(ap_id) and byte_size(ap_id) > 0 <- object.data["context"] do
|
ap_id when is_binary(ap_id) and byte_size(ap_id) > 0 <- object.data["context"] do
|
||||||
{:ok, conversation} = create_for_ap_id(ap_id)
|
{:ok, conversation} = create_for_ap_id(ap_id)
|
||||||
|
|
||||||
users = User.get_users_from_set(activity.recipients, false)
|
users = User.get_users_from_set(activity.recipients, local_only: false)
|
||||||
|
|
||||||
participations =
|
participations =
|
||||||
Enum.map(users, fn user ->
|
Enum.map(users, fn user ->
|
||||||
|
|
|
@ -499,7 +499,7 @@ defp download_archive(url, sha) do
|
||||||
if Base.decode16!(sha) == :crypto.hash(:sha256, archive) do
|
if Base.decode16!(sha) == :crypto.hash(:sha256, archive) do
|
||||||
{:ok, archive}
|
{:ok, archive}
|
||||||
else
|
else
|
||||||
{:error, :imvalid_checksum}
|
{:error, :invalid_checksum}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -92,8 +92,9 @@ def for_user_query(user, opts \\ %{}) do
|
||||||
|> join(:left, [n, a], object in Object,
|
|> join(:left, [n, a], object in Object,
|
||||||
on:
|
on:
|
||||||
fragment(
|
fragment(
|
||||||
"(?->>'id') = COALESCE((? -> 'object'::text) ->> 'id'::text)",
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
||||||
object.data,
|
object.data,
|
||||||
|
a.data,
|
||||||
a.data
|
a.data
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -224,18 +225,8 @@ def set_read_up_to(%{id: user_id} = user, id) do
|
||||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|
|
||||||
Notification
|
for_user_query(user)
|
||||||
|> where([n], n.id in ^notification_ids)
|
|> where([n], n.id in ^notification_ids)
|
||||||
|> join(:inner, [n], activity in assoc(n, :activity))
|
|
||||||
|> join(:left, [n, a], object in Object,
|
|
||||||
on:
|
|
||||||
fragment(
|
|
||||||
"(?->>'id') = COALESCE((? -> 'object'::text) ->> 'id'::text)",
|
|
||||||
object.data,
|
|
||||||
a.data
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|> preload([n, a, o], activity: {a, object: o})
|
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -370,7 +361,8 @@ def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, lo
|
||||||
when type in ["Create", "Like", "Announce", "Follow", "Move", "EmojiReact"] do
|
when type in ["Create", "Like", "Announce", "Follow", "Move", "EmojiReact"] do
|
||||||
potential_receiver_ap_ids = get_potential_receiver_ap_ids(activity)
|
potential_receiver_ap_ids = get_potential_receiver_ap_ids(activity)
|
||||||
|
|
||||||
potential_receivers = User.get_users_from_set(potential_receiver_ap_ids, local_only)
|
potential_receivers =
|
||||||
|
User.get_users_from_set(potential_receiver_ap_ids, local_only: local_only)
|
||||||
|
|
||||||
notification_enabled_ap_ids =
|
notification_enabled_ap_ids =
|
||||||
potential_receiver_ap_ids
|
potential_receiver_ap_ids
|
||||||
|
|
|
@ -31,7 +31,7 @@ defp headers do
|
||||||
{"x-content-type-options", "nosniff"},
|
{"x-content-type-options", "nosniff"},
|
||||||
{"referrer-policy", referrer_policy},
|
{"referrer-policy", referrer_policy},
|
||||||
{"x-download-options", "noopen"},
|
{"x-download-options", "noopen"},
|
||||||
{"content-security-policy", csp_string() <> ";"}
|
{"content-security-policy", csp_string()}
|
||||||
]
|
]
|
||||||
|
|
||||||
if report_uri do
|
if report_uri do
|
||||||
|
@ -43,23 +43,46 @@ defp headers do
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
headers ++ [{"reply-to", Jason.encode!(report_group)}]
|
[{"reply-to", Jason.encode!(report_group)} | headers]
|
||||||
else
|
else
|
||||||
headers
|
headers
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
static_csp_rules = [
|
||||||
|
"default-src 'none'",
|
||||||
|
"base-uri 'self'",
|
||||||
|
"frame-ancestors 'none'",
|
||||||
|
"style-src 'self' 'unsafe-inline'",
|
||||||
|
"font-src 'self'",
|
||||||
|
"manifest-src 'self'"
|
||||||
|
]
|
||||||
|
|
||||||
|
@csp_start [Enum.join(static_csp_rules, ";") <> ";"]
|
||||||
|
|
||||||
defp csp_string do
|
defp csp_string do
|
||||||
scheme = Config.get([Pleroma.Web.Endpoint, :url])[:scheme]
|
scheme = Config.get([Pleroma.Web.Endpoint, :url])[:scheme]
|
||||||
static_url = Pleroma.Web.Endpoint.static_url()
|
static_url = Pleroma.Web.Endpoint.static_url()
|
||||||
websocket_url = Pleroma.Web.Endpoint.websocket_url()
|
websocket_url = Pleroma.Web.Endpoint.websocket_url()
|
||||||
report_uri = Config.get([:http_security, :report_uri])
|
report_uri = Config.get([:http_security, :report_uri])
|
||||||
|
|
||||||
connect_src = "connect-src 'self' #{static_url} #{websocket_url}"
|
img_src = "img-src 'self' data: blob:"
|
||||||
|
media_src = "media-src 'self'"
|
||||||
|
|
||||||
|
{img_src, media_src} =
|
||||||
|
if Config.get([:media_proxy, :enabled]) &&
|
||||||
|
!Config.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do
|
||||||
|
sources = get_proxy_and_attachment_sources()
|
||||||
|
{[img_src, sources], [media_src, sources]}
|
||||||
|
else
|
||||||
|
{img_src, media_src}
|
||||||
|
end
|
||||||
|
|
||||||
|
connect_src = ["connect-src 'self' ", static_url, ?\s, websocket_url]
|
||||||
|
|
||||||
connect_src =
|
connect_src =
|
||||||
if Pleroma.Config.get(:env) == :dev do
|
if Pleroma.Config.get(:env) == :dev do
|
||||||
connect_src <> " http://localhost:3035/"
|
[connect_src, " http://localhost:3035/"]
|
||||||
else
|
else
|
||||||
connect_src
|
connect_src
|
||||||
end
|
end
|
||||||
|
@ -71,27 +94,46 @@ defp csp_string do
|
||||||
"script-src 'self'"
|
"script-src 'self'"
|
||||||
end
|
end
|
||||||
|
|
||||||
main_part = [
|
report = if report_uri, do: ["report-uri ", report_uri, ";report-to csp-endpoint"]
|
||||||
"default-src 'none'",
|
insecure = if scheme == "https", do: "upgrade-insecure-requests"
|
||||||
"base-uri 'self'",
|
|
||||||
"frame-ancestors 'none'",
|
|
||||||
"img-src 'self' data: blob: https:",
|
|
||||||
"media-src 'self' https:",
|
|
||||||
"style-src 'self' 'unsafe-inline'",
|
|
||||||
"font-src 'self'",
|
|
||||||
"manifest-src 'self'",
|
|
||||||
connect_src,
|
|
||||||
script_src
|
|
||||||
]
|
|
||||||
|
|
||||||
report = if report_uri, do: ["report-uri #{report_uri}; report-to csp-endpoint"], else: []
|
@csp_start
|
||||||
|
|> add_csp_param(img_src)
|
||||||
insecure = if scheme == "https", do: ["upgrade-insecure-requests"], else: []
|
|> add_csp_param(media_src)
|
||||||
|
|> add_csp_param(connect_src)
|
||||||
(main_part ++ report ++ insecure)
|
|> add_csp_param(script_src)
|
||||||
|> Enum.join("; ")
|
|> add_csp_param(insecure)
|
||||||
|
|> add_csp_param(report)
|
||||||
|
|> :erlang.iolist_to_binary()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp get_proxy_and_attachment_sources do
|
||||||
|
media_proxy_whitelist =
|
||||||
|
Enum.reduce(Config.get([:media_proxy, :whitelist]), [], fn host, acc ->
|
||||||
|
add_source(acc, host)
|
||||||
|
end)
|
||||||
|
|
||||||
|
upload_base_url =
|
||||||
|
if Config.get([Pleroma.Upload, :base_url]),
|
||||||
|
do: URI.parse(Config.get([Pleroma.Upload, :base_url])).host
|
||||||
|
|
||||||
|
s3_endpoint =
|
||||||
|
if Config.get([Pleroma.Upload, :uploader]) == Pleroma.Uploaders.S3,
|
||||||
|
do: URI.parse(Config.get([Pleroma.Uploaders.S3, :public_endpoint])).host
|
||||||
|
|
||||||
|
[]
|
||||||
|
|> add_source(upload_base_url)
|
||||||
|
|> add_source(s3_endpoint)
|
||||||
|
|> add_source(media_proxy_whitelist)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp add_source(iodata, nil), do: iodata
|
||||||
|
defp add_source(iodata, source), do: [[?\s, source] | iodata]
|
||||||
|
|
||||||
|
defp add_csp_param(csp_iodata, nil), do: csp_iodata
|
||||||
|
|
||||||
|
defp add_csp_param(csp_iodata, param), do: [[param, ?;] | csp_iodata]
|
||||||
|
|
||||||
def warn_if_disabled do
|
def warn_if_disabled do
|
||||||
unless Config.get([:http_security, :enabled]) do
|
unless Config.get([:http_security, :enabled]) do
|
||||||
Logger.warn("
|
Logger.warn("
|
||||||
|
|
|
@ -538,9 +538,10 @@ def update_as_admin_changeset(struct, params) do
|
||||||
|> delete_change(:also_known_as)
|
|> delete_change(:also_known_as)
|
||||||
|> unique_constraint(:email)
|
|> unique_constraint(:email)
|
||||||
|> validate_format(:email, @email_regex)
|
|> validate_format(:email, @email_regex)
|
||||||
|
|> validate_inclusion(:actor_type, ["Person", "Service"])
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec update_as_admin(%User{}, map) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
@spec update_as_admin(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
||||||
def update_as_admin(user, params) do
|
def update_as_admin(user, params) do
|
||||||
params = Map.put(params, "password_confirmation", params["password"])
|
params = Map.put(params, "password_confirmation", params["password"])
|
||||||
changeset = update_as_admin_changeset(user, params)
|
changeset = update_as_admin_changeset(user, params)
|
||||||
|
@ -561,7 +562,7 @@ def password_update_changeset(struct, params) do
|
||||||
|> put_change(:password_reset_pending, false)
|
|> put_change(:password_reset_pending, false)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec reset_password(User.t(), map) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
@spec reset_password(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
||||||
def reset_password(%User{} = user, params) do
|
def reset_password(%User{} = user, params) do
|
||||||
reset_password(user, user, params)
|
reset_password(user, user, params)
|
||||||
end
|
end
|
||||||
|
@ -1208,8 +1209,9 @@ def increment_unread_conversation_count(conversation, %User{local: true} = user)
|
||||||
|
|
||||||
def increment_unread_conversation_count(_, user), do: {:ok, user}
|
def increment_unread_conversation_count(_, user), do: {:ok, user}
|
||||||
|
|
||||||
@spec get_users_from_set([String.t()], boolean()) :: [User.t()]
|
@spec get_users_from_set([String.t()], keyword()) :: [User.t()]
|
||||||
def get_users_from_set(ap_ids, local_only \\ true) do
|
def get_users_from_set(ap_ids, opts \\ []) do
|
||||||
|
local_only = Keyword.get(opts, :local_only, true)
|
||||||
criteria = %{ap_id: ap_ids, deactivated: false}
|
criteria = %{ap_id: ap_ids, deactivated: false}
|
||||||
criteria = if local_only, do: Map.put(criteria, :local, true), else: criteria
|
criteria = if local_only, do: Map.put(criteria, :local, true), else: criteria
|
||||||
|
|
||||||
|
@ -1618,12 +1620,19 @@ def html_filter_policy(_), do: Pleroma.Config.get([:markup, :scrub_policy])
|
||||||
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
|
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
|
||||||
|
|
||||||
def get_or_fetch_by_ap_id(ap_id) do
|
def get_or_fetch_by_ap_id(ap_id) do
|
||||||
user = get_cached_by_ap_id(ap_id)
|
cached_user = get_cached_by_ap_id(ap_id)
|
||||||
|
|
||||||
if !is_nil(user) and !needs_update?(user) do
|
maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id)
|
||||||
|
|
||||||
|
case {cached_user, maybe_fetched_user} do
|
||||||
|
{_, {:ok, %User{} = user}} ->
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
else
|
|
||||||
fetch_by_ap_id(ap_id)
|
{%User{} = user, _} ->
|
||||||
|
{:ok, user}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
{:error, :not_found}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -545,14 +545,27 @@ def fetch_latest_activity_id_for_context(context, opts \\ %{}) do
|
||||||
|> Repo.one()
|
|> Repo.one()
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec fetch_public_activities(map(), Pagination.type()) :: [Activity.t()]
|
@spec fetch_public_or_unlisted_activities(map(), Pagination.type()) :: [Activity.t()]
|
||||||
def fetch_public_activities(opts \\ %{}, pagination \\ :keyset) do
|
def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do
|
||||||
opts = Map.drop(opts, ["user"])
|
opts = Map.drop(opts, ["user"])
|
||||||
|
|
||||||
[Constants.as_public()]
|
query = fetch_activities_query([Constants.as_public()], opts)
|
||||||
|> fetch_activities_query(opts)
|
|
||||||
|> restrict_unlisted()
|
query =
|
||||||
|> Pagination.fetch_paginated(opts, pagination)
|
if opts["restrict_unlisted"] do
|
||||||
|
restrict_unlisted(query)
|
||||||
|
else
|
||||||
|
query
|
||||||
|
end
|
||||||
|
|
||||||
|
Pagination.fetch_paginated(query, opts, pagination)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec fetch_public_activities(map(), Pagination.type()) :: [Activity.t()]
|
||||||
|
def fetch_public_activities(opts \\ %{}, pagination \\ :keyset) do
|
||||||
|
opts
|
||||||
|
|> Map.put("restrict_unlisted", true)
|
||||||
|
|> fetch_public_or_unlisted_activities(pagination)
|
||||||
end
|
end
|
||||||
|
|
||||||
@valid_visibilities ~w[direct unlisted public private]
|
@valid_visibilities ~w[direct unlisted public private]
|
||||||
|
@ -1165,7 +1178,7 @@ def fetch_favourites(user, params \\ %{}, pagination \\ :keyset) do
|
||||||
|> Activity.with_joined_object()
|
|> Activity.with_joined_object()
|
||||||
|> Object.with_joined_activity()
|
|> Object.with_joined_activity()
|
||||||
|> select([_like, object, activity], %{activity | object: object})
|
|> select([_like, object, activity], %{activity | object: object})
|
||||||
|> order_by([like, _, _], desc: like.id)
|
|> order_by([like, _, _], desc_nulls_last: like.id)
|
||||||
|> Pagination.fetch_paginated(
|
|> Pagination.fetch_paginated(
|
||||||
Map.merge(params, %{"skip_order" => true}),
|
Map.merge(params, %{"skip_order" => true}),
|
||||||
pagination,
|
pagination,
|
||||||
|
|
|
@ -1055,10 +1055,14 @@ def add_hashtags(object) do
|
||||||
Map.put(object, "tag", tags)
|
Map.put(object, "tag", tags)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# TODO These should be added on our side on insertion, it doesn't make much
|
||||||
|
# sense to regenerate these all the time
|
||||||
def add_mention_tags(object) do
|
def add_mention_tags(object) do
|
||||||
{enabled_receivers, disabled_receivers} = Utils.get_notified_from_object(object)
|
to = object["to"] || []
|
||||||
potential_receivers = enabled_receivers ++ disabled_receivers
|
cc = object["cc"] || []
|
||||||
mentions = Enum.map(potential_receivers, &build_mention_tag/1)
|
mentioned = User.get_users_from_set(to ++ cc, local_only: false)
|
||||||
|
|
||||||
|
mentions = Enum.map(mentioned, &build_mention_tag/1)
|
||||||
|
|
||||||
tags = object["tag"] || []
|
tags = object["tag"] || []
|
||||||
Map.put(object, "tag", tags ++ mentions)
|
Map.put(object, "tag", tags ++ mentions)
|
||||||
|
|
|
@ -693,7 +693,7 @@ def update_user_credentials(
|
||||||
%{assigns: %{user: admin}} = conn,
|
%{assigns: %{user: admin}} = conn,
|
||||||
%{"nickname" => nickname} = params
|
%{"nickname" => nickname} = params
|
||||||
) do
|
) do
|
||||||
with {_, user} <- {:user, User.get_cached_by_nickname(nickname)},
|
with {_, %User{} = user} <- {:user, User.get_cached_by_nickname(nickname)},
|
||||||
{:ok, _user} <-
|
{:ok, _user} <-
|
||||||
User.update_as_admin(user, params) do
|
User.update_as_admin(user, params) do
|
||||||
ModerationLog.insert_log(%{
|
ModerationLog.insert_log(%{
|
||||||
|
@ -715,11 +715,12 @@ def update_user_credentials(
|
||||||
json(conn, %{status: "success"})
|
json(conn, %{status: "success"})
|
||||||
else
|
else
|
||||||
{:error, changeset} ->
|
{:error, changeset} ->
|
||||||
{_, {error, _}} = Enum.at(changeset.errors, 0)
|
errors = Map.new(changeset.errors, fn {key, {error, _}} -> {key, error} end)
|
||||||
json(conn, %{error: "New password #{error}."})
|
|
||||||
|
json(conn, %{errors: errors})
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
json(conn, %{error: "Unable to change password."})
|
json(conn, %{error: "Unable to update user."})
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ def feed(conn, %{"nickname" => nickname} = params) do
|
||||||
"actor_id" => user.ap_id
|
"actor_id" => user.ap_id
|
||||||
}
|
}
|
||||||
|> put_if_exist("max_id", params["max_id"])
|
|> put_if_exist("max_id", params["max_id"])
|
||||||
|> ActivityPub.fetch_public_activities()
|
|> ActivityPub.fetch_public_or_unlisted_activities()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_resp_content_type("application/#{format}+xml")
|
|> put_resp_content_type("application/#{format}+xml")
|
||||||
|
|
|
@ -81,7 +81,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
RateLimiter,
|
RateLimiter,
|
||||||
[name: :relation_id_action, params: ["id", "uri"]] when action in @relationship_actions
|
[name: :relation_id_action, params: [:id, :uri]] when action in @relationship_actions
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(RateLimiter, [name: :relations_actions] when action in @relationship_actions)
|
plug(RateLimiter, [name: :relations_actions] when action in @relationship_actions)
|
||||||
|
|
|
@ -84,13 +84,13 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
RateLimiter,
|
RateLimiter,
|
||||||
[name: :status_id_action, bucket_name: "status_id_action:reblog_unreblog", params: ["id"]]
|
[name: :status_id_action, bucket_name: "status_id_action:reblog_unreblog", params: [:id]]
|
||||||
when action in ~w(reblog unreblog)a
|
when action in ~w(reblog unreblog)a
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
RateLimiter,
|
RateLimiter,
|
||||||
[name: :status_id_action, bucket_name: "status_id_action:fav_unfav", params: ["id"]]
|
[name: :status_id_action, bucket_name: "status_id_action:fav_unfav", params: [:id]]
|
||||||
when action in ~w(favourite unfavourite)a
|
when action in ~w(favourite unfavourite)a
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -182,12 +182,14 @@ defp do_render("show.json", %{user: user} = opts) do
|
||||||
bot = user.actor_type in ["Application", "Service"]
|
bot = user.actor_type in ["Application", "Service"]
|
||||||
|
|
||||||
emojis =
|
emojis =
|
||||||
Enum.map(user.emoji, fn {shortcode, url} ->
|
Enum.map(user.emoji, fn {shortcode, raw_url} ->
|
||||||
|
url = MediaProxy.url(raw_url)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
"shortcode" => shortcode,
|
shortcode: shortcode,
|
||||||
"url" => url,
|
url: url,
|
||||||
"static_url" => url,
|
static_url: url,
|
||||||
"visible_in_picker" => false
|
visible_in_picker: false
|
||||||
}
|
}
|
||||||
end)
|
end)
|
||||||
|
|
||||||
|
|
|
@ -106,7 +106,7 @@ def download(%{body_params: %{url: url, name: name} = params} = conn, _) do
|
||||||
|> put_status(:internal_server_error)
|
|> put_status(:internal_server_error)
|
||||||
|> json(%{error: "The requested instance does not support sharing emoji packs"})
|
|> json(%{error: "The requested instance does not support sharing emoji packs"})
|
||||||
|
|
||||||
{:error, :imvalid_checksum} ->
|
{:error, :invalid_checksum} ->
|
||||||
conn
|
conn
|
||||||
|> put_status(:internal_server_error)
|
|> put_status(:internal_server_error)
|
||||||
|> json(%{error: "SHA256 for the pack doesn't match the one sent by the server"})
|
|> json(%{error: "SHA256 for the pack doesn't match the one sent by the server"})
|
||||||
|
|
|
@ -16,6 +16,8 @@ defmodule Pleroma.Workers.Cron.ClearOauthTokenWorker do
|
||||||
def perform(_opts, _job) do
|
def perform(_opts, _job) do
|
||||||
if Config.get([:oauth2, :clean_expired_tokens], false) do
|
if Config.get([:oauth2, :clean_expired_tokens], false) do
|
||||||
Token.delete_expired_tokens()
|
Token.delete_expired_tokens()
|
||||||
|
else
|
||||||
|
:ok
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -37,6 +37,8 @@ def perform(_opts, _job) do
|
||||||
)
|
)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|> send_emails
|
|> send_emails
|
||||||
|
else
|
||||||
|
:ok
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,11 @@ def perform(_args, _job) do
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|> Enum.map(&Pleroma.Emails.NewUsersDigestEmail.new_users(&1, users_and_statuses))
|
|> Enum.map(&Pleroma.Emails.NewUsersDigestEmail.new_users(&1, users_and_statuses))
|
||||||
|> Enum.each(&Pleroma.Emails.Mailer.deliver/1)
|
|> Enum.each(&Pleroma.Emails.Mailer.deliver/1)
|
||||||
end
|
else
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
else
|
||||||
|
:ok
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -23,6 +23,8 @@ defmodule Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker do
|
||||||
def perform(_opts, _job) do
|
def perform(_opts, _job) do
|
||||||
if Config.get([ActivityExpiration, :enabled]) do
|
if Config.get([ActivityExpiration, :enabled]) do
|
||||||
Enum.each(ActivityExpiration.due_expirations(@interval), &delete_activity/1)
|
Enum.each(ActivityExpiration.due_expirations(@interval), &delete_activity/1)
|
||||||
|
else
|
||||||
|
:ok
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
7
priv/repo/migrations/20200526144426_add_apps_indexes.exs
Normal file
7
priv/repo/migrations/20200526144426_add_apps_indexes.exs
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
defmodule Pleroma.Repo.Migrations.AddAppsIndexes do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def change do
|
||||||
|
create(index(:apps, [:client_id, :client_secret]))
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,8 @@
|
||||||
|
defmodule Pleroma.Repo.Migrations.ChangeNotificationUserIndex do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def change do
|
||||||
|
drop_if_exists(index(:notifications, [:user_id]))
|
||||||
|
create_if_not_exists(index(:notifications, [:user_id, "id desc nulls last"]))
|
||||||
|
end
|
||||||
|
end
|
Binary file not shown.
BIN
priv/static/adminfe/chunk-3384.d50ed383.css
Normal file
BIN
priv/static/adminfe/chunk-3384.d50ed383.css
Normal file
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/chunk-e458.6c0703cb.css
Normal file
BIN
priv/static/adminfe/chunk-e458.6c0703cb.css
Normal file
Binary file not shown.
Binary file not shown.
|
@ -1 +1 @@
|
||||||
<!DOCTYPE html><html><head><meta charset=utf-8><meta http-equiv=X-UA-Compatible content="IE=edge,chrome=1"><meta name=renderer content=webkit><meta name=viewport content="width=device-width,initial-scale=1,maximum-scale=1,user-scalable=no"><title>Admin FE</title><link rel="shortcut icon" href=favicon.ico><link href=chunk-elementUI.1abbc9b8.css rel=stylesheet><link href=chunk-libs.686b5876.css rel=stylesheet><link href=app.796ca6d4.css rel=stylesheet></head><body><div id=app></div><script type=text/javascript src=static/js/runtime.1b4f6ce0.js></script><script type=text/javascript src=static/js/chunk-elementUI.fba0efec.js></script><script type=text/javascript src=static/js/chunk-libs.b8c453ab.js></script><script type=text/javascript src=static/js/app.203f69f8.js></script></body></html>
|
<!DOCTYPE html><html><head><meta charset=utf-8><meta http-equiv=X-UA-Compatible content="IE=edge,chrome=1"><meta name=renderer content=webkit><meta name=viewport content="width=device-width,initial-scale=1,maximum-scale=1,user-scalable=no"><title>Admin FE</title><link rel="shortcut icon" href=favicon.ico><link href=chunk-elementUI.1abbc9b8.css rel=stylesheet><link href=chunk-libs.686b5876.css rel=stylesheet><link href=app.796ca6d4.css rel=stylesheet></head><body><div id=app></div><script type=text/javascript src=static/js/runtime.b08eb412.js></script><script type=text/javascript src=static/js/chunk-elementUI.fba0efec.js></script><script type=text/javascript src=static/js/chunk-libs.b8c453ab.js></script><script type=text/javascript src=static/js/app.0146039c.js></script></body></html>
|
BIN
priv/static/adminfe/static/js/app.0146039c.js
Normal file
BIN
priv/static/adminfe/static/js/app.0146039c.js
Normal file
Binary file not shown.
BIN
priv/static/adminfe/static/js/app.0146039c.js.map
Normal file
BIN
priv/static/adminfe/static/js/app.0146039c.js.map
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/static/js/chunk-3384.b2ebeeca.js
Normal file
BIN
priv/static/adminfe/static/js/chunk-3384.b2ebeeca.js
Normal file
Binary file not shown.
BIN
priv/static/adminfe/static/js/chunk-3384.b2ebeeca.js.map
Normal file
BIN
priv/static/adminfe/static/js/chunk-3384.b2ebeeca.js.map
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/static/js/chunk-7e30.ec42e302.js
Normal file
BIN
priv/static/adminfe/static/js/chunk-7e30.ec42e302.js
Normal file
Binary file not shown.
BIN
priv/static/adminfe/static/js/chunk-7e30.ec42e302.js.map
Normal file
BIN
priv/static/adminfe/static/js/chunk-7e30.ec42e302.js.map
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/static/js/chunk-e458.bb460d81.js
Normal file
BIN
priv/static/adminfe/static/js/chunk-e458.bb460d81.js
Normal file
Binary file not shown.
BIN
priv/static/adminfe/static/js/chunk-e458.bb460d81.js.map
Normal file
BIN
priv/static/adminfe/static/js/chunk-e458.bb460d81.js.map
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
3
test/instance_static/local_pack/files.json
Normal file
3
test/instance_static/local_pack/files.json
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"blank": "blank.png"
|
||||||
|
}
|
10
test/instance_static/local_pack/manifest.json
Normal file
10
test/instance_static/local_pack/manifest.json
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"local": {
|
||||||
|
"src_sha256": "384025A1AC6314473863A11AC7AB38A12C01B851A3F82359B89B4D4211D3291D",
|
||||||
|
"src": "test/fixtures/emoji/packs/blank.png.zip",
|
||||||
|
"license": "Apache 2.0",
|
||||||
|
"homepage": "https://example.com",
|
||||||
|
"files": "files.json",
|
||||||
|
"description": "Some local pack"
|
||||||
|
}
|
||||||
|
}
|
|
@ -454,8 +454,7 @@ test "it sets all notifications as read up to a specified notification ID" do
|
||||||
status: "hey again @#{other_user.nickname}!"
|
status: "hey again @#{other_user.nickname}!"
|
||||||
})
|
})
|
||||||
|
|
||||||
[n2, n1] = notifs = Notification.for_user(other_user)
|
[n2, n1] = Notification.for_user(other_user)
|
||||||
assert length(notifs) == 2
|
|
||||||
|
|
||||||
assert n2.id > n1.id
|
assert n2.id > n1.id
|
||||||
|
|
||||||
|
@ -464,7 +463,9 @@ test "it sets all notifications as read up to a specified notification ID" do
|
||||||
status: "hey yet again @#{other_user.nickname}!"
|
status: "hey yet again @#{other_user.nickname}!"
|
||||||
})
|
})
|
||||||
|
|
||||||
Notification.set_read_up_to(other_user, n2.id)
|
[_, read_notification] = Notification.set_read_up_to(other_user, n2.id)
|
||||||
|
|
||||||
|
assert read_notification.activity.object
|
||||||
|
|
||||||
[n3, n2, n1] = Notification.for_user(other_user)
|
[n3, n2, n1] = Notification.for_user(other_user)
|
||||||
|
|
||||||
|
@ -972,7 +973,9 @@ test "it returns notifications for muted user without notifications" do
|
||||||
|
|
||||||
{:ok, _activity} = CommonAPI.post(muted, %{status: "hey @#{user.nickname}"})
|
{:ok, _activity} = CommonAPI.post(muted, %{status: "hey @#{user.nickname}"})
|
||||||
|
|
||||||
assert length(Notification.for_user(user)) == 1
|
[notification] = Notification.for_user(user)
|
||||||
|
|
||||||
|
assert notification.activity.object
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it doesn't return notifications for muted user with notifications" do
|
test "it doesn't return notifications for muted user with notifications" do
|
||||||
|
|
|
@ -68,6 +68,7 @@ test "with a bcrypt hash, it updates to a pkbdf2 hash", %{conn: conn} do
|
||||||
assert "$pbkdf2" <> _ = user.password_hash
|
assert "$pbkdf2" <> _ = user.password_hash
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag :skip_on_mac
|
||||||
test "with a crypt hash, it updates to a pkbdf2 hash", %{conn: conn} do
|
test "with a crypt hash, it updates to a pkbdf2 hash", %{conn: conn} do
|
||||||
user =
|
user =
|
||||||
insert(:user,
|
insert(:user,
|
||||||
|
|
|
@ -34,7 +34,8 @@ def user_factory do
|
||||||
last_digest_emailed_at: NaiveDateTime.utc_now(),
|
last_digest_emailed_at: NaiveDateTime.utc_now(),
|
||||||
last_refreshed_at: NaiveDateTime.utc_now(),
|
last_refreshed_at: NaiveDateTime.utc_now(),
|
||||||
notification_settings: %Pleroma.User.NotificationSetting{},
|
notification_settings: %Pleroma.User.NotificationSetting{},
|
||||||
multi_factor_authentication_settings: %Pleroma.MFA.Settings{}
|
multi_factor_authentication_settings: %Pleroma.MFA.Settings{},
|
||||||
|
ap_enabled: true
|
||||||
}
|
}
|
||||||
|
|
||||||
%{
|
%{
|
||||||
|
|
|
@ -73,6 +73,19 @@ test "download pack from default manifest" do
|
||||||
on_exit(fn -> File.rm_rf!("test/instance_static/emoji/finmoji") end)
|
on_exit(fn -> File.rm_rf!("test/instance_static/emoji/finmoji") end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "install local emoji pack" do
|
||||||
|
assert capture_io(fn ->
|
||||||
|
Emoji.run([
|
||||||
|
"get-packs",
|
||||||
|
"local",
|
||||||
|
"--manifest",
|
||||||
|
"test/instance_static/local_pack/manifest.json"
|
||||||
|
])
|
||||||
|
end) =~ "Writing pack.json for"
|
||||||
|
|
||||||
|
on_exit(fn -> File.rm_rf!("test/instance_static/emoji/local") end)
|
||||||
|
end
|
||||||
|
|
||||||
test "pack not found" do
|
test "pack not found" do
|
||||||
mock(fn
|
mock(fn
|
||||||
%{
|
%{
|
||||||
|
|
|
@ -586,6 +586,26 @@ test "updates an existing user, if stale" do
|
||||||
|
|
||||||
refute user.last_refreshed_at == orig_user.last_refreshed_at
|
refute user.last_refreshed_at == orig_user.last_refreshed_at
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag capture_log: true
|
||||||
|
test "it returns the old user if stale, but unfetchable" do
|
||||||
|
a_week_ago = NaiveDateTime.add(NaiveDateTime.utc_now(), -604_800)
|
||||||
|
|
||||||
|
orig_user =
|
||||||
|
insert(
|
||||||
|
:user,
|
||||||
|
local: false,
|
||||||
|
nickname: "admin@mastodon.example.org",
|
||||||
|
ap_id: "http://mastodon.example.org/users/raymoo",
|
||||||
|
last_refreshed_at: a_week_ago
|
||||||
|
)
|
||||||
|
|
||||||
|
assert orig_user.last_refreshed_at == a_week_ago
|
||||||
|
|
||||||
|
{:ok, user} = User.get_or_fetch_by_ap_id("http://mastodon.example.org/users/raymoo")
|
||||||
|
|
||||||
|
assert user.last_refreshed_at == orig_user.last_refreshed_at
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns an ap_id for a user" do
|
test "returns an ap_id for a user" do
|
||||||
|
|
|
@ -451,6 +451,36 @@ test "it inserts an incoming activity into the database", %{conn: conn} do
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag capture_log: true
|
||||||
|
test "it inserts an incoming activity into the database" <>
|
||||||
|
"even if we can't fetch the user but have it in our db",
|
||||||
|
%{conn: conn} do
|
||||||
|
user =
|
||||||
|
insert(:user,
|
||||||
|
ap_id: "https://mastodon.example.org/users/raymoo",
|
||||||
|
ap_enabled: true,
|
||||||
|
local: false,
|
||||||
|
last_refreshed_at: nil
|
||||||
|
)
|
||||||
|
|
||||||
|
data =
|
||||||
|
File.read!("test/fixtures/mastodon-post-activity.json")
|
||||||
|
|> Poison.decode!()
|
||||||
|
|> Map.put("actor", user.ap_id)
|
||||||
|
|> put_in(["object", "attridbutedTo"], user.ap_id)
|
||||||
|
|
||||||
|
conn =
|
||||||
|
conn
|
||||||
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|
|> post("/inbox", data)
|
||||||
|
|
||||||
|
assert "ok" == json_response(conn, 200)
|
||||||
|
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
|
end
|
||||||
|
|
||||||
test "it clears `unreachable` federation status of the sender", %{conn: conn} do
|
test "it clears `unreachable` federation status of the sender", %{conn: conn} do
|
||||||
data = File.read!("test/fixtures/mastodon-post-activity.json") |> Poison.decode!()
|
data = File.read!("test/fixtures/mastodon-post-activity.json") |> Poison.decode!()
|
||||||
|
|
||||||
|
|
|
@ -1094,7 +1094,10 @@ test "it turns mentions into tags" do
|
||||||
{:ok, activity} =
|
{:ok, activity} =
|
||||||
CommonAPI.post(user, %{status: "hey, @#{other_user.nickname}, how are ya? #2hu"})
|
CommonAPI.post(user, %{status: "hey, @#{other_user.nickname}, how are ya? #2hu"})
|
||||||
|
|
||||||
|
with_mock Pleroma.Notification,
|
||||||
|
get_notified_from_activity: fn _, _ -> [] end do
|
||||||
{:ok, modified} = Transmogrifier.prepare_outgoing(activity.data)
|
{:ok, modified} = Transmogrifier.prepare_outgoing(activity.data)
|
||||||
|
|
||||||
object = modified["object"]
|
object = modified["object"]
|
||||||
|
|
||||||
expected_mention = %{
|
expected_mention = %{
|
||||||
|
@ -1109,9 +1112,11 @@ test "it turns mentions into tags" do
|
||||||
"name" => "#2hu"
|
"name" => "#2hu"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
refute called(Pleroma.Notification.get_notified_from_activity(:_, :_))
|
||||||
assert Enum.member?(object["tag"], expected_tag)
|
assert Enum.member?(object["tag"], expected_tag)
|
||||||
assert Enum.member?(object["tag"], expected_mention)
|
assert Enum.member?(object["tag"], expected_mention)
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
test "it adds the sensitive property" do
|
test "it adds the sensitive property" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
|
@ -3191,8 +3191,12 @@ test "returns 403 if requested by a non-admin" do
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "PATCH /users/:nickname/credentials" do
|
describe "PATCH /users/:nickname/credentials" do
|
||||||
test "changes password and email", %{conn: conn, admin: admin} do
|
setup do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
[user: user]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "changes password and email", %{conn: conn, admin: admin, user: user} do
|
||||||
assert user.password_reset_pending == false
|
assert user.password_reset_pending == false
|
||||||
|
|
||||||
conn =
|
conn =
|
||||||
|
@ -3222,9 +3226,7 @@ test "changes password and email", %{conn: conn, admin: admin} do
|
||||||
"@#{admin.nickname} forced password reset for users: @#{user.nickname}"
|
"@#{admin.nickname} forced password reset for users: @#{user.nickname}"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns 403 if requested by a non-admin" do
|
test "returns 403 if requested by a non-admin", %{user: user} do
|
||||||
user = insert(:user)
|
|
||||||
|
|
||||||
conn =
|
conn =
|
||||||
build_conn()
|
build_conn()
|
||||||
|> assign(:user, user)
|
|> assign(:user, user)
|
||||||
|
@ -3236,6 +3238,31 @@ test "returns 403 if requested by a non-admin" do
|
||||||
|
|
||||||
assert json_response(conn, :forbidden)
|
assert json_response(conn, :forbidden)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "changes actor type from permitted list", %{conn: conn, user: user} do
|
||||||
|
assert user.actor_type == "Person"
|
||||||
|
|
||||||
|
assert patch(conn, "/api/pleroma/admin/users/#{user.nickname}/credentials", %{
|
||||||
|
"actor_type" => "Service"
|
||||||
|
})
|
||||||
|
|> json_response(200) == %{"status" => "success"}
|
||||||
|
|
||||||
|
updated_user = User.get_by_id(user.id)
|
||||||
|
|
||||||
|
assert updated_user.actor_type == "Service"
|
||||||
|
|
||||||
|
assert patch(conn, "/api/pleroma/admin/users/#{user.nickname}/credentials", %{
|
||||||
|
"actor_type" => "Application"
|
||||||
|
})
|
||||||
|
|> json_response(200) == %{"errors" => %{"actor_type" => "is invalid"}}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "update non existing user", %{conn: conn} do
|
||||||
|
assert patch(conn, "/api/pleroma/admin/users/non-existing/credentials", %{
|
||||||
|
"password" => "new_password"
|
||||||
|
})
|
||||||
|
|> json_response(200) == %{"error" => "Unable to update user."}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "PATCH /users/:nickname/force_password_reset" do
|
describe "PATCH /users/:nickname/force_password_reset" do
|
||||||
|
|
|
@ -11,13 +11,14 @@ defmodule Pleroma.Web.Feed.UserControllerTest do
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
setup do: clear_config([:instance, :federating], true)
|
setup do: clear_config([:instance, :federating], true)
|
||||||
|
|
||||||
describe "feed" do
|
describe "feed" do
|
||||||
setup do: clear_config([:feed])
|
setup do: clear_config([:feed])
|
||||||
|
|
||||||
test "gets a feed", %{conn: conn} do
|
test "gets an atom feed", %{conn: conn} do
|
||||||
Config.put(
|
Config.put(
|
||||||
[:feed, :post_title],
|
[:feed, :post_title],
|
||||||
%{max_length: 10, omission: "..."}
|
%{max_length: 10, omission: "..."}
|
||||||
|
@ -157,6 +158,29 @@ test "returns 404 for a missing feed", %{conn: conn} do
|
||||||
|
|
||||||
assert response(conn, 404)
|
assert response(conn, 404)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "returns feed with public and unlisted activities", %{conn: conn} do
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
{:ok, _} = CommonAPI.post(user, %{status: "public", visibility: "public"})
|
||||||
|
{:ok, _} = CommonAPI.post(user, %{status: "direct", visibility: "direct"})
|
||||||
|
{:ok, _} = CommonAPI.post(user, %{status: "unlisted", visibility: "unlisted"})
|
||||||
|
{:ok, _} = CommonAPI.post(user, %{status: "private", visibility: "private"})
|
||||||
|
|
||||||
|
resp =
|
||||||
|
conn
|
||||||
|
|> put_req_header("accept", "application/atom+xml")
|
||||||
|
|> get(user_feed_path(conn, :feed, user.nickname))
|
||||||
|
|> response(200)
|
||||||
|
|
||||||
|
activity_titles =
|
||||||
|
resp
|
||||||
|
|> SweetXml.parse()
|
||||||
|
|> SweetXml.xpath(~x"//entry/title/text()"l)
|
||||||
|
|> Enum.sort()
|
||||||
|
|
||||||
|
assert activity_titles == ['public', 'unlisted']
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Note: see ActivityPubControllerTest for JSON format tests
|
# Note: see ActivityPubControllerTest for JSON format tests
|
||||||
|
|
|
@ -54,10 +54,10 @@ test "Represent a user account" do
|
||||||
header_static: "http://localhost:4001/images/banner.png",
|
header_static: "http://localhost:4001/images/banner.png",
|
||||||
emojis: [
|
emojis: [
|
||||||
%{
|
%{
|
||||||
"static_url" => "/file.png",
|
static_url: "/file.png",
|
||||||
"url" => "/file.png",
|
url: "/file.png",
|
||||||
"shortcode" => "karjalanpiirakka",
|
shortcode: "karjalanpiirakka",
|
||||||
"visible_in_picker" => false
|
visible_in_picker: false
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
fields: [],
|
fields: [],
|
||||||
|
@ -493,4 +493,31 @@ test "shows non-zero when historical unapproved requests are present" do
|
||||||
AccountView.render("show.json", %{user: user, for: user})
|
AccountView.render("show.json", %{user: user, for: user})
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "uses mediaproxy urls when it's enabled" do
|
||||||
|
clear_config([:media_proxy, :enabled], true)
|
||||||
|
|
||||||
|
user =
|
||||||
|
insert(:user,
|
||||||
|
avatar: %{"url" => [%{"href" => "https://evil.website/avatar.png"}]},
|
||||||
|
banner: %{"url" => [%{"href" => "https://evil.website/banner.png"}]},
|
||||||
|
emoji: %{"joker_smile" => "https://evil.website/society.png"}
|
||||||
|
)
|
||||||
|
|
||||||
|
AccountView.render("show.json", %{user: user})
|
||||||
|
|> Enum.all?(fn
|
||||||
|
{key, url} when key in [:avatar, :avatar_static, :header, :header_static] ->
|
||||||
|
String.starts_with?(url, Pleroma.Web.base_url())
|
||||||
|
|
||||||
|
{:emojis, emojis} ->
|
||||||
|
Enum.all?(emojis, fn %{url: url, static_url: static_url} ->
|
||||||
|
String.starts_with?(url, Pleroma.Web.base_url()) &&
|
||||||
|
String.starts_with?(static_url, Pleroma.Web.base_url())
|
||||||
|
end)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
true
|
||||||
|
end)
|
||||||
|
|> assert()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -124,15 +124,7 @@ test "encoded url are tried to match for proxy as `conn.request_path` encodes th
|
||||||
end
|
end
|
||||||
|
|
||||||
test "uses the configured base_url" do
|
test "uses the configured base_url" do
|
||||||
base_url = Pleroma.Config.get([:media_proxy, :base_url])
|
clear_config([:media_proxy, :base_url], "https://cache.pleroma.social")
|
||||||
|
|
||||||
if base_url do
|
|
||||||
on_exit(fn ->
|
|
||||||
Pleroma.Config.put([:media_proxy, :base_url], base_url)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
Pleroma.Config.put([:media_proxy, :base_url], "https://cache.pleroma.social")
|
|
||||||
|
|
||||||
url = "https://pleroma.soykaf.com/static/logo.png"
|
url = "https://pleroma.soykaf.com/static/logo.png"
|
||||||
encoded = url(url)
|
encoded = url(url)
|
||||||
|
@ -213,8 +205,8 @@ test "mediaproxy whitelist" do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "does not change whitelisted urls" do
|
test "does not change whitelisted urls" do
|
||||||
Pleroma.Config.put([:media_proxy, :whitelist], ["mycdn.akamai.com"])
|
clear_config([:media_proxy, :whitelist], ["mycdn.akamai.com"])
|
||||||
Pleroma.Config.put([:media_proxy, :base_url], "https://cache.pleroma.social")
|
clear_config([:media_proxy, :base_url], "https://cache.pleroma.social")
|
||||||
|
|
||||||
media_url = "https://mycdn.akamai.com"
|
media_url = "https://mycdn.akamai.com"
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue