forked from AkkomaGang/akkoma
Compare commits
48 commits
5992e8bb16
...
f101886709
Author | SHA1 | Date | |
---|---|---|---|
f101886709 | |||
09fa7227f6 | |||
d5b0720596 | |||
940792f8ba | |||
3c72b48a05 | |||
6475cf127e | |||
a8a231c5b2 | |||
2901fda29c | |||
bd14440386 | |||
|
b312edac4c | ||
3bb31117e6 | |||
2c5c531c35 | |||
3ff0f46b9f | |||
4ff5293093 | |||
4f0cb61782 | |||
5fdb5d69d2 | |||
f66135ed08 | |||
dc34328f15 | |||
13e2a811ec | |||
1a4238bf98 | |||
c3069b9478 | |||
51f09531c4 | |||
962847fdc3 | |||
83aab0859a | |||
|
eb2b0d26e4 | ||
|
91870590ec | ||
|
c442877c25 | ||
|
16af0bad55 | ||
|
16ee6ed500 | ||
|
babf5df0e7 | ||
|
5767f59294 | ||
|
72ce0b7759 | ||
|
0cf9b44179 | ||
|
3cf335c4d0 | ||
|
1556e2be8e | ||
|
629077dce4 | ||
|
50256af6f6 | ||
|
c5d36d9679 | ||
|
fb4c5b97c7 | ||
|
a715cf4b3c | ||
|
693a6486da | ||
|
4e353f0335 | ||
3b197503d2 | |||
c0b2bba55e | |||
4b765b1886 | |||
cba2c5725f | |||
|
3947012691 | ||
|
d61b7d4b49 |
63 changed files with 31683 additions and 20730 deletions
|
@ -12,9 +12,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
## Added
|
||||
- Implement [FEP-67ff](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md) (federation documentation)
|
||||
- Meilisearch: it is now possible to use separate keys for search and admin actions
|
||||
- New standalone `prune_orphaned_activities` mix task with configurable batch limit
|
||||
- The `prune_objects` mix task now accepts a `--limit` parameter for initial object pruning
|
||||
|
||||
## Fixed
|
||||
- Meilisearch: order of results returned from our REST API now actually matches how Meilisearch ranks results
|
||||
- Emoji are now federated as anonymous objects, fixing issues with
|
||||
some strict servers e.g. rejecting e.g. remote emoji reactions
|
||||
|
||||
## Changed
|
||||
- Refactored Rich Media to cache the content in the database. Fetching operations that could block status rendering have been eliminated.
|
||||
|
@ -112,8 +116,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Akkoma API is now documented
|
||||
- ability to auto-approve follow requests from users you are already following
|
||||
- The SimplePolicy MRF can now strip user backgrounds from selected remote hosts
|
||||
- New standalone `prune_orphaned_activities` mix task with configurable batch limit
|
||||
- The `prune_objects` mix task now accepts a `--limit` parameter for initial object pruning
|
||||
|
||||
## Changed
|
||||
- OTP builds are now built on erlang OTP26
|
||||
|
|
|
@ -11,4 +11,4 @@ echo "-- Running migrations..."
|
|||
mix ecto.migrate
|
||||
|
||||
echo "-- Starting!"
|
||||
mix phx.server
|
||||
elixir --erl "+sbwt none +sbwtdcpu none +sbwtdio none" -S mix phx.server
|
||||
|
|
|
@ -4,12 +4,12 @@
|
|||
|
||||
1. Stop the Akkoma service.
|
||||
2. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
||||
3. Run[¹] `sudo -Hu postgres pg_dump -d akkoma --format=custom -f </path/to/backup_location/akkoma.pgdump>` (make sure the postgres user has write access to the destination file)
|
||||
4. Copy `akkoma.pgdump`, `config/prod.secret.exs`[²], `config/setup_db.psql` (if still available) and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too.
|
||||
3. Run `sudo -Hu postgres pg_dump -d akkoma --format=custom -f </path/to/backup_location/akkoma.pgdump>`[¹] (make sure the postgres user has write access to the destination file)
|
||||
4. Copy `akkoma.pgdump`, `config/config.exs`[²], `uploads` folder, and [static directory](../configuration/static_dir.md) to your backup destination. If you have other modifications, copy those changes too.
|
||||
5. Restart the Akkoma service.
|
||||
|
||||
[¹]: We assume the database name is "akkoma". If not, you can find the correct name in your config files.
|
||||
[²]: If you've installed using OTP, you need `config/config.exs` instead of `config/prod.secret.exs`.
|
||||
[¹]: We assume the database name is "akkoma". If not, you can find the correct name in your configuration files.
|
||||
[²]: If you have a from source installation, you need `config/prod.secret.exs` instead of `config/config.exs`. The `config/config.exs` file also exists, but in case of from source installations, it only contains the default values and it is tracked by Git, so you don't need to back it up.
|
||||
|
||||
## Restore/Move
|
||||
|
||||
|
@ -17,19 +17,16 @@
|
|||
2. Stop the Akkoma service.
|
||||
3. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
||||
4. Copy the above mentioned files back to their original position.
|
||||
5. Drop the existing database and user if restoring in-place[¹]. `sudo -Hu postgres psql -c 'DROP DATABASE akkoma;';` `sudo -Hu postgres psql -c 'DROP USER akkoma;'`
|
||||
6. Restore the database schema and akkoma role using either of the following options
|
||||
* You can use the original `setup_db.psql` if you have it[²]: `sudo -Hu postgres psql -f config/setup_db.psql`.
|
||||
* Or recreate the database and user yourself (replace the password with the one you find in the config file) `sudo -Hu postgres psql -c "CREATE USER akkoma WITH ENCRYPTED PASSWORD '<database-password-wich-you-can-find-in-your-config-file>'; CREATE DATABASE akkoma OWNER akkoma;"`.
|
||||
5. Drop the existing database and user[¹]. `sudo -Hu postgres psql -c 'DROP DATABASE akkoma;';` `sudo -Hu postgres psql -c 'DROP USER akkoma;'`
|
||||
6. Restore the database schema and akkoma role[¹] (replace the password with the one you find in the configuration file), `sudo -Hu postgres psql -c "CREATE USER akkoma WITH ENCRYPTED PASSWORD '<database-password-wich-you-can-find-in-your-configuration-file>';"` `sudo -Hu postgres psql -c "CREATE DATABASE akkoma OWNER akkoma;"`.
|
||||
7. Now restore the Akkoma instance's data into the empty database schema[¹]: `sudo -Hu postgres pg_restore -d akkoma -v -1 </path/to/backup_location/akkoma.pgdump>`
|
||||
8. If you installed a newer Akkoma version, you should run `MIX_ENV=prod mix ecto.migrate`[³]. This task performs database migrations, if there were any.
|
||||
8. If you installed a newer Akkoma version, you should run the database migrations `./bin/pleroma_ctl migrate`[²].
|
||||
9. Restart the Akkoma service.
|
||||
10. Run `sudo -Hu postgres vacuumdb --all --analyze-in-stages`. This will quickly generate the statistics so that postgres can properly plan queries.
|
||||
11. If setting up on a new server configure Nginx by using the `installation/akkoma.nginx` config sample or reference the Akkoma installation guide for your OS which contains the Nginx configuration instructions.
|
||||
11. If setting up on a new server, configure Nginx by using the `installation/nginx/akkoma.nginx` configuration sample or reference the Akkoma installation guide which contains the Nginx configuration instructions.
|
||||
|
||||
[¹]: We assume the database name and user are both "akkoma". If not, you can find the correct name in your config files.
|
||||
[²]: You can recreate the `config/setup_db.psql` by running the `mix pleroma.instance gen` task again. You can ignore most of the questions, but make the database user, name, and password the same as found in your backed up config file. This will also create a new `config/generated_config.exs` file which you may delete as it is not needed.
|
||||
[³]: Prefix with `MIX_ENV=prod` to run it using the production config file.
|
||||
[¹]: We assume the database name and user are both "akkoma". If not, you can find the correct name in your configuration files.
|
||||
[²]: If you have a from source installation, the command is `MIX_ENV=prod mix ecto.migrate`. Note that we prefix with `MIX_ENV=prod` to use the `config/prod.secret.exs` configuration file.
|
||||
|
||||
## Remove
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ as soon as the post is received by your instance.
|
|||
|
||||
## Nginx
|
||||
|
||||
The following are excerpts from the [suggested nginx config](../../../installation/nginx/akkoma.nginx) that demonstrates the necessary config for the media proxy to work.
|
||||
The following are excerpts from the [suggested nginx config](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/installation/nginx/akkoma.nginx) that demonstrates the necessary config for the media proxy to work.
|
||||
|
||||
A `proxy_cache_path` must be defined, for example:
|
||||
|
||||
|
|
|
@ -19,6 +19,9 @@ Environment="MIX_ENV=prod"
|
|||
; Don't listen epmd on 0.0.0.0
|
||||
Environment="ERL_EPMD_ADDRESS=127.0.0.1"
|
||||
|
||||
; Don't busy wait
|
||||
Environment="ERL_AFLAGS=+sbwt none +sbwtdcpu none +sbwtdio none"
|
||||
|
||||
; Make sure that all paths fit your installation.
|
||||
; Path to the home directory of the user running the Akkoma service.
|
||||
Environment="HOME=/var/lib/akkoma"
|
||||
|
|
|
@ -12,26 +12,22 @@ example.tld {
|
|||
output file /var/log/caddy/akkoma.log
|
||||
}
|
||||
|
||||
encode gzip
|
||||
|
||||
# this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
|
||||
# and `localhost.` resolves to [::0] on some systems: see issue #930
|
||||
reverse_proxy 127.0.0.1:4000
|
||||
|
||||
# Uncomment if using a separate media subdomain
|
||||
#@mediaproxy path /media/* /proxy/*
|
||||
#handle @mediaproxy {
|
||||
# redir https://media.example.tld{uri} permanent
|
||||
#}
|
||||
@mediaproxy path /media/* /proxy/*
|
||||
handle @mediaproxy {
|
||||
redir https://media.example.tld{uri} permanent
|
||||
}
|
||||
}
|
||||
|
||||
# Uncomment if using a separate media subdomain
|
||||
#media.example.tld {
|
||||
# @mediaproxy path /media/* /proxy/*
|
||||
# reverse_proxy @mediaproxy 127.0.0.1:4000 {
|
||||
# transport http {
|
||||
# response_header_timeout 10s
|
||||
# read_timeout 15s
|
||||
# }
|
||||
# }
|
||||
#}
|
||||
media.example.tld {
|
||||
@mediaproxy path /media/* /proxy/*
|
||||
reverse_proxy @mediaproxy 127.0.0.1:4000 {
|
||||
transport http {
|
||||
response_header_timeout 10s
|
||||
read_timeout 15s
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,23 +1,43 @@
|
|||
#!/sbin/openrc-run
|
||||
supervisor=supervise-daemon
|
||||
command_user=akkoma:akkoma
|
||||
command_background=1
|
||||
# Ask process to terminate within 30 seconds, otherwise kill it
|
||||
retry="SIGTERM/30/SIGKILL/5"
|
||||
pidfile="/var/run/akkoma.pid"
|
||||
directory=/opt/akkoma
|
||||
healthcheck_delay=60
|
||||
healthcheck_timer=30
|
||||
no_new_privs="yes"
|
||||
pidfile="/var/run/akkoma.pid"
|
||||
|
||||
: ${akkoma_port:-4000}
|
||||
# Ask process first to terminate itself within 60s, otherwise kill it
|
||||
retry="SIGTERM/60/SIGKILL/5"
|
||||
|
||||
# Needs OpenRC >= 0.42
|
||||
#respawn_max=0
|
||||
#respawn_delay=5
|
||||
# if you really want to use start-stop-daemon instead,
|
||||
# also put the following in the config:
|
||||
# command_background=1
|
||||
|
||||
# Adjust defaults as needed in /etc/conf.d/akkoma;
|
||||
# no need to directly edit the service file
|
||||
command_user="${command_user:-akkoma:akkoma}"
|
||||
directory="${directory:-/var/lib/akkoma/akkoma}"
|
||||
akkoma_port="${akkoma_port:-4000}"
|
||||
# whether to allow connecting a remote exlixir shell to the running Akkoma instance
|
||||
akkoma_console=${akkoma_console:-NO}
|
||||
|
||||
output_log="${output_log:-/var/log/akkoma}"
|
||||
error_log="${error_log:-/var/log/akkoma}"
|
||||
|
||||
# 0 means unlimited restarts
|
||||
respawn_max="${respawn_max:-0}"
|
||||
respawn_delay="${respawn_delay:-5}"
|
||||
# define respawn period to only count crashes within a
|
||||
# sliding time window towards respawn_max, e.g.:
|
||||
# respawn_period=2850
|
||||
|
||||
healthcheck_delay="${healthcheck_delay:-60}"
|
||||
healthcheck_timer="${healthcheck_timer:-30}"
|
||||
|
||||
MIX_ENV=prod
|
||||
ERL_EPMD_ADDRESS="${ERL_EPMD_ADDRESS:-127.0.0.1}"
|
||||
ERL_AFLAGS="${ERL_AFLAGS:-+sbwt none +sbwtdcpu none +sbwtdio none}"
|
||||
supervise_daemon_args="${supervise_daemon_args} --env MIX_ENV=${MIX_ENV}"
|
||||
supervise_daemon_args="${supervise_daemon_args} --env ERL_EPMD_ADDRESS=${ERL_EPMD_ADDRESS}"
|
||||
supervise_daemon_args="${supervise_daemon_args} --env ERL_AFLAGS='${ERL_AFLAGS}'"
|
||||
|
||||
# put akkoma_console=YES in /etc/conf.d/akkoma if you want to be able to
|
||||
# connect to akkoma via an elixir console
|
||||
if yesno "${akkoma_console}"; then
|
||||
command=elixir
|
||||
command_args="--name akkoma@127.0.0.1 --erl '-kernel inet_dist_listen_min 9001 inet_dist_listen_max 9001 inet_dist_use_interface {127,0,0,1}' -S mix phx.server"
|
||||
|
@ -31,13 +51,24 @@ else
|
|||
command_args="phx.server"
|
||||
fi
|
||||
|
||||
export MIX_ENV=prod
|
||||
export ERL_EPMD_ADDRESS=127.0.0.1
|
||||
|
||||
depend() {
|
||||
need nginx postgresql
|
||||
}
|
||||
|
||||
start_pre() {
|
||||
# Ensure logfile ownership and perms are alright
|
||||
checkpath --file --owner "$command_user" "$output_log" "$error_log" \
|
||||
|| eerror "Logfile(s) not owned by $command_user, or not a file!"
|
||||
checkpath --writable "$output_log" "$error_log" \
|
||||
|| eerror "Logfile(s) not writable!"
|
||||
|
||||
# If a recompile is needed perform it with lowest prio
|
||||
# (delaying the actual start) to avoid hogging too much
|
||||
# CPU from other services
|
||||
cd "$directory"
|
||||
doas -u "${command_user%%:*}" env MIX_ENV="$MIX_ENV" nice -n 19 "$command" compile
|
||||
}
|
||||
|
||||
healthcheck() {
|
||||
# put akkoma_health=YES in /etc/conf.d/akkoma if you want healthchecking
|
||||
# and make sure you have curl installed
|
||||
|
|
|
@ -12,8 +12,6 @@ defmodule Pleroma.Object.Containment do
|
|||
spoofing, therefore removal of object containment functions is NOT recommended.
|
||||
"""
|
||||
|
||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
|
||||
def get_actor(%{"actor" => actor}) when is_binary(actor) do
|
||||
actor
|
||||
end
|
||||
|
@ -50,16 +48,39 @@ def get_object(_) do
|
|||
defp compare_uris(%URI{host: host} = _id_uri, %URI{host: host} = _other_uri), do: :ok
|
||||
defp compare_uris(_id_uri, _other_uri), do: :error
|
||||
|
||||
defp compare_uris_exact(uri, uri), do: :ok
|
||||
defp uri_strip_slash(%URI{path: path} = uri) when is_binary(path),
|
||||
do: %{uri | path: String.replace_suffix(path, "/", "")}
|
||||
|
||||
defp compare_uris_exact(%URI{} = id, %URI{} = other),
|
||||
do: compare_uris_exact(URI.to_string(id), URI.to_string(other))
|
||||
defp uri_strip_slash(uri), do: uri
|
||||
|
||||
defp compare_uris_exact(id_uri, other_uri)
|
||||
when is_binary(id_uri) and is_binary(other_uri) do
|
||||
norm_id = String.replace_suffix(id_uri, "/", "")
|
||||
norm_other = String.replace_suffix(other_uri, "/", "")
|
||||
if norm_id == norm_other, do: :ok, else: :error
|
||||
# domain names are case-insensitive per spec (other parts of URIs aren’t necessarily)
|
||||
defp uri_normalise_host(%URI{host: host} = uri) when is_binary(host),
|
||||
do: %{uri | host: String.downcase(host, :ascii)}
|
||||
|
||||
defp uri_normalise_host(uri), do: uri
|
||||
|
||||
defp compare_uri_identities(uri, uri), do: :ok
|
||||
|
||||
defp compare_uri_identities(id_uri, other_uri) when is_binary(id_uri) and is_binary(other_uri),
|
||||
do: compare_uri_identities(URI.parse(id_uri), URI.parse(other_uri))
|
||||
|
||||
defp compare_uri_identities(%URI{} = id, %URI{} = other) do
|
||||
normid =
|
||||
%{id | fragment: nil}
|
||||
|> uri_strip_slash()
|
||||
|> uri_normalise_host()
|
||||
|
||||
normother =
|
||||
%{other | fragment: nil}
|
||||
|> uri_strip_slash()
|
||||
|> uri_normalise_host()
|
||||
|
||||
# Conversion back to binary avoids issues from non-normalised deprecated authority field
|
||||
if URI.to_string(normid) == URI.to_string(normother) do
|
||||
:ok
|
||||
else
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
|
@ -93,21 +114,13 @@ def contain_origin(id, %{"attributedTo" => actor} = params),
|
|||
def contain_origin(_id, _data), do: :ok
|
||||
|
||||
@doc """
|
||||
Check whether the fetch URL (after redirects) exactly (sans tralining slash) matches either
|
||||
the canonical ActivityPub id or the objects url field (for display URLs from *key and Mastodon)
|
||||
Check whether the fetch URL (after redirects) is the
|
||||
same location the canonical ActivityPub id points to.
|
||||
|
||||
Since this is meant to be used for fetches, anonymous or transient objects are not accepted here.
|
||||
"""
|
||||
def contain_id_to_fetch(url, %{"id" => id} = data) when is_binary(id) do
|
||||
with {:id, :error} <- {:id, compare_uris_exact(id, url)},
|
||||
# "url" can be a "Link" object and this is checked before full normalisation
|
||||
display_url <- Transmogrifier.fix_url(data)["url"],
|
||||
true <- display_url != nil do
|
||||
compare_uris_exact(display_url, url)
|
||||
else
|
||||
{:id, :ok} -> :ok
|
||||
_ -> :error
|
||||
end
|
||||
def contain_id_to_fetch(url, %{"id" => id}) when is_binary(id) do
|
||||
compare_uri_identities(url, id)
|
||||
end
|
||||
|
||||
def contain_id_to_fetch(_url, _data), do: :error
|
||||
|
|
|
@ -116,7 +116,7 @@ defp reinject_object(%Object{} = object, new_data) do
|
|||
@doc "Assumes object already is in our database and refetches from remote to update (e.g. for polls)"
|
||||
def refetch_object(%Object{data: %{"id" => id}} = object) do
|
||||
with {:local, false} <- {:local, Object.local?(object)},
|
||||
{:ok, new_data} <- fetch_and_contain_remote_object_from_id(id),
|
||||
{:ok, new_data} <- fetch_and_contain_remote_object_from_id(id, true),
|
||||
{:id, true} <- {:id, new_data["id"] == id},
|
||||
{:ok, object} <- reinject_object(object, new_data) do
|
||||
{:ok, object}
|
||||
|
@ -253,14 +253,17 @@ defp maybe_date_fetch(headers, date) do
|
|||
end
|
||||
end
|
||||
|
||||
@doc "Fetches arbitrary remote object and performs basic safety and authenticity checks"
|
||||
def fetch_and_contain_remote_object_from_id(id)
|
||||
@doc """
|
||||
Fetches arbitrary remote object and performs basic safety and authenticity checks.
|
||||
When the fetch URL is known to already be a canonical AP id, checks are stricter.
|
||||
"""
|
||||
def fetch_and_contain_remote_object_from_id(id, is_ap_id \\ false)
|
||||
|
||||
def fetch_and_contain_remote_object_from_id(%{"id" => id}),
|
||||
do: fetch_and_contain_remote_object_from_id(id)
|
||||
def fetch_and_contain_remote_object_from_id(%{"id" => id}, is_ap_id),
|
||||
do: fetch_and_contain_remote_object_from_id(id, is_ap_id)
|
||||
|
||||
def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
|
||||
Logger.debug("Fetching object #{id} via AP")
|
||||
def fetch_and_contain_remote_object_from_id(id, is_ap_id) when is_binary(id) do
|
||||
Logger.debug("Fetching object #{id} via AP [ap_id=#{is_ap_id}]")
|
||||
|
||||
with {:valid_uri_scheme, true} <- {:valid_uri_scheme, String.starts_with?(id, "http")},
|
||||
%URI{} = uri <- URI.parse(id),
|
||||
|
@ -270,18 +273,31 @@ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
|
|||
{:mrf_accept_check, Pleroma.Web.ActivityPub.MRF.SimplePolicy.check_accept(uri)},
|
||||
{:local_fetch, :ok} <- {:local_fetch, Containment.contain_local_fetch(id)},
|
||||
{:ok, final_id, body} <- get_object(id),
|
||||
# a canonical ID shouldn't be a redirect
|
||||
true <- !is_ap_id || final_id == id,
|
||||
{:ok, data} <- safe_json_decode(body),
|
||||
{_, :ok} <- {:strict_id, Containment.contain_id_to_fetch(final_id, data)},
|
||||
{_, :ok} <- {:containment, Containment.contain_origin(final_id, data)} do
|
||||
{_, :ok} <- {:containment, Containment.contain_origin(final_id, data)},
|
||||
{_, _, :ok} <- {:strict_id, data["id"], Containment.contain_id_to_fetch(final_id, data)} do
|
||||
unless Instances.reachable?(final_id) do
|
||||
Instances.set_reachable(final_id)
|
||||
end
|
||||
|
||||
{:ok, data}
|
||||
else
|
||||
{:strict_id, _} = e ->
|
||||
log_fetch_error(id, e)
|
||||
{:error, :id_mismatch}
|
||||
# E.g. Mastodon and *key serve the AP object directly under their display URLs without
|
||||
# redirecting to their canonical location first, thus ids will expectedly differ.
|
||||
# Similarly keys, either use a fragment ID and are a subobjects or a distinct ID
|
||||
# but for compatibility are still a subobject presenting their owning actors ID at the toplevel.
|
||||
# Refetching _once_ from the listed id, should yield a strict match afterwards.
|
||||
{:strict_id, ap_id, _} = e ->
|
||||
case is_ap_id do
|
||||
false ->
|
||||
fetch_and_contain_remote_object_from_id(ap_id, true)
|
||||
|
||||
true ->
|
||||
log_fetch_error(id, e)
|
||||
{:error, :id_mismatch}
|
||||
end
|
||||
|
||||
{:mrf_reject_check, _} = e ->
|
||||
log_fetch_error(id, e)
|
||||
|
@ -301,7 +317,7 @@ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
|
|||
|
||||
{:containment, reason} ->
|
||||
log_fetch_error(id, reason)
|
||||
{:error, reason}
|
||||
{:error, {:containment, reason}}
|
||||
|
||||
{:error, e} ->
|
||||
{:error, e}
|
||||
|
@ -311,25 +327,13 @@ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
|
|||
end
|
||||
end
|
||||
|
||||
def fetch_and_contain_remote_object_from_id(_id),
|
||||
def fetch_and_contain_remote_object_from_id(_id, _is_ap_id),
|
||||
do: {:error, :invalid_id}
|
||||
|
||||
defp check_crossdomain_redirect(final_host, original_url)
|
||||
|
||||
# HOPEFULLY TEMPORARY
|
||||
# Basically none of our Tesla mocks in tests set the (supposed to
|
||||
# exist for Tesla proper) url parameter for their responses
|
||||
# causing almost every fetch in test to fail otherwise
|
||||
if @mix_env == :test do
|
||||
defp check_crossdomain_redirect(nil, _) do
|
||||
{:cross_domain_redirect, false}
|
||||
end
|
||||
end
|
||||
|
||||
defp check_crossdomain_redirect(final_host, original_url) do
|
||||
{:cross_domain_redirect, final_host != URI.parse(original_url).host}
|
||||
end
|
||||
|
||||
if @mix_env == :test do
|
||||
defp get_final_id(nil, initial_url), do: initial_url
|
||||
defp get_final_id("", initial_url), do: initial_url
|
||||
|
@ -355,10 +359,6 @@ def get_object(id) do
|
|||
with {:ok, %{body: body, status: code, headers: headers, url: final_url}}
|
||||
when code in 200..299 <-
|
||||
HTTP.Backoff.get(id, headers),
|
||||
remote_host <-
|
||||
URI.parse(final_url).host,
|
||||
{:cross_domain_redirect, false} <-
|
||||
check_crossdomain_redirect(remote_host, id),
|
||||
{:has_content_type, {_, content_type}} <-
|
||||
{:has_content_type, List.keyfind(headers, "content-type", 0)},
|
||||
{:parse_content_type, {:ok, "application", subtype, type_params}} <-
|
||||
|
|
|
@ -1624,8 +1624,12 @@ def blocks_user?(%User{} = user, %User{} = target) do
|
|||
|
||||
def blocks_user?(_, _), do: false
|
||||
|
||||
def blocks_domain?(%User{} = user, %User{} = target) do
|
||||
%{host: host} = URI.parse(target.ap_id)
|
||||
def blocks_domain?(%User{} = user, %User{ap_id: ap_id}) do
|
||||
blocks_domain?(user, ap_id)
|
||||
end
|
||||
|
||||
def blocks_domain?(%User{} = user, url) when is_binary(url) do
|
||||
%{host: host} = URI.parse(url)
|
||||
Enum.member?(user.domain_blocks, host)
|
||||
# TODO: functionality should probably be changed such that subdomains block as well,
|
||||
# but as it stands, this just hecks up the relationships endpoint
|
||||
|
|
|
@ -233,7 +233,7 @@ def config_descriptions(policies) do
|
|||
if function_exported?(policy, :config_description, 0) do
|
||||
description =
|
||||
@default_description
|
||||
|> Map.merge(policy.config_description)
|
||||
|> Map.merge(policy.config_description())
|
||||
|> Map.put(:group, :pleroma)
|
||||
|> Map.put(:tab, :mrf)
|
||||
|> Map.put(:type, :group)
|
||||
|
|
|
@ -951,7 +951,7 @@ defp build_emoji_tag({name, url}) do
|
|||
"name" => ":" <> name <> ":",
|
||||
"type" => "Emoji",
|
||||
"updated" => "1970-01-01T00:00:00Z",
|
||||
"id" => url
|
||||
"id" => nil
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
@ -18,6 +18,8 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
|
|||
@timeout :timer.seconds(60)
|
||||
# Hibernate every X messages
|
||||
@hibernate_every 100
|
||||
# Tune garabge collect for long-lived websocket process
|
||||
@fullsweep_after 20
|
||||
|
||||
def init(%{qs: qs} = req, state) do
|
||||
with params <- Enum.into(:cow_qs.parse_qs(qs), %{}),
|
||||
|
@ -59,6 +61,10 @@ def websocket_init(state) do
|
|||
"#{__MODULE__} accepted websocket connection for user #{(state.user || %{id: "anonymous"}).id}, topic #{state.topic}"
|
||||
)
|
||||
|
||||
# process is long-lived and can sometimes accumulate stale data in such a way it's
|
||||
# not freed by young garbage cycles, thus make full collection sweeps more frequent
|
||||
:erlang.process_flag(:fullsweep_after, @fullsweep_after)
|
||||
|
||||
Streamer.add_socket(state.topic, state.oauth_token)
|
||||
{:ok, %{state | timer: timer()}}
|
||||
end
|
||||
|
|
|
@ -52,6 +52,14 @@ defp filter_allowed_user_by_ap_id(ap_ids, excluded_ap_ids) do
|
|||
end)
|
||||
end
|
||||
|
||||
defp filter_allowed_users_by_domain(ap_ids, %User{} = for_user) do
|
||||
Enum.reject(ap_ids, fn ap_id ->
|
||||
User.blocks_domain?(for_user, ap_id)
|
||||
end)
|
||||
end
|
||||
|
||||
defp filter_allowed_users_by_domain(ap_ids, nil), do: ap_ids
|
||||
|
||||
def filter_allowed_users(reactions, user, with_muted) do
|
||||
exclude_ap_ids =
|
||||
if is_nil(user) do
|
||||
|
@ -62,7 +70,10 @@ def filter_allowed_users(reactions, user, with_muted) do
|
|||
end
|
||||
|
||||
filter_emoji = fn emoji, users, url ->
|
||||
case filter_allowed_user_by_ap_id(users, exclude_ap_ids) do
|
||||
users
|
||||
|> filter_allowed_user_by_ap_id(exclude_ap_ids)
|
||||
|> filter_allowed_users_by_domain(user)
|
||||
|> case do
|
||||
[] -> nil
|
||||
users -> {emoji, users, url}
|
||||
end
|
||||
|
|
|
@ -9,13 +9,13 @@
|
|||
xmlns:ostatus="http://ostatus.org/schema/1.0"
|
||||
xmlns:statusnet="http://status.net/schema/api/1/">
|
||||
|
||||
<id><%= '#{url(~p"/tags/#{@tag}")}.rss' %></id>
|
||||
<id><%= "#{url(~p"/tags/#{@tag}")}.rss" %></id>
|
||||
<title>#<%= @tag %></title>
|
||||
|
||||
<subtitle><%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %></subtitle>
|
||||
<logo><%= feed_logo() %></logo>
|
||||
<updated><%= most_recent_update(@activities) %></updated>
|
||||
<link rel="self" href="<%= '#{url(~p"/tags/#{@tag}")}.atom' %>" type="application/atom+xml"/>
|
||||
<link rel="self" href="<%= "#{url(~p"/tags/#{@tag}")}.atom" %>" type="application/atom+xml"/>
|
||||
<%= for activity <- @activities do %>
|
||||
<%= render @view_module, "_tag_activity.atom", Map.merge(assigns, prepare_activity(activity, actor: true)) %>
|
||||
<% end %>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
<title>#<%= @tag %></title>
|
||||
<description><%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %></description>
|
||||
<link><%= '#{url(~p"/tags/#{@tag}")}.rss' %></link>
|
||||
<link><%= "#{url(~p"/tags/#{@tag}")}.rss" %></link>
|
||||
<webfeeds:logo><%= feed_logo() %></webfeeds:logo>
|
||||
<webfeeds:accentColor>2b90d9</webfeeds:accentColor>
|
||||
<%= for activity <- @activities do %>
|
||||
|
|
|
@ -10,12 +10,12 @@
|
|||
<title><%= @user.nickname <> "'s timeline" %></title>
|
||||
<updated><%= most_recent_update(@activities, @user) %></updated>
|
||||
<logo><%= logo(@user) %></logo>
|
||||
<link rel="self" href="<%= '#{url(~p"/users/#{@user.nickname}/feed")}.atom' %>" type="application/atom+xml"/>
|
||||
<link rel="self" href="<%= "#{url(~p"/users/#{@user.nickname}/feed")}.atom" %>" type="application/atom+xml"/>
|
||||
|
||||
<%= render @view_module, "_author.atom", assigns %>
|
||||
|
||||
<%= if last_activity(@activities) do %>
|
||||
<link rel="next" href="<%= '#{url(~p"/users/#{@user.nickname}/feed")}.atom?max_id=#{last_activity(@activities).id}' %>" type="application/atom+xml"/>
|
||||
<link rel="next" href="<%= "#{url(~p"/users/#{@user.nickname}/feed")}.atom?max_id=#{last_activity(@activities).id}" %>" type="application/atom+xml"/>
|
||||
<% end %>
|
||||
|
||||
<%= for activity <- @activities do %>
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
<title><%= @user.nickname <> "'s timeline" %></title>
|
||||
<updated><%= most_recent_update(@activities, @user) %></updated>
|
||||

|
||||
<link><%= '#{url(~p"/users/#{@user.nickname}/feed")}.rss' %></link>
|
||||
<link><%= "#{url(~p"/users/#{@user.nickname}/feed")}.rss" %></link>
|
||||
|
||||
<%= render @view_module, "_author.rss", assigns %>
|
||||
|
||||
<%= if last_activity(@activities) do %>
|
||||
<link rel="next"><%= '#{url(~p"/users/#{@user.nickname}/feed")}.rss?max_id=#{last_activity(@activities).id}' %></link>
|
||||
<link rel="next"><%= "#{url(~p"/users/#{@user.nickname}/feed")}.rss?max_id=#{last_activity(@activities).id}" %></link>
|
||||
<% end %>
|
||||
|
||||
<%= for activity <- @activities do %>
|
||||
|
|
4
mix.lock
4
mix.lock
|
@ -18,7 +18,7 @@
|
|||
"cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"},
|
||||
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
|
||||
"cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"},
|
||||
"credo": {:hex, :credo, "1.7.6", "b8f14011a5443f2839b04def0b252300842ce7388f3af177157c86da18dfbeea", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "146f347fb9f8cbc5f7e39e3f22f70acbef51d441baa6d10169dd604bfbc55296"},
|
||||
"credo": {:hex, :credo, "1.7.7", "771445037228f763f9b2afd612b6aa2fd8e28432a95dbbc60d8e03ce71ba4446", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8bc87496c9aaacdc3f90f01b7b0582467b69b4bd2441fe8aae3109d843cc2f2e"},
|
||||
"custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm", "8df019facc5ec9603e94f7270f1ac73ddf339f56ade76a721eaa57c1493ba463"},
|
||||
"db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"},
|
||||
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
|
||||
|
@ -94,7 +94,7 @@
|
|||
"phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"},
|
||||
"phoenix_swoosh": {:hex, :phoenix_swoosh, "1.2.1", "b74ccaa8046fbc388a62134360ee7d9742d5a8ae74063f34eb050279de7a99e1", [:mix], [{:finch, "~> 0.8", [hex: :finch, repo: "hexpm", optional: true]}, {:hackney, "~> 1.10", [hex: :hackney, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_view, "~> 1.0 or ~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:swoosh, "~> 1.5", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm", "4000eeba3f9d7d1a6bf56d2bd56733d5cadf41a7f0d8ffe5bb67e7d667e204a2"},
|
||||
"phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"},
|
||||
"phoenix_view": {:hex, :phoenix_view, "2.0.3", "4d32c4817fce933693741deeb99ef1392619f942633dde834a5163124813aad3", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "cd34049af41be2c627df99cd4eaa71fc52a328c0c3d8e7d4aa28f880c30e7f64"},
|
||||
"phoenix_view": {:hex, :phoenix_view, "2.0.4", "b45c9d9cf15b3a1af5fb555c674b525391b6a1fe975f040fb4d913397b31abf4", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "4e992022ce14f31fe57335db27a28154afcc94e9983266835bb3040243eb620b"},
|
||||
"plug": {:hex, :plug, "1.16.0", "1d07d50cb9bb05097fdf187b31cf087c7297aafc3fed8299aac79c128a707e47", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cbf53aa1f5c4d758a7559c0bd6d59e286c2be0c6a1fac8cc3eee2f638243b93e"},
|
||||
"plug_cowboy": {:hex, :plug_cowboy, "2.7.1", "87677ffe3b765bc96a89be7960f81703223fe2e21efa42c125fcd0127dd9d6b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "02dbd5f9ab571b864ae39418db7811618506256f6d13b4a45037e5fe78dc5de3"},
|
||||
"plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"},
|
||||
|
|
|
@ -5,8 +5,8 @@ msgstr ""
|
|||
"POT-Creation-Date: 2022-07-28 09:35+0000\n"
|
||||
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||
"Language-Team: Catalan <http://translate.akkoma.dev/projects/akkoma/"
|
||||
"akkoma-backend-config-descriptions/ca/>\n"
|
||||
"Language-Team: Catalan <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||
"backend-config-descriptions/ca/>\n"
|
||||
"Language: ca\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -5,8 +5,8 @@ msgstr ""
|
|||
"POT-Creation-Date: 2022-08-06 22:23+0000\n"
|
||||
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||
"Language-Team: Spanish <http://translate.akkoma.dev/projects/akkoma/"
|
||||
"akkoma-backend-config-descriptions/es/>\n"
|
||||
"Language-Team: Spanish <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||
"backend-config-descriptions/es/>\n"
|
||||
"Language: es\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -5,8 +5,8 @@ msgstr ""
|
|||
"POT-Creation-Date: 2022-08-06 21:54+0000\n"
|
||||
"PO-Revision-Date: 2023-08-04 14:26+0000\n"
|
||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||
"Language-Team: Dutch <http://translate.akkoma.dev/projects/akkoma/"
|
||||
"akkoma-backend-config-descriptions/nl/>\n"
|
||||
"Language-Team: Dutch <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||
"backend-config-descriptions/nl/>\n"
|
||||
"Language: nl\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -5,8 +5,8 @@ msgstr ""
|
|||
"POT-Creation-Date: 2023-07-07 18:47+0000\n"
|
||||
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||
"Language-Team: Thai <http://translate.akkoma.dev/projects/akkoma/"
|
||||
"akkoma-backend-config-descriptions/th/>\n"
|
||||
"Language-Team: Thai <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||
"backend-config-descriptions/th/>\n"
|
||||
"Language: th\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,12 +1,10 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddContextIndex do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(
|
||||
index(:activities, ["(data->>'type')", "(data->>'context')"],
|
||||
name: :activities_context_index,
|
||||
concurrently: true
|
||||
name: :activities_context_index
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddFTSIndexToActivities do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(
|
||||
index(:activities, ["(to_tsvector('english', data->'object'->>'content'))"],
|
||||
concurrently: true,
|
||||
using: :gin,
|
||||
name: :activities_fts
|
||||
)
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddTagIndex do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(
|
||||
index(:activities, ["(data #> '{\"object\",\"tag\"}')"],
|
||||
concurrently: true,
|
||||
using: :gin,
|
||||
name: :activities_tags
|
||||
)
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddSecondObjectIndexToActivty do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
drop_if_exists(
|
||||
index(:activities, ["(data->'object'->>'id')", "(data->>'type')"],
|
||||
|
@ -12,8 +10,7 @@ def change do
|
|||
|
||||
create(
|
||||
index(:activities, ["(coalesce(data->'object'->>'id', data->>'object'))"],
|
||||
name: :activities_create_objects_index,
|
||||
concurrently: true
|
||||
name: :activities_create_objects_index
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
|
@ -1,14 +1,7 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddObjectActorIndex do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(
|
||||
index(:objects, ["(data->>'actor')", "(data->>'type')"],
|
||||
concurrently: true,
|
||||
name: :objects_actor_type
|
||||
)
|
||||
)
|
||||
create(index(:objects, ["(data->>'actor')", "(data->>'type')"], name: :objects_actor_type))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,14 +1,12 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddActorToActivity do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def up do
|
||||
alter table(:activities) do
|
||||
add(:actor, :string)
|
||||
end
|
||||
|
||||
create(index(:activities, [:actor, "id DESC NULLS LAST"], concurrently: true))
|
||||
create(index(:activities, [:actor, "id DESC NULLS LAST"]))
|
||||
end
|
||||
|
||||
def down do
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddSortIndexToActivities do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(index(:activities, ["id desc nulls last"], concurrently: true))
|
||||
create(index(:activities, ["id desc nulls last"]))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddFollowerAddressIndexToUsers do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
def change do
|
||||
create(index(:users, [:follower_address], concurrently: true))
|
||||
create(index(:users, [:following], concurrently: true, using: :gin))
|
||||
create(index(:users, [:follower_address]))
|
||||
create(index(:users, [:following], using: :gin))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
defmodule Pleroma.Repo.Migrations.ModifyActivityIndex do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(index(:activities, ["id desc nulls last", "local"], concurrently: true))
|
||||
create(index(:activities, ["id desc nulls last", "local"]))
|
||||
drop_if_exists(index(:activities, ["id desc nulls last"]))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,13 +1,7 @@
|
|||
defmodule Pleroma.Repo.Migrations.CreateApidHostExtractionIndex do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(
|
||||
index(:activities, ["(split_part(actor, '/', 3))"],
|
||||
concurrently: true,
|
||||
name: :activities_hosts
|
||||
)
|
||||
)
|
||||
create(index(:activities, ["(split_part(actor, '/', 3))"], name: :activities_hosts))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,13 +1,7 @@
|
|||
defmodule Pleroma.Repo.Migrations.CreateActivitiesInReplyToIndex do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(
|
||||
index(:activities, ["(data->'object'->>'inReplyTo')"],
|
||||
concurrently: true,
|
||||
name: :activities_in_reply_to
|
||||
)
|
||||
)
|
||||
create(index(:activities, ["(data->'object'->>'inReplyTo')"], name: :activities_in_reply_to))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddVisibilityFunction do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def up do
|
||||
definition = """
|
||||
|
@ -30,8 +29,7 @@ def up do
|
|||
|
||||
create(
|
||||
index(:activities, ["activity_visibility(actor, recipients, data)"],
|
||||
name: :activities_visibility_index,
|
||||
concurrently: true
|
||||
name: :activities_visibility_index
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddActivitiesLikesIndex do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(
|
||||
index(:activities, ["((data #> '{\"object\",\"likes\"}'))"],
|
||||
concurrently: true,
|
||||
name: :activities_likes,
|
||||
using: :gin
|
||||
)
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddCorrectDMIndex do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def up do
|
||||
drop_if_exists(
|
||||
|
@ -12,7 +11,6 @@ def up do
|
|||
create(
|
||||
index(:activities, ["activity_visibility(actor, recipients, data)", "id DESC NULLS LAST"],
|
||||
name: :activities_visibility_index,
|
||||
concurrently: true,
|
||||
where: "data->>'type' = 'Create'"
|
||||
)
|
||||
)
|
||||
|
@ -22,7 +20,6 @@ def down do
|
|||
drop_if_exists(
|
||||
index(:activities, ["activity_visibility(actor, recipients, data)", "id DESC"],
|
||||
name: :activities_visibility_index,
|
||||
concurrently: true,
|
||||
where: "data->>'type' = 'Create'"
|
||||
)
|
||||
)
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddIndexOnSubscribers do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
def change do
|
||||
create(
|
||||
index(:users, ["(info->'subscribers')"],
|
||||
name: :users_subscribers_index,
|
||||
using: :gin,
|
||||
concurrently: true
|
||||
using: :gin
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddFollowingAddressIndexToUser do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
def change do
|
||||
create(index(:users, [:following_address], concurrently: true))
|
||||
create(index(:users, [:following_address]))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -100,7 +100,7 @@ def update_follower_count(%{id: user_id} = user) do
|
|||
|
||||
"users"
|
||||
|> where(id: ^user_id)
|
||||
|> join(:inner, [u], s in subquery(follower_count_query))
|
||||
|> join(:inner, [u], s in subquery(follower_count_query), on: true)
|
||||
|> update([u, s],
|
||||
set: [follower_count: s.count]
|
||||
)
|
||||
|
|
|
@ -9,7 +9,6 @@ defmodule Pleroma.Object.ContainmentTest do
|
|||
alias Pleroma.User
|
||||
|
||||
import Pleroma.Factory
|
||||
import ExUnit.CaptureLog
|
||||
|
||||
setup_all do
|
||||
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
||||
|
@ -136,23 +135,17 @@ test "contain_id_to_fetch() allows matching IDs" do
|
|||
)
|
||||
end
|
||||
|
||||
test "contain_id_to_fetch() allows display URLs" do
|
||||
test "contain_id_to_fetch() allows fragments and normalises domain casing" do
|
||||
data = %{
|
||||
"id" => "http://example.com/~alyssa/activities/1234.json",
|
||||
"url" => "http://example.com/@alyssa/status/1234"
|
||||
"id" => "http://example.com/users/capybara",
|
||||
"url" => "http://example.com/@capybara"
|
||||
}
|
||||
|
||||
:ok =
|
||||
Containment.contain_id_to_fetch(
|
||||
"http://example.com/@alyssa/status/1234",
|
||||
data
|
||||
)
|
||||
|
||||
:ok =
|
||||
Containment.contain_id_to_fetch(
|
||||
"http://example.com/@alyssa/status/1234/",
|
||||
data
|
||||
)
|
||||
assert :ok ==
|
||||
Containment.contain_id_to_fetch(
|
||||
"http://EXAMPLE.com/users/capybara#key",
|
||||
data
|
||||
)
|
||||
end
|
||||
|
||||
test "users cannot be collided through fake direction spoofing attempts" do
|
||||
|
@ -164,10 +157,14 @@ test "users cannot be collided through fake direction spoofing attempts" do
|
|||
follower_address: User.ap_followers(%User{nickname: "rye@niu.moe"})
|
||||
})
|
||||
|
||||
assert capture_log(fn ->
|
||||
{:error, _} = User.get_or_fetch_by_ap_id("https://n1u.moe/users/rye")
|
||||
end) =~
|
||||
"[error] Could not decode user at fetch https://n1u.moe/users/rye"
|
||||
# Fetch from an attempted spoof id will suceed, but automatically retrieve
|
||||
# the real data from the homeserver instead of naïvely using the spoof
|
||||
{:ok, fetched_user} = User.get_or_fetch_by_ap_id("https://n1u.moe/users/rye")
|
||||
|
||||
refute fetched_user.name == "evil rye"
|
||||
refute fetched_user.raw_bio == "boooo!"
|
||||
assert fetched_user.name == "♡ rye ♡"
|
||||
assert fetched_user.nickname == "rye@niu.moe"
|
||||
end
|
||||
|
||||
test "contain_origin_from_id() gracefully handles cases where no ID is present" do
|
||||
|
|
|
@ -22,6 +22,7 @@ defp spoofed_object_with_ids(
|
|||
|> Jason.decode!()
|
||||
|> Map.put("id", id)
|
||||
|> Map.put("actor", actor_id)
|
||||
|> Map.put("attributedTo", actor_id)
|
||||
|> Jason.encode!()
|
||||
end
|
||||
|
||||
|
@ -109,7 +110,7 @@ defp spoofed_object_with_ids(
|
|||
body: spoofed_object_with_ids("https://patch.cx/objects/spoof_media_redirect1")
|
||||
}
|
||||
|
||||
# Spoof: cross-domain redirect with final domain id
|
||||
# Spoof: cross-domain redirect with final domain id, but original id actor
|
||||
%{method: :get, url: "https://patch.cx/objects/spoof_media_redirect2"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
|
@ -118,6 +119,19 @@ defp spoofed_object_with_ids(
|
|||
body: spoofed_object_with_ids("https://media.patch.cx/objects/spoof_media_redirect2")
|
||||
}
|
||||
|
||||
# No-Spoof: cross-domain redirect with id and actor from final domain
|
||||
%{method: :get, url: "https://patch.cx/objects/spoof_media_redirect3"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
url: "https://media.patch.cx/objects/spoof_media_redirect3",
|
||||
headers: [{"content-type", "application/activity+json"}],
|
||||
body:
|
||||
spoofed_object_with_ids(
|
||||
"https://media.patch.cx/objects/spoof_media_redirect3",
|
||||
"https://media.patch.cx/users/rin"
|
||||
)
|
||||
}
|
||||
|
||||
# No-Spoof: same domain redirect
|
||||
%{method: :get, url: "https://patch.cx/objects/spoof_redirect"} ->
|
||||
%Tesla.Env{
|
||||
|
@ -252,7 +266,7 @@ test "it does not fetch a spoofed object with wrong content type" do
|
|||
end
|
||||
|
||||
test "it does not fetch a spoofed object with id different from URL" do
|
||||
assert {:error, :id_mismatch} =
|
||||
assert {:error, :not_found} =
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(
|
||||
"https://patch.cx/media/03ca3c8b4ac3ddd08bf0f84be7885f2f88de0f709112131a22d83650819e36c2.json"
|
||||
)
|
||||
|
@ -264,19 +278,29 @@ test "it does not fetch a spoofed object with id different from URL" do
|
|||
end
|
||||
|
||||
test "it does not fetch an object via cross-domain redirects (initial id)" do
|
||||
assert {:error, {:cross_domain_redirect, true}} =
|
||||
assert {:error, {:containment, _}} =
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(
|
||||
"https://patch.cx/objects/spoof_media_redirect1"
|
||||
)
|
||||
end
|
||||
|
||||
test "it does not fetch an object via cross-domain redirects (final id)" do
|
||||
assert {:error, {:cross_domain_redirect, true}} =
|
||||
test "it does not fetch an object via cross-domain redirect if the actor is from the original domain" do
|
||||
assert {:error, {:containment, :error}} =
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(
|
||||
"https://patch.cx/objects/spoof_media_redirect2"
|
||||
)
|
||||
end
|
||||
|
||||
test "it allows cross-domain redirects when id and author are from final domain" do
|
||||
assert {:ok, %{"id" => id, "attributedTo" => author}} =
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(
|
||||
"https://patch.cx/objects/spoof_media_redirect3"
|
||||
)
|
||||
|
||||
assert URI.parse(id).host == "media.patch.cx"
|
||||
assert URI.parse(author).host == "media.patch.cx"
|
||||
end
|
||||
|
||||
test "it accepts same-domain redirects" do
|
||||
assert {:ok, %{"id" => id} = _object} =
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(
|
||||
|
|
|
@ -1149,6 +1149,18 @@ test "it blocks people" do
|
|||
assert User.blocks?(user, blocked_user)
|
||||
end
|
||||
|
||||
test "it blocks domains" do
|
||||
user = insert(:user)
|
||||
blocked_user = insert(:user)
|
||||
|
||||
refute User.blocks_domain?(user, blocked_user)
|
||||
|
||||
url = URI.parse(blocked_user.ap_id)
|
||||
{:ok, user} = User.block_domain(user, url.host)
|
||||
|
||||
assert User.blocks_domain?(user, blocked_user)
|
||||
end
|
||||
|
||||
test "it unblocks users" do
|
||||
user = insert(:user)
|
||||
blocked_user = insert(:user)
|
||||
|
@ -1159,6 +1171,17 @@ test "it unblocks users" do
|
|||
refute User.blocks?(user, blocked_user)
|
||||
end
|
||||
|
||||
test "it unblocks domains" do
|
||||
user = insert(:user)
|
||||
blocked_user = insert(:user)
|
||||
|
||||
url = URI.parse(blocked_user.ap_id)
|
||||
{:ok, user} = User.block_domain(user, url.host)
|
||||
{:ok, user} = User.unblock_domain(user, url.host)
|
||||
|
||||
refute User.blocks_domain?(user, blocked_user)
|
||||
end
|
||||
|
||||
test "blocks tear down cyclical follow relationships" do
|
||||
blocker = insert(:user)
|
||||
blocked = insert(:user)
|
||||
|
|
|
@ -700,7 +700,7 @@ test "take_emoji_tags/1" do
|
|||
assert Transmogrifier.take_emoji_tags(user) == [
|
||||
%{
|
||||
"icon" => %{"type" => "Image", "url" => "https://example.org/firefox.png"},
|
||||
"id" => "https://example.org/firefox.png",
|
||||
"id" => nil,
|
||||
"name" => ":firefox:",
|
||||
"type" => "Emoji",
|
||||
"updated" => "1970-01-01T00:00:00Z"
|
||||
|
|
|
@ -43,7 +43,7 @@ test "Renders with emoji tags" do
|
|||
"tag" => [
|
||||
%{
|
||||
"icon" => %{"type" => "Image", "url" => "/test"},
|
||||
"id" => "/test",
|
||||
"id" => nil,
|
||||
"name" => ":bib:",
|
||||
"type" => "Emoji",
|
||||
"updated" => "1970-01-01T00:00:00Z"
|
||||
|
|
|
@ -33,6 +33,10 @@ test "has an emoji reaction list" do
|
|||
user = insert(:user)
|
||||
other_user = insert(:user)
|
||||
third_user = insert(:user)
|
||||
domain_blocked_user = insert(:user, %{ap_id: "https://blocked.com/@blocked"})
|
||||
|
||||
{:ok, user} = User.block_domain(user, "blocked.com")
|
||||
|
||||
{:ok, activity} = CommonAPI.post(user, %{status: "dae cofe??"})
|
||||
|
||||
{:ok, _} = CommonAPI.react_with_emoji(activity.id, user, "☕")
|
||||
|
@ -40,6 +44,8 @@ test "has an emoji reaction list" do
|
|||
{:ok, _} = CommonAPI.react_with_emoji(activity.id, third_user, "🍵")
|
||||
{:ok, _} = CommonAPI.react_with_emoji(activity.id, other_user, "☕")
|
||||
{:ok, _} = CommonAPI.react_with_emoji(activity.id, other_user, ":dinosaur:")
|
||||
# this should not show up when the user is viewing the status
|
||||
{:ok, _} = CommonAPI.react_with_emoji(activity.id, domain_blocked_user, "😈")
|
||||
|
||||
activity = Repo.get(Activity, activity.id)
|
||||
status = StatusView.render("show.json", activity: activity)
|
||||
|
@ -55,7 +61,8 @@ test "has an emoji reaction list" do
|
|||
url: "http://localhost:4001/emoji/dino walking.gif",
|
||||
account_ids: [other_user.id, user.id]
|
||||
},
|
||||
%{name: "🍵", count: 1, me: false, url: nil, account_ids: [third_user.id]}
|
||||
%{name: "🍵", count: 1, me: false, url: nil, account_ids: [third_user.id]},
|
||||
%{name: "😈", count: 1, me: false, url: nil, account_ids: [domain_blocked_user.id]}
|
||||
]
|
||||
|
||||
status = StatusView.render("show.json", activity: activity, for: user)
|
||||
|
@ -73,6 +80,8 @@ test "has an emoji reaction list" do
|
|||
},
|
||||
%{name: "🍵", count: 1, me: false, url: nil, account_ids: [third_user.id]}
|
||||
]
|
||||
|
||||
refute Enum.any?(status[:pleroma][:emoji_reactions], fn reaction -> reaction[:name] == "😈" end)
|
||||
end
|
||||
|
||||
test "works correctly with badly formatted emojis" do
|
||||
|
|
|
@ -263,7 +263,12 @@ def get("https://n1u.moe/users/rye", _, _, @activitypub_accept_headers) do
|
|||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body: File.read!("test/fixtures/tesla_mock/rye.json"),
|
||||
body:
|
||||
File.read!("test/fixtures/tesla_mock/rye.json")
|
||||
|> Jason.decode!()
|
||||
|> Map.put("name", "evil rye")
|
||||
|> Map.put("bio", "boooo!")
|
||||
|> Jason.encode!(),
|
||||
headers: activitypub_object_headers()
|
||||
}}
|
||||
end
|
||||
|
|
Loading…
Add table
Reference in a new issue