Merge branch 'develop' of https://git.pleroma.social/pleroma/pleroma into develop

This commit is contained in:
sadposter 2019-07-09 21:08:12 +01:00
commit dbeb8f43fe
155 changed files with 1971 additions and 477 deletions
CHANGELOG.md
config
docs
lib
mix.exsmix.lock
priv/templates
test
activity_test.exs
bbs
conversation_test.exs
fixtures/tesla_mock
7369654.atom7369654.html7even.jsonadmin@mastdon.example.org.jsonatarifrosch_feed.xmlatarifrosch_webfinger.xmlbaptiste.gelex.xyz-article.jsonbaptiste.gelex.xyz-user.jsoneal_sakamoto.xmlemelie.atomemelie.jsonframasoft@framatube.org.jsonframatube.org_host_metagerzilla.de_host_metagnusocial.de_host_metags.example.org_host_metahellpie.jsonhttp___gs.example.org_4040_index.php_user_1.xmlhttp___mastodon.example.org_users_admin_status_1234.jsonhttp__gs.example.org_index.php_api_statuses_user_timeline_1.atom.xmlhttps___info.pleroma.site_actor.jsonhttps___mamot.fr_users_Skruyb.atomhttps___mastodon.social_users_lambadalambda.atomhttps___mastodon.social_users_lambadalambda.xmlhttps___osada.macgirvin.com_channel_mike.jsonhttps___pawoo.net_users_aqidaqidaqid.xmlhttps___pawoo.net_users_pekorino.atomhttps___pawoo.net_users_pekorino.xmlhttps___pleroma.soykaf.com_users_lain.xmlhttps___pleroma.soykaf.com_users_lain_feed.atom.xmlhttps___prismo.news__mxb.jsonhttps___shitposter.club_api_statuses_show_2827873.atom.xmlhttps___shitposter.club_api_statuses_user_timeline_1.atom.xmlhttps___shitposter.club_notice_2827873.htmlhttps___shitposter.club_user_1.xmlhttps___social.heldscal.la_api_statuses_user_timeline_23211.atom.xmlhttps___social.heldscal.la_api_statuses_user_timeline_29191.atom.xmlhttps___social.heldscal.la_user_23211.xmlhttps___social.heldscal.la_user_29191.xmlhttps__info.pleroma.site_activity.jsonhttps__info.pleroma.site_activity2.jsonhttps__info.pleroma.site_activity3.jsonhttps__info.pleroma.site_activity4.jsonkaniini@gerzilla.de.jsonkaniini@hubzilla.example.org.jsonlain_squeet.me_webfinger.xmllucifermysticus.jsonmacgirvin.com_host_metamamot.fr_host_metamastodon.social_host_metamastodon.xyz_host_metamayumayu.jsonmayumayupost.jsonmike@osada.macgirvin.com.jsonnonexistant@social.heldscal.la.xmlpawoo.net_host_metapeertube.moe-vid.jsonpleroma.soykaf.com_host_metapuckipedia.com.jsonrinpatch.json

View file

@ -6,15 +6,23 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [Unreleased] ## [Unreleased]
### Added ### Added
- MRF: Support for priming the mediaproxy cache (`Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`) - MRF: Support for priming the mediaproxy cache (`Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`)
Configuration: `federation_incoming_replies_max_depth` option
- Mastodon API: Support for the [`tagged` filter](https://github.com/tootsuite/mastodon/pull/9755) in [`GET /api/v1/accounts/:id/statuses`](https://docs.joinmastodon.org/api/rest/accounts/#get-api-v1-accounts-id-statuses) - Mastodon API: Support for the [`tagged` filter](https://github.com/tootsuite/mastodon/pull/9755) in [`GET /api/v1/accounts/:id/statuses`](https://docs.joinmastodon.org/api/rest/accounts/#get-api-v1-accounts-id-statuses)
- Mastodon API, streaming: Add support for passing the token in the `Sec-WebSocket-Protocol` header
- Mastodon API, extension: Ability to reset avatar, profile banner, and background
- Admin API: Return users' tags when querying reports - Admin API: Return users' tags when querying reports
- Admin API: Return avatar and display name when querying users - Admin API: Return avatar and display name when querying users
- Admin API: Allow querying user by ID
- Added synchronization of following/followers counters for external users
### Fixed ### Fixed
- Not being able to pin unlisted posts - Not being able to pin unlisted posts
- Metadata rendering errors resulting in the entire page being inaccessible
- Mastodon API: Handling of search timeouts (`/api/v1/search` and `/api/v2/search`) - Mastodon API: Handling of search timeouts (`/api/v1/search` and `/api/v2/search`)
- Mastodon API: Embedded relationships not being properly rendered in the Account entity of Status entity
### Changed ### Changed
- Configuration: OpenGraph and TwitterCard providers enabled by default
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text - Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
### Changed ### Changed

View file

@ -218,6 +218,7 @@
}, },
registrations_open: true, registrations_open: true,
federating: true, federating: true,
federation_incoming_replies_max_depth: 100,
federation_reachability_timeout_days: 7, federation_reachability_timeout_days: 7,
federation_publisher_modules: [ federation_publisher_modules: [
Pleroma.Web.ActivityPub.Publisher, Pleroma.Web.ActivityPub.Publisher,
@ -248,7 +249,14 @@
remote_post_retention_days: 90, remote_post_retention_days: 90,
skip_thread_containment: true, skip_thread_containment: true,
limit_to_local_content: :unauthenticated, limit_to_local_content: :unauthenticated,
dynamic_configuration: false dynamic_configuration: false,
external_user_synchronization: [
enabled: false,
# every 2 hours
interval: 60 * 60 * 2,
max_retries: 3,
limit: 500
]
config :pleroma, :markup, config :pleroma, :markup,
# XXX - unfortunately, inline images must be enabled by default right now, because # XXX - unfortunately, inline images must be enabled by default right now, because
@ -358,7 +366,11 @@
port: 9999 port: 9999
config :pleroma, Pleroma.Web.Metadata, config :pleroma, Pleroma.Web.Metadata,
providers: [Pleroma.Web.Metadata.Providers.RelMe], providers: [
Pleroma.Web.Metadata.Providers.OpenGraph,
Pleroma.Web.Metadata.Providers.TwitterCard,
Pleroma.Web.Metadata.Providers.RelMe
],
unfurl_nsfw: false unfurl_nsfw: false
config :pleroma, :suggestions, config :pleroma, :suggestions,

View file

@ -28,7 +28,8 @@
config :pleroma, :instance, config :pleroma, :instance,
email: "admin@example.com", email: "admin@example.com",
notify_email: "noreply@example.com", notify_email: "noreply@example.com",
skip_thread_containment: false skip_thread_containment: false,
federating: false
# Configure your database # Configure your database
config :pleroma, Pleroma.Repo, config :pleroma, Pleroma.Repo,
@ -74,6 +75,8 @@
config :pleroma, :database, rum_enabled: rum_enabled config :pleroma, :database, rum_enabled: rum_enabled
IO.puts("RUM enabled: #{rum_enabled}") IO.puts("RUM enabled: #{rum_enabled}")
config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
try do try do
import_config "test.secret.exs" import_config "test.secret.exs"
rescue rescue

View file

@ -176,13 +176,13 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
- `nickname` - `nickname`
- `status` BOOLEAN field, false value means deactivation. - `status` BOOLEAN field, false value means deactivation.
## `/api/pleroma/admin/users/:nickname` ## `/api/pleroma/admin/users/:nickname_or_id`
### Retrive the details of a user ### Retrive the details of a user
- Method: `GET` - Method: `GET`
- Params: - Params:
- `nickname` - `nickname` or `id`
- Response: - Response:
- On failure: `Not found` - On failure: `Not found`
- On success: JSON of the user - On success: JSON of the user

View file

@ -46,6 +46,14 @@ Has these additional fields under the `pleroma` object:
- `settings_store`: A generic map of settings for frontends. Opaque to the backend. Only returned in `verify_credentials` and `update_credentials` - `settings_store`: A generic map of settings for frontends. Opaque to the backend. Only returned in `verify_credentials` and `update_credentials`
- `chat_token`: The token needed for Pleroma chat. Only returned in `verify_credentials` - `chat_token`: The token needed for Pleroma chat. Only returned in `verify_credentials`
### Extensions for PleromaFE
These endpoints added for controlling PleromaFE features over the Mastodon API
- PATCH `/api/v1/accounts/update_avatar`: Set/clear user avatar image
- PATCH `/api/v1/accounts/update_banner`: Set/clear user banner image
- PATCH `/api/v1/accounts/update_background`: Set/clear user background image
### Source ### Source
Has these additional fields under the `pleroma` object: Has these additional fields under the `pleroma` object:

View file

@ -87,6 +87,7 @@ config :pleroma, Pleroma.Emails.Mailer,
* `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`). * `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`).
* `account_activation_required`: Require users to confirm their emails before signing in. * `account_activation_required`: Require users to confirm their emails before signing in.
* `federating`: Enable federation with other instances * `federating`: Enable federation with other instances
* `federation_incoming_replies_max_depth`: Max. depth of reply-to activities fetching on incoming federation, to prevent out-of-memory situations while fetching very long threads. If set to `nil`, threads of any depth will be fetched. Lower this value if you experience out-of-memory crashes.
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it. * `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
* `allow_relay`: Enable Pleromas Relay, which makes it possible to follow a whole instance * `allow_relay`: Enable Pleromas Relay, which makes it possible to follow a whole instance
* `rewrite_policy`: Message Rewrite Policy, either one or a list. Here are the ones available by default: * `rewrite_policy`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
@ -124,6 +125,12 @@ config :pleroma, Pleroma.Emails.Mailer,
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`. * `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`. * `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api. * `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
* `external_user_synchronization`: Following/followers counters synchronization settings.
* `enabled`: Enables synchronization
* `interval`: Interval between synchronization.
* `max_retries`: Max rettries for host. After exceeding the limit, the check will not be carried out for users from this host.
* `limit`: Users batch size for processing in one time.
## :logger ## :logger

View file

@ -1,6 +1,7 @@
# Pleroma: A lightweight social networking server # Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/> # Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-onl # SPDX-License-Identifier: AGPL-3.0-onl
defmodule Mix.Tasks.Pleroma.Ecto do defmodule Mix.Tasks.Pleroma.Ecto do
@doc """ @doc """
Ensures the given repository's migrations path exists on the file system. Ensures the given repository's migrations path exists on the file system.

View file

@ -151,7 +151,11 @@ def start(_type, _args) do
start: {Pleroma.Web.Endpoint, :start_link, []}, start: {Pleroma.Web.Endpoint, :start_link, []},
type: :supervisor type: :supervisor
}, },
%{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}} %{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}},
%{
id: Pleroma.User.SynchronizationWorker,
start: {Pleroma.User.SynchronizationWorker, :start_link, []}
}
] ]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html

View file

@ -44,44 +44,46 @@ def get_by_ap_id(ap_id) do
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id))) Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
end end
def normalize(_, fetch_remote \\ true) defp warn_on_no_object_preloaded(ap_id) do
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object"
|> Logger.debug()
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
end
def normalize(_, fetch_remote \\ true, options \\ [])
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object. # If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
# Use this whenever possible, especially when walking graphs in an O(N) loop! # Use this whenever possible, especially when walking graphs in an O(N) loop!
def normalize(%Object{} = object, _), do: object def normalize(%Object{} = object, _, _), do: object
def normalize(%Activity{object: %Object{} = object}, _), do: object def normalize(%Activity{object: %Object{} = object}, _, _), do: object
# A hack for fake activities # A hack for fake activities
def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _) do def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _, _) do
%Object{id: "pleroma:fake_object_id", data: data} %Object{id: "pleroma:fake_object_id", data: data}
end end
# Catch and log Object.normalize() calls where the Activity's child object is not # No preloaded object
# preloaded. def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote, _) do
def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote) do warn_on_no_object_preloaded(ap_id)
Logger.debug(
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
)
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
normalize(ap_id, fetch_remote) normalize(ap_id, fetch_remote)
end end
def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote) do # No preloaded object
Logger.debug( def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote, _) do
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!" warn_on_no_object_preloaded(ap_id)
)
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
normalize(ap_id, fetch_remote) normalize(ap_id, fetch_remote)
end end
# Old way, try fetching the object through cache. # Old way, try fetching the object through cache.
def normalize(%{"id" => ap_id}, fetch_remote), do: normalize(ap_id, fetch_remote) def normalize(%{"id" => ap_id}, fetch_remote, _), do: normalize(ap_id, fetch_remote)
def normalize(ap_id, false) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id) def normalize(ap_id, false, _) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
def normalize(ap_id, true) when is_binary(ap_id), do: Fetcher.fetch_object_from_id!(ap_id)
def normalize(_, _), do: nil def normalize(ap_id, true, options) when is_binary(ap_id) do
Fetcher.fetch_object_from_id!(ap_id, options)
end
def normalize(_, _, _), do: nil
# Owned objects can only be mutated by their owner # Owned objects can only be mutated by their owner
def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}), def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),

View file

@ -22,7 +22,7 @@ defp reinject_object(data) do
# TODO: # TODO:
# This will create a Create activity, which we need internally at the moment. # This will create a Create activity, which we need internally at the moment.
def fetch_object_from_id(id) do def fetch_object_from_id(id, options \\ []) do
if object = Object.get_cached_by_ap_id(id) do if object = Object.get_cached_by_ap_id(id) do
{:ok, object} {:ok, object}
else else
@ -38,7 +38,7 @@ def fetch_object_from_id(id) do
"object" => data "object" => data
}, },
:ok <- Containment.contain_origin(id, params), :ok <- Containment.contain_origin(id, params),
{:ok, activity} <- Transmogrifier.handle_incoming(params), {:ok, activity} <- Transmogrifier.handle_incoming(params, options),
{:object, _data, %Object{} = object} <- {:object, _data, %Object{} = object} <-
{:object, data, Object.normalize(activity, false)} do {:object, data, Object.normalize(activity, false)} do
{:ok, object} {:ok, object}
@ -63,8 +63,8 @@ def fetch_object_from_id(id) do
end end
end end
def fetch_object_from_id!(id) do def fetch_object_from_id!(id, options \\ []) do
with {:ok, object} <- fetch_object_from_id(id) do with {:ok, object} <- fetch_object_from_id(id, options) do
object object
else else
_e -> _e ->

View file

@ -0,0 +1,24 @@
defmodule Pleroma.ReverseProxy.Client do
@callback request(atom(), String.t(), [tuple()], String.t(), list()) ::
{:ok, pos_integer(), [tuple()], reference() | map()}
| {:ok, pos_integer(), [tuple()]}
| {:ok, reference()}
| {:error, term()}
@callback stream_body(reference() | pid() | map()) ::
{:ok, binary()} | :done | {:error, String.t()}
@callback close(reference() | pid() | map()) :: :ok
def request(method, url, headers, "", opts \\ []) do
client().request(method, url, headers, "", opts)
end
def stream_body(ref), do: client().stream_body(ref)
def close(ref), do: client().close(ref)
defp client do
Pleroma.Config.get([Pleroma.ReverseProxy.Client], :hackney)
end
end

View file

@ -146,7 +146,7 @@ defp request(method, url, headers, hackney_opts) do
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}") Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
method = method |> String.downcase() |> String.to_existing_atom() method = method |> String.downcase() |> String.to_existing_atom()
case hackney().request(method, url, headers, "", hackney_opts) do case client().request(method, url, headers, "", hackney_opts) do
{:ok, code, headers, client} when code in @valid_resp_codes -> {:ok, code, headers, client} when code in @valid_resp_codes ->
{:ok, code, downcase_headers(headers), client} {:ok, code, downcase_headers(headers), client}
@ -173,7 +173,7 @@ defp response(conn, client, url, status, headers, opts) do
halt(conn) halt(conn)
{:error, :closed, conn} -> {:error, :closed, conn} ->
:hackney.close(client) client().close(client)
halt(conn) halt(conn)
{:error, error, conn} -> {:error, error, conn} ->
@ -181,7 +181,7 @@ defp response(conn, client, url, status, headers, opts) do
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}" "#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
) )
:hackney.close(client) client().close(client)
halt(conn) halt(conn)
end end
end end
@ -196,7 +196,7 @@ defp chunk_reply(conn, client, opts, sent_so_far, duration) do
duration, duration,
Keyword.get(opts, :max_read_duration, @max_read_duration) Keyword.get(opts, :max_read_duration, @max_read_duration)
), ),
{:ok, data} <- hackney().stream_body(client), {:ok, data} <- client().stream_body(client),
{:ok, duration} <- increase_read_duration(duration), {:ok, duration} <- increase_read_duration(duration),
sent_so_far = sent_so_far + byte_size(data), sent_so_far = sent_so_far + byte_size(data),
:ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)), :ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
@ -378,5 +378,5 @@ defp increase_read_duration(_) do
{:ok, :no_duration_limit, :no_duration_limit} {:ok, :no_duration_limit, :no_duration_limit}
end end
defp hackney, do: Pleroma.Config.get(:hackney, :hackney) defp client, do: Pleroma.ReverseProxy.Client
end end

View file

@ -1,51 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.Swift.Keystone do
use HTTPoison.Base
def process_url(url) do
Enum.join(
[Pleroma.Config.get!([Pleroma.Uploaders.Swift, :auth_url]), url],
"/"
)
end
def process_response_body(body) do
body
|> Jason.decode!()
end
def get_token do
settings = Pleroma.Config.get(Pleroma.Uploaders.Swift)
username = Keyword.fetch!(settings, :username)
password = Keyword.fetch!(settings, :password)
tenant_id = Keyword.fetch!(settings, :tenant_id)
case post(
"/tokens",
make_auth_body(username, password, tenant_id),
["Content-Type": "application/json"],
hackney: [:insecure]
) do
{:ok, %Tesla.Env{status: 200, body: body}} ->
body["access"]["token"]["id"]
{:ok, %Tesla.Env{status: _}} ->
""
end
end
def make_auth_body(username, password, tenant) do
Jason.encode!(%{
:auth => %{
:passwordCredentials => %{
:username => username,
:password => password
},
:tenantId => tenant
}
})
end
end

View file

@ -1,29 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.Swift.Client do
use HTTPoison.Base
def process_url(url) do
Enum.join(
[Pleroma.Config.get!([Pleroma.Uploaders.Swift, :storage_url]), url],
"/"
)
end
def upload_file(filename, body, content_type) do
token = Pleroma.Uploaders.Swift.Keystone.get_token()
case put("#{filename}", body, "X-Auth-Token": token, "Content-Type": content_type) do
{:ok, %Tesla.Env{status: 201}} ->
{:ok, {:file, filename}}
{:ok, %Tesla.Env{status: 401}} ->
{:error, "Unauthorized, Bad Token"}
{:error, _} ->
{:error, "Swift Upload Error"}
end
end
end

View file

@ -1,19 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.Swift do
@behaviour Pleroma.Uploaders.Uploader
def get_file(name) do
{:ok, {:url, Path.join([Pleroma.Config.get!([__MODULE__, :object_url]), name])}}
end
def put_file(upload) do
Pleroma.Uploaders.Swift.Client.upload_file(
upload.path,
File.read!(upload.tmpfile),
upload.content_type
)
end
end

View file

@ -107,15 +107,25 @@ def ap_id(%User{nickname: nickname}) do
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers" def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
def user_info(%User{} = user) do def user_info(%User{} = user, args \\ %{}) do
following_count =
if args[:following_count], do: args[:following_count], else: following_count(user)
follower_count =
if args[:follower_count], do: args[:follower_count], else: user.info.follower_count
%{ %{
following_count: following_count(user),
note_count: user.info.note_count, note_count: user.info.note_count,
follower_count: user.info.follower_count,
locked: user.info.locked, locked: user.info.locked,
confirmation_pending: user.info.confirmation_pending, confirmation_pending: user.info.confirmation_pending,
default_scope: user.info.default_scope default_scope: user.info.default_scope
} }
|> Map.put(:following_count, following_count)
|> Map.put(:follower_count, follower_count)
end
def set_info_cache(user, args) do
Cachex.put(:user_cache, "user_info:#{user.id}", user_info(user, args))
end end
def restrict_deactivated(query) do def restrict_deactivated(query) do
@ -1000,6 +1010,56 @@ def perform(:follow_import, %User{} = follower, followed_identifiers)
) )
end end
@spec sync_follow_counter() :: :ok
def sync_follow_counter,
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:sync_follow_counters])
@spec perform(:sync_follow_counters) :: :ok
def perform(:sync_follow_counters) do
{:ok, _pid} = Agent.start_link(fn -> %{} end, name: :domain_errors)
config = Pleroma.Config.get([:instance, :external_user_synchronization])
:ok = sync_follow_counters(config)
Agent.stop(:domain_errors)
end
@spec sync_follow_counters(keyword()) :: :ok
def sync_follow_counters(opts \\ []) do
users = external_users(opts)
if length(users) > 0 do
errors = Agent.get(:domain_errors, fn state -> state end)
{last, updated_errors} = User.Synchronization.call(users, errors, opts)
Agent.update(:domain_errors, fn _state -> updated_errors end)
sync_follow_counters(max_id: last.id, limit: opts[:limit])
else
:ok
end
end
@spec external_users(keyword()) :: [User.t()]
def external_users(opts \\ []) do
query =
User.Query.build(%{
external: true,
active: true,
order_by: :id,
select: [:id, :ap_id, :info]
})
query =
if opts[:max_id],
do: where(query, [u], u.id > ^opts[:max_id]),
else: query
query =
if opts[:limit],
do: limit(query, ^opts[:limit]),
else: query
Repo.all(query)
end
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers), def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
do: do:
PleromaJobQueue.enqueue(:background, __MODULE__, [ PleromaJobQueue.enqueue(:background, __MODULE__, [

View file

@ -7,7 +7,7 @@ defmodule Pleroma.User.Query do
User query builder module. Builds query from new query or another user query. User query builder module. Builds query from new query or another user query.
## Example: ## Example:
query = Pleroma.User.Query(%{nickname: "nickname"}) query = Pleroma.User.Query.build(%{nickname: "nickname"})
another_query = Pleroma.User.Query.build(query, %{email: "email@example.com"}) another_query = Pleroma.User.Query.build(query, %{email: "email@example.com"})
Pleroma.Repo.all(query) Pleroma.Repo.all(query)
Pleroma.Repo.all(another_query) Pleroma.Repo.all(another_query)
@ -47,7 +47,10 @@ defmodule Pleroma.User.Query do
friends: User.t(), friends: User.t(),
recipients_from_activity: [String.t()], recipients_from_activity: [String.t()],
nickname: [String.t()], nickname: [String.t()],
ap_id: [String.t()] ap_id: [String.t()],
order_by: term(),
select: term(),
limit: pos_integer()
} }
| %{} | %{}
@ -141,6 +144,18 @@ defp compose_query({:recipients_from_activity, to}, query) do
where(query, [u], u.ap_id in ^to or fragment("? && ?", u.following, ^to)) where(query, [u], u.ap_id in ^to or fragment("? && ?", u.following, ^to))
end end
defp compose_query({:order_by, key}, query) do
order_by(query, [u], field(u, ^key))
end
defp compose_query({:select, keys}, query) do
select(query, [u], ^keys)
end
defp compose_query({:limit, limit}, query) do
limit(query, ^limit)
end
defp compose_query(_unsupported_param, query), do: query defp compose_query(_unsupported_param, query), do: query
defp prepare_tag_criteria(tag, query) do defp prepare_tag_criteria(tag, query) do

View file

@ -150,7 +150,7 @@ defp boost_search_rank_query(query, for_user) do
@spec fts_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t() @spec fts_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
defp fts_search_subquery(query, term) do defp fts_search_subquery(query, term) do
processed_query = processed_query =
term String.trim_trailing(term, "@" <> local_domain())
|> String.replace(~r/\W+/, " ") |> String.replace(~r/\W+/, " ")
|> String.trim() |> String.trim()
|> String.split() |> String.split()
@ -192,6 +192,8 @@ defp fts_search_subquery(query, term) do
@spec trigram_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t() @spec trigram_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
defp trigram_search_subquery(query, term) do defp trigram_search_subquery(query, term) do
term = String.trim_trailing(term, "@" <> local_domain())
from( from(
u in query, u in query,
select_merge: %{ select_merge: %{
@ -209,4 +211,6 @@ defp trigram_search_subquery(query, term) do
) )
|> User.restrict_deactivated() |> User.restrict_deactivated()
end end
defp local_domain, do: Pleroma.Config.get([Pleroma.Web.Endpoint, :url, :host])
end end

View file

@ -0,0 +1,60 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.User.Synchronization do
alias Pleroma.HTTP
alias Pleroma.User
@spec call([User.t()], map(), keyword()) :: {User.t(), map()}
def call(users, errors, opts \\ []) do
do_call(users, errors, opts)
end
defp do_call([user | []], errors, opts) do
updated = fetch_counters(user, errors, opts)
{user, updated}
end
defp do_call([user | others], errors, opts) do
updated = fetch_counters(user, errors, opts)
do_call(others, updated, opts)
end
defp fetch_counters(user, errors, opts) do
%{host: host} = URI.parse(user.ap_id)
info = %{}
{following, errors} = fetch_counter(user.ap_id <> "/following", host, errors, opts)
info = if following, do: Map.put(info, :following_count, following), else: info
{followers, errors} = fetch_counter(user.ap_id <> "/followers", host, errors, opts)
info = if followers, do: Map.put(info, :follower_count, followers), else: info
User.set_info_cache(user, info)
errors
end
defp available_domain?(domain, errors, opts) do
max_retries = Keyword.get(opts, :max_retries, 3)
not (Map.has_key?(errors, domain) && errors[domain] >= max_retries)
end
defp fetch_counter(url, host, errors, opts) do
with true <- available_domain?(host, errors, opts),
{:ok, %{body: body, status: code}} when code in 200..299 <-
HTTP.get(
url,
[{:Accept, "application/activity+json"}]
),
{:ok, data} <- Jason.decode(body) do
{data["totalItems"], errors}
else
false ->
{nil, errors}
_ ->
{nil, Map.update(errors, host, 1, &(&1 + 1))}
end
end
end

View file

@ -0,0 +1,32 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-onl
defmodule Pleroma.User.SynchronizationWorker do
use GenServer
def start_link do
config = Pleroma.Config.get([:instance, :external_user_synchronization])
if config[:enabled] do
GenServer.start_link(__MODULE__, interval: config[:interval])
else
:ignore
end
end
def init(opts) do
schedule_next(opts)
{:ok, opts}
end
def handle_info(:sync_follow_counters, opts) do
Pleroma.User.sync_follow_counter()
schedule_next(opts)
{:noreply, opts}
end
defp schedule_next(opts) do
Process.send_after(self(), :sync_follow_counters, opts[:interval])
end
end

View file

@ -14,6 +14,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.Federator
import Ecto.Query import Ecto.Query
@ -22,20 +23,20 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
@doc """ @doc """
Modifies an incoming AP object (mastodon format) to our internal format. Modifies an incoming AP object (mastodon format) to our internal format.
""" """
def fix_object(object) do def fix_object(object, options \\ []) do
object object
|> fix_actor |> fix_actor
|> fix_url |> fix_url
|> fix_attachments |> fix_attachments
|> fix_context |> fix_context
|> fix_in_reply_to |> fix_in_reply_to(options)
|> fix_emoji |> fix_emoji
|> fix_tag |> fix_tag
|> fix_content_map |> fix_content_map
|> fix_likes |> fix_likes
|> fix_addressing |> fix_addressing
|> fix_summary |> fix_summary
|> fix_type |> fix_type(options)
end end
def fix_summary(%{"summary" => nil} = object) do def fix_summary(%{"summary" => nil} = object) do
@ -164,7 +165,9 @@ def fix_likes(object) do
object object
end end
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object) def fix_in_reply_to(object, options \\ [])
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
when not is_nil(in_reply_to) do when not is_nil(in_reply_to) do
in_reply_to_id = in_reply_to_id =
cond do cond do
@ -182,7 +185,10 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
"" ""
end end
case get_obj_helper(in_reply_to_id) do object = Map.put(object, "inReplyToAtomUri", in_reply_to_id)
if Federator.allowed_incoming_reply_depth?(options[:depth]) do
case get_obj_helper(in_reply_to_id, options) do
{:ok, replied_object} -> {:ok, replied_object} ->
with %Activity{} = _activity <- with %Activity{} = _activity <-
Activity.get_create_by_object_ap_id(replied_object.data["id"]) do Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
@ -201,9 +207,12 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}") Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
object object
end end
else
object
end
end end
def fix_in_reply_to(object), do: object def fix_in_reply_to(object, _options), do: object
def fix_context(object) do def fix_context(object) do
context = object["context"] || object["conversation"] || Utils.generate_context_id() context = object["context"] || object["conversation"] || Utils.generate_context_id()
@ -336,8 +345,13 @@ def fix_content_map(%{"contentMap" => content_map} = object) do
def fix_content_map(object), do: object def fix_content_map(object), do: object
def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do def fix_type(object, options \\ [])
reply = Object.normalize(reply_id)
def fix_type(%{"inReplyTo" => reply_id} = object, options) when is_binary(reply_id) do
reply =
if Federator.allowed_incoming_reply_depth?(options[:depth]) do
Object.normalize(reply_id, true)
end
if reply && (reply.data["type"] == "Question" and object["name"]) do if reply && (reply.data["type"] == "Question" and object["name"]) do
Map.put(object, "type", "Answer") Map.put(object, "type", "Answer")
@ -346,7 +360,7 @@ def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do
end end
end end
def fix_type(object), do: object def fix_type(object, _), do: object
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
with true <- id =~ "follows", with true <- id =~ "follows",
@ -374,9 +388,11 @@ defp get_follow_activity(follow_object, followed) do
end end
end end
def handle_incoming(data, options \\ [])
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them # Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
# with nil ID. # with nil ID.
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data) do def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do
with context <- data["context"] || Utils.generate_context_id(), with context <- data["context"] || Utils.generate_context_id(),
content <- data["content"] || "", content <- data["content"] || "",
%User{} = actor <- User.get_cached_by_ap_id(actor), %User{} = actor <- User.get_cached_by_ap_id(actor),
@ -409,15 +425,19 @@ def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} =
end end
# disallow objects with bogus IDs # disallow objects with bogus IDs
def handle_incoming(%{"id" => nil}), do: :error def handle_incoming(%{"id" => nil}, _options), do: :error
def handle_incoming(%{"id" => ""}), do: :error def handle_incoming(%{"id" => ""}, _options), do: :error
# length of https:// = 8, should validate better, but good enough for now. # length of https:// = 8, should validate better, but good enough for now.
def handle_incoming(%{"id" => id}) when not (is_binary(id) and length(id) > 8), do: :error def handle_incoming(%{"id" => id}, _options) when not (is_binary(id) and length(id) > 8),
do: :error
# TODO: validate those with a Ecto scheme # TODO: validate those with a Ecto scheme
# - tags # - tags
# - emoji # - emoji
def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data) def handle_incoming(
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
options
)
when objtype in ["Article", "Note", "Video", "Page", "Question", "Answer"] do when objtype in ["Article", "Note", "Video", "Page", "Question", "Answer"] do
actor = Containment.get_actor(data) actor = Containment.get_actor(data)
@ -427,7 +447,8 @@ def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = obj
with nil <- Activity.get_create_by_object_ap_id(object["id"]), with nil <- Activity.get_create_by_object_ap_id(object["id"]),
{:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do {:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
object = fix_object(data["object"]) options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
object = fix_object(data["object"], options)
params = %{ params = %{
to: data["to"], to: data["to"],
@ -452,7 +473,8 @@ def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = obj
end end
def handle_incoming( def handle_incoming(
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data %{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data,
_options
) do ) do
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed), with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower), {:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
@ -503,7 +525,8 @@ def handle_incoming(
end end
def handle_incoming( def handle_incoming(
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data %{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data,
_options
) do ) do
with actor <- Containment.get_actor(data), with actor <- Containment.get_actor(data),
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor), {:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
@ -524,7 +547,8 @@ def handle_incoming(
end end
def handle_incoming( def handle_incoming(
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data %{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data,
_options
) do ) do
with actor <- Containment.get_actor(data), with actor <- Containment.get_actor(data),
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor), {:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
@ -548,7 +572,8 @@ def handle_incoming(
end end
def handle_incoming( def handle_incoming(
%{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data %{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data,
_options
) do ) do
with actor <- Containment.get_actor(data), with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor), {:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@ -561,7 +586,8 @@ def handle_incoming(
end end
def handle_incoming( def handle_incoming(
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data %{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data,
_options
) do ) do
with actor <- Containment.get_actor(data), with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor), {:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@ -576,7 +602,8 @@ def handle_incoming(
def handle_incoming( def handle_incoming(
%{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => actor_id} = %{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => actor_id} =
data data,
_options
) )
when object_type in ["Person", "Application", "Service", "Organization"] do when object_type in ["Person", "Application", "Service", "Organization"] do
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
@ -614,7 +641,8 @@ def handle_incoming(
# an error or a tombstone. This would allow us to verify that a deletion actually took # an error or a tombstone. This would allow us to verify that a deletion actually took
# place. # place.
def handle_incoming( def handle_incoming(
%{"type" => "Delete", "object" => object_id, "actor" => _actor, "id" => _id} = data %{"type" => "Delete", "object" => object_id, "actor" => _actor, "id" => _id} = data,
_options
) do ) do
object_id = Utils.get_ap_id(object_id) object_id = Utils.get_ap_id(object_id)
@ -635,7 +663,8 @@ def handle_incoming(
"object" => %{"type" => "Announce", "object" => object_id}, "object" => %{"type" => "Announce", "object" => object_id},
"actor" => _actor, "actor" => _actor,
"id" => id "id" => id
} = data } = data,
_options
) do ) do
with actor <- Containment.get_actor(data), with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor), {:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@ -653,7 +682,8 @@ def handle_incoming(
"object" => %{"type" => "Follow", "object" => followed}, "object" => %{"type" => "Follow", "object" => followed},
"actor" => follower, "actor" => follower,
"id" => id "id" => id
} = _data } = _data,
_options
) do ) do
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed), with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower), {:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
@ -671,7 +701,8 @@ def handle_incoming(
"object" => %{"type" => "Block", "object" => blocked}, "object" => %{"type" => "Block", "object" => blocked},
"actor" => blocker, "actor" => blocker,
"id" => id "id" => id
} = _data } = _data,
_options
) do ) do
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]), with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
%User{local: true} = blocked <- User.get_cached_by_ap_id(blocked), %User{local: true} = blocked <- User.get_cached_by_ap_id(blocked),
@ -685,7 +716,8 @@ def handle_incoming(
end end
def handle_incoming( def handle_incoming(
%{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data %{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data,
_options
) do ) do
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]), with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
%User{local: true} = blocked = User.get_cached_by_ap_id(blocked), %User{local: true} = blocked = User.get_cached_by_ap_id(blocked),
@ -705,7 +737,8 @@ def handle_incoming(
"object" => %{"type" => "Like", "object" => object_id}, "object" => %{"type" => "Like", "object" => object_id},
"actor" => _actor, "actor" => _actor,
"id" => id "id" => id
} = data } = data,
_options
) do ) do
with actor <- Containment.get_actor(data), with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor), {:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@ -717,10 +750,10 @@ def handle_incoming(
end end
end end
def handle_incoming(_), do: :error def handle_incoming(_, _), do: :error
def get_obj_helper(id) do def get_obj_helper(id, options \\ []) do
if object = Object.normalize(id), do: {:ok, object}, else: nil if object = Object.normalize(id, true, options), do: {:ok, object}, else: nil
end end
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do

View file

@ -170,6 +170,7 @@ def create_context(context) do
Enqueues an activity for federation if it's local Enqueues an activity for federation if it's local
""" """
def maybe_federate(%Activity{local: true} = activity) do def maybe_federate(%Activity{local: true} = activity) do
if Pleroma.Config.get!([:instance, :federating]) do
priority = priority =
case activity.data["type"] do case activity.data["type"] do
"Delete" -> 10 "Delete" -> 10
@ -178,6 +179,8 @@ def maybe_federate(%Activity{local: true} = activity) do
end end
Pleroma.Web.Federator.publish(activity, priority) Pleroma.Web.Federator.publish(activity, priority)
end
:ok :ok
end end

View file

@ -74,7 +74,7 @@ def user_create(
end end
def user_show(conn, %{"nickname" => nickname}) do def user_show(conn, %{"nickname" => nickname}) do
with %User{} = user <- User.get_cached_by_nickname(nickname) do with %User{} = user <- User.get_cached_by_nickname_or_id(nickname) do
conn conn
|> json(AccountView.render("show.json", %{user: user})) |> json(AccountView.render("show.json", %{user: user}))
else else

View file

@ -22,6 +22,18 @@ def init do
refresh_subscriptions() refresh_subscriptions()
end end
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
def allowed_incoming_reply_depth?(depth) do
max_replies_depth = Pleroma.Config.get([:instance, :federation_incoming_replies_max_depth])
if max_replies_depth do
(depth || 1) <= max_replies_depth
else
true
end
end
# Client API # Client API
def incoming_doc(doc) do def incoming_doc(doc) do

View file

@ -167,6 +167,69 @@ def update_credentials(%{assigns: %{user: user}} = conn, params) do
end end
end end
def update_avatar(%{assigns: %{user: user}} = conn, %{"img" => ""}) do
change = Changeset.change(user, %{avatar: nil})
{:ok, user} = User.update_and_set_cache(change)
CommonAPI.update(user)
json(conn, %{url: nil})
end
def update_avatar(%{assigns: %{user: user}} = conn, params) do
{:ok, object} = ActivityPub.upload(params, type: :avatar)
change = Changeset.change(user, %{avatar: object.data})
{:ok, user} = User.update_and_set_cache(change)
CommonAPI.update(user)
%{"url" => [%{"href" => href} | _]} = object.data
json(conn, %{url: href})
end
def update_banner(%{assigns: %{user: user}} = conn, %{"banner" => ""}) do
with new_info <- %{"banner" => %{}},
info_cng <- User.Info.profile_update(user.info, new_info),
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
{:ok, user} <- User.update_and_set_cache(changeset) do
CommonAPI.update(user)
json(conn, %{url: nil})
end
end
def update_banner(%{assigns: %{user: user}} = conn, params) do
with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, type: :banner),
new_info <- %{"banner" => object.data},
info_cng <- User.Info.profile_update(user.info, new_info),
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
{:ok, user} <- User.update_and_set_cache(changeset) do
CommonAPI.update(user)
%{"url" => [%{"href" => href} | _]} = object.data
json(conn, %{url: href})
end
end
def update_background(%{assigns: %{user: user}} = conn, %{"img" => ""}) do
with new_info <- %{"background" => %{}},
info_cng <- User.Info.profile_update(user.info, new_info),
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
{:ok, _user} <- User.update_and_set_cache(changeset) do
json(conn, %{url: nil})
end
end
def update_background(%{assigns: %{user: user}} = conn, params) do
with {:ok, object} <- ActivityPub.upload(params, type: :background),
new_info <- %{"background" => object.data},
info_cng <- User.Info.profile_update(user.info, new_info),
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
{:ok, _user} <- User.update_and_set_cache(changeset) do
%{"url" => [%{"href" => href} | _]} = object.data
json(conn, %{url: href})
end
end
def verify_credentials(%{assigns: %{user: user}} = conn, _) do def verify_credentials(%{assigns: %{user: user}} = conn, _) do
chat_token = Phoenix.Token.sign(conn, "user socket", user.id) chat_token = Phoenix.Token.sign(conn, "user socket", user.id)

View file

@ -104,7 +104,7 @@ def render(
id: to_string(activity.id), id: to_string(activity.id),
uri: activity_object.data["id"], uri: activity_object.data["id"],
url: activity_object.data["id"], url: activity_object.data["id"],
account: AccountView.render("account.json", %{user: user}), account: AccountView.render("account.json", %{user: user, for: opts[:for]}),
in_reply_to_id: nil, in_reply_to_id: nil,
in_reply_to_account_id: nil, in_reply_to_account_id: nil,
reblog: reblogged, reblog: reblogged,
@ -221,7 +221,7 @@ def render("status.json", %{activity: %{data: %{"object" => _object}} = activity
id: to_string(activity.id), id: to_string(activity.id),
uri: object.data["id"], uri: object.data["id"],
url: url, url: url,
account: AccountView.render("account.json", %{user: user}), account: AccountView.render("account.json", %{user: user, for: opts[:for]}),
in_reply_to_id: reply_to && to_string(reply_to.id), in_reply_to_id: reply_to && to_string(reply_to.id),
in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id), in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id),
reblog: nil, reblog: nil,

View file

@ -29,9 +29,10 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
def init(%{qs: qs} = req, state) do def init(%{qs: qs} = req, state) do
with params <- :cow_qs.parse_qs(qs), with params <- :cow_qs.parse_qs(qs),
sec_websocket <- :cowboy_req.header("sec-websocket-protocol", req, nil),
access_token <- List.keyfind(params, "access_token", 0), access_token <- List.keyfind(params, "access_token", 0),
{_, stream} <- List.keyfind(params, "stream", 0), {_, stream} <- List.keyfind(params, "stream", 0),
{:ok, user} <- allow_request(stream, access_token), {:ok, user} <- allow_request(stream, [access_token, sec_websocket]),
topic when is_binary(topic) <- expand_topic(stream, params) do topic when is_binary(topic) <- expand_topic(stream, params) do
{:cowboy_websocket, req, %{user: user, topic: topic}, %{idle_timeout: @timeout}} {:cowboy_websocket, req, %{user: user, topic: topic}, %{idle_timeout: @timeout}}
else else
@ -84,13 +85,21 @@ def terminate(reason, _req, state) do
end end
# Public streams without authentication. # Public streams without authentication.
defp allow_request(stream, nil) when stream in @anonymous_streams do defp allow_request(stream, [nil, nil]) when stream in @anonymous_streams do
{:ok, nil} {:ok, nil}
end end
# Authenticated streams. # Authenticated streams.
defp allow_request(stream, {"access_token", access_token}) when stream in @streams do defp allow_request(stream, [access_token, sec_websocket]) when stream in @streams do
with %Token{user_id: user_id} <- Repo.get_by(Token, token: access_token), token =
with {"access_token", token} <- access_token do
token
else
_ -> sec_websocket
end
with true <- is_bitstring(token),
%Token{user_id: user_id} <- Repo.get_by(Token, token: token),
user = %User{} <- User.get_cached_by_id(user_id) do user = %User{} <- User.get_cached_by_id(user_id) do
{:ok, user} {:ok, user}
else else

View file

@ -33,20 +33,7 @@ defp whitelisted?(url) do
def encode_url(url) do def encode_url(url) do
secret = Pleroma.Config.get([Pleroma.Web.Endpoint, :secret_key_base]) secret = Pleroma.Config.get([Pleroma.Web.Endpoint, :secret_key_base])
base64 = Base.url_encode64(url, @base64_opts)
# Must preserve `%2F` for compatibility with S3
# https://git.pleroma.social/pleroma/pleroma/issues/580
replacement = get_replacement(url, ":2F:")
# The URL is url-decoded and encoded again to ensure it is correctly encoded and not twice.
base64 =
url
|> String.replace("%2F", replacement)
|> URI.decode()
|> URI.encode()
|> String.replace(replacement, "%2F")
|> Base.url_encode64(@base64_opts)
sig = :crypto.hmac(:sha, secret, base64) sig = :crypto.hmac(:sha, secret, base64)
sig64 = sig |> Base.url_encode64(@base64_opts) sig64 = sig |> Base.url_encode64(@base64_opts)
@ -80,12 +67,4 @@ def build_url(sig_base64, url_base64, filename \\ nil) do
|> Enum.filter(fn value -> value end) |> Enum.filter(fn value -> value end)
|> Path.join() |> Path.join()
end end
defp get_replacement(url, replacement) do
if String.contains?(url, replacement) do
get_replacement(url, replacement <> replacement)
else
replacement
end
end
end end

View file

@ -121,4 +121,6 @@ defp build_attachments(%{data: %{"attachment" => attachments}}) do
acc ++ rendered_tags acc ++ rendered_tags
end) end)
end end
defp build_attachments(_), do: []
end end

View file

@ -117,6 +117,8 @@ defp build_attachments(id, %{data: %{"attachment" => attachments}}) do
end) end)
end end
defp build_attachments(_id, _object), do: []
defp player_url(id) do defp player_url(id) do
Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice_player, id) Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice_player, id)
end end

View file

@ -10,6 +10,7 @@ defmodule Pleroma.Web.OStatus.NoteHandler do
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI
alias Pleroma.Web.Federator
alias Pleroma.Web.OStatus alias Pleroma.Web.OStatus
alias Pleroma.Web.XML alias Pleroma.Web.XML
@ -88,14 +89,15 @@ def add_external_url(note, entry) do
Map.put(note, "external_url", url) Map.put(note, "external_url", url)
end end
def fetch_replied_to_activity(entry, in_reply_to) do def fetch_replied_to_activity(entry, in_reply_to, options \\ []) do
with %Activity{} = activity <- Activity.get_create_by_object_ap_id(in_reply_to) do with %Activity{} = activity <- Activity.get_create_by_object_ap_id(in_reply_to) do
activity activity
else else
_e -> _e ->
with in_reply_to_href when not is_nil(in_reply_to_href) <- with true <- Federator.allowed_incoming_reply_depth?(options[:depth]),
in_reply_to_href when not is_nil(in_reply_to_href) <-
XML.string_from_xpath("//thr:in-reply-to[1]/@href", entry), XML.string_from_xpath("//thr:in-reply-to[1]/@href", entry),
{:ok, [activity | _]} <- OStatus.fetch_activity_from_url(in_reply_to_href) do {:ok, [activity | _]} <- OStatus.fetch_activity_from_url(in_reply_to_href, options) do
activity activity
else else
_e -> nil _e -> nil
@ -104,7 +106,7 @@ def fetch_replied_to_activity(entry, in_reply_to) do
end end
# TODO: Clean this up a bit. # TODO: Clean this up a bit.
def handle_note(entry, doc \\ nil) do def handle_note(entry, doc \\ nil, options \\ []) do
with id <- XML.string_from_xpath("//id", entry), with id <- XML.string_from_xpath("//id", entry),
activity when is_nil(activity) <- Activity.get_create_by_object_ap_id_with_object(id), activity when is_nil(activity) <- Activity.get_create_by_object_ap_id_with_object(id),
[author] <- :xmerl_xpath.string('//author[1]', doc), [author] <- :xmerl_xpath.string('//author[1]', doc),
@ -112,7 +114,8 @@ def handle_note(entry, doc \\ nil) do
content_html <- OStatus.get_content(entry), content_html <- OStatus.get_content(entry),
cw <- OStatus.get_cw(entry), cw <- OStatus.get_cw(entry),
in_reply_to <- XML.string_from_xpath("//thr:in-reply-to[1]/@ref", entry), in_reply_to <- XML.string_from_xpath("//thr:in-reply-to[1]/@ref", entry),
in_reply_to_activity <- fetch_replied_to_activity(entry, in_reply_to), options <- Keyword.put(options, :depth, (options[:depth] || 0) + 1),
in_reply_to_activity <- fetch_replied_to_activity(entry, in_reply_to, options),
in_reply_to_object <- in_reply_to_object <-
(in_reply_to_activity && Object.normalize(in_reply_to_activity)) || nil, (in_reply_to_activity && Object.normalize(in_reply_to_activity)) || nil,
in_reply_to <- (in_reply_to_object && in_reply_to_object.data["id"]) || in_reply_to, in_reply_to <- (in_reply_to_object && in_reply_to_object.data["id"]) || in_reply_to,

View file

@ -54,7 +54,7 @@ def remote_follow_path do
"#{Web.base_url()}/ostatus_subscribe?acct={uri}" "#{Web.base_url()}/ostatus_subscribe?acct={uri}"
end end
def handle_incoming(xml_string) do def handle_incoming(xml_string, options \\ []) do
with doc when doc != :error <- parse_document(xml_string) do with doc when doc != :error <- parse_document(xml_string) do
with {:ok, actor_user} <- find_make_or_update_user(doc), with {:ok, actor_user} <- find_make_or_update_user(doc),
do: Pleroma.Instances.set_reachable(actor_user.ap_id) do: Pleroma.Instances.set_reachable(actor_user.ap_id)
@ -91,10 +91,12 @@ def handle_incoming(xml_string) do
_ -> _ ->
case object_type do case object_type do
'http://activitystrea.ms/schema/1.0/note' -> 'http://activitystrea.ms/schema/1.0/note' ->
with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
do: activity
'http://activitystrea.ms/schema/1.0/comment' -> 'http://activitystrea.ms/schema/1.0/comment' ->
with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
do: activity
_ -> _ ->
Logger.error("Couldn't parse incoming document") Logger.error("Couldn't parse incoming document")
@ -359,7 +361,7 @@ def get_atom_url(body) do
end end
end end
def fetch_activity_from_atom_url(url) do def fetch_activity_from_atom_url(url, options \\ []) do
with true <- String.starts_with?(url, "http"), with true <- String.starts_with?(url, "http"),
{:ok, %{body: body, status: code}} when code in 200..299 <- {:ok, %{body: body, status: code}} when code in 200..299 <-
HTTP.get( HTTP.get(
@ -367,7 +369,7 @@ def fetch_activity_from_atom_url(url) do
[{:Accept, "application/atom+xml"}] [{:Accept, "application/atom+xml"}]
) do ) do
Logger.debug("Got document from #{url}, handling...") Logger.debug("Got document from #{url}, handling...")
handle_incoming(body) handle_incoming(body, options)
else else
e -> e ->
Logger.debug("Couldn't get #{url}: #{inspect(e)}") Logger.debug("Couldn't get #{url}: #{inspect(e)}")
@ -375,13 +377,13 @@ def fetch_activity_from_atom_url(url) do
end end
end end
def fetch_activity_from_html_url(url) do def fetch_activity_from_html_url(url, options \\ []) do
Logger.debug("Trying to fetch #{url}") Logger.debug("Trying to fetch #{url}")
with true <- String.starts_with?(url, "http"), with true <- String.starts_with?(url, "http"),
{:ok, %{body: body}} <- HTTP.get(url, []), {:ok, %{body: body}} <- HTTP.get(url, []),
{:ok, atom_url} <- get_atom_url(body) do {:ok, atom_url} <- get_atom_url(body) do
fetch_activity_from_atom_url(atom_url) fetch_activity_from_atom_url(atom_url, options)
else else
e -> e ->
Logger.debug("Couldn't get #{url}: #{inspect(e)}") Logger.debug("Couldn't get #{url}: #{inspect(e)}")
@ -389,11 +391,11 @@ def fetch_activity_from_html_url(url) do
end end
end end
def fetch_activity_from_url(url) do def fetch_activity_from_url(url, options \\ []) do
with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url) do with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url, options) do
{:ok, activities} {:ok, activities}
else else
_e -> fetch_activity_from_html_url(url) _e -> fetch_activity_from_html_url(url, options)
end end
rescue rescue
e -> e ->

View file

@ -322,6 +322,10 @@ defmodule Pleroma.Web.Router do
patch("/accounts/update_credentials", MastodonAPIController, :update_credentials) patch("/accounts/update_credentials", MastodonAPIController, :update_credentials)
patch("/accounts/update_avatar", MastodonAPIController, :update_avatar)
patch("/accounts/update_banner", MastodonAPIController, :update_banner)
patch("/accounts/update_background", MastodonAPIController, :update_background)
post("/statuses", MastodonAPIController, :post_status) post("/statuses", MastodonAPIController, :post_status)
delete("/statuses/:id", MastodonAPIController, :delete_status) delete("/statuses/:id", MastodonAPIController, :delete_status)
@ -724,6 +728,7 @@ defmodule Pleroma.Web.Router do
defmodule Fallback.RedirectController do defmodule Fallback.RedirectController do
use Pleroma.Web, :controller use Pleroma.Web, :controller
require Logger
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.Metadata alias Pleroma.Web.Metadata
@ -750,7 +755,20 @@ def redirector_with_meta(conn, %{"maybe_nickname_or_id" => maybe_nickname_or_id}
def redirector_with_meta(conn, params) do def redirector_with_meta(conn, params) do
{:ok, index_content} = File.read(index_file_path()) {:ok, index_content} = File.read(index_file_path())
tags = Metadata.build_tags(params)
tags =
try do
Metadata.build_tags(params)
rescue
e ->
Logger.error(
"Metadata rendering for #{conn.request_path} failed.\n" <>
Exception.format(:error, e, __STACKTRACE__)
)
""
end
response = String.replace(index_content, "<!--server-generated-meta-->", tags) response = String.replace(index_content, "<!--server-generated-meta-->", tags)
conn conn

View file

@ -456,6 +456,16 @@ def resend_confirmation_email(conn, params) do
end end
end end
def update_avatar(%{assigns: %{user: user}} = conn, %{"img" => ""}) do
change = Changeset.change(user, %{avatar: nil})
{:ok, user} = User.update_and_set_cache(change)
CommonAPI.update(user)
conn
|> put_view(UserView)
|> render("show.json", %{user: user, for: user})
end
def update_avatar(%{assigns: %{user: user}} = conn, params) do def update_avatar(%{assigns: %{user: user}} = conn, params) do
{:ok, object} = ActivityPub.upload(params, type: :avatar) {:ok, object} = ActivityPub.upload(params, type: :avatar)
change = Changeset.change(user, %{avatar: object.data}) change = Changeset.change(user, %{avatar: object.data})
@ -467,6 +477,19 @@ def update_avatar(%{assigns: %{user: user}} = conn, params) do
|> render("show.json", %{user: user, for: user}) |> render("show.json", %{user: user, for: user})
end end
def update_banner(%{assigns: %{user: user}} = conn, %{"banner" => ""}) do
with new_info <- %{"banner" => %{}},
info_cng <- User.Info.profile_update(user.info, new_info),
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
{:ok, user} <- User.update_and_set_cache(changeset) do
CommonAPI.update(user)
response = %{url: nil} |> Jason.encode!()
conn
|> json_reply(200, response)
end
end
def update_banner(%{assigns: %{user: user}} = conn, params) do def update_banner(%{assigns: %{user: user}} = conn, params) do
with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, type: :banner), with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, type: :banner),
new_info <- %{"banner" => object.data}, new_info <- %{"banner" => object.data},
@ -482,6 +505,18 @@ def update_banner(%{assigns: %{user: user}} = conn, params) do
end end
end end
def update_background(%{assigns: %{user: user}} = conn, %{"img" => ""}) do
with new_info <- %{"background" => %{}},
info_cng <- User.Info.profile_update(user.info, new_info),
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
{:ok, _user} <- User.update_and_set_cache(changeset) do
response = %{url: nil} |> Jason.encode!()
conn
|> json_reply(200, response)
end
end
def update_background(%{assigns: %{user: user}} = conn, params) do def update_background(%{assigns: %{user: user}} = conn, params) do
with {:ok, object} <- ActivityPub.upload(params, type: :background), with {:ok, object} <- ActivityPub.upload(params, type: :background),
new_info <- %{"background" => object.data}, new_info <- %{"background" => object.data},

View file

@ -109,7 +109,6 @@ defp deps do
{:phoenix_html, "~> 2.10"}, {:phoenix_html, "~> 2.10"},
{:calendar, "~> 0.17.4"}, {:calendar, "~> 0.17.4"},
{:cachex, "~> 3.0.2"}, {:cachex, "~> 3.0.2"},
{:httpoison, "~> 1.2.0"},
{:poison, "~> 3.0", override: true}, {:poison, "~> 3.0", override: true},
{:tesla, "~> 1.2"}, {:tesla, "~> 1.2"},
{:jason, "~> 1.0"}, {:jason, "~> 1.0"},
@ -151,7 +150,8 @@ defp deps do
{:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)}, {:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)},
{:ex_rated, "~> 1.3"}, {:ex_rated, "~> 1.3"},
{:plug_static_index_html, "~> 1.0.0"}, {:plug_static_index_html, "~> 1.0.0"},
{:excoveralls, "~> 0.11.1", only: :test} {:excoveralls, "~> 0.11.1", only: :test},
{:mox, "~> 0.5", only: :test}
] ++ oauth_deps() ] ++ oauth_deps()
end end

View file

@ -52,6 +52,7 @@
"mochiweb": {:hex, :mochiweb, "2.15.0", "e1daac474df07651e5d17cc1e642c4069c7850dc4508d3db7263a0651330aacc", [:rebar3], [], "hexpm"}, "mochiweb": {:hex, :mochiweb, "2.15.0", "e1daac474df07651e5d17cc1e642c4069c7850dc4508d3db7263a0651330aacc", [:rebar3], [], "hexpm"},
"mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"}, "mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"}, "mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
"mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
"nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"}, "nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"},
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"}, "parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"}, "pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
@ -65,14 +66,12 @@
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"}, "plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"}, "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm"},
"postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"}, "postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
"prometheus": {:hex, :prometheus, "4.2.2", "a830e77b79dc6d28183f4db050a7cac926a6c58f1872f9ef94a35cd989aceef8", [:mix, :rebar3], [], "hexpm"}, "prometheus": {:hex, :prometheus, "4.2.2", "a830e77b79dc6d28183f4db050a7cac926a6c58f1872f9ef94a35cd989aceef8", [:mix, :rebar3], [], "hexpm"},
"prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"}, "prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
"prometheus_ex": {:hex, :prometheus_ex, "3.0.5", "fa58cfd983487fc5ead331e9a3e0aa622c67232b3ec71710ced122c4c453a02f", [:mix], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"}, "prometheus_ex": {:hex, :prometheus_ex, "3.0.5", "fa58cfd983487fc5ead331e9a3e0aa622c67232b3ec71710ced122c4c453a02f", [:mix], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"},
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.2.1", "964a74dfbc055f781d3a75631e06ce3816a2913976d1df7830283aa3118a797a", [:mix], [{:phoenix, "~> 1.3", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"}, "prometheus_phoenix": {:hex, :prometheus_phoenix, "1.2.1", "964a74dfbc055f781d3a75631e06ce3816a2913976d1df7830283aa3118a797a", [:mix], [{:phoenix, "~> 1.3", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"}, "prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
"prometheus_process_collector": {:hex, :prometheus_process_collector, "1.4.0", "6dbd39e3165b9ef1c94a7a820e9ffe08479f949dcdd431ed4aaea7b250eebfde", [:rebar3], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"},
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"}, "quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"}, "ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]}, "recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},

View file

@ -67,20 +67,3 @@ config :pleroma, Pleroma.Uploaders.Local, uploads: "<%= uploads_dir %>"
# For using third-party S3 clones like wasabi, also do: # For using third-party S3 clones like wasabi, also do:
# config :ex_aws, :s3, # config :ex_aws, :s3,
# host: "s3.wasabisys.com" # host: "s3.wasabisys.com"
# Configure Openstack Swift support if desired.
#
# Many openstack deployments are different, so config is left very open with
# no assumptions made on which provider you're using. This should allow very
# wide support without needing separate handlers for OVH, Rackspace, etc.
#
# config :pleroma, Pleroma.Uploaders.Swift,
# container: "some-container",
# username: "api-username-yyyy",
# password: "api-key-xxxx",
# tenant_id: "<openstack-project/tenant-id>",
# auth_url: "https://keystone-endpoint.provider.com",
# storage_url: "https://swift-endpoint.prodider.com/v1/AUTH_<tenant>/<container>",
# object_url: "https://cdn-endpoint.provider.com/<container>"
#

View file

@ -6,6 +6,7 @@ defmodule Pleroma.ActivityTest do
use Pleroma.DataCase use Pleroma.DataCase
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.Bookmark alias Pleroma.Bookmark
alias Pleroma.Object
alias Pleroma.ThreadMute alias Pleroma.ThreadMute
import Pleroma.Factory import Pleroma.Factory
@ -18,15 +19,18 @@ test "returns an activity by it's AP id" do
test "returns activities by it's objects AP ids" do test "returns activities by it's objects AP ids" do
activity = insert(:note_activity) activity = insert(:note_activity)
[found_activity] = Activity.get_all_create_by_object_ap_id(activity.data["object"]["id"]) object_data = Object.normalize(activity).data
[found_activity] = Activity.get_all_create_by_object_ap_id(object_data["id"])
assert activity == found_activity assert activity == found_activity
end end
test "returns the activity that created an object" do test "returns the activity that created an object" do
activity = insert(:note_activity) activity = insert(:note_activity)
object_data = Object.normalize(activity).data
found_activity = Activity.get_create_by_object_ap_id(activity.data["object"]["id"]) found_activity = Activity.get_create_by_object_ap_id(object_data["id"])
assert activity == found_activity assert activity == found_activity
end end

View file

@ -59,6 +59,7 @@ test "replying" do
another_user = insert(:user) another_user = insert(:user)
{:ok, activity} = CommonAPI.post(another_user, %{"status" => "this is a test post"}) {:ok, activity} = CommonAPI.post(another_user, %{"status" => "this is a test post"})
activity_object = Object.normalize(activity)
output = output =
capture_io(fn -> capture_io(fn ->
@ -76,8 +77,9 @@ test "replying" do
) )
assert reply.actor == user.ap_id assert reply.actor == user.ap_id
object = Object.normalize(reply)
assert object.data["content"] == "this is a reply" reply_object_data = Object.normalize(reply).data
assert object.data["inReplyTo"] == activity.data["object"] assert reply_object_data["content"] == "this is a reply"
assert reply_object_data["inReplyTo"] == activity_object.data["id"]
end end
end end

View file

@ -11,6 +11,16 @@ defmodule Pleroma.ConversationTest do
import Pleroma.Factory import Pleroma.Factory
setup_all do
config_path = [:instance, :federating]
initial_setting = Pleroma.Config.get(config_path)
Pleroma.Config.put(config_path, true)
on_exit(fn -> Pleroma.Config.put(config_path, initial_setting) end)
:ok
end
test "it goes through old direct conversations" do test "it goes through old direct conversations" do
user = insert(:user) user = insert(:user)
other_user = insert(:user) other_user = insert(:user)

Some files were not shown because too many files have changed in this diff Show more