diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..c5ef89b86 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,12 @@ +.* +*.md +AGPL-3 +CC-BY-SA-4.0 +COPYING +*file +elixir_buildpack.config +docs/ +test/ + +# Required to get version +!.git diff --git a/CHANGELOG.md b/CHANGELOG.md index bd64b2259..dccc36965 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Fixed - Not being able to pin unlisted posts +- Objects being re-embedded to activities after being updated (e.g faved/reposted). Running 'mix pleroma.database prune_objects' again is advised. - Metadata rendering errors resulting in the entire page being inaccessible - Federation/MediaProxy not working with instances that have wrong certificate order - Mastodon API: Handling of search timeouts (`/api/v1/search` and `/api/v2/search`) @@ -34,6 +35,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - ActivityPub S2S: sharedInbox usage has been mostly aligned with the rules in the AP specification. - ActivityPub S2S: remote user deletions now work the same as local user deletions. - Not being able to access the Mastodon FE login page on private instances +- Invalid SemVer version generation, when the current branch does not have commits ahead of tag/checked out on a tag +- Pleroma.Upload base_url was not automatically whitelisted by MediaProxy. Now your custom CDN or file hosting will be accessed directly as expected. +- Report email not being sent to admins when the reporter is a remote user ### Added - MRF: Support for priming the mediaproxy cache (`Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`) @@ -67,6 +71,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - ActivityPub: Optional signing of ActivityPub object fetches. - Admin API: Endpoint for fetching latest user's statuses - Pleroma API: Add `/api/v1/pleroma/accounts/confirmation_resend?email=` for resending account confirmation. +- Relays: Added a task to list relay subscriptions. ### Changed - Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text @@ -74,6 +79,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - RichMedia: parsers and their order are configured in `rich_media` config. - RichMedia: add the rich media ttl based on image expiration time. +### Removed +- Emoji: Remove longfox emojis. +- Remove `Reply-To` header from report emails for admins. + ## [1.0.1] - 2019-07-14 ### Security - OStatus: fix an object spoofing vulnerability. @@ -84,6 +93,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Rich media: Do not crawl private IP ranges ### Added +- Digest email for inactive users - Add a generic settings store for frontends / clients to use. - Explicit addressing option for posting. - Optional SSH access mode. (Needs `erlang-ssh` package on some distributions). @@ -110,6 +120,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Configuration: `notify_email` option - Configuration: Media proxy `whitelist` option - Configuration: `report_uri` option +- Configuration: `email_notifications` option - Configuration: `limit_to_local_content` option - Pleroma API: User subscriptions - Pleroma API: Healthcheck endpoint diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..268ec61dc --- /dev/null +++ b/Dockerfile @@ -0,0 +1,39 @@ +FROM rinpatch/elixir:1.9.0-rc.0-alpine as build + +COPY . . + +ENV MIX_ENV=prod + +RUN apk add git gcc g++ musl-dev make &&\ + echo "import Mix.Config" > config/prod.secret.exs &&\ + mix local.hex --force &&\ + mix local.rebar --force &&\ + mix deps.get --only prod &&\ + mkdir release &&\ + mix release --path release + +FROM alpine:latest + +ARG HOME=/opt/pleroma +ARG DATA=/var/lib/pleroma + +RUN echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories &&\ + apk update &&\ + apk add ncurses postgresql-client &&\ + adduser --system --shell /bin/false --home ${HOME} pleroma &&\ + mkdir -p ${DATA}/uploads &&\ + mkdir -p ${DATA}/static &&\ + chown -R pleroma ${DATA} &&\ + mkdir -p /etc/pleroma &&\ + chown -R pleroma /etc/pleroma + +USER pleroma + +COPY --from=build --chown=pleroma:0 /release ${HOME} + +COPY ./config/docker.exs /etc/pleroma/config.exs +COPY ./docker-entrypoint.sh ${HOME} + +EXPOSE 4000 + +ENTRYPOINT ["/opt/pleroma/docker-entrypoint.sh"] diff --git a/README.md b/README.md index 41d454a03..5aad34ccc 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ If you want to run your own server, feel free to contact us at @lain@pleroma.soy Currently Pleroma is not packaged by any OS/Distros, but feel free to reach out to us at [#pleroma-dev on freenode](https://webchat.freenode.net/?channels=%23pleroma-dev) or via matrix at for assistance. If you want to change default options in your Pleroma package, please **discuss it with us first**. ### Docker -While we don’t provide docker files, other people have written very good ones. Take a look at or . +While we don’t provide docker files, other people have written very good ones. Take a look at or . ### Dependencies diff --git a/config/config.exs b/config/config.exs index 5fd64365c..258e4d274 100644 --- a/config/config.exs +++ b/config/config.exs @@ -524,6 +524,14 @@ config :pleroma, Pleroma.ScheduledActivity, total_user_limit: 300, enabled: true +config :pleroma, :email_notifications, + digest: %{ + active: false, + schedule: "0 0 * * 0", + interval: 7, + inactivity_threshold: 7 + } + config :pleroma, :oauth2, token_expires_in: 600, issue_new_refresh_token: true, diff --git a/config/docker.exs b/config/docker.exs new file mode 100644 index 000000000..63ab4cdee --- /dev/null +++ b/config/docker.exs @@ -0,0 +1,68 @@ +import Config + +config :pleroma, Pleroma.Web.Endpoint, + url: [host: System.get_env("DOMAIN", "localhost"), scheme: "https", port: 443], + http: [ip: {0, 0, 0, 0}, port: 4000] + +config :pleroma, :instance, + name: System.get_env("INSTANCE_NAME", "Pleroma"), + email: System.get_env("ADMIN_EMAIL"), + notify_email: System.get_env("NOTIFY_EMAIL"), + limit: 5000, + registrations_open: false, + dynamic_configuration: true + +config :pleroma, Pleroma.Repo, + adapter: Ecto.Adapters.Postgres, + username: System.get_env("DB_USER", "pleroma"), + password: System.fetch_env!("DB_PASS"), + database: System.get_env("DB_NAME", "pleroma"), + hostname: System.get_env("DB_HOST", "db"), + pool_size: 10 + +# Configure web push notifications +config :web_push_encryption, :vapid_details, subject: "mailto:#{System.get_env("NOTIFY_EMAIL")}" + +config :pleroma, :database, rum_enabled: false +config :pleroma, :instance, static_dir: "/var/lib/pleroma/static" +config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads" + +# We can't store the secrets in this file, since this is baked into the docker image +if not File.exists?("/var/lib/pleroma/secret.exs") do + secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64) + signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8) + {web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1) + + secret_file = + EEx.eval_string( + """ + import Config + + config :pleroma, Pleroma.Web.Endpoint, + secret_key_base: "<%= secret %>", + signing_salt: "<%= signing_salt %>" + + config :web_push_encryption, :vapid_details, + public_key: "<%= web_push_public_key %>", + private_key: "<%= web_push_private_key %>" + """, + secret: secret, + signing_salt: signing_salt, + web_push_public_key: Base.url_encode64(web_push_public_key, padding: false), + web_push_private_key: Base.url_encode64(web_push_private_key, padding: false) + ) + + File.write("/var/lib/pleroma/secret.exs", secret_file) +end + +import_config("/var/lib/pleroma/secret.exs") + +# For additional user config +if File.exists?("/var/lib/pleroma/config.exs"), + do: import_config("/var/lib/pleroma/config.exs"), + else: + File.write("/var/lib/pleroma/config.exs", """ + import Config + + # For additional configuration outside of environmental variables + """) diff --git a/config/emoji.txt b/config/emoji.txt index 79246f239..200768ad1 100644 --- a/config/emoji.txt +++ b/config/emoji.txt @@ -1,30 +1,2 @@ firefox, /emoji/Firefox.gif, Gif,Fun blank, /emoji/blank.png, Fun -f_00b, /emoji/f_00b.png -f_00b11b, /emoji/f_00b11b.png -f_00b33b, /emoji/f_00b33b.png -f_00h, /emoji/f_00h.png -f_00t, /emoji/f_00t.png -f_01b, /emoji/f_01b.png -f_03b, /emoji/f_03b.png -f_10b, /emoji/f_10b.png -f_11b, /emoji/f_11b.png -f_11b00b, /emoji/f_11b00b.png -f_11b22b, /emoji/f_11b22b.png -f_11h, /emoji/f_11h.png -f_11t, /emoji/f_11t.png -f_12b, /emoji/f_12b.png -f_21b, /emoji/f_21b.png -f_22b, /emoji/f_22b.png -f_22b11b, /emoji/f_22b11b.png -f_22b33b, /emoji/f_22b33b.png -f_22h, /emoji/f_22h.png -f_22t, /emoji/f_22t.png -f_23b, /emoji/f_23b.png -f_30b, /emoji/f_30b.png -f_32b, /emoji/f_32b.png -f_33b, /emoji/f_33b.png -f_33b00b, /emoji/f_33b00b.png -f_33b22b, /emoji/f_33b22b.png -f_33h, /emoji/f_33h.png -f_33t, /emoji/f_33t.png diff --git a/config/test.exs b/config/test.exs index 23d9bf779..f897b5d48 100644 --- a/config/test.exs +++ b/config/test.exs @@ -29,7 +29,8 @@ config :pleroma, :instance, email: "admin@example.com", notify_email: "noreply@example.com", skip_thread_containment: false, - federating: false + federating: false, + external_user_synchronization: false config :pleroma, :activitypub, sign_object_fetches: false @@ -84,6 +85,8 @@ rum_enabled = System.get_env("RUM_ENABLED") == "true" config :pleroma, :database, rum_enabled: rum_enabled IO.puts("RUM enabled: #{rum_enabled}") +config :joken, default_signer: "yU8uHKq+yyAkZ11Hx//jcdacWc8yQ1bxAAGrplzB0Zwwjkp35v0RK9SO8WTPr6QZ" + config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock try do diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh new file mode 100755 index 000000000..f56f8c50a --- /dev/null +++ b/docker-entrypoint.sh @@ -0,0 +1,14 @@ +#!/bin/ash + +set -e + +echo "-- Waiting for database..." +while ! pg_isready -U ${DB_USER:-pleroma} -d postgres://${DB_HOST:-db}:5432/${DB_NAME:-pleroma} -t 1; do + sleep 1s +done + +echo "-- Running migrations..." +$HOME/bin/pleroma_ctl migrate + +echo "-- Starting!" +exec $HOME/bin/pleroma start diff --git a/docs/api/admin_api.md b/docs/api/admin_api.md index 22873dde9..7ccb90836 100644 --- a/docs/api/admin_api.md +++ b/docs/api/admin_api.md @@ -627,6 +627,9 @@ Tuples can be passed as `{"tuple": ["first_val", Pleroma.Module, []]}`. Keywords can be passed as lists with 2 child tuples, e.g. `[{"tuple": ["first_val", Pleroma.Module]}, {"tuple": ["second_val", true]}]`. +If value contains list of settings `[subkey: val1, subkey2: val2, subkey3: val3]`, it's possible to remove only subkeys instead of all settings passing `subkeys` parameter. E.g.: +{"group": "pleroma", "key": "some_key", "delete": "true", "subkeys": [":subkey", ":subkey3"]}. + Compile time settings (need instance reboot): - all settings by this keys: - `:hackney_pools` @@ -645,6 +648,7 @@ Compile time settings (need instance reboot): - `key` (string or string with leading `:` for atoms) - `value` (string, [], {} or {"tuple": []}) - `delete` = true (optional, if parameter must be deleted) + - `subkeys` [(string with leading `:` for atoms)] (optional, works only if `delete=true` parameter is passed, otherwise will be ignored) ] - Request (example): diff --git a/docs/config.md b/docs/config.md index 5c18ffdbf..8f8bd22f4 100644 --- a/docs/config.md +++ b/docs/config.md @@ -25,7 +25,7 @@ At this time, write CNAME to CDN in public_endpoint. ## Pleroma.Upload.Filter.Mogrify -* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"impode", "1"}]`. +* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"implode", "1"}]`. ## Pleroma.Upload.Filter.Dedupe @@ -529,6 +529,18 @@ Authentication / authorization settings. * `oauth_consumer_template`: OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`. * `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by `OAUTH_CONSUMER_STRATEGIES` environment variable. Each entry in this space-delimited string should be of format `` or `:` (e.g. `twitter` or `keycloak:ueberauth_keycloak_strategy` in case dependency is named differently than `ueberauth_`). +## :email_notifications + +Email notifications settings. + + - digest - emails of "what you've missed" for users who have been + inactive for a while. + - active: globally enable or disable digest emails + - schedule: When to send digest email, in [crontab format](https://en.wikipedia.org/wiki/Cron). + "0 0 * * 0" is the default, meaning "once a week at midnight on Sunday morning" + - interval: Minimum interval between digest emails to one user + - inactivity_threshold: Minimum user inactivity threshold + ## OAuth consumer mode OAuth consumer mode allows sign in / sign up via external OAuth providers (e.g. Twitter, Facebook, Google, Microsoft, etc.). diff --git a/docs/config/howto_mediaproxy.md b/docs/config/howto_mediaproxy.md index ed70c3ed4..16c40c5db 100644 --- a/docs/config/howto_mediaproxy.md +++ b/docs/config/howto_mediaproxy.md @@ -1,8 +1,8 @@ # How to activate mediaproxy ## Explanation -Without the `mediaproxy` function, Pleroma don't store any remote content like pictures, video etc. locally. So every time you open Pleroma, the content is loaded from the source server, from where the post is coming. This can result in slowly loading content or/and increased bandwidth usage on the source server. -With the `mediaproxy` function you can use the cache ability of nginx, to cache these content, so user can access it faster, cause it's loaded from your server. +Without the `mediaproxy` function, Pleroma doesn't store any remote content like pictures, video etc. locally. So every time you open Pleroma, the content is loaded from the source server, from where the post is coming. This can result in slowly loading content or/and increased bandwidth usage on the source server. +With the `mediaproxy` function you can use nginx to cache this content, so users can access it faster, because it's loaded from your server. ## Activate it diff --git a/lib/mix/tasks/pleroma/digest.ex b/lib/mix/tasks/pleroma/digest.ex new file mode 100644 index 000000000..81c207e10 --- /dev/null +++ b/lib/mix/tasks/pleroma/digest.ex @@ -0,0 +1,33 @@ +defmodule Mix.Tasks.Pleroma.Digest do + use Mix.Task + + @shortdoc "Manages digest emails" + @moduledoc """ + Manages digest emails + + ## Send digest email since given date (user registration date by default) + ignoring user activity status. + + ``mix pleroma.digest test `` + + Example: ``mix pleroma.digest test donaldtheduck 2019-05-20`` + """ + def run(["test", nickname | opts]) do + Mix.Pleroma.start_pleroma() + + user = Pleroma.User.get_by_nickname(nickname) + + last_digest_emailed_at = + with [date] <- opts, + {:ok, datetime} <- Timex.parse(date, "{YYYY}-{0M}-{0D}") do + datetime + else + _ -> user.inserted_at + end + + patched_user = %{user | last_digest_emailed_at: last_digest_emailed_at} + + _user = Pleroma.DigestEmailWorker.perform(patched_user) + Mix.shell().info("Digest email have been sent to #{nickname} (#{user.email})") + end +end diff --git a/lib/mix/tasks/pleroma/instance.ex b/lib/mix/tasks/pleroma/instance.ex index 9080adb52..b9b1991c2 100644 --- a/lib/mix/tasks/pleroma/instance.ex +++ b/lib/mix/tasks/pleroma/instance.ex @@ -183,6 +183,7 @@ defmodule Mix.Tasks.Pleroma.Instance do ) secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64) + jwt_secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64) signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8) {web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1) template_dir = Application.app_dir(:pleroma, "priv") <> "/templates" @@ -200,6 +201,7 @@ defmodule Mix.Tasks.Pleroma.Instance do dbuser: dbuser, dbpass: dbpass, secret: secret, + jwt_secret: jwt_secret, signing_salt: signing_salt, web_push_public_key: Base.url_encode64(web_push_public_key, padding: false), web_push_private_key: Base.url_encode64(web_push_private_key, padding: false), diff --git a/lib/mix/tasks/pleroma/relay.ex b/lib/mix/tasks/pleroma/relay.ex index 83ed0ed02..c7324fff6 100644 --- a/lib/mix/tasks/pleroma/relay.ex +++ b/lib/mix/tasks/pleroma/relay.ex @@ -5,6 +5,7 @@ defmodule Mix.Tasks.Pleroma.Relay do use Mix.Task import Mix.Pleroma + alias Pleroma.User alias Pleroma.Web.ActivityPub.Relay @shortdoc "Manages remote relays" @@ -22,6 +23,10 @@ defmodule Mix.Tasks.Pleroma.Relay do ``mix pleroma.relay unfollow `` Example: ``mix pleroma.relay unfollow https://example.org/relay`` + + ## List relay subscriptions + + ``mix pleroma.relay list`` """ def run(["follow", target]) do start_pleroma() @@ -44,4 +49,19 @@ defmodule Mix.Tasks.Pleroma.Relay do {:error, e} -> shell_error("Error while following #{target}: #{inspect(e)}") end end + + def run(["list"]) do + start_pleroma() + + with %User{} = user <- Relay.get_actor() do + user.following + |> Enum.each(fn entry -> + URI.parse(entry) + |> Map.get(:host) + |> shell_info() + end) + else + e -> shell_error("Error while fetching relay subscription list: #{inspect(e)}") + end + end end diff --git a/lib/mix/tasks/pleroma/user.ex b/lib/mix/tasks/pleroma/user.ex index c9b84b8f9..a3f8bc945 100644 --- a/lib/mix/tasks/pleroma/user.ex +++ b/lib/mix/tasks/pleroma/user.ex @@ -31,8 +31,8 @@ defmodule Mix.Tasks.Pleroma.User do mix pleroma.user invite [OPTION...] Options: - - `--expires_at DATE` - last day on which token is active (e.g. "2019-04-05") - - `--max_use NUMBER` - maximum numbers of token uses + - `--expires-at DATE` - last day on which token is active (e.g. "2019-04-05") + - `--max-use NUMBER` - maximum numbers of token uses ## List generated invites diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index ce7d8c4b2..5550a4902 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -162,7 +162,9 @@ defmodule Pleroma.Application do # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html # for other strategies and supported options opts = [strategy: :one_for_one, name: Pleroma.Supervisor] - Supervisor.start_link(children, opts) + result = Supervisor.start_link(children, opts) + :ok = after_supervisor_start() + result end defp setup_instrumenters do @@ -227,4 +229,17 @@ defmodule Pleroma.Application do :hackney_pool.child_spec(pool, options) end end + + defp after_supervisor_start do + with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest], + true <- digest_config[:active] do + PleromaJobQueue.schedule( + digest_config[:schedule], + :digest_emails, + Pleroma.DigestEmailWorker + ) + end + + :ok + end end diff --git a/lib/pleroma/digest_email_worker.ex b/lib/pleroma/digest_email_worker.ex new file mode 100644 index 000000000..18e67d39b --- /dev/null +++ b/lib/pleroma/digest_email_worker.ex @@ -0,0 +1,35 @@ +defmodule Pleroma.DigestEmailWorker do + import Ecto.Query + + @queue_name :digest_emails + + def perform do + config = Pleroma.Config.get([:email_notifications, :digest]) + negative_interval = -Map.fetch!(config, :interval) + inactivity_threshold = Map.fetch!(config, :inactivity_threshold) + inactive_users_query = Pleroma.User.list_inactive_users_query(inactivity_threshold) + + now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second) + + from(u in inactive_users_query, + where: fragment(~s(? #> '{"email_notifications","digest"}' @> 'true'), u.info), + where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"), + select: u + ) + |> Pleroma.Repo.all() + |> Enum.each(&PleromaJobQueue.enqueue(@queue_name, __MODULE__, [&1])) + end + + @doc """ + Send digest email to the given user. + Updates `last_digest_emailed_at` field for the user and returns the updated user. + """ + @spec perform(Pleroma.User.t()) :: Pleroma.User.t() + def perform(user) do + with %Swoosh.Email{} = email <- Pleroma.Emails.UserEmail.digest_email(user) do + Pleroma.Emails.Mailer.deliver_async(email) + end + + Pleroma.User.touch_last_digest_emailed_at(user) + end +end diff --git a/lib/pleroma/emails/admin_email.ex b/lib/pleroma/emails/admin_email.ex index d0e254362..c14be02dd 100644 --- a/lib/pleroma/emails/admin_email.ex +++ b/lib/pleroma/emails/admin_email.ex @@ -63,7 +63,6 @@ defmodule Pleroma.Emails.AdminEmail do new() |> to({to.name, to.email}) |> from({instance_name(), instance_notify_email()}) - |> reply_to({reporter.name, reporter.email}) |> subject("#{instance_name()} Report") |> html_body(html_body) end diff --git a/lib/pleroma/emails/user_email.ex b/lib/pleroma/emails/user_email.ex index 934620765..49046bb8b 100644 --- a/lib/pleroma/emails/user_email.ex +++ b/lib/pleroma/emails/user_email.ex @@ -5,7 +5,7 @@ defmodule Pleroma.Emails.UserEmail do @moduledoc "User emails" - import Swoosh.Email + use Phoenix.Swoosh, view: Pleroma.Web.EmailView, layout: {Pleroma.Web.LayoutView, :email} alias Pleroma.Web.Endpoint alias Pleroma.Web.Router @@ -87,4 +87,73 @@ defmodule Pleroma.Emails.UserEmail do |> subject("#{instance_name()} account confirmation") |> html_body(html_body) end + + @doc """ + Email used in digest email notifications + Includes Mentions and New Followers data + If there are no mentions (even when new followers exist), the function will return nil + """ + @spec digest_email(Pleroma.User.t()) :: Swoosh.Email.t() | nil + def digest_email(user) do + new_notifications = + Pleroma.Notification.for_user_since(user, user.last_digest_emailed_at) + |> Enum.reduce(%{followers: [], mentions: []}, fn + %{activity: %{data: %{"type" => "Create"}, actor: actor} = activity} = notification, + acc -> + new_mention = %{ + data: notification, + object: Pleroma.Object.normalize(activity), + from: Pleroma.User.get_by_ap_id(actor) + } + + %{acc | mentions: [new_mention | acc.mentions]} + + %{activity: %{data: %{"type" => "Follow"}, actor: actor} = activity} = notification, + acc -> + new_follower = %{ + data: notification, + object: Pleroma.Object.normalize(activity), + from: Pleroma.User.get_by_ap_id(actor) + } + + %{acc | followers: [new_follower | acc.followers]} + + _, acc -> + acc + end) + + with [_ | _] = mentions <- new_notifications.mentions do + html_data = %{ + instance: instance_name(), + user: user, + mentions: mentions, + followers: new_notifications.followers, + unsubscribe_link: unsubscribe_url(user, "digest") + } + + new() + |> to(recipient(user)) + |> from(sender()) + |> subject("Your digest from #{instance_name()}") + |> render_body("digest.html", html_data) + else + _ -> + nil + end + end + + @doc """ + Generate unsubscribe link for given user and notifications type. + The link contains JWT token with the data, and subscription can be modified without + authorization. + """ + @spec unsubscribe_url(Pleroma.User.t(), String.t()) :: String.t() + def unsubscribe_url(user, notifications_type) do + token = + %{"sub" => user.id, "act" => %{"unsubscribe" => notifications_type}, "exp" => false} + |> Pleroma.JWT.generate_and_sign!() + |> Base.encode64() + + Router.Helpers.subscription_url(Pleroma.Web.Endpoint, :unsubscribe, token) + end end diff --git a/lib/pleroma/jwt.ex b/lib/pleroma/jwt.ex new file mode 100644 index 000000000..10102ff5d --- /dev/null +++ b/lib/pleroma/jwt.ex @@ -0,0 +1,9 @@ +defmodule Pleroma.JWT do + use Joken.Config + + @impl true + def token_config do + default_claims(skip: [:aud]) + |> add_claim("aud", &Pleroma.Web.Endpoint.url/0, &(&1 == Pleroma.Web.Endpoint.url())) + end +end diff --git a/lib/pleroma/notification.ex b/lib/pleroma/notification.ex index d47229258..5d29af853 100644 --- a/lib/pleroma/notification.ex +++ b/lib/pleroma/notification.ex @@ -18,6 +18,8 @@ defmodule Pleroma.Notification do import Ecto.Query import Ecto.Changeset + @type t :: %__MODULE__{} + schema "notifications" do field(:seen, :boolean, default: false) belongs_to(:user, User, type: Pleroma.FlakeId) @@ -31,7 +33,7 @@ defmodule Pleroma.Notification do |> cast(attrs, [:seen]) end - def for_user_query(user, opts) do + def for_user_query(user, opts \\ []) do query = Notification |> where(user_id: ^user.id) @@ -75,6 +77,25 @@ defmodule Pleroma.Notification do |> Pagination.fetch_paginated(opts) end + @doc """ + Returns notifications for user received since given date. + + ## Examples + + iex> Pleroma.Notification.for_user_since(%Pleroma.User{}, ~N[2019-04-13 11:22:33]) + [%Pleroma.Notification{}, %Pleroma.Notification{}] + + iex> Pleroma.Notification.for_user_since(%Pleroma.User{}, ~N[2019-04-15 11:22:33]) + [] + """ + @spec for_user_since(Pleroma.User.t(), NaiveDateTime.t()) :: [t()] + def for_user_since(user, date) do + from(n in for_user_query(user), + where: n.updated_at > ^date + ) + |> Repo.all() + end + def set_read_up_to(%{id: user_id} = _user, id) do query = from( @@ -82,7 +103,10 @@ defmodule Pleroma.Notification do where: n.user_id == ^user_id, where: n.id <= ^id, update: [ - set: [seen: true] + set: [ + seen: true, + updated_at: ^NaiveDateTime.utc_now() + ] ] ) diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex index 305ce8357..8d79ddb1f 100644 --- a/lib/pleroma/object/fetcher.ex +++ b/lib/pleroma/object/fetcher.ex @@ -114,7 +114,7 @@ defmodule Pleroma.Object.Fetcher do end end - def fetch_and_contain_remote_object_from_id(id) do + def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do Logger.info("Fetching object #{id} via AP") date = @@ -141,4 +141,9 @@ defmodule Pleroma.Object.Fetcher do {:error, e} end end + + def fetch_and_contain_remote_object_from_id(%{"id" => id}), + do: fetch_and_contain_remote_object_from_id(id) + + def fetch_and_contain_remote_object_from_id(_id), do: {:error, "id must be a string"} end diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index 1adb82f32..7d18f099e 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -57,6 +57,7 @@ defmodule Pleroma.User do field(:search_type, :integer, virtual: true) field(:tags, {:array, :string}, default: []) field(:last_refreshed_at, :naive_datetime_usec) + field(:last_digest_emailed_at, :naive_datetime) has_many(:notifications, Notification) has_many(:registrations, Registration) embeds_one(:info, User.Info) @@ -114,7 +115,9 @@ defmodule Pleroma.User do def user_info(%User{} = user, args \\ %{}) do following_count = - if args[:following_count], do: args[:following_count], else: following_count(user) + if args[:following_count], + do: args[:following_count], + else: user.info.following_count || following_count(user) follower_count = if args[:follower_count], do: args[:follower_count], else: user.info.follower_count @@ -406,6 +409,8 @@ defmodule Pleroma.User do {1, [follower]} = Repo.update_all(q, []) + follower = maybe_update_following_count(follower) + {:ok, _} = update_follower_count(followed) set_cache(follower) @@ -425,6 +430,8 @@ defmodule Pleroma.User do {1, [follower]} = Repo.update_all(q, []) + follower = maybe_update_following_count(follower) + {:ok, followed} = update_follower_count(followed) set_cache(follower) @@ -709,32 +716,73 @@ defmodule Pleroma.User do |> update_and_set_cache() end - def update_follower_count(%User{} = user) do - follower_count_query = - User.Query.build(%{followers: user, deactivated: false}) - |> select([u], %{count: count(u.id)}) + def maybe_fetch_follow_information(user) do + with {:ok, user} <- fetch_follow_information(user) do + user + else + e -> + Logger.error("Follower/Following counter update for #{user.ap_id} failed.\n#{inspect(e)}") - User - |> where(id: ^user.id) - |> join(:inner, [u], s in subquery(follower_count_query)) - |> update([u, s], - set: [ - info: - fragment( - "jsonb_set(?, '{follower_count}', ?::varchar::jsonb, true)", - u.info, - s.count - ) - ] - ) - |> select([u], u) - |> Repo.update_all([]) - |> case do - {1, [user]} -> set_cache(user) - _ -> {:error, user} + user end end + def fetch_follow_information(user) do + with {:ok, info} <- ActivityPub.fetch_follow_information_for_user(user) do + info_cng = User.Info.follow_information_update(user.info, info) + + changeset = + user + |> change() + |> put_embed(:info, info_cng) + + update_and_set_cache(changeset) + else + {:error, _} = e -> e + e -> {:error, e} + end + end + + def update_follower_count(%User{} = user) do + if user.local or !Pleroma.Config.get([:instance, :external_user_synchronization]) do + follower_count_query = + User.Query.build(%{followers: user, deactivated: false}) + |> select([u], %{count: count(u.id)}) + + User + |> where(id: ^user.id) + |> join(:inner, [u], s in subquery(follower_count_query)) + |> update([u, s], + set: [ + info: + fragment( + "jsonb_set(?, '{follower_count}', ?::varchar::jsonb, true)", + u.info, + s.count + ) + ] + ) + |> select([u], u) + |> Repo.update_all([]) + |> case do + {1, [user]} -> set_cache(user) + _ -> {:error, user} + end + else + {:ok, maybe_fetch_follow_information(user)} + end + end + + def maybe_update_following_count(%User{local: false} = user) do + if Pleroma.Config.get([:instance, :external_user_synchronization]) do + {:ok, maybe_fetch_follow_information(user)} + else + user + end + end + + def maybe_update_following_count(user), do: user + def remove_duplicated_following(%User{following: following} = user) do uniq_following = Enum.uniq(following) @@ -1372,6 +1420,80 @@ defmodule Pleroma.User do target.ap_id not in user.info.muted_reblogs end + @doc """ + The function returns a query to get users with no activity for given interval of days. + Inactive users are those who didn't read any notification, or had any activity where + the user is the activity's actor, during `inactivity_threshold` days. + Deactivated users will not appear in this list. + + ## Examples + + iex> Pleroma.User.list_inactive_users() + %Ecto.Query{} + """ + @spec list_inactive_users_query(integer()) :: Ecto.Query.t() + def list_inactive_users_query(inactivity_threshold \\ 7) do + negative_inactivity_threshold = -inactivity_threshold + now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second) + # Subqueries are not supported in `where` clauses, join gets too complicated. + has_read_notifications = + from(n in Pleroma.Notification, + where: n.seen == true, + group_by: n.id, + having: max(n.updated_at) > datetime_add(^now, ^negative_inactivity_threshold, "day"), + select: n.user_id + ) + |> Pleroma.Repo.all() + + from(u in Pleroma.User, + left_join: a in Pleroma.Activity, + on: u.ap_id == a.actor, + where: not is_nil(u.nickname), + where: fragment("not (?->'deactivated' @> 'true')", u.info), + where: u.id not in ^has_read_notifications, + group_by: u.id, + having: + max(a.inserted_at) < datetime_add(^now, ^negative_inactivity_threshold, "day") or + is_nil(max(a.inserted_at)) + ) + end + + @doc """ + Enable or disable email notifications for user + + ## Examples + + iex> Pleroma.User.switch_email_notifications(Pleroma.User{info: %{email_notifications: %{"digest" => false}}}, "digest", true) + Pleroma.User{info: %{email_notifications: %{"digest" => true}}} + + iex> Pleroma.User.switch_email_notifications(Pleroma.User{info: %{email_notifications: %{"digest" => true}}}, "digest", false) + Pleroma.User{info: %{email_notifications: %{"digest" => false}}} + """ + @spec switch_email_notifications(t(), String.t(), boolean()) :: + {:ok, t()} | {:error, Ecto.Changeset.t()} + def switch_email_notifications(user, type, status) do + info = Pleroma.User.Info.update_email_notifications(user.info, %{type => status}) + + change(user) + |> put_embed(:info, info) + |> update_and_set_cache() + end + + @doc """ + Set `last_digest_emailed_at` value for the user to current time + """ + @spec touch_last_digest_emailed_at(t()) :: t() + def touch_last_digest_emailed_at(user) do + now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second) + + {:ok, updated_user} = + user + |> change(%{last_digest_emailed_at: now}) + |> update_and_set_cache() + + updated_user + end + @spec toggle_confirmation(User.t()) :: {:ok, User.t()} | {:error, Changeset.t()} def toggle_confirmation(%User{} = user) do need_confirmation? = !user.info.confirmation_pending diff --git a/lib/pleroma/user/info.ex b/lib/pleroma/user/info.ex index 9beb3ddbd..22eb9a182 100644 --- a/lib/pleroma/user/info.ex +++ b/lib/pleroma/user/info.ex @@ -16,6 +16,8 @@ defmodule Pleroma.User.Info do field(:source_data, :map, default: %{}) field(:note_count, :integer, default: 0) field(:follower_count, :integer, default: 0) + # Should be filled in only for remote users + field(:following_count, :integer, default: nil) field(:locked, :boolean, default: false) field(:confirmation_pending, :boolean, default: false) field(:confirmation_token, :string, default: nil) @@ -43,6 +45,7 @@ defmodule Pleroma.User.Info do field(:hide_follows, :boolean, default: false) field(:hide_favorites, :boolean, default: true) field(:pinned_activities, {:array, :string}, default: []) + field(:email_notifications, :map, default: %{"digest" => false}) field(:mascot, :map, default: nil) field(:emoji, {:array, :map}, default: []) field(:pleroma_settings_store, :map, default: %{}) @@ -93,6 +96,30 @@ defmodule Pleroma.User.Info do |> validate_required([:notification_settings]) end + @doc """ + Update email notifications in the given User.Info struct. + + Examples: + + iex> update_email_notifications(%Pleroma.User.Info{email_notifications: %{"digest" => false}}, %{"digest" => true}) + %Pleroma.User.Info{email_notifications: %{"digest" => true}} + + """ + @spec update_email_notifications(t(), map()) :: Ecto.Changeset.t() + def update_email_notifications(info, settings) do + email_notifications = + info.email_notifications + |> Map.merge(settings) + |> Map.take(["digest"]) + + params = %{email_notifications: email_notifications} + fields = [:email_notifications] + + info + |> cast(params, fields) + |> validate_required(fields) + end + def add_to_note_count(info, number) do set_note_count(info, info.note_count + number) end @@ -223,7 +250,11 @@ defmodule Pleroma.User.Info do :uri, :hub, :topic, - :salmon + :salmon, + :hide_followers, + :hide_follows, + :follower_count, + :following_count ]) end @@ -234,7 +265,11 @@ defmodule Pleroma.User.Info do :source_data, :banner, :locked, - :magic_key + :magic_key, + :follower_count, + :following_count, + :hide_follows, + :hide_followers ]) end @@ -348,4 +383,14 @@ defmodule Pleroma.User.Info do cast(info, params, [:muted_reblogs]) end + + def follow_information_update(info, params) do + info + |> cast(params, [ + :hide_followers, + :hide_follows, + :follower_count, + :following_count + ]) + end end diff --git a/lib/pleroma/user/search.ex b/lib/pleroma/user/search.ex index 46620b89a..6fb2c2352 100644 --- a/lib/pleroma/user/search.ex +++ b/lib/pleroma/user/search.ex @@ -44,7 +44,7 @@ defmodule Pleroma.User.Search do query_string = String.trim_leading(query_string, "@") with [name, domain] <- String.split(query_string, "@"), - formatted_domain <- String.replace(domain, ~r/[!-\-|@|[-`|{-~|\/|:]+/, "") do + formatted_domain <- String.replace(domain, ~r/[!-\-|@|[-`|{-~|\/|:|\s]+/, "") do name <> "@" <> to_string(:idna.encode(formatted_domain)) else _ -> query_string diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 6fd7fef92..1a279a7df 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -267,6 +267,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do else {:fake, true, activity} -> {:ok, activity} + + {:error, message} -> + {:error, message} end end @@ -746,8 +749,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do defp restrict_favorited_by(query, %{"favorited_by" => ap_id}) do from( - activity in query, - where: fragment(~s(? <@ (? #> '{"object","likes"}'\)), ^ap_id, activity.data) + [_activity, object] in query, + where: fragment("(?)->'likes' \\? (?)", object.data, ^ap_id) ) end @@ -1009,10 +1012,10 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do user_data = %{ ap_id: data["id"], info: %{ - "ap_enabled" => true, - "source_data" => data, - "banner" => banner, - "locked" => locked + ap_enabled: true, + source_data: data, + banner: banner, + locked: locked }, avatar: avatar, name: data["name"], @@ -1036,6 +1039,71 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do {:ok, user_data} end + def fetch_follow_information_for_user(user) do + with {:ok, following_data} <- + Fetcher.fetch_and_contain_remote_object_from_id(user.following_address), + following_count when is_integer(following_count) <- following_data["totalItems"], + {:ok, hide_follows} <- collection_private(following_data), + {:ok, followers_data} <- + Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address), + followers_count when is_integer(followers_count) <- followers_data["totalItems"], + {:ok, hide_followers} <- collection_private(followers_data) do + {:ok, + %{ + hide_follows: hide_follows, + follower_count: followers_count, + following_count: following_count, + hide_followers: hide_followers + }} + else + {:error, _} = e -> + e + + e -> + {:error, e} + end + end + + defp maybe_update_follow_information(data) do + with {:enabled, true} <- + {:enabled, Pleroma.Config.get([:instance, :external_user_synchronization])}, + {:ok, info} <- fetch_follow_information_for_user(data) do + info = Map.merge(data.info, info) + Map.put(data, :info, info) + else + {:enabled, false} -> + data + + e -> + Logger.error( + "Follower/Following counter update for #{data.ap_id} failed.\n" <> inspect(e) + ) + + data + end + end + + defp collection_private(data) do + if is_map(data["first"]) and + data["first"]["type"] in ["CollectionPage", "OrderedCollectionPage"] do + {:ok, false} + else + with {:ok, %{"type" => type}} when type in ["CollectionPage", "OrderedCollectionPage"] <- + Fetcher.fetch_and_contain_remote_object_from_id(data["first"]) do + {:ok, false} + else + {:error, {:ok, %{status: code}}} when code in [401, 403] -> + {:ok, true} + + {:error, _} = e -> + e + + e -> + {:error, e} + end + end + end + def user_data_from_user_object(data) do with {:ok, data} <- MRF.filter(data), {:ok, data} <- object_to_user_data(data) do @@ -1047,7 +1115,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do def fetch_and_prepare_user_from_ap_id(ap_id) do with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id), - {:ok, data} <- user_data_from_user_object(data) do + {:ok, data} <- user_data_from_user_object(data), + data <- maybe_update_follow_information(data) do {:ok, data} else e -> Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}") diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 44bb1cb9a..5403b71d8 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -608,13 +608,13 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do {:ok, new_user_data} = ActivityPub.user_data_from_user_object(object) - banner = new_user_data[:info]["banner"] - locked = new_user_data[:info]["locked"] || false + banner = new_user_data[:info][:banner] + locked = new_user_data[:info][:locked] || false update_data = new_user_data |> Map.take([:name, :bio, :avatar]) - |> Map.put(:info, %{"banner" => banner, "locked" => locked}) + |> Map.put(:info, %{banner: banner, locked: locked}) actor |> User.upgrade_changeset(update_data) @@ -1076,10 +1076,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do PleromaJobQueue.enqueue(:transmogrifier, __MODULE__, [:user_upgrade, user]) end - if Pleroma.Config.get([:instance, :external_user_synchronization]) do - update_following_followers_counters(user) - end - {:ok, user} else %User{} = user -> {:ok, user} @@ -1112,27 +1108,4 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do data |> maybe_fix_user_url end - - def update_following_followers_counters(user) do - info = %{} - - following = fetch_counter(user.following_address) - info = if following, do: Map.put(info, :following_count, following), else: info - - followers = fetch_counter(user.follower_address) - info = if followers, do: Map.put(info, :follower_count, followers), else: info - - User.set_info_cache(user, info) - end - - defp fetch_counter(url) do - with {:ok, %{body: body, status: code}} when code in 200..299 <- - Pleroma.HTTP.get( - url, - [{:Accept, "application/activity+json"}] - ), - {:ok, data} <- Jason.decode(body) do - data["totalItems"] - end - end end diff --git a/lib/pleroma/web/activity_pub/utils.ex b/lib/pleroma/web/activity_pub/utils.ex index f0917f9d4..4f68acc78 100644 --- a/lib/pleroma/web/activity_pub/utils.ex +++ b/lib/pleroma/web/activity_pub/utils.ex @@ -244,20 +244,6 @@ defmodule Pleroma.Web.ActivityPub.Utils do def insert_full_object(map), do: {:ok, map, nil} - def update_object_in_activities(%{data: %{"id" => id}} = object) do - # TODO - # Update activities that already had this. Could be done in a seperate process. - # Alternatively, just don't do this and fetch the current object each time. Most - # could probably be taken from cache. - relevant_activities = Activity.get_all_create_by_object_ap_id(id) - - Enum.map(relevant_activities, fn activity -> - new_activity_data = activity.data |> Map.put("object", object.data) - changeset = Changeset.change(activity, data: new_activity_data) - Repo.update(changeset) - end) - end - #### Like-related helpers @doc """ @@ -340,8 +326,7 @@ defmodule Pleroma.Web.ActivityPub.Utils do |> Map.put("#{property}_count", length(element)) |> Map.put("#{property}s", element), changeset <- Changeset.change(object, data: new_data), - {:ok, object} <- Object.update_and_set_cache(changeset), - _ <- update_object_in_activities(object) do + {:ok, object} <- Object.update_and_set_cache(changeset) do {:ok, object} end end diff --git a/lib/pleroma/web/activity_pub/views/object_view.ex b/lib/pleroma/web/activity_pub/views/object_view.ex index 6028b773c..94d05f49b 100644 --- a/lib/pleroma/web/activity_pub/views/object_view.ex +++ b/lib/pleroma/web/activity_pub/views/object_view.ex @@ -66,8 +66,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do "orderedItems" => items } - if offset < total do + if offset + length(items) < total do Map.put(map, "next", "#{iri}?page=#{page + 1}") + else + map end end end diff --git a/lib/pleroma/web/activity_pub/views/user_view.ex b/lib/pleroma/web/activity_pub/views/user_view.ex index 639519e0a..06c9e1c71 100644 --- a/lib/pleroma/web/activity_pub/views/user_view.ex +++ b/lib/pleroma/web/activity_pub/views/user_view.ex @@ -65,7 +65,7 @@ defmodule Pleroma.Web.ActivityPub.UserView do do: render("service.json", %{user: user}) def render("user.json", %{user: %User{nickname: "internal." <> _} = user}), - do: render("service.json", %{user: user}) + do: render("service.json", %{user: user}) |> Map.put("preferredUsername", user.nickname) def render("user.json", %{user: user}) do {:ok, user} = User.ensure_keys_present(user) diff --git a/lib/pleroma/web/admin_api/admin_api_controller.ex b/lib/pleroma/web/admin_api/admin_api_controller.ex index fcda57b3e..2d3d0adc4 100644 --- a/lib/pleroma/web/admin_api/admin_api_controller.ex +++ b/lib/pleroma/web/admin_api/admin_api_controller.ex @@ -402,9 +402,9 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do if Pleroma.Config.get([:instance, :dynamic_configuration]) do updated = Enum.map(configs, fn - %{"group" => group, "key" => key, "delete" => "true"} -> - {:ok, _} = Config.delete(%{group: group, key: key}) - nil + %{"group" => group, "key" => key, "delete" => "true"} = params -> + {:ok, config} = Config.delete(%{group: group, key: key, subkeys: params["subkeys"]}) + config %{"group" => group, "key" => key, "value" => value} -> {:ok, config} = Config.update_or_create(%{group: group, key: key, value: value}) diff --git a/lib/pleroma/web/admin_api/config.ex b/lib/pleroma/web/admin_api/config.ex index dde05ea7b..a10cc779b 100644 --- a/lib/pleroma/web/admin_api/config.ex +++ b/lib/pleroma/web/admin_api/config.ex @@ -55,8 +55,19 @@ defmodule Pleroma.Web.AdminAPI.Config do @spec delete(map()) :: {:ok, Config.t()} | {:error, Changeset.t()} def delete(params) do - with %Config{} = config <- Config.get_by_params(params) do - Repo.delete(config) + with %Config{} = config <- Config.get_by_params(Map.delete(params, :subkeys)) do + if params[:subkeys] do + updated_value = + Keyword.drop( + :erlang.binary_to_term(config.value), + Enum.map(params[:subkeys], &do_transform_string(&1)) + ) + + Config.update(config, %{value: updated_value}) + else + Repo.delete(config) + {:ok, nil} + end else nil -> err = diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex index c8a743e8e..22c44a0a3 100644 --- a/lib/pleroma/web/common_api/utils.ex +++ b/lib/pleroma/web/common_api/utils.ex @@ -47,26 +47,43 @@ defmodule Pleroma.Web.CommonAPI.Utils do def get_replied_to_activity(_), do: nil - def attachments_from_ids(data) do - if Map.has_key?(data, "descriptions") do - attachments_from_ids_descs(data["media_ids"], data["descriptions"]) - else - attachments_from_ids_no_descs(data["media_ids"]) - end + def attachments_from_ids(%{"media_ids" => ids, "descriptions" => desc} = _) do + attachments_from_ids_descs(ids, desc) end - def attachments_from_ids_no_descs(ids) do - Enum.map(ids || [], fn media_id -> - Repo.get(Object, media_id).data - end) + def attachments_from_ids(%{"media_ids" => ids} = _) do + attachments_from_ids_no_descs(ids) end + def attachments_from_ids(_), do: [] + + def attachments_from_ids_no_descs([]), do: [] + + def attachments_from_ids_no_descs(ids) do + Enum.map(ids, fn media_id -> + case Repo.get(Object, media_id) do + %Object{data: data} = _ -> data + _ -> nil + end + end) + |> Enum.filter(& &1) + end + + def attachments_from_ids_descs([], _), do: [] + def attachments_from_ids_descs(ids, descs_str) do {_, descs} = Jason.decode(descs_str) - Enum.map(ids || [], fn media_id -> - Map.put(Repo.get(Object, media_id).data, "name", descs[media_id]) + Enum.map(ids, fn media_id -> + case Repo.get(Object, media_id) do + %Object{data: data} = _ -> + Map.put(data, "name", descs[media_id]) + + _ -> + nil + end end) + |> Enum.filter(& &1) end @spec get_to_and_cc(User.t(), list(String.t()), Activity.t() | nil, String.t()) :: @@ -247,20 +264,18 @@ defmodule Pleroma.Web.CommonAPI.Utils do end def add_attachments(text, attachments) do - attachment_text = - Enum.map(attachments, fn - %{"url" => [%{"href" => href} | _]} = attachment -> - name = attachment["name"] || URI.decode(Path.basename(href)) - href = MediaProxy.url(href) - "#{shortname(name)}" - - _ -> - "" - end) - + attachment_text = Enum.map(attachments, &build_attachment_link/1) Enum.join([text | attachment_text], "
") end + defp build_attachment_link(%{"url" => [%{"href" => href} | _]} = attachment) do + name = attachment["name"] || URI.decode(Path.basename(href)) + href = MediaProxy.url(href) + "#{shortname(name)}" + end + + defp build_attachment_link(_), do: "" + def format_input(text, format, options \\ []) @doc """ @@ -320,7 +335,7 @@ defmodule Pleroma.Web.CommonAPI.Utils do sensitive \\ false, merge \\ %{} ) do - object = %{ + %{ "type" => "Note", "to" => to, "cc" => cc, @@ -330,18 +345,20 @@ defmodule Pleroma.Web.CommonAPI.Utils do "context" => context, "attachment" => attachments, "actor" => actor, - "tag" => tags |> Enum.map(fn {_, tag} -> tag end) |> Enum.uniq() + "tag" => Keyword.values(tags) |> Enum.uniq() } + |> add_in_reply_to(in_reply_to) + |> Map.merge(merge) + end - object = - with false <- is_nil(in_reply_to), - %Object{} = in_reply_to_object <- Object.normalize(in_reply_to) do - Map.put(object, "inReplyTo", in_reply_to_object.data["id"]) - else - _ -> object - end + defp add_in_reply_to(object, nil), do: object - Map.merge(object, merge) + defp add_in_reply_to(object, in_reply_to) do + with %Object{} = in_reply_to_object <- Object.normalize(in_reply_to) do + Map.put(object, "inReplyTo", in_reply_to_object.data["id"]) + else + _ -> object + end end def format_naive_asctime(date) do @@ -373,17 +390,16 @@ defmodule Pleroma.Web.CommonAPI.Utils do |> String.replace(~r/(\.\d+)?$/, ".000Z", global: false) end - def to_masto_date(date) do - try do - date - |> NaiveDateTime.from_iso8601!() - |> NaiveDateTime.to_iso8601() - |> String.replace(~r/(\.\d+)?$/, ".000Z", global: false) - rescue - _e -> "" + def to_masto_date(date) when is_binary(date) do + with {:ok, date} <- NaiveDateTime.from_iso8601(date) do + to_masto_date(date) + else + _ -> "" end end + def to_masto_date(_), do: "" + defp shortname(name) do if String.length(name) < 30 do name @@ -428,7 +444,7 @@ defmodule Pleroma.Web.CommonAPI.Utils do object_data = cond do - !is_nil(object) -> + not is_nil(object) -> object.data is_map(data["object"]) -> @@ -472,9 +488,9 @@ defmodule Pleroma.Web.CommonAPI.Utils do def maybe_extract_mentions(%{"tag" => tag}) do tag - |> Enum.filter(fn x -> is_map(x) end) - |> Enum.filter(fn x -> x["type"] == "Mention" end) + |> Enum.filter(fn x -> is_map(x) && x["type"] == "Mention" end) |> Enum.map(fn x -> x["href"] end) + |> Enum.uniq() end def maybe_extract_mentions(_), do: [] diff --git a/lib/pleroma/web/mailer/subscription_controller.ex b/lib/pleroma/web/mailer/subscription_controller.ex new file mode 100644 index 000000000..478a83518 --- /dev/null +++ b/lib/pleroma/web/mailer/subscription_controller.ex @@ -0,0 +1,20 @@ +defmodule Pleroma.Web.Mailer.SubscriptionController do + use Pleroma.Web, :controller + + alias Pleroma.JWT + alias Pleroma.Repo + alias Pleroma.User + + def unsubscribe(conn, %{"token" => encoded_token}) do + with {:ok, token} <- Base.decode64(encoded_token), + {:ok, claims} <- JWT.verify_and_validate(token), + %{"act" => %{"unsubscribe" => type}, "sub" => uid} <- claims, + %User{} = user <- Repo.get(User, uid), + {:ok, _user} <- User.switch_email_notifications(user, type, false) do + render(conn, "unsubscribe_success.html", email: user.email) + else + _err -> + render(conn, "unsubscribe_failure.html") + end + end +end diff --git a/lib/pleroma/web/mastodon_api/views/account_view.ex b/lib/pleroma/web/mastodon_api/views/account_view.ex index b2b06eeb9..de084fd6e 100644 --- a/lib/pleroma/web/mastodon_api/views/account_view.ex +++ b/lib/pleroma/web/mastodon_api/views/account_view.ex @@ -28,7 +28,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do id: to_string(user.id), acct: user.nickname, username: username_from_nickname(user.nickname), - url: user.ap_id + url: User.profile_url(user) } end @@ -106,7 +106,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do following_count: user_info.following_count, statuses_count: user_info.note_count, note: bio || "", - url: user.ap_id, + url: User.profile_url(user), avatar: image, avatar_static: image, header: header, diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex index a661e9bb7..1725ab071 100644 --- a/lib/pleroma/web/media_proxy/media_proxy.ex +++ b/lib/pleroma/web/media_proxy/media_proxy.ex @@ -4,6 +4,7 @@ defmodule Pleroma.Web.MediaProxy do alias Pleroma.Config + alias Pleroma.Upload alias Pleroma.Web @base64_opts [padding: false] @@ -26,7 +27,18 @@ defmodule Pleroma.Web.MediaProxy do defp whitelisted?(url) do %{host: domain} = URI.parse(url) - Enum.any?(Config.get([:media_proxy, :whitelist]), fn pattern -> + mediaproxy_whitelist = Config.get([:media_proxy, :whitelist]) + + upload_base_url_domain = + if !is_nil(Config.get([Upload, :base_url])) do + [URI.parse(Config.get([Upload, :base_url])).host] + else + [] + end + + whitelist = mediaproxy_whitelist ++ upload_base_url_domain + + Enum.any?(whitelist, fn pattern -> String.equivalent?(domain, pattern) end) end diff --git a/lib/pleroma/web/ostatus/activity_representer.ex b/lib/pleroma/web/ostatus/activity_representer.ex index 760345301..8e55b9f0b 100644 --- a/lib/pleroma/web/ostatus/activity_representer.ex +++ b/lib/pleroma/web/ostatus/activity_representer.ex @@ -183,6 +183,7 @@ defmodule Pleroma.Web.OStatus.ActivityRepresenter do author = if with_author, do: [{:author, UserRepresenter.to_simple_form(user)}], else: [] retweeted_activity = Activity.get_create_by_object_ap_id(activity.data["object"]) + retweeted_object = Object.normalize(retweeted_activity) retweeted_user = User.get_cached_by_ap_id(retweeted_activity.data["actor"]) retweeted_xml = to_simple_form(retweeted_activity, retweeted_user, true) @@ -197,7 +198,7 @@ defmodule Pleroma.Web.OStatus.ActivityRepresenter do {:"activity:verb", ['http://activitystrea.ms/schema/1.0/share']}, {:id, h.(activity.data["id"])}, {:title, ['#{user.nickname} repeated a notice']}, - {:content, [type: 'html'], ['RT #{retweeted_activity.data["object"]["content"]}']}, + {:content, [type: 'html'], ['RT #{retweeted_object.data["content"]}']}, {:published, h.(inserted_at)}, {:updated, h.(updated_at)}, {:"ostatus:conversation", [ref: h.(activity.data["context"])], diff --git a/lib/pleroma/web/rich_media/parsers/twitter_card.ex b/lib/pleroma/web/rich_media/parsers/twitter_card.ex index e4efe2dd0..afaa98f3d 100644 --- a/lib/pleroma/web/rich_media/parsers/twitter_card.ex +++ b/lib/pleroma/web/rich_media/parsers/twitter_card.ex @@ -3,13 +3,20 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.RichMedia.Parsers.TwitterCard do + alias Pleroma.Web.RichMedia.Parsers.MetaTagsParser + + @spec parse(String.t(), map()) :: {:ok, map()} | {:error, String.t()} def parse(html, data) do - Pleroma.Web.RichMedia.Parsers.MetaTagsParser.parse( - html, - data, - "twitter", - "No twitter card metadata found", - "name" - ) + data + |> parse_name_attrs(html) + |> parse_property_attrs(html) + end + + defp parse_name_attrs(data, html) do + MetaTagsParser.parse(html, data, "twitter", %{}, "name") + end + + defp parse_property_attrs({_, data}, html) do + MetaTagsParser.parse(html, data, "twitter", "No twitter card metadata found", "property") end end diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index d475fc973..c8c1c22dd 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -608,6 +608,8 @@ defmodule Pleroma.Web.Router do post("/push/hub/:nickname", Websub.WebsubController, :websub_subscription_request) get("/push/subscriptions/:id", Websub.WebsubController, :websub_subscription_confirmation) post("/push/subscriptions/:id", Websub.WebsubController, :websub_incoming) + + get("/mailer/unsubscribe/:token", Mailer.SubscriptionController, :unsubscribe) end pipeline :activitypub do diff --git a/lib/pleroma/web/templates/email/digest.html.eex b/lib/pleroma/web/templates/email/digest.html.eex new file mode 100644 index 000000000..c9dd699fd --- /dev/null +++ b/lib/pleroma/web/templates/email/digest.html.eex @@ -0,0 +1,20 @@ +

Hey <%= @user.nickname %>, here is what you've missed!

+ +

New Mentions:

+
    +<%= for %{data: mention, object: object, from: from} <- @mentions do %> +
  • <%= link from.nickname, to: mention.activity.actor %>: <%= raw object.data["content"] %>
  • +<% end %> +
+ +<%= if @followers != [] do %> +

<%= length(@followers) %> New Followers:

+
    +<%= for %{data: follow, from: from} <- @followers do %> +
  • <%= link from.nickname, to: follow.activity.actor %>
  • +<% end %> +
+<% end %> + +

You have received this email because you have signed up to receive digest emails from <%= @instance %> Pleroma instance.

+

The email address you are subscribed as is <%= @user.email %>. To unsubscribe, please go <%= link "here", to: @unsubscribe_link %>.

\ No newline at end of file diff --git a/lib/pleroma/web/templates/layout/app.html.eex b/lib/pleroma/web/templates/layout/app.html.eex index b3cf9ed11..5836ec1e0 100644 --- a/lib/pleroma/web/templates/layout/app.html.eex +++ b/lib/pleroma/web/templates/layout/app.html.eex @@ -36,6 +36,11 @@ margin-bottom: 20px; } + a { + color: color: #d8a070; + text-decoration: none; + } + form { width: 100%; } diff --git a/lib/pleroma/web/templates/layout/email.html.eex b/lib/pleroma/web/templates/layout/email.html.eex new file mode 100644 index 000000000..f6dcd7f0f --- /dev/null +++ b/lib/pleroma/web/templates/layout/email.html.eex @@ -0,0 +1,10 @@ + + + + + <%= @email.subject %> + + + <%= render @view_module, @view_template, assigns %> + + \ No newline at end of file diff --git a/lib/pleroma/web/templates/mailer/subscription/unsubscribe_failure.html.eex b/lib/pleroma/web/templates/mailer/subscription/unsubscribe_failure.html.eex new file mode 100644 index 000000000..7b476f02d --- /dev/null +++ b/lib/pleroma/web/templates/mailer/subscription/unsubscribe_failure.html.eex @@ -0,0 +1 @@ +

UNSUBSCRIBE FAILURE

diff --git a/lib/pleroma/web/templates/mailer/subscription/unsubscribe_success.html.eex b/lib/pleroma/web/templates/mailer/subscription/unsubscribe_success.html.eex new file mode 100644 index 000000000..6dfa2c185 --- /dev/null +++ b/lib/pleroma/web/templates/mailer/subscription/unsubscribe_success.html.eex @@ -0,0 +1 @@ +

UNSUBSCRIBE SUCCESSFUL

diff --git a/lib/pleroma/web/views/email_view.ex b/lib/pleroma/web/views/email_view.ex new file mode 100644 index 000000000..b63eb162c --- /dev/null +++ b/lib/pleroma/web/views/email_view.ex @@ -0,0 +1,5 @@ +defmodule Pleroma.Web.EmailView do + use Pleroma.Web, :view + import Phoenix.HTML + import Phoenix.HTML.Link +end diff --git a/lib/pleroma/web/views/mailer/subscription_view.ex b/lib/pleroma/web/views/mailer/subscription_view.ex new file mode 100644 index 000000000..fc3d20816 --- /dev/null +++ b/lib/pleroma/web/views/mailer/subscription_view.ex @@ -0,0 +1,3 @@ +defmodule Pleroma.Web.Mailer.SubscriptionView do + use Pleroma.Web, :view +end diff --git a/mix.exs b/mix.exs index 1ca7a4a77..4c670509e 100644 --- a/mix.exs +++ b/mix.exs @@ -128,6 +128,7 @@ defmodule Pleroma.Mixfile do {:ex_doc, "~> 0.20.2", only: :dev, runtime: false}, {:web_push_encryption, "~> 0.2.1"}, {:swoosh, "~> 0.23.2"}, + {:phoenix_swoosh, "~> 0.2"}, {:gen_smtp, "~> 0.13"}, {:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test}, {:floki, "~> 0.20.0"}, @@ -140,7 +141,7 @@ defmodule Pleroma.Mixfile do {:http_signatures, git: "https://git.pleroma.social/pleroma/http_signatures.git", ref: "293d77bb6f4a67ac8bde1428735c3b42f22cbb30"}, - {:pleroma_job_queue, "~> 0.2.0"}, + {:pleroma_job_queue, "~> 0.3"}, {:telemetry, "~> 0.3"}, {:prometheus_ex, "~> 3.0"}, {:prometheus_plugs, "~> 1.1"}, @@ -148,6 +149,7 @@ defmodule Pleroma.Mixfile do {:prometheus_ecto, "~> 1.4"}, {:recon, github: "ferd/recon", tag: "2.4.0"}, {:quack, "~> 0.1.1"}, + {:joken, "~> 2.0"}, {:benchee, "~> 1.0"}, {:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)}, {:ex_rated, "~> 1.3"}, @@ -191,12 +193,13 @@ defmodule Pleroma.Mixfile do tag = String.trim(tag), {describe, 0} <- System.cmd("git", ["describe", "--tags", "--abbrev=8"]), describe = String.trim(describe), - ahead <- String.replace(describe, tag, "") do + ahead <- String.replace(describe, tag, ""), + ahead <- String.trim_leading(ahead, "-") do {String.replace_prefix(tag, "v", ""), if(ahead != "", do: String.trim(ahead))} else _ -> {commit_hash, 0} = System.cmd("git", ["rev-parse", "--short", "HEAD"]) - {nil, "-0-g" <> String.trim(commit_hash)} + {nil, "0-g" <> String.trim(commit_hash)} end if git_tag && version != git_tag do @@ -208,14 +211,15 @@ defmodule Pleroma.Mixfile do # Branch name as pre-release version component, denoted with a dot branch_name = with {branch_name, 0} <- System.cmd("git", ["rev-parse", "--abbrev-ref", "HEAD"]), + branch_name <- String.trim(branch_name), branch_name <- System.get_env("PLEROMA_BUILD_BRANCH") || branch_name, - true <- branch_name != "master" do + true <- branch_name not in ["master", "HEAD"] do branch_name = branch_name |> String.trim() |> String.replace(identifier_filter, "-") - "." <> branch_name + branch_name end build_name = @@ -235,6 +239,17 @@ defmodule Pleroma.Mixfile do env_override -> env_override end + # Pre-release version, denoted by appending a hyphen + # and a series of dot separated identifiers + pre_release = + [git_pre_release, branch_name] + |> Enum.filter(fn string -> string && string != "" end) + |> Enum.join(".") + |> (fn + "" -> nil + string -> "-" <> String.replace(string, identifier_filter, "-") + end).() + # Build metadata, denoted with a plus sign build_metadata = [build_name, env_name] @@ -245,7 +260,7 @@ defmodule Pleroma.Mixfile do string -> "+" <> String.replace(string, identifier_filter, "-") end).() - [version, git_pre_release, branch_name, build_metadata] + [version, pre_release, build_metadata] |> Enum.filter(fn string -> string && string != "" end) |> Enum.join() end diff --git a/mix.lock b/mix.lock index 8c0b9734e..a48cfac77 100644 --- a/mix.lock +++ b/mix.lock @@ -5,16 +5,17 @@ "bbcode": {:hex, :bbcode, "0.1.1", "0023e2c7814119b2e620b7add67182e3f6019f92bfec9a22da7e99821aceba70", [:mix], [{:nimble_parsec, "~> 0.5", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"}, "benchee": {:hex, :benchee, "1.0.1", "66b211f9bfd84bd97e6d1beaddf8fc2312aaabe192f776e8931cb0c16f53a521", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}], "hexpm"}, "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm"}, - "cachex": {:hex, :cachex, "3.0.2", "1351caa4e26e29f7d7ec1d29b53d6013f0447630bbf382b4fb5d5bad0209f203", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm"}, + "cachex": {:hex, :cachex, "3.0.3", "4e2d3e05814a5738f5ff3903151d5c25636d72a3527251b753f501ad9c657967", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm"}, "calendar": {:hex, :calendar, "0.17.6", "ec291cb2e4ba499c2e8c0ef5f4ace974e2f9d02ae9e807e711a9b0c7850b9aee", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm"}, "certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm"}, "combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm"}, - "comeonin": {:hex, :comeonin, "4.1.1", "c7304fc29b45b897b34142a91122bc72757bc0c295e9e824999d5179ffc08416", [:mix], [{:argon2_elixir, "~> 1.2", [hex: :argon2_elixir, repo: "hexpm", optional: true]}, {:bcrypt_elixir, "~> 0.12.1 or ~> 1.0", [hex: :bcrypt_elixir, repo: "hexpm", optional: true]}, {:pbkdf2_elixir, "~> 0.12", [hex: :pbkdf2_elixir, repo: "hexpm", optional: true]}], "hexpm"}, + "comeonin": {:hex, :comeonin, "4.1.2", "3eb5620fd8e35508991664b4c2b04dd41e52f1620b36957be837c1d7784b7592", [:mix], [{:argon2_elixir, "~> 1.2", [hex: :argon2_elixir, repo: "hexpm", optional: true]}, {:bcrypt_elixir, "~> 0.12.1 or ~> 1.0", [hex: :bcrypt_elixir, repo: "hexpm", optional: true]}, {:pbkdf2_elixir, "~> 0.12", [hex: :pbkdf2_elixir, repo: "hexpm", optional: true]}], "hexpm"}, "connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm"}, "cors_plug": {:hex, :cors_plug, "1.5.2", "72df63c87e4f94112f458ce9d25800900cc88608c1078f0e4faddf20933eda6e", [:mix], [{:plug, "~> 1.3 or ~> 1.4 or ~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "cowboy": {:hex, :cowboy, "2.6.3", "99aa50e94e685557cad82e704457336a453d4abcb77839ad22dbe71f311fcc06", [:rebar3], [{:cowlib, "~> 2.7.3", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.7.1", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm"}, "cowlib": {:hex, :cowlib, "2.7.3", "a7ffcd0917e6d50b4d5fb28e9e2085a0ceb3c97dea310505f7460ff5ed764ce9", [:rebar3], [], "hexpm"}, "credo": {:hex, :credo, "0.9.3", "76fa3e9e497ab282e0cf64b98a624aa11da702854c52c82db1bf24e54ab7c97a", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:poison, ">= 0.0.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"}, + "crontab": {:hex, :crontab, "1.1.7", "b9219f0bdc8678b94143655a8f229716c5810c0636a4489f98c0956137e53985", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"}, "crypt": {:git, "https://github.com/msantos/crypt", "1f2b58927ab57e72910191a7ebaeff984382a1d3", [ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"]}, "db_connection": {:hex, :db_connection, "2.0.6", "bde2f85d047969c5b5800cb8f4b3ed6316c8cb11487afedac4aa5f93fd39abfa", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"}, "decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"}, @@ -43,14 +44,15 @@ "httpoison": {:hex, :httpoison, "1.2.0", "2702ed3da5fd7a8130fc34b11965c8cfa21ade2f232c00b42d96d4967c39a3a3", [:mix], [{:hackney, "~> 1.8", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"}, "idna": {:hex, :idna, "6.0.0", "689c46cbcdf3524c44d5f3dde8001f364cd7608a99556d8fbd8239a5798d4c10", [:rebar3], [{:unicode_util_compat, "0.4.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm"}, "jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"}, - "jose": {:hex, :jose, "1.8.4", "7946d1e5c03a76ac9ef42a6e6a20001d35987afd68c2107bcd8f01a84e75aa73", [:mix, :rebar3], [{:base64url, "~> 0.0.1", [hex: :base64url, repo: "hexpm", optional: false]}], "hexpm"}, + "joken": {:hex, :joken, "2.0.1", "ec9ab31bf660f343380da033b3316855197c8d4c6ef597fa3fcb451b326beb14", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm"}, + "jose": {:hex, :jose, "1.9.0", "4167c5f6d06ffaebffd15cdb8da61a108445ef5e85ab8f5a7ad926fdf3ada154", [:mix, :rebar3], [{:base64url, "~> 0.0.1", [hex: :base64url, repo: "hexpm", optional: false]}], "hexpm"}, "makeup": {:hex, :makeup, "0.8.0", "9cf32aea71c7fe0a4b2e9246c2c4978f9070257e5c9ce6d4a28ec450a839b55f", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"}, "makeup_elixir": {:hex, :makeup_elixir, "0.13.0", "be7a477997dcac2e48a9d695ec730b2d22418292675c75aa2d34ba0909dcdeda", [:mix], [{:makeup, "~> 0.8", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"}, "meck": {:hex, :meck, "0.8.13", "ffedb39f99b0b99703b8601c6f17c7f76313ee12de6b646e671e3188401f7866", [:rebar3], [], "hexpm"}, "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"}, "mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm"}, "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm"}, - "mochiweb": {:hex, :mochiweb, "2.15.0", "e1daac474df07651e5d17cc1e642c4069c7850dc4508d3db7263a0651330aacc", [:rebar3], [], "hexpm"}, + "mochiweb": {:hex, :mochiweb, "2.18.0", "eb55f1db3e6e960fac4e6db4e2db9ec3602cc9f30b86cd1481d56545c3145d2e", [:rebar3], [], "hexpm"}, "mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"}, "mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"}, "mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"}, @@ -62,20 +64,19 @@ "phoenix_ecto": {:hex, :phoenix_ecto, "4.0.0", "c43117a136e7399ea04ecaac73f8f23ee0ffe3e07acfcb8062fe5f4c9f0f6531", [:mix], [{:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.9", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "phoenix_html": {:hex, :phoenix_html, "2.13.1", "fa8f034b5328e2dfa0e4131b5569379003f34bc1fafdaa84985b0b9d2f12e68b", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"}, - "pleroma_job_queue": {:hex, :pleroma_job_queue, "0.2.0", "879e660aa1cebe8dc6f0aaaa6aa48b4875e89cd961d4a585fd128e0773b31a18", [:mix], [], "hexpm"}, + "phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"}, + "pleroma_job_queue": {:hex, :pleroma_job_queue, "0.3.0", "b84538d621f0c3d6fcc1cff9d5648d3faaf873b8b21b94e6503428a07a48ec47", [:mix], [{:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}], "hexpm"}, "plug": {:hex, :plug, "1.8.2", "0bcce1daa420f189a6491f3940cc77ea7fb1919761175c9c3b59800d897440fc", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"}, "plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"}, "plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"}, - "poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm"}, "postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"}, "prometheus": {:hex, :prometheus, "4.4.1", "1e96073b3ed7788053768fea779cbc896ddc3bdd9ba60687f2ad50b252ac87d6", [:mix, :rebar3], [], "hexpm"}, "prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"}, "prometheus_ex": {:hex, :prometheus_ex, "3.0.5", "fa58cfd983487fc5ead331e9a3e0aa622c67232b3ec71710ced122c4c453a02f", [:mix], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"}, "prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"}, "prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"}, - "prometheus_process_collector": {:hex, :prometheus_process_collector, "1.4.0", "6dbd39e3165b9ef1c94a7a820e9ffe08479f949dcdd431ed4aaea7b250eebfde", [:rebar3], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"}, "quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"}, "ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"}, "recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]}, @@ -89,7 +90,7 @@ "tzdata": {:hex, :tzdata, "1.0.1", "f6027a331af7d837471248e62733c6ebee86a72e57c613aa071ebb1f750fc71a", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"}, "ueberauth": {:hex, :ueberauth, "0.6.1", "9e90d3337dddf38b1ca2753aca9b1e53d8a52b890191cdc55240247c89230412", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "unicode_util_compat": {:hex, :unicode_util_compat, "0.4.1", "d869e4c68901dd9531385bb0c8c40444ebf624e60b6962d95952775cac5e90cd", [:rebar3], [], "hexpm"}, - "unsafe": {:hex, :unsafe, "1.0.0", "7c21742cd05380c7875546b023481d3a26f52df8e5dfedcb9f958f322baae305", [:mix], [], "hexpm"}, + "unsafe": {:hex, :unsafe, "1.0.1", "a27e1874f72ee49312e0a9ec2e0b27924214a05e3ddac90e91727bc76f8613d8", [:mix], [], "hexpm"}, "web_push_encryption": {:hex, :web_push_encryption, "0.2.1", "d42cecf73420d9dc0053ba3299cc8c8d6ff2be2487d67ca2a57265868e4d9a98", [:mix], [{:httpoison, "~> 1.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jose, "~> 1.8", [hex: :jose, repo: "hexpm", optional: false]}, {:poison, "~> 3.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"}, "websocket_client": {:git, "https://github.com/jeremyong/websocket_client.git", "9a6f65d05ebf2725d62fb19262b21f1805a59fbf", []}, } diff --git a/priv/repo/migrations/20190412052952_add_user_info_fields.exs b/priv/repo/migrations/20190412052952_add_user_info_fields.exs new file mode 100644 index 000000000..646c91f32 --- /dev/null +++ b/priv/repo/migrations/20190412052952_add_user_info_fields.exs @@ -0,0 +1,20 @@ +defmodule Pleroma.Repo.Migrations.AddEmailNotificationsToUserInfo do + use Ecto.Migration + + def up do + execute(" + UPDATE users + SET info = info || '{ + \"email_notifications\": { + \"digest\": false + } + }'") + end + + def down do + execute(" + UPDATE users + SET info = info - 'email_notifications' + ") + end +end diff --git a/priv/repo/migrations/20190413085040_add_signin_and_last_digest_dates_to_user.exs b/priv/repo/migrations/20190413085040_add_signin_and_last_digest_dates_to_user.exs new file mode 100644 index 000000000..4312b171f --- /dev/null +++ b/priv/repo/migrations/20190413085040_add_signin_and_last_digest_dates_to_user.exs @@ -0,0 +1,9 @@ +defmodule Pleroma.Repo.Migrations.AddSigninAndLastDigestDatesToUser do + use Ecto.Migration + + def change do + alter table(:users) do + add(:last_digest_emailed_at, :naive_datetime, default: fragment("now()")) + end + end +end diff --git a/priv/static/emoji/f_00b.png b/priv/static/emoji/f_00b.png deleted file mode 100644 index 3d00b89b0..000000000 Binary files a/priv/static/emoji/f_00b.png and /dev/null differ diff --git a/priv/static/emoji/f_00b11b.png b/priv/static/emoji/f_00b11b.png deleted file mode 100644 index 3e99ce464..000000000 Binary files a/priv/static/emoji/f_00b11b.png and /dev/null differ diff --git a/priv/static/emoji/f_00b33b.png b/priv/static/emoji/f_00b33b.png deleted file mode 100644 index 8f4929297..000000000 Binary files a/priv/static/emoji/f_00b33b.png and /dev/null differ diff --git a/priv/static/emoji/f_00h.png b/priv/static/emoji/f_00h.png deleted file mode 100644 index ba3da57c6..000000000 Binary files a/priv/static/emoji/f_00h.png and /dev/null differ diff --git a/priv/static/emoji/f_00t.png b/priv/static/emoji/f_00t.png deleted file mode 100644 index 31d98b433..000000000 Binary files a/priv/static/emoji/f_00t.png and /dev/null differ diff --git a/priv/static/emoji/f_01b.png b/priv/static/emoji/f_01b.png deleted file mode 100644 index 7bd2582c5..000000000 Binary files a/priv/static/emoji/f_01b.png and /dev/null differ diff --git a/priv/static/emoji/f_03b.png b/priv/static/emoji/f_03b.png deleted file mode 100644 index 9e4ff1bf7..000000000 Binary files a/priv/static/emoji/f_03b.png and /dev/null differ diff --git a/priv/static/emoji/f_10b.png b/priv/static/emoji/f_10b.png deleted file mode 100644 index 67c6493fc..000000000 Binary files a/priv/static/emoji/f_10b.png and /dev/null differ diff --git a/priv/static/emoji/f_11b.png b/priv/static/emoji/f_11b.png deleted file mode 100644 index b53328ba9..000000000 Binary files a/priv/static/emoji/f_11b.png and /dev/null differ diff --git a/priv/static/emoji/f_11b00b.png b/priv/static/emoji/f_11b00b.png deleted file mode 100644 index c4c30e11f..000000000 Binary files a/priv/static/emoji/f_11b00b.png and /dev/null differ diff --git a/priv/static/emoji/f_11b22b.png b/priv/static/emoji/f_11b22b.png deleted file mode 100644 index 47425e06e..000000000 Binary files a/priv/static/emoji/f_11b22b.png and /dev/null differ diff --git a/priv/static/emoji/f_11h.png b/priv/static/emoji/f_11h.png deleted file mode 100644 index 28342363a..000000000 Binary files a/priv/static/emoji/f_11h.png and /dev/null differ diff --git a/priv/static/emoji/f_11t.png b/priv/static/emoji/f_11t.png deleted file mode 100644 index dca67dc70..000000000 Binary files a/priv/static/emoji/f_11t.png and /dev/null differ diff --git a/priv/static/emoji/f_12b.png b/priv/static/emoji/f_12b.png deleted file mode 100644 index 9925adb7c..000000000 Binary files a/priv/static/emoji/f_12b.png and /dev/null differ diff --git a/priv/static/emoji/f_21b.png b/priv/static/emoji/f_21b.png deleted file mode 100644 index aa56d2cb2..000000000 Binary files a/priv/static/emoji/f_21b.png and /dev/null differ diff --git a/priv/static/emoji/f_22b.png b/priv/static/emoji/f_22b.png deleted file mode 100644 index 426878986..000000000 Binary files a/priv/static/emoji/f_22b.png and /dev/null differ diff --git a/priv/static/emoji/f_22b11b.png b/priv/static/emoji/f_22b11b.png deleted file mode 100644 index 4bdfb3107..000000000 Binary files a/priv/static/emoji/f_22b11b.png and /dev/null differ diff --git a/priv/static/emoji/f_22b33b.png b/priv/static/emoji/f_22b33b.png deleted file mode 100644 index adf94f811..000000000 Binary files a/priv/static/emoji/f_22b33b.png and /dev/null differ diff --git a/priv/static/emoji/f_22h.png b/priv/static/emoji/f_22h.png deleted file mode 100644 index 3b27e2de8..000000000 Binary files a/priv/static/emoji/f_22h.png and /dev/null differ diff --git a/priv/static/emoji/f_22t.png b/priv/static/emoji/f_22t.png deleted file mode 100644 index addd9fec7..000000000 Binary files a/priv/static/emoji/f_22t.png and /dev/null differ diff --git a/priv/static/emoji/f_23b.png b/priv/static/emoji/f_23b.png deleted file mode 100644 index beb69ab36..000000000 Binary files a/priv/static/emoji/f_23b.png and /dev/null differ diff --git a/priv/static/emoji/f_30b.png b/priv/static/emoji/f_30b.png deleted file mode 100644 index 41dbb2a5d..000000000 Binary files a/priv/static/emoji/f_30b.png and /dev/null differ diff --git a/priv/static/emoji/f_32b.png b/priv/static/emoji/f_32b.png deleted file mode 100644 index d8261e8a8..000000000 Binary files a/priv/static/emoji/f_32b.png and /dev/null differ diff --git a/priv/static/emoji/f_33b.png b/priv/static/emoji/f_33b.png deleted file mode 100644 index 71b8b914a..000000000 Binary files a/priv/static/emoji/f_33b.png and /dev/null differ diff --git a/priv/static/emoji/f_33b00b.png b/priv/static/emoji/f_33b00b.png deleted file mode 100644 index 65b6e24b8..000000000 Binary files a/priv/static/emoji/f_33b00b.png and /dev/null differ diff --git a/priv/static/emoji/f_33b22b.png b/priv/static/emoji/f_33b22b.png deleted file mode 100644 index d71a8ddd4..000000000 Binary files a/priv/static/emoji/f_33b22b.png and /dev/null differ diff --git a/priv/static/emoji/f_33h.png b/priv/static/emoji/f_33h.png deleted file mode 100644 index e141c5184..000000000 Binary files a/priv/static/emoji/f_33h.png and /dev/null differ diff --git a/priv/static/emoji/f_33t.png b/priv/static/emoji/f_33t.png deleted file mode 100644 index d5a23073d..000000000 Binary files a/priv/static/emoji/f_33t.png and /dev/null differ diff --git a/priv/templates/sample_config.eex b/priv/templates/sample_config.eex index ca9c7a2c2..dc75d4008 100644 --- a/priv/templates/sample_config.eex +++ b/priv/templates/sample_config.eex @@ -68,3 +68,5 @@ config :pleroma, Pleroma.Uploaders.Local, uploads: "<%= uploads_dir %>" # For using third-party S3 clones like wasabi, also do: # config :ex_aws, :s3, # host: "s3.wasabisys.com" + +config :joken, default_signer: "<%= jwt_secret %>" diff --git a/test/emails/admin_email_test.exs b/test/emails/admin_email_test.exs index 4bf54b0c2..9e83c73c6 100644 --- a/test/emails/admin_email_test.exs +++ b/test/emails/admin_email_test.exs @@ -24,7 +24,6 @@ defmodule Pleroma.Emails.AdminEmailTest do assert res.to == [{to_user.name, to_user.email}] assert res.from == {config[:name], config[:notify_email]} - assert res.reply_to == {reporter.name, reporter.email} assert res.subject == "#{config[:name]} Report" assert res.html_body == @@ -34,4 +33,17 @@ defmodule Pleroma.Emails.AdminEmailTest do status_url }\">#{status_url}\n \n

\n\n" end + + test "it works when the reporter is a remote user without email" do + config = Pleroma.Config.get(:instance) + to_user = insert(:user) + reporter = insert(:user, email: nil, local: false) + account = insert(:user) + + res = + AdminEmail.report(to_user, reporter, account, [%{name: "Test", id: "12"}], "Test comment") + + assert res.to == [{to_user.name, to_user.email}] + assert res.from == {config[:name], config[:notify_email]} + end end diff --git a/test/fixtures/nypd-facial-recognition-children-teenagers.html b/test/fixtures/nypd-facial-recognition-children-teenagers.html new file mode 100644 index 000000000..5702c4484 --- /dev/null +++ b/test/fixtures/nypd-facial-recognition-children-teenagers.html @@ -0,0 +1,227 @@ + + + + She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database. - The New York Times + + + + + + + + + + + + + + + + + + + + + +

Advertisement

She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.

With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.

Image
CreditCreditSarah Blesener for The New York Times

[What you need to know to start the day: Get New York Today in your inbox.]

The New York Police Department has been loading thousands of arrest photos of children and teenagers into a facial recognition database despite evidence the technology has a higher risk of false matches in younger faces.

For about four years, internal records show, the department has used the technology to compare crime scene images with its collection of juvenile mug shots, the photos that are taken at an arrest. Most of the photos are of teenagers, largely 13 to 16 years old, but children as young as 11 have been included.

Elected officials and civil rights groups said the disclosure that the city was deploying a powerful surveillance tool on adolescents — whose privacy seems sacrosanct and whose status is protected in the criminal justice system — was a striking example of the Police Department’s ability to adopt advancing technology with little public scrutiny.

Several members of the City Council as well as a range of civil liberties groups said they were unaware of the policy until they were contacted by The New York Times.

Police Department officials defended the decision, saying it was just the latest evolution of a longstanding policing technique: using arrest photos to identify suspects.

“I don’t think this is any secret decision that’s made behind closed doors,” the city’s chief of detectives, Dermot F. Shea, said in an interview. “This is just process, and making sure we’re doing everything to fight crime.”

Other cities have begun to debate whether law enforcement should use facial recognition, which relies on an algorithm to quickly pore through images and suggest matches. In May, San Francisco blocked city agencies, including the police, from using the tool amid unease about potential government abuse. Detroit is facing public resistance to a technology that has been shown to have lower accuracy with people with darker skin.

In New York, the state Education Department recently told the Lockport, N.Y., school district to delay a plan to use facial recognition on students, citing privacy concerns.

“At the end of the day, it should be banned — no young people,” said Councilman Donovan Richards, a Queens Democrat who heads the Public Safety Committee, which oversees the Police Department.

The department said its legal bureau had approved using facial recognition on juveniles. The algorithm may suggest a lead, but detectives would not make an arrest based solely on that, Chief Shea said.

Image
CreditChang W. Lee/The New York Times

Still, facial recognition has not been widely tested on children. Most algorithms are taught to “think” based on adult faces, and there is growing evidence that they do not work as well on children.

The National Institute of Standards and Technology, which is part of the Commerce Department and evaluates facial recognition algorithms for accuracy, recently found the vast majority of more than 100 facial recognition algorithms had a higher rate of mistaken matches among children. The error rate was most pronounced in young children but was also seen in those aged 10 to 16.

Aging poses another problem: The appearance of children and adolescents can change drastically as bones stretch and shift, altering the underlying facial structure.

“I would use extreme caution in using those algorithms,” said Karl Ricanek Jr., a computer science professor and co-founder of the Face Aging Group at the University of North Carolina-Wilmington.

Technology that can match an image of a younger teenager to a recent arrest photo may be less effective at finding the same person even one or two years later, he said.

“The systems do not have the capacity to understand the dynamic changes that occur to a child’s face,” Dr. Ricanek said.

Idemia and DataWorks Plus, the two companies that provide facial recognition software to the Police Department, did not respond to requests for comment.

The New York Police Department can take arrest photos of minors as young as 11 who are charged with a felony, depending on the severity of the charge.

And in many cases, the department keeps the photos for years, making facial recognition comparisons to what may have effectively become outdated images. There are photos of 5,500 individuals in the juvenile database, 4,100 of whom are no longer 16 or under, the department said. Teenagers 17 and older are considered adults in the criminal justice system.

Police officials declined to provide statistics on how often their facial recognition systems provide false matches, or to explain how they evaluate the system’s effectiveness.

“We are comfortable with this technology because it has proved to be a valuable investigative method,” Chief Shea said. Facial recognition has helped lead to thousands of arrests of both adults and juveniles, the department has said.

Mayor Bill de Blasio had been aware the department was using the technology on minors, said Freddi Goldstein, a spokeswoman for the mayor.

She said the Police Department followed “strict guidelines” in applying the technology and City Hall monitored the agency’s compliance with the policies.

The Times learned details of the department’s use of facial recognition by reviewing documents posted online earlier this year by Clare Garvie, a senior associate at the Center on Privacy and Technology at Georgetown Law. Ms. Garvie received the documents as part of an open records lawsuit.

It could not be determined whether other large police departments used facial recognition with juveniles because very few have written policies governing the use of the technology, Ms. Garvie said.

New York detectives rely on a vast network of surveillance cameras throughout the city to provide images of people believed to have committed a crime. Since 2011, the department has had a dedicated unit of officers who use facial recognition to compare those images against millions of photos, usually mug shots. The software proposes matches, which have led to thousands of arrests, the department said.

By 2013, top police officials were meeting to discuss including juveniles in the program, the documents reviewed by The Times showed.

The documents showed that the juvenile database had been integrated into the system by 2015.

“We have these photos. It makes sense,” Chief Shea said in the interview.

State law requires that arrest photos be destroyed if the case is resolved in the juvenile’s favor, or if the child is found to have committed only a misdemeanor, rather than a felony. The photos also must be destroyed if a person reaches age 21 without a criminal record.

When children are charged with crimes, the court system usually takes some steps to prevent their acts from defining them in later years. Children who are 16 and under, for instance, are generally sent to Family Court, where records are not public.

Yet including their photos in a facial recognition database runs the risk that an imperfect algorithm identifies them as possible suspects in later crimes, civil rights advocates said. A mistaken match could lead investigators to focus on the wrong person from the outset, they said.

“It’s very disturbing to know that no matter what I’m doing at that moment, someone might be scanning my picture to try to find someone who committed a crime,” said Bailey, a 17-year-old Brooklyn girl who had admitted guilt in Family Court to a group attack that happened when she was 14. She said she was present at the attack but did not participate.

Bailey, who asked that she be identified only by her last name because she did not want her juvenile arrest to be public, has not been arrested again and is now a student at John Jay College of Criminal Justice.

Recent studies indicate that people of color, as well as children and women, have a greater risk of misidentification than their counterparts, said Joy Buolamwini, the founder of the Algorithmic Justice League and graduate researcher at the M.I.T. Media Lab, who has examined how human biases are built into artificial intelligence.

The racial disparities in the juvenile justice system are stark: In New York, black and Latino juveniles were charged with crimes at far higher rates than whites in 2017, the most recent year for which numbers were available. Black juveniles outnumbered white juveniles more than 15 to 1.

“If the facial recognition algorithm has a negative bias toward a black population, that will get magnified more toward children,” Dr. Ricanek said, adding that in terms of diminished accuracy, “you’re now putting yourself in unknown territory.”

Joseph Goldstein writes about policing and the criminal justice system. He has been a reporter at The Times since 2011, and is based in New York. He also worked for a year in the Kabul bureau, reporting on Afghanistan. @JoeKGoldstein

Ali Watkins is a reporter on the Metro Desk, covering courts and social services. Previously, she covered national security in Washington for The Times, BuzzFeed and McClatchy Newspapers. @AliWatkins

A version of this article appears in print on , Section A, Page 1 of the New York edition with the headline: In New York, Police Computers Scan Faces, Some as Young as 11. Order Reprints | Today’s Paper | Subscribe

Advertisement

+ + + + + + + + + + +
+ +
+ + + + \ No newline at end of file diff --git a/test/fixtures/nypd-facial-recognition-children-teenagers2.html b/test/fixtures/nypd-facial-recognition-children-teenagers2.html new file mode 100644 index 000000000..ae8b26aff --- /dev/null +++ b/test/fixtures/nypd-facial-recognition-children-teenagers2.html @@ -0,0 +1,226 @@ + + + + She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database. - The New York Times + + + + + + + + + + + + + + + + + + + + +

Advertisement

She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.

With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.

Image
CreditCreditSarah Blesener for The New York Times

[What you need to know to start the day: Get New York Today in your inbox.]

The New York Police Department has been loading thousands of arrest photos of children and teenagers into a facial recognition database despite evidence the technology has a higher risk of false matches in younger faces.

For about four years, internal records show, the department has used the technology to compare crime scene images with its collection of juvenile mug shots, the photos that are taken at an arrest. Most of the photos are of teenagers, largely 13 to 16 years old, but children as young as 11 have been included.

Elected officials and civil rights groups said the disclosure that the city was deploying a powerful surveillance tool on adolescents — whose privacy seems sacrosanct and whose status is protected in the criminal justice system — was a striking example of the Police Department’s ability to adopt advancing technology with little public scrutiny.

Several members of the City Council as well as a range of civil liberties groups said they were unaware of the policy until they were contacted by The New York Times.

Police Department officials defended the decision, saying it was just the latest evolution of a longstanding policing technique: using arrest photos to identify suspects.

“I don’t think this is any secret decision that’s made behind closed doors,” the city’s chief of detectives, Dermot F. Shea, said in an interview. “This is just process, and making sure we’re doing everything to fight crime.”

Other cities have begun to debate whether law enforcement should use facial recognition, which relies on an algorithm to quickly pore through images and suggest matches. In May, San Francisco blocked city agencies, including the police, from using the tool amid unease about potential government abuse. Detroit is facing public resistance to a technology that has been shown to have lower accuracy with people with darker skin.

In New York, the state Education Department recently told the Lockport, N.Y., school district to delay a plan to use facial recognition on students, citing privacy concerns.

“At the end of the day, it should be banned — no young people,” said Councilman Donovan Richards, a Queens Democrat who heads the Public Safety Committee, which oversees the Police Department.

The department said its legal bureau had approved using facial recognition on juveniles. The algorithm may suggest a lead, but detectives would not make an arrest based solely on that, Chief Shea said.

Image
CreditChang W. Lee/The New York Times

Still, facial recognition has not been widely tested on children. Most algorithms are taught to “think” based on adult faces, and there is growing evidence that they do not work as well on children.

The National Institute of Standards and Technology, which is part of the Commerce Department and evaluates facial recognition algorithms for accuracy, recently found the vast majority of more than 100 facial recognition algorithms had a higher rate of mistaken matches among children. The error rate was most pronounced in young children but was also seen in those aged 10 to 16.

Aging poses another problem: The appearance of children and adolescents can change drastically as bones stretch and shift, altering the underlying facial structure.

“I would use extreme caution in using those algorithms,” said Karl Ricanek Jr., a computer science professor and co-founder of the Face Aging Group at the University of North Carolina-Wilmington.

Technology that can match an image of a younger teenager to a recent arrest photo may be less effective at finding the same person even one or two years later, he said.

“The systems do not have the capacity to understand the dynamic changes that occur to a child’s face,” Dr. Ricanek said.

Idemia and DataWorks Plus, the two companies that provide facial recognition software to the Police Department, did not respond to requests for comment.

The New York Police Department can take arrest photos of minors as young as 11 who are charged with a felony, depending on the severity of the charge.

And in many cases, the department keeps the photos for years, making facial recognition comparisons to what may have effectively become outdated images. There are photos of 5,500 individuals in the juvenile database, 4,100 of whom are no longer 16 or under, the department said. Teenagers 17 and older are considered adults in the criminal justice system.

Police officials declined to provide statistics on how often their facial recognition systems provide false matches, or to explain how they evaluate the system’s effectiveness.

“We are comfortable with this technology because it has proved to be a valuable investigative method,” Chief Shea said. Facial recognition has helped lead to thousands of arrests of both adults and juveniles, the department has said.

Mayor Bill de Blasio had been aware the department was using the technology on minors, said Freddi Goldstein, a spokeswoman for the mayor.

She said the Police Department followed “strict guidelines” in applying the technology and City Hall monitored the agency’s compliance with the policies.

The Times learned details of the department’s use of facial recognition by reviewing documents posted online earlier this year by Clare Garvie, a senior associate at the Center on Privacy and Technology at Georgetown Law. Ms. Garvie received the documents as part of an open records lawsuit.

It could not be determined whether other large police departments used facial recognition with juveniles because very few have written policies governing the use of the technology, Ms. Garvie said.

New York detectives rely on a vast network of surveillance cameras throughout the city to provide images of people believed to have committed a crime. Since 2011, the department has had a dedicated unit of officers who use facial recognition to compare those images against millions of photos, usually mug shots. The software proposes matches, which have led to thousands of arrests, the department said.

By 2013, top police officials were meeting to discuss including juveniles in the program, the documents reviewed by The Times showed.

The documents showed that the juvenile database had been integrated into the system by 2015.

“We have these photos. It makes sense,” Chief Shea said in the interview.

State law requires that arrest photos be destroyed if the case is resolved in the juvenile’s favor, or if the child is found to have committed only a misdemeanor, rather than a felony. The photos also must be destroyed if a person reaches age 21 without a criminal record.

When children are charged with crimes, the court system usually takes some steps to prevent their acts from defining them in later years. Children who are 16 and under, for instance, are generally sent to Family Court, where records are not public.

Yet including their photos in a facial recognition database runs the risk that an imperfect algorithm identifies them as possible suspects in later crimes, civil rights advocates said. A mistaken match could lead investigators to focus on the wrong person from the outset, they said.

“It’s very disturbing to know that no matter what I’m doing at that moment, someone might be scanning my picture to try to find someone who committed a crime,” said Bailey, a 17-year-old Brooklyn girl who had admitted guilt in Family Court to a group attack that happened when she was 14. She said she was present at the attack but did not participate.

Bailey, who asked that she be identified only by her last name because she did not want her juvenile arrest to be public, has not been arrested again and is now a student at John Jay College of Criminal Justice.

Recent studies indicate that people of color, as well as children and women, have a greater risk of misidentification than their counterparts, said Joy Buolamwini, the founder of the Algorithmic Justice League and graduate researcher at the M.I.T. Media Lab, who has examined how human biases are built into artificial intelligence.

The racial disparities in the juvenile justice system are stark: In New York, black and Latino juveniles were charged with crimes at far higher rates than whites in 2017, the most recent year for which numbers were available. Black juveniles outnumbered white juveniles more than 15 to 1.

“If the facial recognition algorithm has a negative bias toward a black population, that will get magnified more toward children,” Dr. Ricanek said, adding that in terms of diminished accuracy, “you’re now putting yourself in unknown territory.”

Joseph Goldstein writes about policing and the criminal justice system. He has been a reporter at The Times since 2011, and is based in New York. He also worked for a year in the Kabul bureau, reporting on Afghanistan. @JoeKGoldstein

Ali Watkins is a reporter on the Metro Desk, covering courts and social services. Previously, she covered national security in Washington for The Times, BuzzFeed and McClatchy Newspapers. @AliWatkins

A version of this article appears in print on , Section A, Page 1 of the New York edition with the headline: In New York, Police Computers Scan Faces, Some as Young as 11. Order Reprints | Today’s Paper | Subscribe

Advertisement

+ + + + + + + + + + +
+ +
+ + + + diff --git a/test/fixtures/nypd-facial-recognition-children-teenagers3.html b/test/fixtures/nypd-facial-recognition-children-teenagers3.html new file mode 100644 index 000000000..53454d23e --- /dev/null +++ b/test/fixtures/nypd-facial-recognition-children-teenagers3.html @@ -0,0 +1,227 @@ + + + + She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database. - The New York Times + + + + + + + + + + + + + + + + + + + + + +

Advertisement

She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.

With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.

Image
CreditCreditSarah Blesener for The New York Times

[What you need to know to start the day: Get New York Today in your inbox.]

The New York Police Department has been loading thousands of arrest photos of children and teenagers into a facial recognition database despite evidence the technology has a higher risk of false matches in younger faces.

For about four years, internal records show, the department has used the technology to compare crime scene images with its collection of juvenile mug shots, the photos that are taken at an arrest. Most of the photos are of teenagers, largely 13 to 16 years old, but children as young as 11 have been included.

Elected officials and civil rights groups said the disclosure that the city was deploying a powerful surveillance tool on adolescents — whose privacy seems sacrosanct and whose status is protected in the criminal justice system — was a striking example of the Police Department’s ability to adopt advancing technology with little public scrutiny.

Several members of the City Council as well as a range of civil liberties groups said they were unaware of the policy until they were contacted by The New York Times.

Police Department officials defended the decision, saying it was just the latest evolution of a longstanding policing technique: using arrest photos to identify suspects.

“I don’t think this is any secret decision that’s made behind closed doors,” the city’s chief of detectives, Dermot F. Shea, said in an interview. “This is just process, and making sure we’re doing everything to fight crime.”

Other cities have begun to debate whether law enforcement should use facial recognition, which relies on an algorithm to quickly pore through images and suggest matches. In May, San Francisco blocked city agencies, including the police, from using the tool amid unease about potential government abuse. Detroit is facing public resistance to a technology that has been shown to have lower accuracy with people with darker skin.

In New York, the state Education Department recently told the Lockport, N.Y., school district to delay a plan to use facial recognition on students, citing privacy concerns.

“At the end of the day, it should be banned — no young people,” said Councilman Donovan Richards, a Queens Democrat who heads the Public Safety Committee, which oversees the Police Department.

The department said its legal bureau had approved using facial recognition on juveniles. The algorithm may suggest a lead, but detectives would not make an arrest based solely on that, Chief Shea said.

Image
CreditChang W. Lee/The New York Times

Still, facial recognition has not been widely tested on children. Most algorithms are taught to “think” based on adult faces, and there is growing evidence that they do not work as well on children.

The National Institute of Standards and Technology, which is part of the Commerce Department and evaluates facial recognition algorithms for accuracy, recently found the vast majority of more than 100 facial recognition algorithms had a higher rate of mistaken matches among children. The error rate was most pronounced in young children but was also seen in those aged 10 to 16.

Aging poses another problem: The appearance of children and adolescents can change drastically as bones stretch and shift, altering the underlying facial structure.

“I would use extreme caution in using those algorithms,” said Karl Ricanek Jr., a computer science professor and co-founder of the Face Aging Group at the University of North Carolina-Wilmington.

Technology that can match an image of a younger teenager to a recent arrest photo may be less effective at finding the same person even one or two years later, he said.

“The systems do not have the capacity to understand the dynamic changes that occur to a child’s face,” Dr. Ricanek said.

Idemia and DataWorks Plus, the two companies that provide facial recognition software to the Police Department, did not respond to requests for comment.

The New York Police Department can take arrest photos of minors as young as 11 who are charged with a felony, depending on the severity of the charge.

And in many cases, the department keeps the photos for years, making facial recognition comparisons to what may have effectively become outdated images. There are photos of 5,500 individuals in the juvenile database, 4,100 of whom are no longer 16 or under, the department said. Teenagers 17 and older are considered adults in the criminal justice system.

Police officials declined to provide statistics on how often their facial recognition systems provide false matches, or to explain how they evaluate the system’s effectiveness.

“We are comfortable with this technology because it has proved to be a valuable investigative method,” Chief Shea said. Facial recognition has helped lead to thousands of arrests of both adults and juveniles, the department has said.

Mayor Bill de Blasio had been aware the department was using the technology on minors, said Freddi Goldstein, a spokeswoman for the mayor.

She said the Police Department followed “strict guidelines” in applying the technology and City Hall monitored the agency’s compliance with the policies.

The Times learned details of the department’s use of facial recognition by reviewing documents posted online earlier this year by Clare Garvie, a senior associate at the Center on Privacy and Technology at Georgetown Law. Ms. Garvie received the documents as part of an open records lawsuit.

It could not be determined whether other large police departments used facial recognition with juveniles because very few have written policies governing the use of the technology, Ms. Garvie said.

New York detectives rely on a vast network of surveillance cameras throughout the city to provide images of people believed to have committed a crime. Since 2011, the department has had a dedicated unit of officers who use facial recognition to compare those images against millions of photos, usually mug shots. The software proposes matches, which have led to thousands of arrests, the department said.

By 2013, top police officials were meeting to discuss including juveniles in the program, the documents reviewed by The Times showed.

The documents showed that the juvenile database had been integrated into the system by 2015.

“We have these photos. It makes sense,” Chief Shea said in the interview.

State law requires that arrest photos be destroyed if the case is resolved in the juvenile’s favor, or if the child is found to have committed only a misdemeanor, rather than a felony. The photos also must be destroyed if a person reaches age 21 without a criminal record.

When children are charged with crimes, the court system usually takes some steps to prevent their acts from defining them in later years. Children who are 16 and under, for instance, are generally sent to Family Court, where records are not public.

Yet including their photos in a facial recognition database runs the risk that an imperfect algorithm identifies them as possible suspects in later crimes, civil rights advocates said. A mistaken match could lead investigators to focus on the wrong person from the outset, they said.

“It’s very disturbing to know that no matter what I’m doing at that moment, someone might be scanning my picture to try to find someone who committed a crime,” said Bailey, a 17-year-old Brooklyn girl who had admitted guilt in Family Court to a group attack that happened when she was 14. She said she was present at the attack but did not participate.

Bailey, who asked that she be identified only by her last name because she did not want her juvenile arrest to be public, has not been arrested again and is now a student at John Jay College of Criminal Justice.

Recent studies indicate that people of color, as well as children and women, have a greater risk of misidentification than their counterparts, said Joy Buolamwini, the founder of the Algorithmic Justice League and graduate researcher at the M.I.T. Media Lab, who has examined how human biases are built into artificial intelligence.

The racial disparities in the juvenile justice system are stark: In New York, black and Latino juveniles were charged with crimes at far higher rates than whites in 2017, the most recent year for which numbers were available. Black juveniles outnumbered white juveniles more than 15 to 1.

“If the facial recognition algorithm has a negative bias toward a black population, that will get magnified more toward children,” Dr. Ricanek said, adding that in terms of diminished accuracy, “you’re now putting yourself in unknown territory.”

Joseph Goldstein writes about policing and the criminal justice system. He has been a reporter at The Times since 2011, and is based in New York. He also worked for a year in the Kabul bureau, reporting on Afghanistan. @JoeKGoldstein

Ali Watkins is a reporter on the Metro Desk, covering courts and social services. Previously, she covered national security in Washington for The Times, BuzzFeed and McClatchy Newspapers. @AliWatkins

A version of this article appears in print on , Section A, Page 1 of the New York edition with the headline: In New York, Police Computers Scan Faces, Some as Young as 11. Order Reprints | Today’s Paper | Subscribe

Advertisement

+ + + + + + + + + + +
+ +
+ + + + diff --git a/test/fixtures/users_mock/masto_closed_followers_page.json b/test/fixtures/users_mock/masto_closed_followers_page.json new file mode 100644 index 000000000..04ab0c4d3 --- /dev/null +++ b/test/fixtures/users_mock/masto_closed_followers_page.json @@ -0,0 +1 @@ +{"@context":"https://www.w3.org/ns/activitystreams","id":"http://localhost:4001/users/masto_closed/followers?page=1","type":"OrderedCollectionPage","totalItems":437,"next":"http://localhost:4001/users/masto_closed/followers?page=2","partOf":"http://localhost:4001/users/masto_closed/followers","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]} diff --git a/test/fixtures/users_mock/masto_closed_following_page.json b/test/fixtures/users_mock/masto_closed_following_page.json new file mode 100644 index 000000000..8d8324699 --- /dev/null +++ b/test/fixtures/users_mock/masto_closed_following_page.json @@ -0,0 +1 @@ +{"@context":"https://www.w3.org/ns/activitystreams","id":"http://localhost:4001/users/masto_closed/following?page=1","type":"OrderedCollectionPage","totalItems":152,"next":"http://localhost:4001/users/masto_closed/following?page=2","partOf":"http://localhost:4001/users/masto_closed/following","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]} diff --git a/test/mix/tasks/pleroma.digest_test.exs b/test/mix/tasks/pleroma.digest_test.exs new file mode 100644 index 000000000..595f64ed7 --- /dev/null +++ b/test/mix/tasks/pleroma.digest_test.exs @@ -0,0 +1,51 @@ +defmodule Mix.Tasks.Pleroma.DigestTest do + use Pleroma.DataCase + + import Pleroma.Factory + import Swoosh.TestAssertions + + alias Pleroma.Web.CommonAPI + + setup_all do + Mix.shell(Mix.Shell.Process) + + on_exit(fn -> + Mix.shell(Mix.Shell.IO) + end) + + :ok + end + + describe "pleroma.digest test" do + test "Sends digest to the given user" do + user1 = insert(:user) + user2 = insert(:user) + + Enum.each(0..10, fn i -> + {:ok, _activity} = + CommonAPI.post(user1, %{ + "status" => "hey ##{i} @#{user2.nickname}!" + }) + end) + + yesterday = + NaiveDateTime.add( + NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second), + -60 * 60 * 24, + :second + ) + + {:ok, yesterday_date} = Timex.format(yesterday, "%F", :strftime) + + :ok = Mix.Tasks.Pleroma.Digest.run(["test", user2.nickname, yesterday_date]) + + assert_receive {:mix_shell, :info, [message]} + assert message =~ "Digest email have been sent" + + assert_email_sent( + to: {user2.name, user2.email}, + html_body: ~r/new mentions:/i + ) + end + end +end diff --git a/test/notification_test.exs b/test/notification_test.exs index c88ac54bd..80ea2a085 100644 --- a/test/notification_test.exs +++ b/test/notification_test.exs @@ -4,13 +4,15 @@ defmodule Pleroma.NotificationTest do use Pleroma.DataCase + + import Pleroma.Factory + alias Pleroma.Notification alias Pleroma.User alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.CommonAPI alias Pleroma.Web.Streamer alias Pleroma.Web.TwitterAPI.TwitterAPI - import Pleroma.Factory describe "create_notifications" do test "notifies someone when they are directly addressed" do @@ -352,6 +354,51 @@ defmodule Pleroma.NotificationTest do end end + describe "for_user_since/2" do + defp days_ago(days) do + NaiveDateTime.add( + NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second), + -days * 60 * 60 * 24, + :second + ) + end + + test "Returns recent notifications" do + user1 = insert(:user) + user2 = insert(:user) + + Enum.each(0..10, fn i -> + {:ok, _activity} = + CommonAPI.post(user1, %{ + "status" => "hey ##{i} @#{user2.nickname}!" + }) + end) + + {old, new} = Enum.split(Notification.for_user(user2), 5) + + Enum.each(old, fn notification -> + notification + |> cast(%{updated_at: days_ago(10)}, [:updated_at]) + |> Pleroma.Repo.update!() + end) + + recent_notifications_ids = + user2 + |> Notification.for_user_since( + NaiveDateTime.add(NaiveDateTime.utc_now(), -5 * 86_400, :second) + ) + |> Enum.map(& &1.id) + + Enum.each(old, fn %{id: id} -> + refute id in recent_notifications_ids + end) + + Enum.each(new, fn %{id: id} -> + assert id in recent_notifications_ids + end) + end + end + describe "notification target determination" do test "it sends notifications to addressed users in new messages" do user = insert(:user) diff --git a/test/support/builders/user_builder.ex b/test/support/builders/user_builder.ex index f58e1b0ad..6da16f71a 100644 --- a/test/support/builders/user_builder.ex +++ b/test/support/builders/user_builder.ex @@ -9,7 +9,8 @@ defmodule Pleroma.Builders.UserBuilder do nickname: "testname", password_hash: Comeonin.Pbkdf2.hashpwsalt("test"), bio: "A tester.", - ap_id: "some id" + ap_id: "some id", + last_digest_emailed_at: NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second) } Map.merge(user, data) diff --git a/test/support/factory.ex b/test/support/factory.ex index c751546ce..1787c1088 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -31,7 +31,8 @@ defmodule Pleroma.Factory do nickname: sequence(:nickname, &"nick#{&1}"), password_hash: Comeonin.Pbkdf2.hashpwsalt("test"), bio: sequence(:bio, &"Tester Number #{&1}"), - info: %{} + info: %{}, + last_digest_emailed_at: NaiveDateTime.utc_now() } %{ @@ -182,8 +183,8 @@ defmodule Pleroma.Factory do } end - def like_activity_factory do - note_activity = insert(:note_activity) + def like_activity_factory(attrs \\ %{}) do + note_activity = attrs[:note_activity] || insert(:note_activity) object = Object.normalize(note_activity) user = insert(:user) diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex index d767ab9d4..3adb5ba3b 100644 --- a/test/support/http_request_mock.ex +++ b/test/support/http_request_mock.ex @@ -796,6 +796,14 @@ defmodule HttpRequestMock do }} end + def get("http://localhost:4001/users/masto_closed/followers?page=1", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/users_mock/masto_closed_followers_page.json") + }} + end + def get("http://localhost:4001/users/masto_closed/following", _, _, _) do {:ok, %Tesla.Env{ @@ -804,6 +812,14 @@ defmodule HttpRequestMock do }} end + def get("http://localhost:4001/users/masto_closed/following?page=1", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/users_mock/masto_closed_following_page.json") + }} + end + def get("http://localhost:4001/users/fuser2/followers", _, _, _) do {:ok, %Tesla.Env{ diff --git a/test/user_info_test.exs b/test/user_info_test.exs new file mode 100644 index 000000000..2d795594e --- /dev/null +++ b/test/user_info_test.exs @@ -0,0 +1,24 @@ +defmodule Pleroma.UserInfoTest do + alias Pleroma.Repo + alias Pleroma.User.Info + + use Pleroma.DataCase + + import Pleroma.Factory + + describe "update_email_notifications/2" do + setup do + user = insert(:user, %{info: %{email_notifications: %{"digest" => true}}}) + + {:ok, user: user} + end + + test "Notifications are updated", %{user: user} do + true = user.info.email_notifications["digest"] + changeset = Info.update_email_notifications(user.info, %{"digest" => false}) + assert changeset.valid? + {:ok, result} = Ecto.Changeset.apply_action(changeset, :insert) + assert result.email_notifications["digest"] == false + end + end +end diff --git a/test/user_search_test.exs b/test/user_search_test.exs index 4de6c82a5..48ce973ad 100644 --- a/test/user_search_test.exs +++ b/test/user_search_test.exs @@ -193,7 +193,14 @@ defmodule Pleroma.UserSearchTest do user = User.get_cached_by_ap_id("http://mastodon.example.org/users/admin") assert length(results) == 1 - assert user == result |> Map.put(:search_rank, nil) |> Map.put(:search_type, nil) + + expected = + result + |> Map.put(:search_rank, nil) + |> Map.put(:search_type, nil) + |> Map.put(:last_digest_emailed_at, nil) + + assert user == expected end test "excludes a blocked users from search result" do diff --git a/test/user_test.exs b/test/user_test.exs index ee6d8e8f3..8617752d7 100644 --- a/test/user_test.exs +++ b/test/user_test.exs @@ -1239,6 +1239,109 @@ defmodule Pleroma.UserTest do assert Map.get(user_show, "followers_count") == 2 end + describe "list_inactive_users_query/1" do + defp days_ago(days) do + NaiveDateTime.add( + NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second), + -days * 60 * 60 * 24, + :second + ) + end + + test "Users are inactive by default" do + total = 10 + + users = + Enum.map(1..total, fn _ -> + insert(:user, last_digest_emailed_at: days_ago(20), info: %{deactivated: false}) + end) + + inactive_users_ids = + Pleroma.User.list_inactive_users_query() + |> Pleroma.Repo.all() + |> Enum.map(& &1.id) + + Enum.each(users, fn user -> + assert user.id in inactive_users_ids + end) + end + + test "Only includes users who has no recent activity" do + total = 10 + + users = + Enum.map(1..total, fn _ -> + insert(:user, last_digest_emailed_at: days_ago(20), info: %{deactivated: false}) + end) + + {inactive, active} = Enum.split(users, trunc(total / 2)) + + Enum.map(active, fn user -> + to = Enum.random(users -- [user]) + + {:ok, _} = + Pleroma.Web.TwitterAPI.TwitterAPI.create_status(user, %{ + "status" => "hey @#{to.nickname}" + }) + end) + + inactive_users_ids = + Pleroma.User.list_inactive_users_query() + |> Pleroma.Repo.all() + |> Enum.map(& &1.id) + + Enum.each(active, fn user -> + refute user.id in inactive_users_ids + end) + + Enum.each(inactive, fn user -> + assert user.id in inactive_users_ids + end) + end + + test "Only includes users with no read notifications" do + total = 10 + + users = + Enum.map(1..total, fn _ -> + insert(:user, last_digest_emailed_at: days_ago(20), info: %{deactivated: false}) + end) + + [sender | recipients] = users + {inactive, active} = Enum.split(recipients, trunc(total / 2)) + + Enum.each(recipients, fn to -> + {:ok, _} = + Pleroma.Web.TwitterAPI.TwitterAPI.create_status(sender, %{ + "status" => "hey @#{to.nickname}" + }) + + {:ok, _} = + Pleroma.Web.TwitterAPI.TwitterAPI.create_status(sender, %{ + "status" => "hey again @#{to.nickname}" + }) + end) + + Enum.each(active, fn user -> + [n1, _n2] = Pleroma.Notification.for_user(user) + {:ok, _} = Pleroma.Notification.read_one(user, n1.id) + end) + + inactive_users_ids = + Pleroma.User.list_inactive_users_query() + |> Pleroma.Repo.all() + |> Enum.map(& &1.id) + + Enum.each(active, fn user -> + refute user.id in inactive_users_ids + end) + + Enum.each(inactive, fn user -> + assert user.id in inactive_users_ids + end) + end + end + describe "toggle_confirmation/1" do test "if user is confirmed" do user = insert(:user, info: %{confirmation_pending: false}) @@ -1395,4 +1498,78 @@ defmodule Pleroma.UserTest do assert %User{bio: "test-bio"} = User.get_cached_by_ap_id(user.ap_id) end end + + describe "following/followers synchronization" do + setup do + sync = Pleroma.Config.get([:instance, :external_user_synchronization]) + on_exit(fn -> Pleroma.Config.put([:instance, :external_user_synchronization], sync) end) + end + + test "updates the counters normally on following/getting a follow when disabled" do + Pleroma.Config.put([:instance, :external_user_synchronization], false) + user = insert(:user) + + other_user = + insert(:user, + local: false, + follower_address: "http://localhost:4001/users/masto_closed/followers", + following_address: "http://localhost:4001/users/masto_closed/following", + info: %{ap_enabled: true} + ) + + assert User.user_info(other_user).following_count == 0 + assert User.user_info(other_user).follower_count == 0 + + {:ok, user} = Pleroma.User.follow(user, other_user) + other_user = Pleroma.User.get_by_id(other_user.id) + + assert User.user_info(user).following_count == 1 + assert User.user_info(other_user).follower_count == 1 + end + + test "syncronizes the counters with the remote instance for the followed when enabled" do + Pleroma.Config.put([:instance, :external_user_synchronization], false) + + user = insert(:user) + + other_user = + insert(:user, + local: false, + follower_address: "http://localhost:4001/users/masto_closed/followers", + following_address: "http://localhost:4001/users/masto_closed/following", + info: %{ap_enabled: true} + ) + + assert User.user_info(other_user).following_count == 0 + assert User.user_info(other_user).follower_count == 0 + + Pleroma.Config.put([:instance, :external_user_synchronization], true) + {:ok, _user} = User.follow(user, other_user) + other_user = User.get_by_id(other_user.id) + + assert User.user_info(other_user).follower_count == 437 + end + + test "syncronizes the counters with the remote instance for the follower when enabled" do + Pleroma.Config.put([:instance, :external_user_synchronization], false) + + user = insert(:user) + + other_user = + insert(:user, + local: false, + follower_address: "http://localhost:4001/users/masto_closed/followers", + following_address: "http://localhost:4001/users/masto_closed/following", + info: %{ap_enabled: true} + ) + + assert User.user_info(other_user).following_count == 0 + assert User.user_info(other_user).follower_count == 0 + + Pleroma.Config.put([:instance, :external_user_synchronization], true) + {:ok, other_user} = User.follow(other_user, user) + + assert User.user_info(other_user).following_count == 152 + end + end end diff --git a/test/web/activity_pub/activity_pub_controller_test.exs b/test/web/activity_pub/activity_pub_controller_test.exs index 1d809164f..d7f0a8264 100644 --- a/test/web/activity_pub/activity_pub_controller_test.exs +++ b/test/web/activity_pub/activity_pub_controller_test.exs @@ -184,18 +184,65 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do end describe "/object/:uuid/likes" do - test "it returns the like activities in a collection", %{conn: conn} do + setup do like = insert(:like_activity) like_object_ap_id = Object.normalize(like).data["id"] - uuid = String.split(like_object_ap_id, "/") |> List.last() + uuid = + like_object_ap_id + |> String.split("/") + |> List.last() + + [id: like.data["id"], uuid: uuid] + end + + test "it returns the like activities in a collection", %{conn: conn, id: id, uuid: uuid} do result = conn |> put_req_header("accept", "application/activity+json") |> get("/objects/#{uuid}/likes") |> json_response(200) - assert List.first(result["first"]["orderedItems"])["id"] == like.data["id"] + assert List.first(result["first"]["orderedItems"])["id"] == id + assert result["type"] == "OrderedCollection" + assert result["totalItems"] == 1 + refute result["first"]["next"] + end + + test "it does not crash when page number is exceeded total pages", %{conn: conn, uuid: uuid} do + result = + conn + |> put_req_header("accept", "application/activity+json") + |> get("/objects/#{uuid}/likes?page=2") + |> json_response(200) + + assert result["type"] == "OrderedCollectionPage" + assert result["totalItems"] == 1 + refute result["next"] + assert Enum.empty?(result["orderedItems"]) + end + + test "it contains the next key when likes count is more than 10", %{conn: conn} do + note = insert(:note_activity) + insert_list(11, :like_activity, note_activity: note) + + uuid = + note + |> Object.normalize() + |> Map.get(:data) + |> Map.get("id") + |> String.split("/") + |> List.last() + + result = + conn + |> put_req_header("accept", "application/activity+json") + |> get("/objects/#{uuid}/likes?page=1") + |> json_response(200) + + assert result["totalItems"] == 11 + assert length(result["orderedItems"]) == 10 + assert result["next"] end end diff --git a/test/web/activity_pub/activity_pub_test.exs b/test/web/activity_pub/activity_pub_test.exs index 1c0b274cb..d723f331f 100644 --- a/test/web/activity_pub/activity_pub_test.exs +++ b/test/web/activity_pub/activity_pub_test.exs @@ -677,14 +677,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do assert object.data["likes"] == [user.ap_id] assert object.data["like_count"] == 1 - [note_activity] = Activity.get_all_create_by_object_ap_id(object.data["id"]) - assert note_activity.data["object"]["like_count"] == 1 - {:ok, _like_activity, object} = ActivityPub.like(user_two, object) assert object.data["like_count"] == 2 - - [note_activity] = Activity.get_all_create_by_object_ap_id(object.data["id"]) - assert note_activity.data["object"]["like_count"] == 2 end end @@ -1128,4 +1122,65 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do assert result.id == activity.id end end + + describe "fetch_follow_information_for_user" do + test "syncronizes following/followers counters" do + user = + insert(:user, + local: false, + follower_address: "http://localhost:4001/users/fuser2/followers", + following_address: "http://localhost:4001/users/fuser2/following" + ) + + {:ok, info} = ActivityPub.fetch_follow_information_for_user(user) + assert info.follower_count == 527 + assert info.following_count == 267 + end + + test "detects hidden followers" do + mock(fn env -> + case env.url do + "http://localhost:4001/users/masto_closed/followers?page=1" -> + %Tesla.Env{status: 403, body: ""} + + _ -> + apply(HttpRequestMock, :request, [env]) + end + end) + + user = + insert(:user, + local: false, + follower_address: "http://localhost:4001/users/masto_closed/followers", + following_address: "http://localhost:4001/users/masto_closed/following" + ) + + {:ok, info} = ActivityPub.fetch_follow_information_for_user(user) + assert info.hide_followers == true + assert info.hide_follows == false + end + + test "detects hidden follows" do + mock(fn env -> + case env.url do + "http://localhost:4001/users/masto_closed/following?page=1" -> + %Tesla.Env{status: 403, body: ""} + + _ -> + apply(HttpRequestMock, :request, [env]) + end + end) + + user = + insert(:user, + local: false, + follower_address: "http://localhost:4001/users/masto_closed/followers", + following_address: "http://localhost:4001/users/masto_closed/following" + ) + + {:ok, info} = ActivityPub.fetch_follow_information_for_user(user) + assert info.hide_followers == false + assert info.hide_follows == true + end + end end diff --git a/test/web/activity_pub/transmogrifier_test.exs b/test/web/activity_pub/transmogrifier_test.exs index a1f5f6e36..e7498e005 100644 --- a/test/web/activity_pub/transmogrifier_test.exs +++ b/test/web/activity_pub/transmogrifier_test.exs @@ -1373,32 +1373,4 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do refute recipient.follower_address in fixed_object["to"] end end - - test "update_following_followers_counters/1" do - user1 = - insert(:user, - local: false, - follower_address: "http://localhost:4001/users/masto_closed/followers", - following_address: "http://localhost:4001/users/masto_closed/following" - ) - - user2 = - insert(:user, - local: false, - follower_address: "http://localhost:4001/users/fuser2/followers", - following_address: "http://localhost:4001/users/fuser2/following" - ) - - Transmogrifier.update_following_followers_counters(user1) - Transmogrifier.update_following_followers_counters(user2) - - %{follower_count: followers, following_count: following} = User.get_cached_user_info(user1) - assert followers == 437 - assert following == 152 - - %{follower_count: followers, following_count: following} = User.get_cached_user_info(user2) - - assert followers == 527 - assert following == 267 - end end diff --git a/test/web/admin_api/admin_api_controller_test.exs b/test/web/admin_api/admin_api_controller_test.exs index 824ad23e6..bcbc18639 100644 --- a/test/web/admin_api/admin_api_controller_test.exs +++ b/test/web/admin_api/admin_api_controller_test.exs @@ -1914,6 +1914,38 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do ] } end + + test "delete part of settings by atom subkeys", %{conn: conn} do + config = + insert(:config, + key: "keyaa1", + value: :erlang.term_to_binary(subkey1: "val1", subkey2: "val2", subkey3: "val3") + ) + + conn = + post(conn, "/api/pleroma/admin/config", %{ + configs: [ + %{ + group: config.group, + key: config.key, + subkeys: [":subkey1", ":subkey3"], + delete: "true" + } + ] + }) + + assert( + json_response(conn, 200) == %{ + "configs" => [ + %{ + "group" => "pleroma", + "key" => "keyaa1", + "value" => [%{"tuple" => [":subkey2", "val2"]}] + } + ] + } + ) + end end describe "config mix tasks run" do @@ -1922,7 +1954,10 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do temp_file = "config/test.exported_from_db.secret.exs" + Mix.shell(Mix.Shell.Quiet) + on_exit(fn -> + Mix.shell(Mix.Shell.IO) :ok = File.rm(temp_file) end) diff --git a/test/web/common_api/common_api_utils_test.exs b/test/web/common_api/common_api_utils_test.exs index 4b5666c29..5989d7d29 100644 --- a/test/web/common_api/common_api_utils_test.exs +++ b/test/web/common_api/common_api_utils_test.exs @@ -306,7 +306,6 @@ defmodule Pleroma.Web.CommonAPI.UtilsTest do mentions = [mentioned_user.ap_id] {to, cc} = Utils.get_to_and_cc(user, mentions, nil, "private") - assert length(to) == 2 assert length(cc) == 0 @@ -380,4 +379,222 @@ defmodule Pleroma.Web.CommonAPI.UtilsTest do assert like.data["object"] == activity.data["object"] end end + + describe "to_master_date/1" do + test "removes microseconds from date (NaiveDateTime)" do + assert Utils.to_masto_date(~N[2015-01-23 23:50:07.123]) == "2015-01-23T23:50:07.000Z" + end + + test "removes microseconds from date (String)" do + assert Utils.to_masto_date("2015-01-23T23:50:07.123Z") == "2015-01-23T23:50:07.000Z" + end + + test "returns empty string when date invalid" do + assert Utils.to_masto_date("2015-01?23T23:50:07.123Z") == "" + end + end + + describe "conversation_id_to_context/1" do + test "returns id" do + object = insert(:note) + assert Utils.conversation_id_to_context(object.id) == object.data["id"] + end + + test "returns error if object not found" do + assert Utils.conversation_id_to_context("123") == {:error, "No such conversation"} + end + end + + describe "maybe_notify_mentioned_recipients/2" do + test "returns recipients when activity is not `Create`" do + activity = insert(:like_activity) + assert Utils.maybe_notify_mentioned_recipients(["test"], activity) == ["test"] + end + + test "returns recipients from tag" do + user = insert(:user) + + object = + insert(:note, + user: user, + data: %{ + "tag" => [ + %{"type" => "Hashtag"}, + "", + %{"type" => "Mention", "href" => "https://testing.pleroma.lol/users/lain"}, + %{"type" => "Mention", "href" => "https://shitposter.club/user/5381"}, + %{"type" => "Mention", "href" => "https://shitposter.club/user/5381"} + ] + } + ) + + activity = insert(:note_activity, user: user, note: object) + + assert Utils.maybe_notify_mentioned_recipients(["test"], activity) == [ + "test", + "https://testing.pleroma.lol/users/lain", + "https://shitposter.club/user/5381" + ] + end + + test "returns recipients when object is map" do + user = insert(:user) + object = insert(:note, user: user) + + activity = + insert(:note_activity, + user: user, + note: object, + data_attrs: %{ + "object" => %{ + "tag" => [ + %{"type" => "Hashtag"}, + "", + %{"type" => "Mention", "href" => "https://testing.pleroma.lol/users/lain"}, + %{"type" => "Mention", "href" => "https://shitposter.club/user/5381"}, + %{"type" => "Mention", "href" => "https://shitposter.club/user/5381"} + ] + } + } + ) + + Pleroma.Repo.delete(object) + + assert Utils.maybe_notify_mentioned_recipients(["test"], activity) == [ + "test", + "https://testing.pleroma.lol/users/lain", + "https://shitposter.club/user/5381" + ] + end + + test "returns recipients when object not found" do + user = insert(:user) + object = insert(:note, user: user) + + activity = insert(:note_activity, user: user, note: object) + Pleroma.Repo.delete(object) + + assert Utils.maybe_notify_mentioned_recipients(["test-test"], activity) == [ + "test-test" + ] + end + end + + describe "attachments_from_ids_descs/2" do + test "returns [] when attachment ids is empty" do + assert Utils.attachments_from_ids_descs([], "{}") == [] + end + + test "returns list attachments with desc" do + object = insert(:note) + desc = Jason.encode!(%{object.id => "test-desc"}) + + assert Utils.attachments_from_ids_descs(["#{object.id}", "34"], desc) == [ + Map.merge(object.data, %{"name" => "test-desc"}) + ] + end + end + + describe "attachments_from_ids/1" do + test "returns attachments with descs" do + object = insert(:note) + desc = Jason.encode!(%{object.id => "test-desc"}) + + assert Utils.attachments_from_ids(%{ + "media_ids" => ["#{object.id}"], + "descriptions" => desc + }) == [ + Map.merge(object.data, %{"name" => "test-desc"}) + ] + end + + test "returns attachments without descs" do + object = insert(:note) + assert Utils.attachments_from_ids(%{"media_ids" => ["#{object.id}"]}) == [object.data] + end + + test "returns [] when not pass media_ids" do + assert Utils.attachments_from_ids(%{}) == [] + end + end + + describe "maybe_add_list_data/3" do + test "adds list params when found user list" do + user = insert(:user) + {:ok, %Pleroma.List{} = list} = Pleroma.List.create("title", user) + + assert Utils.maybe_add_list_data(%{additional: %{}, object: %{}}, user, {:list, list.id}) == + %{ + additional: %{"bcc" => [list.ap_id], "listMessage" => list.ap_id}, + object: %{"listMessage" => list.ap_id} + } + end + + test "returns original params when list not found" do + user = insert(:user) + {:ok, %Pleroma.List{} = list} = Pleroma.List.create("title", insert(:user)) + + assert Utils.maybe_add_list_data(%{additional: %{}, object: %{}}, user, {:list, list.id}) == + %{additional: %{}, object: %{}} + end + end + + describe "make_note_data/11" do + test "returns note data" do + user = insert(:user) + note = insert(:note) + user2 = insert(:user) + user3 = insert(:user) + + assert Utils.make_note_data( + user.ap_id, + [user2.ap_id], + "2hu", + "

This is :moominmamma: note

", + [], + note.id, + [name: "jimm"], + "test summary", + [user3.ap_id], + false, + %{"custom_tag" => "test"} + ) == %{ + "actor" => user.ap_id, + "attachment" => [], + "cc" => [user3.ap_id], + "content" => "

This is :moominmamma: note

", + "context" => "2hu", + "sensitive" => false, + "summary" => "test summary", + "tag" => ["jimm"], + "to" => [user2.ap_id], + "type" => "Note", + "custom_tag" => "test" + } + end + end + + describe "maybe_add_attachments/3" do + test "returns parsed results when no_links is true" do + assert Utils.maybe_add_attachments( + {"test", [], ["tags"]}, + [], + true + ) == {"test", [], ["tags"]} + end + + test "adds attachments to parsed results" do + attachment = %{"url" => [%{"href" => "SakuraPM.png"}]} + + assert Utils.maybe_add_attachments( + {"test", [], ["tags"]}, + [attachment], + false + ) == { + "test
SakuraPM.png", + [], + ["tags"] + } + end + end end diff --git a/test/web/federator_test.exs b/test/web/federator_test.exs index ebe962da2..d3a28d50e 100644 --- a/test/web/federator_test.exs +++ b/test/web/federator_test.exs @@ -238,5 +238,21 @@ defmodule Pleroma.Web.FederatorTest do assert {:ok, job} = Federator.incoming_ap_doc(params) assert :error = ObanHelpers.perform(job) end + + test "it does not crash if MRF rejects the post" do + policies = Pleroma.Config.get([:instance, :rewrite_policy]) + mrf_keyword_policy = Pleroma.Config.get(:mrf_keyword) + Pleroma.Config.put([:mrf_keyword, :reject], ["lain"]) + Pleroma.Config.put([:instance, :rewrite_policy], Pleroma.Web.ActivityPub.MRF.KeywordPolicy) + + params = + File.read!("test/fixtures/mastodon-post-activity.json") + |> Poison.decode!() + + assert Federator.incoming_ap_doc(params) == :error + + Pleroma.Config.put([:instance, :rewrite_policy], policies) + Pleroma.Config.put(:mrf_keyword, mrf_keyword_policy) + end end end diff --git a/test/web/mastodon_api/mastodon_api_controller_test.exs b/test/web/mastodon_api/mastodon_api_controller_test.exs index 66016c886..e49c4cc22 100644 --- a/test/web/mastodon_api/mastodon_api_controller_test.exs +++ b/test/web/mastodon_api/mastodon_api_controller_test.exs @@ -1671,40 +1671,6 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIControllerTest do object = Repo.get(Object, media["id"]) assert object.data["actor"] == User.ap_id(conn.assigns[:user]) end - - test "returns proxied url when media proxy is enabled", %{conn: conn, image: image} do - Pleroma.Config.put([Pleroma.Upload, :base_url], "https://media.pleroma.social") - - proxy_url = "https://cache.pleroma.social" - Pleroma.Config.put([:media_proxy, :enabled], true) - Pleroma.Config.put([:media_proxy, :base_url], proxy_url) - - media = - conn - |> post("/api/v1/media", %{"file" => image}) - |> json_response(:ok) - - assert String.starts_with?(media["url"], proxy_url) - end - - test "returns media url when proxy is enabled but media url is whitelisted", %{ - conn: conn, - image: image - } do - media_url = "https://media.pleroma.social" - Pleroma.Config.put([Pleroma.Upload, :base_url], media_url) - - Pleroma.Config.put([:media_proxy, :enabled], true) - Pleroma.Config.put([:media_proxy, :base_url], "https://cache.pleroma.social") - Pleroma.Config.put([:media_proxy, :whitelist], ["media.pleroma.social"]) - - media = - conn - |> post("/api/v1/media", %{"file" => image}) - |> json_response(:ok) - - assert String.starts_with?(media["url"], media_url) - end end describe "locked accounts" do diff --git a/test/web/mastodon_api/search_controller_test.exs b/test/web/mastodon_api/search_controller_test.exs index 043b96c14..49c79ff0a 100644 --- a/test/web/mastodon_api/search_controller_test.exs +++ b/test/web/mastodon_api/search_controller_test.exs @@ -95,6 +95,18 @@ defmodule Pleroma.Web.MastodonAPI.SearchControllerTest do assert user_three.nickname in result_ids end + + test "returns account if query contains a space", %{conn: conn} do + user = insert(:user, %{nickname: "shp@shitposter.club"}) + + results = + conn + |> assign(:user, user) + |> get("/api/v1/accounts/search", %{"q" => "shp@shitposter.club xxx "}) + |> json_response(200) + + assert length(results) == 1 + end end describe ".search" do diff --git a/test/web/media_proxy/media_proxy_test.exs b/test/web/media_proxy/media_proxy_test.exs index edbbf9b66..0c94755df 100644 --- a/test/web/media_proxy/media_proxy_test.exs +++ b/test/web/media_proxy/media_proxy_test.exs @@ -171,21 +171,6 @@ defmodule Pleroma.Web.MediaProxyTest do encoded = url(url) assert decode_result(encoded) == url end - - test "does not change whitelisted urls" do - upload_config = Pleroma.Config.get([Pleroma.Upload]) - media_url = "https://media.pleroma.social" - Pleroma.Config.put([Pleroma.Upload, :base_url], media_url) - Pleroma.Config.put([:media_proxy, :whitelist], ["media.pleroma.social"]) - Pleroma.Config.put([:media_proxy, :base_url], "https://cache.pleroma.social") - - url = "#{media_url}/static/logo.png" - encoded = url(url) - - assert String.starts_with?(encoded, media_url) - - Pleroma.Config.put([Pleroma.Upload], upload_config) - end end describe "when disabled" do @@ -215,12 +200,43 @@ defmodule Pleroma.Web.MediaProxyTest do decoded end - test "mediaproxy whitelist" do - Pleroma.Config.put([:media_proxy, :enabled], true) - Pleroma.Config.put([:media_proxy, :whitelist], ["google.com", "feld.me"]) - url = "https://feld.me/foo.png" + describe "whitelist" do + setup do + Pleroma.Config.put([:media_proxy, :enabled], true) + :ok + end - unencoded = url(url) - assert unencoded == url + test "mediaproxy whitelist" do + Pleroma.Config.put([:media_proxy, :whitelist], ["google.com", "feld.me"]) + url = "https://feld.me/foo.png" + + unencoded = url(url) + assert unencoded == url + end + + test "does not change whitelisted urls" do + Pleroma.Config.put([:media_proxy, :whitelist], ["mycdn.akamai.com"]) + Pleroma.Config.put([:media_proxy, :base_url], "https://cache.pleroma.social") + + media_url = "https://mycdn.akamai.com" + + url = "#{media_url}/static/logo.png" + encoded = url(url) + + assert String.starts_with?(encoded, media_url) + end + + test "ensure Pleroma.Upload base_url is always whitelisted" do + upload_config = Pleroma.Config.get([Pleroma.Upload]) + media_url = "https://media.pleroma.social" + Pleroma.Config.put([Pleroma.Upload, :base_url], media_url) + + url = "#{media_url}/static/logo.png" + encoded = url(url) + + assert String.starts_with?(encoded, media_url) + + Pleroma.Config.put([Pleroma.Upload], upload_config) + end end end diff --git a/test/web/ostatus/ostatus_test.exs b/test/web/ostatus/ostatus_test.exs index f8d389020..803a97695 100644 --- a/test/web/ostatus/ostatus_test.exs +++ b/test/web/ostatus/ostatus_test.exs @@ -199,7 +199,7 @@ defmodule Pleroma.Web.OStatusTest do assert retweeted_activity.data["type"] == "Create" assert retweeted_activity.data["actor"] == user.ap_id assert retweeted_activity.local - assert retweeted_activity.data["object"]["announcement_count"] == 1 + assert Object.normalize(retweeted_activity).data["announcement_count"] == 1 end test "handle incoming retweets - Mastodon, salmon" do diff --git a/test/web/rich_media/parsers/twitter_card_test.exs b/test/web/rich_media/parsers/twitter_card_test.exs new file mode 100644 index 000000000..f8e1c9b40 --- /dev/null +++ b/test/web/rich_media/parsers/twitter_card_test.exs @@ -0,0 +1,69 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do + use ExUnit.Case, async: true + alias Pleroma.Web.RichMedia.Parsers.TwitterCard + + test "returns error when html not contains twitter card" do + assert TwitterCard.parse("", %{}) == {:error, "No twitter card metadata found"} + end + + test "parses twitter card with only name attributes" do + html = File.read!("test/fixtures/nypd-facial-recognition-children-teenagers3.html") + + assert TwitterCard.parse(html, %{}) == + {:ok, + %{ + "app:id:googleplay": "com.nytimes.android", + "app:name:googleplay": "NYTimes", + "app:url:googleplay": "nytimes://reader/id/100000006583622", + site: nil, + title: + "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database. - The New York Times" + }} + end + + test "parses twitter card with only property attributes" do + html = File.read!("test/fixtures/nypd-facial-recognition-children-teenagers2.html") + + assert TwitterCard.parse(html, %{}) == + {:ok, + %{ + card: "summary_large_image", + description: + "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.", + image: + "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg", + "image:alt": "", + title: + "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.", + url: + "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html" + }} + end + + test "parses twitter card with name & property attributes" do + html = File.read!("test/fixtures/nypd-facial-recognition-children-teenagers.html") + + assert TwitterCard.parse(html, %{}) == + {:ok, + %{ + "app:id:googleplay": "com.nytimes.android", + "app:name:googleplay": "NYTimes", + "app:url:googleplay": "nytimes://reader/id/100000006583622", + card: "summary_large_image", + description: + "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.", + image: + "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg", + "image:alt": "", + site: nil, + title: + "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.", + url: + "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html" + }} + end +end