Refactor code to comply with credo suggestions

This commit is contained in:
dtluna 2017-04-27 16:18:50 +03:00
parent fb5cebc1b5
commit 6cf7c13228
21 changed files with 318 additions and 157 deletions

138
.credo.exs Normal file
View file

@ -0,0 +1,138 @@
# This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any config using `mix credo -C <name>`. If no config name is given
# "default" is used.
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
included: ["lib/", "src/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
requires: [],
#
# Credo automatically checks for updates, like e.g. Hex does.
# You can disable this behaviour below:
check_for_updates: true,
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
strict: false,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.MultiAliasImportRequireUse},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
# For some checks, like AliasUsage, you can only customize the priority
# Priority values are: `low, normal, high, higher`
{Credo.Check.Design.AliasUsage, priority: :low},
# For others you can set parameters
# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 100},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc, false},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.LazyLogging},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
# Deprecated checks (these will be deleted after a grace period)
{Credo.Check.Readability.Specs, false},
{Credo.Check.Warning.NameRedeclarationByAssignment, false},
{Credo.Check.Warning.NameRedeclarationByCase, false},
{Credo.Check.Warning.NameRedeclarationByDef, false},
{Credo.Check.Warning.NameRedeclarationByFn, false},
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}

View file

@ -1,4 +1,5 @@
defmodule Pleroma.Plugs.AuthenticationPlug do
alias Comeonin.Pbkdf2
import Plug.Conn
def init(options) do
@ -25,12 +26,12 @@ defp verify(%{id: id} = user, _password, id) do
end
defp verify(nil, _password, _user_id) do
Comeonin.Pbkdf2.dummy_checkpw
Pbkdf2.dummy_checkpw
:error
end
defp verify(user, password, _user_id) do
if Comeonin.Pbkdf2.checkpw(password, user.password_hash) do
if Pbkdf2.checkpw(password, user.password_hash) do
{:ok, user}
else
:error

View file

@ -1,6 +1,8 @@
defmodule Pleroma.Upload do
alias Ecto.UUID
alias Pleroma.Web
def store(%Plug.Upload{} = file) do
uuid = Ecto.UUID.generate
uuid = UUID.generate
upload_folder = Path.join(upload_path(), uuid)
File.mkdir_p!(upload_folder)
result_file = Path.join(upload_folder, file.filename)
@ -21,7 +23,7 @@ def store(%Plug.Upload{} = file) do
def store(%{"img" => "data:image/" <> image_data}) do
parsed = Regex.named_captures(~r/(?<filetype>jpeg|png|gif);base64,(?<data>.*)/, image_data)
data = Base.decode64!(parsed["data"])
uuid = Ecto.UUID.generate
uuid = UUID.generate
upload_folder = Path.join(upload_path(), uuid)
File.mkdir_p!(upload_folder)
filename = Base.encode16(:crypto.hash(:sha256, data)) <> ".#{parsed["filetype"]}"
@ -44,11 +46,11 @@ def store(%{"img" => "data:image/" <> image_data}) do
end
defp upload_path do
Application.get_env(:pleroma, Pleroma.Upload)
|> Keyword.fetch!(:uploads)
settings = Application.get_env(:pleroma, Pleroma.Upload)
Keyword.fetch!(settings, :uploads)
end
defp url_for(file) do
"#{Pleroma.Web.base_url()}/media/#{file}"
"#{Web.base_url()}/media/#{file}"
end
end

View file

@ -1,8 +1,8 @@
defmodule Pleroma.User do
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query
alias Pleroma.{Repo, User, Activity, Object}
import Ecto.{Changeset, Query}
alias Pleroma.{Repo, User, Object, Web}
alias Comeonin.Pbkdf2
schema "users" do
field :bio, :string
@ -27,7 +27,7 @@ def avatar_url(user) do
end
def ap_id(%User{nickname: nickname}) do
"#{Pleroma.Web.base_url}/users/#{nickname}"
"#{Web.base_url}/users/#{nickname}"
end
def ap_followers(%User{} = user) do
@ -66,7 +66,7 @@ def register_changeset(struct, params \\ %{}) do
|> validate_format(:nickname, ~r/^[a-zA-Z\d]+$/)
if changeset.valid? do
hashed = Comeonin.Pbkdf2.hashpwsalt(changeset.changes[:password])
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
followers = User.ap_followers(%User{nickname: changeset.changes[:nickname]})
changeset

View file

@ -1,6 +1,6 @@
defmodule Pleroma.Web.ActivityPub.ActivityPub do
alias Pleroma.Repo
alias Pleroma.{Activity, Object, Upload, User}
alias Pleroma.{Activity, Repo, Object, Upload, User, Web}
alias Ecto.{Changeset, UUID}
import Ecto.Query
def insert(map) when is_map(map) do
@ -44,7 +44,7 @@ def like(%User{ap_id: ap_id} = user, %Object{data: %{ "id" => id}} = object) do
|> Map.put("like_count", length(likes))
|> Map.put("likes", likes)
changeset = Ecto.Changeset.change(object, data: new_data)
changeset = Changeset.change(object, data: new_data)
{:ok, object} = Repo.update(changeset)
update_object_in_activities(object)
@ -58,7 +58,7 @@ defp update_object_in_activities(%{data: %{"id" => id}} = object) do
relevant_activities = Activity.all_by_object_ap_id(id)
Enum.map(relevant_activities, fn (activity) ->
new_activity_data = activity.data |> Map.put("object", object.data)
changeset = Ecto.Changeset.change(activity, data: new_activity_data)
changeset = Changeset.change(activity, data: new_activity_data)
Repo.update(changeset)
end)
end
@ -79,7 +79,7 @@ def unlike(%User{ap_id: ap_id}, %Object{data: %{ "id" => id}} = object) do
|> Map.put("like_count", length(likes))
|> Map.put("likes", likes)
changeset = Ecto.Changeset.change(object, data: new_data)
changeset = Changeset.change(object, data: new_data)
{:ok, object} = Repo.update(changeset)
update_object_in_activities(object)
@ -103,7 +103,7 @@ def generate_object_id do
end
def generate_id(type) do
"#{Pleroma.Web.base_url()}/#{type}/#{Ecto.UUID.generate}"
"#{Web.base_url()}/#{type}/#{UUID.generate}"
end
def fetch_public_activities(opts \\ %{}) do
@ -140,8 +140,7 @@ def fetch_activities(recipients, opts \\ %{}) do
query
end
Repo.all(query)
|> Enum.reverse
Enum.reverse(Repo.all(query))
end
def announce(%User{ap_id: ap_id} = user, %Object{data: %{"id" => id}} = object) do
@ -160,7 +159,7 @@ def announce(%User{ap_id: ap_id} = user, %Object{data: %{"id" => id}} = object)
|> Map.put("announcement_count", length(announcements))
|> Map.put("announcements", announcements)
changeset = Ecto.Changeset.change(object, data: new_data)
changeset = Changeset.change(object, data: new_data)
{:ok, object} = Repo.update(changeset)
update_object_in_activities(object)

View file

@ -8,7 +8,8 @@ def to_simple_form(user, activities, users) do
h = fn(str) -> [to_charlist(str)] end
entries = Enum.map(activities, fn(activity) ->
entries = activities
|> Enum.map(fn(activity) ->
{:entry, ActivityRepresenter.to_simple_form(activity, user)}
end)
|> Enum.filter(fn ({_, form}) -> form end)

View file

@ -16,7 +16,8 @@ def feed(conn, %{"nickname" => nickname}) do
activities = query
|> Repo.all
response = FeedRepresenter.to_simple_form(user, activities, [user])
response = user
|> FeedRepresenter.to_simple_form(activities, [user])
|> :xmerl.export_simple(:xmerl_xml)
|> to_string
@ -25,7 +26,7 @@ def feed(conn, %{"nickname" => nickname}) do
|> send_resp(200, response)
end
def temp(conn, params) do
def temp(_conn, params) do
IO.inspect(params)
end
end

View file

@ -1,7 +1,7 @@
defmodule Pleroma.Web.Router do
use Pleroma.Web, :router
alias Pleroma.{Repo, User}
alias Pleroma.{Repo, User, Web.Router}
def user_fetcher(username) do
{:ok, Repo.get_by(User, %{nickname: username})}
@ -10,13 +10,13 @@ def user_fetcher(username) do
pipeline :api do
plug :accepts, ["json"]
plug :fetch_session
plug Pleroma.Plugs.AuthenticationPlug, %{fetcher: &Pleroma.Web.Router.user_fetcher/1, optional: true}
plug Pleroma.Plugs.AuthenticationPlug, %{fetcher: &Router.user_fetcher/1, optional: true}
end
pipeline :authenticated_api do
plug :accepts, ["json"]
plug :fetch_session
plug Pleroma.Plugs.AuthenticationPlug, %{fetcher: &Pleroma.Web.Router.user_fetcher/1}
plug Pleroma.Plugs.AuthenticationPlug, %{fetcher: &Router.user_fetcher/1}
end
pipeline :well_known do

View file

@ -10,7 +10,6 @@ def decode(salmon) do
{:xmlObj, :string, encoding} = :xmerl_xpath.string('string(//me:encoding[1])', doc)
{:xmlObj, :string, type} = :xmerl_xpath.string('string(//me:data[1]/@type)', doc)
{:ok, data} = Base.url_decode64(to_string(data), ignore: :whitespace)
{:ok, sig} = Base.url_decode64(to_string(sig), ignore: :whitespace)
alg = to_string(alg)

View file

@ -1,17 +1,17 @@
defmodule Pleroma.Web.TwitterAPI.Representers.ActivityRepresenter do
use Pleroma.Web.TwitterAPI.Representers.BaseRepresenter
alias Pleroma.Web.TwitterAPI.Representers.{UserRepresenter, ObjectRepresenter}
alias Pleroma.Activity
alias Pleroma.{Activity, User}
alias Calendar.Strftime
defp user_by_ap_id(user_list, ap_id) do
Enum.find(user_list, fn (%{ap_id: user_id}) -> ap_id == user_id end)
end
def to_map(%Activity{data: %{"type" => "Announce", "actor" => actor}} = activity, %{users: users, announced_activity: announced_activity} = opts) do
def to_map(%Activity{data: %{"type" => "Announce", "actor" => actor, "published" => created_at}} = activity,
%{users: users, announced_activity: announced_activity} = opts) do
user = user_by_ap_id(users, actor)
created_at = get_in(activity.data, ["published"])
|> date_to_asctime
created_at = created_at |> date_to_asctime
text = "#{user.nickname} retweeted a status."
@ -30,16 +30,16 @@ def to_map(%Activity{data: %{"type" => "Announce", "actor" => actor}} = activity
}
end
def to_map(%Activity{data: %{"type" => "Like"}} = activity, %{user: user, liked_activity: liked_activity} = opts) do
created_at = get_in(activity.data, ["published"])
|> date_to_asctime
def to_map(%Activity{data: %{"type" => "Like", "published" => created_at}} = activity,
%{user: user, liked_activity: liked_activity} = opts) do
created_at = created_at |> date_to_asctime
text = "#{user.nickname} favorited a status."
%{
"id" => activity.id,
"user" => UserRepresenter.to_map(user, opts),
"statusnet_html" => text, # TODO: add summary
"statusnet_html" => text,
"text" => text,
"is_local" => true,
"is_post_verb" => false,
@ -49,16 +49,17 @@ def to_map(%Activity{data: %{"type" => "Like"}} = activity, %{user: user, liked_
}
end
def to_map(%Activity{data: %{"type" => "Follow"}} = activity, %{user: user} = opts) do
created_at = get_in(activity.data, ["published"])
|> date_to_asctime
def to_map(%Activity{data: %{"type" => "Follow", "published" => created_at, "object" => followed_id}} = activity, %{user: user} = opts) do
created_at = created_at |> date_to_asctime
followed = User.get_cached_by_ap_id(followed_id)
text = "#{user.nickname} started following #{followed.nickname}"
%{
"id" => activity.id,
"user" => UserRepresenter.to_map(user, opts),
"attentions" => [],
"statusnet_html" => "", # TODO: add summary
"text" => "",
"statusnet_html" => text,
"text" => text,
"is_local" => true,
"is_post_verb" => false,
"created_at" => created_at,
@ -66,14 +67,12 @@ def to_map(%Activity{data: %{"type" => "Follow"}} = activity, %{user: user} = op
}
end
def to_map(%Activity{} = activity, %{user: user} = opts) do
content = get_in(activity.data, ["object", "content"])
created_at = get_in(activity.data, ["object", "published"])
|> date_to_asctime
like_count = get_in(activity.data, ["object", "like_count"]) || 0
announcement_count = get_in(activity.data, ["object", "announcement_count"]) || 0
favorited = opts[:for] && opts[:for].ap_id in (activity.data["object"]["likes"] || [])
repeated = opts[:for] && opts[:for].ap_id in (activity.data["object"]["announcements"] || [])
def to_map(%Activity{data: %{"object" => %{"content" => content} = object}} = activity, %{user: user} = opts) do
created_at = object["published"] |> date_to_asctime
like_count = object["like_count"] || 0
announcement_count = object["announcement_count"] || 0
favorited = opts[:for] && opts[:for].ap_id in (object["likes"] || [])
repeated = opts[:for] && opts[:for].ap_id in (object["announcements"] || [])
mentions = opts[:mentioned] || []
@ -91,22 +90,34 @@ def to_map(%Activity{} = activity, %{user: user} = opts) do
"is_local" => true,
"is_post_verb" => true,
"created_at" => created_at,
"in_reply_to_status_id" => activity.data["object"]["inReplyToStatusId"],
"statusnet_conversation_id" => activity.data["object"]["statusnetConversationId"],
"attachments" => (activity.data["object"]["attachment"] || []) |> ObjectRepresenter.enum_to_list(opts),
"in_reply_to_status_id" => object["inReplyToStatusId"],
"statusnet_conversation_id" => object["statusnetConversationId"],
"attachments" => (object["attachment"] || []) |> ObjectRepresenter.enum_to_list(opts),
"attentions" => attentions,
"fave_num" => like_count,
"repeat_num" => announcement_count,
"favorited" => !!favorited,
"repeated" => !!repeated,
"favorited" => to_boolean(favorited),
"repeated" => to_boolean(repeated),
}
end
defp date_to_asctime(date) do
with {:ok, date, _offset} <- date |> DateTime.from_iso8601 do
Calendar.Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
else _e ->
""
end
end
defp to_boolean(false) do
false
end
defp to_boolean(nil) do
false
end
defp to_boolean(_) do
true
end
end

View file

@ -1,19 +1,19 @@
defmodule Pleroma.Web.TwitterAPI.TwitterAPI do
alias Ecto.Changeset
alias Pleroma.{User, Activity, Repo, Object}
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.{ActivityPub.ActivityPub, Websub, OStatus}
alias Pleroma.Web.TwitterAPI.Representers.{ActivityRepresenter, UserRepresenter}
import Ecto.Query
def create_status(user = %User{}, data = %{}) do
def create_status(%User{} = user, %{} = data) do
attachments = Enum.map(data["media_ids"] || [], fn (media_id) ->
Repo.get(Object, media_id).data
end)
context = ActivityPub.generate_context_id
content = HtmlSanitizeEx.strip_tags(data["status"])
|> String.replace("\n", "<br>")
content = data["status"] |> HtmlSanitizeEx.strip_tags |> String.replace("\n", "<br>")
mentions = parse_mentions(content)
@ -67,29 +67,29 @@ def create_status(user = %User{}, data = %{}) do
with {:ok, activity} <- ActivityPub.insert(activity) do
{:ok, activity} = add_conversation_id(activity)
Pleroma.Web.Websub.publish(Pleroma.Web.OStatus.feed_path(user), user, activity)
Websub.publish(OStatus.feed_path(user), user, activity)
{:ok, activity}
end
end
def fetch_friend_statuses(user, opts \\ %{}) do
ActivityPub.fetch_activities([user.ap_id | user.following], opts)
|> activities_to_statuses(%{for: user})
activities = ActivityPub.fetch_activities([user.ap_id | user.following], opts)
activities_to_statuses(activities, %{for: user})
end
def fetch_public_statuses(user, opts \\ %{}) do
ActivityPub.fetch_public_activities(opts)
|> activities_to_statuses(%{for: user})
activities = ActivityPub.fetch_public_activities(opts)
activities_to_statuses(activities, %{for: user})
end
def fetch_user_statuses(user, opts \\ %{}) do
ActivityPub.fetch_activities([], opts)
|> activities_to_statuses(%{for: user})
activities = ActivityPub.fetch_activities([], opts)
activities_to_statuses(activities, %{for: user})
end
def fetch_mentions(user, opts \\ %{}) do
ActivityPub.fetch_activities([user.ap_id], opts)
|> activities_to_statuses(%{for: user})
activities = ActivityPub.fetch_activities([user.ap_id], opts)
activities_to_statuses(activities, %{for: user})
end
def fetch_conversation(user, id) do
@ -215,15 +215,18 @@ def parse_mentions(text) do
# Modified from https://www.w3.org/TR/html5/forms.html#valid-e-mail-address
regex = ~r/@[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@?[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*/
Regex.scan(regex, text)
regex
|> Regex.scan(text)
|> List.flatten
|> Enum.uniq
|> Enum.map(fn ("@" <> match = full_match) -> {full_match, User.get_cached_by_nickname(match)} end)
|> Enum.map(fn ("@" <> match = full_match) ->
{full_match, User.get_cached_by_nickname(match)} end)
|> Enum.filter(fn ({_match, user}) -> user end)
end
def add_user_links(text, mentions) do
Enum.reduce(mentions, text, fn ({match, %User{ap_id: ap_id}}, text) -> String.replace(text, match, "<a href='#{ap_id}'>#{match}</a>") end)
Enum.reduce(mentions, text, fn ({match, %User{ap_id: ap_id}}, text) ->
String.replace(text, match, "<a href='#{ap_id}'>#{match}</a>") end)
end
defp add_conversation_id(activity) do
@ -236,10 +239,10 @@ defp add_conversation_id(activity) do
object = Object.get_by_ap_id(activity.data["object"]["id"])
changeset = Ecto.Changeset.change(object, data: data["object"])
changeset = Changeset.change(object, data: data["object"])
Repo.update(changeset)
changeset = Ecto.Changeset.change(activity, data: data)
changeset = Changeset.change(activity, data: data)
Repo.update(changeset)
end
end
@ -260,8 +263,7 @@ def register_user(params) do
{:ok, UserRepresenter.to_map(user)}
else
{:error, changeset} ->
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
|> Poison.encode!
errors = Poison.encode!(Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end))
{:error, %{error: errors}}
end
end
@ -303,7 +305,8 @@ defp activity_to_status(%Activity{data: %{"type" => "Like"}} = activity, opts) d
user = User.get_cached_by_ap_id(actor)
[liked_activity] = Activity.all_by_object_ap_id(activity.data["object"])
ActivityRepresenter.to_map(activity, Map.merge(opts, %{user: user, liked_activity: liked_activity}))
ActivityRepresenter.to_map(activity,
Map.merge(opts, %{user: user, liked_activity: liked_activity}))
end
# For announces, fetch the announced activity and the user.
@ -313,7 +316,8 @@ defp activity_to_status(%Activity{data: %{"type" => "Announce"}} = activity, opt
[announced_activity] = Activity.all_by_object_ap_id(activity.data["object"])
announced_actor = User.get_cached_by_ap_id(announced_activity.data["actor"])
ActivityRepresenter.to_map(activity, Map.merge(opts, %{users: [user, announced_actor], announced_activity: announced_activity}))
ActivityRepresenter.to_map(activity,
Map.merge(opts, %{users: [user, announced_actor], announced_activity: announced_activity}))
end
defp activity_to_status(activity, opts) do
@ -323,7 +327,7 @@ defp activity_to_status(activity, opts) do
mentioned_users = Enum.map(activity.data["to"] || [], fn (ap_id) ->
User.get_cached_by_ap_id(ap_id)
end)
|> Enum.filter(&(&1))
mentioned_users = mentioned_users |> Enum.filter(&(&1))
ActivityRepresenter.to_map(activity, Map.merge(opts, %{user: user, mentioned: mentioned_users}))
end

View file

@ -2,8 +2,9 @@ defmodule Pleroma.Web.TwitterAPI.Controller do
use Pleroma.Web, :controller
alias Pleroma.Web.TwitterAPI.TwitterAPI
alias Pleroma.Web.TwitterAPI.Representers.{UserRepresenter, ActivityRepresenter}
alias Pleroma.{Repo, Activity}
alias Pleroma.{Web, Repo, Activity}
alias Pleroma.Web.ActivityPub.ActivityPub
alias Ecto.Changeset
def verify_credentials(%{assigns: %{user: user}} = conn, _params) do
response = user |> UserRepresenter.to_json(%{for: user})
@ -89,7 +90,7 @@ def follow(%{assigns: %{user: user}} = conn, params) do
def unfollow(%{assigns: %{user: user}} = conn, params) do
case TwitterAPI.unfollow(user, params) do
{ :ok, user, unfollowed, } ->
{:ok, user, unfollowed} ->
response = unfollowed |> UserRepresenter.to_json(%{for: user})
conn
|> json_reply(200, response)
@ -98,7 +99,7 @@ def unfollow(%{assigns: %{user: user}} = conn, params) do
end
def fetch_status(%{assigns: %{user: user}} = conn, %{"id" => id}) do
response = TwitterAPI.fetch_status(user, id) |> Poison.encode!
response = Poison.encode!(TwitterAPI.fetch_status(user, id))
conn
|> json_reply(200, response)
@ -106,7 +107,7 @@ def fetch_status(%{assigns: %{user: user}} = conn, %{ "id" => id }) do
def fetch_conversation(%{assigns: %{user: user}} = conn, %{"id" => id}) do
id = String.to_integer(id)
response = TwitterAPI.fetch_conversation(user, id) |> Poison.encode!
response = Poison.encode!(TwitterAPI.fetch_conversation(user, id))
conn
|> json_reply(200, response)
@ -132,8 +133,8 @@ def upload_json(conn, %{"media" => media}) do
def config(conn, _params) do
response = %{
site: %{
name: Pleroma.Web.base_url,
server: Pleroma.Web.base_url,
name: Web.base_url,
server: Web.base_url,
textlimit: -1
}
}
@ -188,11 +189,10 @@ def register(conn, params) do
def update_avatar(%{assigns: %{user: user}} = conn, params) do
{:ok, object} = ActivityPub.upload(params)
change = Ecto.Changeset.change(user, %{avatar: object.data})
change = Changeset.change(user, %{avatar: object.data})
{:ok, user} = Repo.update(change)
response = UserRepresenter.to_map(user, %{for: user})
|> Poison.encode!
response = Poison.encode!(UserRepresenter.to_map(user, %{for: user}))
conn
|> json_reply(200, response)

View file

@ -20,8 +20,7 @@ def controller do
quote do
use Phoenix.Controller, namespace: Pleroma.Web
import Plug.Conn
import Pleroma.Web.Router.Helpers
import Pleroma.Web.Gettext
import Pleroma.Web.{Gettext, Router.Helpers}
end
end
@ -33,9 +32,7 @@ def view do
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
import Pleroma.Web.Router.Helpers
import Pleroma.Web.ErrorHelpers
import Pleroma.Web.Gettext
import Pleroma.Web.{ErrorHelpers, Gettext, Router.Helpers}
end
end
@ -75,7 +72,8 @@ def base_url do
protocol = settings |> Keyword.fetch!(:protocol)
port_fragment = with {:ok, protocol_info} <- settings |> Keyword.fetch(String.to_atom(protocol)),
port_fragment = with {:ok, protocol_info} <- settings
|> Keyword.fetch(String.to_atom(protocol)),
{:ok, port} <- protocol_info |> Keyword.fetch(:port)
do
":#{port}"

View file

@ -1,10 +1,9 @@
defmodule Pleroma.Web.WebFinger do
alias Pleroma.XmlBuilder
alias Pleroma.User
alias Pleroma.Web.OStatus
alias Pleroma.{User, XmlBuilder}
alias Pleroma.{Web, Web.OStatus}
def host_meta() do
base_url = Pleroma.Web.base_url
def host_meta do
base_url = Web.base_url
{
:XRD, %{xmlns: "http://docs.oasis-open.org/ns/xri/xrd-1.0"},
{
@ -15,7 +14,7 @@ def host_meta() do
end
def webfinger(resource) do
host = Pleroma.Web.host
host = Web.host
regex = ~r/acct:(?<username>\w+)@#{host}/
case Regex.named_captures(regex, resource) do
%{"username" => username} ->
@ -29,7 +28,7 @@ def represent_user(user) do
{
:XRD, %{xmlns: "http://docs.oasis-open.org/ns/xri/xrd-1.0"},
[
{:Subject, "acct:#{user.nickname}@#{Pleroma.Web.host}"},
{:Subject, "acct:#{user.nickname}@#{Web.host}"},
{:Alias, user.ap_id},
{:Link, %{rel: "http://schemas.google.com/g/2010#updates-from", type: "application/atom+xml", href: OStatus.feed_path(user)}}
]

View file

@ -12,7 +12,7 @@ def host_meta(conn, _params) do
end
def webfinger(conn, %{"resource" => resource}) do
{:ok, response} = Pleroma.Web.WebFinger.webfinger(resource)
{:ok, response} = WebFinger.webfinger(resource)
conn
|> put_resp_content_type("application/xrd+xml")

View file

@ -1,4 +1,5 @@
defmodule Pleroma.Web.Websub do
alias Ecto.Changeset
alias Pleroma.Repo
alias Pleroma.Web.Websub.WebsubServerSubscription
alias Pleroma.Web.OStatus.FeedRepresenter
@ -10,7 +11,8 @@ defmodule Pleroma.Web.Websub do
def verify(subscription, getter \\ &HTTPoison.get/3) do
challenge = Base.encode16(:crypto.strong_rand_bytes(8))
lease_seconds = NaiveDateTime.diff(subscription.valid_until, subscription.updated_at) |> to_string
lease_seconds = NaiveDateTime.diff(subscription.valid_until, subscription.updated_at)
lease_seconds = lease_seconds |> to_string
params = %{
"hub.challenge": challenge,
@ -25,10 +27,10 @@ def verify(subscription, getter \\ &HTTPoison.get/3 ) do
with {:ok, response} <- getter.(url, [], [params: params]),
^challenge <- response.body
do
changeset = Ecto.Changeset.change(subscription, %{state: "active"})
changeset = Changeset.change(subscription, %{state: "active"})
Repo.update(changeset)
else _e ->
changeset = Ecto.Changeset.change(subscription, %{state: "rejected"})
changeset = Changeset.change(subscription, %{state: "rejected"})
{:ok, subscription} = Repo.update(changeset)
{:error, subscription}
end
@ -39,10 +41,11 @@ def publish(topic, user, activity) do
where: sub.topic == ^topic and sub.state == "active"
subscriptions = Repo.all(query)
Enum.each(subscriptions, fn(sub) ->
response = FeedRepresenter.to_simple_form(user, [activity], [user])
response = user
|> FeedRepresenter.to_simple_form([activity], [user])
|> :xmerl.export_simple(:xmerl_xml)
signature = :crypto.hmac(:sha, sub.secret, response) |> Base.encode16
signature = Base.encode16(:crypto.hmac(:sha, sub.secret, response))
HTTPoison.post(sub.callback, response, [
{"Content-Type", "application/atom+xml"},
@ -65,10 +68,11 @@ def incoming_subscription_request(user, %{"hub.mode" => "subscribe"} = params) d
callback: callback
}
change = Ecto.Changeset.change(subscription, data)
change = Changeset.change(subscription, data)
websub = Repo.insert_or_update!(change)
change = Ecto.Changeset.change(websub, %{valid_until: NaiveDateTime.add(websub.updated_at, lease_time)})
change = Changeset.change(websub, %{valid_until:
NaiveDateTime.add(websub.updated_at, lease_time)})
websub = Repo.update!(change)
# Just spawn that for now, maybe pool later.
@ -81,7 +85,8 @@ def incoming_subscription_request(user, %{"hub.mode" => "subscribe"} = params) d
end
defp get_subscription(topic, callback) do
Repo.get_by(WebsubServerSubscription, topic: topic, callback: callback) || %WebsubServerSubscription{}
Repo.get_by(WebsubServerSubscription, topic: topic, callback: callback) ||
%WebsubServerSubscription{}
end
defp lease_time(%{"hub.lease_seconds" => lease_seconds}) do

View file

@ -30,13 +30,13 @@ def to_xml(%NaiveDateTime{} = time) do
NaiveDateTime.to_iso8601(time)
end
def to_doc(content), do: "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" <> to_xml(content)
def to_doc(content), do: ~s(<?xml version="1.0" encoding="UTF-8"?>) <> to_xml(content)
defp make_open_tag(tag, attributes) do
attributes_string = for {attribute, value} <- attributes do
"#{attribute}=\"#{value}\""
end |> Enum.join(" ")
Enum.join([tag, attributes_string], " ") |> String.strip
[tag, attributes_string] |> Enum.join(" ") |> String.strip
end
end

View file

@ -41,6 +41,7 @@ defp deps do
{:cachex, "~> 2.1"},
{:httpoison, "~> 0.11.1"},
{:ex_machina, "~> 2.0", only: :test},
{:credo, "~> 0.7", only: [:dev, :test]},
{:mix_test_watch, "~> 0.2", only: :dev}]
end

View file

@ -1,4 +1,5 @@
%{"cachex": {:hex, :cachex, "2.1.0", "fad49b4e78d11c6c314e75bd8c9408f5b78cb065c047442798caed10803ee3be", [:mix], [{:eternal, "~> 1.1", [hex: :eternal, optional: false]}]},
%{"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], []},
"cachex": {:hex, :cachex, "2.1.0", "fad49b4e78d11c6c314e75bd8c9408f5b78cb065c047442798caed10803ee3be", [:mix], [{:eternal, "~> 1.1", [hex: :eternal, optional: false]}]},
"calendar": {:hex, :calendar, "0.16.1", "782327ad8bae7c797b887840dc4ddb933f05ce6e333e5b04964d7a5d5f79bde3", [:mix], [{:tzdata, "~> 0.5.8 or ~> 0.1.201603", [hex: :tzdata, optional: false]}]},
"certifi": {:hex, :certifi, "1.0.0", "1c787a85b1855ba354f0b8920392c19aa1d06b0ee1362f9141279620a5be2039", [:rebar3], []},
"comeonin": {:hex, :comeonin, "3.0.2", "8b213268a6634bd2e31a8035a963e974681d13ccc1f73f2ae664b6ac4e993c96", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, optional: false]}]},
@ -6,6 +7,7 @@
"connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], []},
"cowboy": {:hex, :cowboy, "1.1.2", "61ac29ea970389a88eca5a65601460162d370a70018afe6f949a29dca91f3bb0", [:rebar3], [{:cowlib, "~> 1.0.2", [hex: :cowlib, optional: false]}, {:ranch, "~> 1.3.2", [hex: :ranch, optional: false]}]},
"cowlib": {:hex, :cowlib, "1.0.2", "9d769a1d062c9c3ac753096f868ca121e2730b9a377de23dec0f7e08b1df84ee", [:make], []},
"credo": {:hex, :credo, "0.7.3", "9827ab04002186af1aec014a811839a06f72aaae6cd5eed3919b248c8767dbf3", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, optional: false]}]},
"db_connection": {:hex, :db_connection, "1.1.2", "2865c2a4bae0714e2213a0ce60a1b12d76a6efba0c51fbda59c9ab8d1accc7a8", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, optional: true]}]},
"decimal": {:hex, :decimal, "1.3.1", "157b3cedb2bfcb5359372a7766dd7a41091ad34578296e951f58a946fcab49c6", [:mix], []},
"deppie": {:hex, :deppie, "1.1.0", "cfb6fcee7dfb64eb78cb8505537971a0805131899326ad469ef10df04520f451", [:mix], []},