forked from AkkomaGang/akkoma
Refactor code to comply with credo suggestions
This commit is contained in:
parent
fb5cebc1b5
commit
6cf7c13228
21 changed files with 318 additions and 157 deletions
138
.credo.exs
Normal file
138
.credo.exs
Normal file
|
@ -0,0 +1,138 @@
|
||||||
|
# This file contains the configuration for Credo and you are probably reading
|
||||||
|
# this after creating it with `mix credo.gen.config`.
|
||||||
|
#
|
||||||
|
# If you find anything wrong or unclear in this file, please report an
|
||||||
|
# issue on GitHub: https://github.com/rrrene/credo/issues
|
||||||
|
#
|
||||||
|
%{
|
||||||
|
#
|
||||||
|
# You can have as many configs as you like in the `configs:` field.
|
||||||
|
configs: [
|
||||||
|
%{
|
||||||
|
#
|
||||||
|
# Run any config using `mix credo -C <name>`. If no config name is given
|
||||||
|
# "default" is used.
|
||||||
|
name: "default",
|
||||||
|
#
|
||||||
|
# These are the files included in the analysis:
|
||||||
|
files: %{
|
||||||
|
#
|
||||||
|
# You can give explicit globs or simply directories.
|
||||||
|
# In the latter case `**/*.{ex,exs}` will be used.
|
||||||
|
included: ["lib/", "src/", "web/", "apps/"],
|
||||||
|
excluded: [~r"/_build/", ~r"/deps/"]
|
||||||
|
},
|
||||||
|
#
|
||||||
|
# If you create your own checks, you must specify the source files for
|
||||||
|
# them here, so they can be loaded by Credo before running the analysis.
|
||||||
|
requires: [],
|
||||||
|
#
|
||||||
|
# Credo automatically checks for updates, like e.g. Hex does.
|
||||||
|
# You can disable this behaviour below:
|
||||||
|
check_for_updates: true,
|
||||||
|
#
|
||||||
|
# If you want to enforce a style guide and need a more traditional linting
|
||||||
|
# experience, you can change `strict` to `true` below:
|
||||||
|
strict: false,
|
||||||
|
#
|
||||||
|
# If you want to use uncolored output by default, you can change `color`
|
||||||
|
# to `false` below:
|
||||||
|
color: true,
|
||||||
|
#
|
||||||
|
# You can customize the parameters of any check by adding a second element
|
||||||
|
# to the tuple.
|
||||||
|
#
|
||||||
|
# To disable a check put `false` as second element:
|
||||||
|
#
|
||||||
|
# {Credo.Check.Design.DuplicatedCode, false}
|
||||||
|
#
|
||||||
|
checks: [
|
||||||
|
{Credo.Check.Consistency.ExceptionNames},
|
||||||
|
{Credo.Check.Consistency.LineEndings},
|
||||||
|
{Credo.Check.Consistency.MultiAliasImportRequireUse},
|
||||||
|
{Credo.Check.Consistency.ParameterPatternMatching},
|
||||||
|
{Credo.Check.Consistency.SpaceAroundOperators},
|
||||||
|
{Credo.Check.Consistency.SpaceInParentheses},
|
||||||
|
{Credo.Check.Consistency.TabsOrSpaces},
|
||||||
|
|
||||||
|
# For some checks, like AliasUsage, you can only customize the priority
|
||||||
|
# Priority values are: `low, normal, high, higher`
|
||||||
|
{Credo.Check.Design.AliasUsage, priority: :low},
|
||||||
|
|
||||||
|
# For others you can set parameters
|
||||||
|
|
||||||
|
# If you don't want the `setup` and `test` macro calls in ExUnit tests
|
||||||
|
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
|
||||||
|
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
|
||||||
|
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
|
||||||
|
|
||||||
|
# You can also customize the exit_status of each check.
|
||||||
|
# If you don't want TODO comments to cause `mix credo` to fail, just
|
||||||
|
# set this value to 0 (zero).
|
||||||
|
{Credo.Check.Design.TagTODO, exit_status: 2},
|
||||||
|
{Credo.Check.Design.TagFIXME},
|
||||||
|
|
||||||
|
{Credo.Check.Readability.FunctionNames},
|
||||||
|
{Credo.Check.Readability.LargeNumbers},
|
||||||
|
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 100},
|
||||||
|
{Credo.Check.Readability.ModuleAttributeNames},
|
||||||
|
{Credo.Check.Readability.ModuleDoc, false},
|
||||||
|
{Credo.Check.Readability.ModuleNames},
|
||||||
|
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
|
||||||
|
{Credo.Check.Readability.ParenthesesInCondition},
|
||||||
|
{Credo.Check.Readability.PredicateFunctionNames},
|
||||||
|
{Credo.Check.Readability.PreferImplicitTry},
|
||||||
|
{Credo.Check.Readability.RedundantBlankLines},
|
||||||
|
{Credo.Check.Readability.StringSigils},
|
||||||
|
{Credo.Check.Readability.TrailingBlankLine},
|
||||||
|
{Credo.Check.Readability.TrailingWhiteSpace},
|
||||||
|
{Credo.Check.Readability.VariableNames},
|
||||||
|
{Credo.Check.Readability.Semicolons},
|
||||||
|
{Credo.Check.Readability.SpaceAfterCommas},
|
||||||
|
|
||||||
|
{Credo.Check.Refactor.DoubleBooleanNegation},
|
||||||
|
{Credo.Check.Refactor.CondStatements},
|
||||||
|
{Credo.Check.Refactor.CyclomaticComplexity},
|
||||||
|
{Credo.Check.Refactor.FunctionArity},
|
||||||
|
{Credo.Check.Refactor.MatchInCondition},
|
||||||
|
{Credo.Check.Refactor.NegatedConditionsInUnless},
|
||||||
|
{Credo.Check.Refactor.NegatedConditionsWithElse},
|
||||||
|
{Credo.Check.Refactor.Nesting},
|
||||||
|
{Credo.Check.Refactor.PipeChainStart},
|
||||||
|
{Credo.Check.Refactor.UnlessWithElse},
|
||||||
|
|
||||||
|
{Credo.Check.Warning.BoolOperationOnSameValues},
|
||||||
|
{Credo.Check.Warning.IExPry},
|
||||||
|
{Credo.Check.Warning.IoInspect},
|
||||||
|
{Credo.Check.Warning.LazyLogging},
|
||||||
|
{Credo.Check.Warning.OperationOnSameValues},
|
||||||
|
{Credo.Check.Warning.OperationWithConstantResult},
|
||||||
|
{Credo.Check.Warning.UnusedEnumOperation},
|
||||||
|
{Credo.Check.Warning.UnusedFileOperation},
|
||||||
|
{Credo.Check.Warning.UnusedKeywordOperation},
|
||||||
|
{Credo.Check.Warning.UnusedListOperation},
|
||||||
|
{Credo.Check.Warning.UnusedPathOperation},
|
||||||
|
{Credo.Check.Warning.UnusedRegexOperation},
|
||||||
|
{Credo.Check.Warning.UnusedStringOperation},
|
||||||
|
{Credo.Check.Warning.UnusedTupleOperation},
|
||||||
|
|
||||||
|
# Controversial and experimental checks (opt-in, just remove `, false`)
|
||||||
|
#
|
||||||
|
{Credo.Check.Refactor.ABCSize, false},
|
||||||
|
{Credo.Check.Refactor.AppendSingleItem, false},
|
||||||
|
{Credo.Check.Refactor.VariableRebinding, false},
|
||||||
|
{Credo.Check.Warning.MapGetUnsafePass, false},
|
||||||
|
|
||||||
|
# Deprecated checks (these will be deleted after a grace period)
|
||||||
|
{Credo.Check.Readability.Specs, false},
|
||||||
|
{Credo.Check.Warning.NameRedeclarationByAssignment, false},
|
||||||
|
{Credo.Check.Warning.NameRedeclarationByCase, false},
|
||||||
|
{Credo.Check.Warning.NameRedeclarationByDef, false},
|
||||||
|
{Credo.Check.Warning.NameRedeclarationByFn, false},
|
||||||
|
|
||||||
|
# Custom checks can be created using `mix credo.gen.check`.
|
||||||
|
#
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
defmodule Pleroma.Plugs.AuthenticationPlug do
|
defmodule Pleroma.Plugs.AuthenticationPlug do
|
||||||
|
alias Comeonin.Pbkdf2
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
|
|
||||||
def init(options) do
|
def init(options) do
|
||||||
|
@ -25,12 +26,12 @@ defp verify(%{id: id} = user, _password, id) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp verify(nil, _password, _user_id) do
|
defp verify(nil, _password, _user_id) do
|
||||||
Comeonin.Pbkdf2.dummy_checkpw
|
Pbkdf2.dummy_checkpw
|
||||||
:error
|
:error
|
||||||
end
|
end
|
||||||
|
|
||||||
defp verify(user, password, _user_id) do
|
defp verify(user, password, _user_id) do
|
||||||
if Comeonin.Pbkdf2.checkpw(password, user.password_hash) do
|
if Pbkdf2.checkpw(password, user.password_hash) do
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
else
|
else
|
||||||
:error
|
:error
|
||||||
|
@ -42,7 +43,7 @@ defp decode_header(conn) do
|
||||||
{:ok, userinfo} <- Base.decode64(header),
|
{:ok, userinfo} <- Base.decode64(header),
|
||||||
[username, password] <- String.split(userinfo, ":")
|
[username, password] <- String.split(userinfo, ":")
|
||||||
do
|
do
|
||||||
{ :ok, username, password }
|
{:ok, username, password}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
defmodule Pleroma.Upload do
|
defmodule Pleroma.Upload do
|
||||||
|
alias Ecto.UUID
|
||||||
|
alias Pleroma.Web
|
||||||
def store(%Plug.Upload{} = file) do
|
def store(%Plug.Upload{} = file) do
|
||||||
uuid = Ecto.UUID.generate
|
uuid = UUID.generate
|
||||||
upload_folder = Path.join(upload_path(), uuid)
|
upload_folder = Path.join(upload_path(), uuid)
|
||||||
File.mkdir_p!(upload_folder)
|
File.mkdir_p!(upload_folder)
|
||||||
result_file = Path.join(upload_folder, file.filename)
|
result_file = Path.join(upload_folder, file.filename)
|
||||||
|
@ -21,7 +23,7 @@ def store(%Plug.Upload{} = file) do
|
||||||
def store(%{"img" => "data:image/" <> image_data}) do
|
def store(%{"img" => "data:image/" <> image_data}) do
|
||||||
parsed = Regex.named_captures(~r/(?<filetype>jpeg|png|gif);base64,(?<data>.*)/, image_data)
|
parsed = Regex.named_captures(~r/(?<filetype>jpeg|png|gif);base64,(?<data>.*)/, image_data)
|
||||||
data = Base.decode64!(parsed["data"])
|
data = Base.decode64!(parsed["data"])
|
||||||
uuid = Ecto.UUID.generate
|
uuid = UUID.generate
|
||||||
upload_folder = Path.join(upload_path(), uuid)
|
upload_folder = Path.join(upload_path(), uuid)
|
||||||
File.mkdir_p!(upload_folder)
|
File.mkdir_p!(upload_folder)
|
||||||
filename = Base.encode16(:crypto.hash(:sha256, data)) <> ".#{parsed["filetype"]}"
|
filename = Base.encode16(:crypto.hash(:sha256, data)) <> ".#{parsed["filetype"]}"
|
||||||
|
@ -44,11 +46,11 @@ def store(%{"img" => "data:image/" <> image_data}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp upload_path do
|
defp upload_path do
|
||||||
Application.get_env(:pleroma, Pleroma.Upload)
|
settings = Application.get_env(:pleroma, Pleroma.Upload)
|
||||||
|> Keyword.fetch!(:uploads)
|
Keyword.fetch!(settings, :uploads)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp url_for(file) do
|
defp url_for(file) do
|
||||||
"#{Pleroma.Web.base_url()}/media/#{file}"
|
"#{Web.base_url()}/media/#{file}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
defmodule Pleroma.User do
|
defmodule Pleroma.User do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.{Changeset, Query}
|
||||||
import Ecto.Query
|
alias Pleroma.{Repo, User, Object, Web}
|
||||||
alias Pleroma.{Repo, User, Activity, Object}
|
alias Comeonin.Pbkdf2
|
||||||
|
|
||||||
schema "users" do
|
schema "users" do
|
||||||
field :bio, :string
|
field :bio, :string
|
||||||
|
@ -12,7 +12,7 @@ defmodule Pleroma.User do
|
||||||
field :password_hash, :string
|
field :password_hash, :string
|
||||||
field :password, :string, virtual: true
|
field :password, :string, virtual: true
|
||||||
field :password_confirmation, :string, virtual: true
|
field :password_confirmation, :string, virtual: true
|
||||||
field :following, { :array, :string }, default: []
|
field :following, {:array, :string}, default: []
|
||||||
field :ap_id, :string
|
field :ap_id, :string
|
||||||
field :avatar, :map
|
field :avatar, :map
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ def avatar_url(user) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def ap_id(%User{nickname: nickname}) do
|
def ap_id(%User{nickname: nickname}) do
|
||||||
"#{Pleroma.Web.base_url}/users/#{nickname}"
|
"#{Web.base_url}/users/#{nickname}"
|
||||||
end
|
end
|
||||||
|
|
||||||
def ap_followers(%User{} = user) do
|
def ap_followers(%User{} = user) do
|
||||||
|
@ -66,7 +66,7 @@ def register_changeset(struct, params \\ %{}) do
|
||||||
|> validate_format(:nickname, ~r/^[a-zA-Z\d]+$/)
|
|> validate_format(:nickname, ~r/^[a-zA-Z\d]+$/)
|
||||||
|
|
||||||
if changeset.valid? do
|
if changeset.valid? do
|
||||||
hashed = Comeonin.Pbkdf2.hashpwsalt(changeset.changes[:password])
|
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
|
||||||
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
|
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
|
||||||
followers = User.ap_followers(%User{nickname: changeset.changes[:nickname]})
|
followers = User.ap_followers(%User{nickname: changeset.changes[:nickname]})
|
||||||
changeset
|
changeset
|
||||||
|
@ -81,8 +81,8 @@ def register_changeset(struct, params \\ %{}) do
|
||||||
def follow(%User{} = follower, %User{} = followed) do
|
def follow(%User{} = follower, %User{} = followed) do
|
||||||
ap_followers = User.ap_followers(followed)
|
ap_followers = User.ap_followers(followed)
|
||||||
if following?(follower, followed) do
|
if following?(follower, followed) do
|
||||||
{ :error,
|
{:error,
|
||||||
"Could not follow user: #{followed.nickname} is already on your list." }
|
"Could not follow user: #{followed.nickname} is already on your list."}
|
||||||
else
|
else
|
||||||
following = [ap_followers | follower.following]
|
following = [ap_followers | follower.following]
|
||||||
|> Enum.uniq
|
|> Enum.uniq
|
||||||
|
@ -103,7 +103,7 @@ def unfollow(%User{} = follower, %User{} = followed) do
|
||||||
|> follow_changeset(%{following: following})
|
|> follow_changeset(%{following: following})
|
||||||
|> Repo.update
|
|> Repo.update
|
||||||
else
|
else
|
||||||
{ :error, "Not subscribed!" }
|
{:error, "Not subscribed!"}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
alias Pleroma.Repo
|
alias Pleroma.{Activity, Repo, Object, Upload, User, Web}
|
||||||
alias Pleroma.{Activity, Object, Upload, User}
|
alias Ecto.{Changeset, UUID}
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
def insert(map) when is_map(map) do
|
def insert(map) when is_map(map) do
|
||||||
|
@ -19,7 +19,7 @@ def insert(map) when is_map(map) do
|
||||||
Repo.insert(%Activity{data: map})
|
Repo.insert(%Activity{data: map})
|
||||||
end
|
end
|
||||||
|
|
||||||
def like(%User{ap_id: ap_id} = user, %Object{data: %{ "id" => id}} = object) do
|
def like(%User{ap_id: ap_id} = user, %Object{data: %{"id" => id}} = object) do
|
||||||
cond do
|
cond do
|
||||||
# There's already a like here, so return the original activity.
|
# There's already a like here, so return the original activity.
|
||||||
ap_id in (object.data["likes"] || []) ->
|
ap_id in (object.data["likes"] || []) ->
|
||||||
|
@ -44,7 +44,7 @@ def like(%User{ap_id: ap_id} = user, %Object{data: %{ "id" => id}} = object) do
|
||||||
|> Map.put("like_count", length(likes))
|
|> Map.put("like_count", length(likes))
|
||||||
|> Map.put("likes", likes)
|
|> Map.put("likes", likes)
|
||||||
|
|
||||||
changeset = Ecto.Changeset.change(object, data: new_data)
|
changeset = Changeset.change(object, data: new_data)
|
||||||
{:ok, object} = Repo.update(changeset)
|
{:ok, object} = Repo.update(changeset)
|
||||||
|
|
||||||
update_object_in_activities(object)
|
update_object_in_activities(object)
|
||||||
|
@ -58,7 +58,7 @@ defp update_object_in_activities(%{data: %{"id" => id}} = object) do
|
||||||
relevant_activities = Activity.all_by_object_ap_id(id)
|
relevant_activities = Activity.all_by_object_ap_id(id)
|
||||||
Enum.map(relevant_activities, fn (activity) ->
|
Enum.map(relevant_activities, fn (activity) ->
|
||||||
new_activity_data = activity.data |> Map.put("object", object.data)
|
new_activity_data = activity.data |> Map.put("object", object.data)
|
||||||
changeset = Ecto.Changeset.change(activity, data: new_activity_data)
|
changeset = Changeset.change(activity, data: new_activity_data)
|
||||||
Repo.update(changeset)
|
Repo.update(changeset)
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
@ -79,7 +79,7 @@ def unlike(%User{ap_id: ap_id}, %Object{data: %{ "id" => id}} = object) do
|
||||||
|> Map.put("like_count", length(likes))
|
|> Map.put("like_count", length(likes))
|
||||||
|> Map.put("likes", likes)
|
|> Map.put("likes", likes)
|
||||||
|
|
||||||
changeset = Ecto.Changeset.change(object, data: new_data)
|
changeset = Changeset.change(object, data: new_data)
|
||||||
{:ok, object} = Repo.update(changeset)
|
{:ok, object} = Repo.update(changeset)
|
||||||
|
|
||||||
update_object_in_activities(object)
|
update_object_in_activities(object)
|
||||||
|
@ -103,7 +103,7 @@ def generate_object_id do
|
||||||
end
|
end
|
||||||
|
|
||||||
def generate_id(type) do
|
def generate_id(type) do
|
||||||
"#{Pleroma.Web.base_url()}/#{type}/#{Ecto.UUID.generate}"
|
"#{Web.base_url()}/#{type}/#{UUID.generate}"
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_public_activities(opts \\ %{}) do
|
def fetch_public_activities(opts \\ %{}) do
|
||||||
|
@ -140,8 +140,7 @@ def fetch_activities(recipients, opts \\ %{}) do
|
||||||
query
|
query
|
||||||
end
|
end
|
||||||
|
|
||||||
Repo.all(query)
|
Enum.reverse(Repo.all(query))
|
||||||
|> Enum.reverse
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def announce(%User{ap_id: ap_id} = user, %Object{data: %{"id" => id}} = object) do
|
def announce(%User{ap_id: ap_id} = user, %Object{data: %{"id" => id}} = object) do
|
||||||
|
@ -160,7 +159,7 @@ def announce(%User{ap_id: ap_id} = user, %Object{data: %{"id" => id}} = object)
|
||||||
|> Map.put("announcement_count", length(announcements))
|
|> Map.put("announcement_count", length(announcements))
|
||||||
|> Map.put("announcements", announcements)
|
|> Map.put("announcements", announcements)
|
||||||
|
|
||||||
changeset = Ecto.Changeset.change(object, data: new_data)
|
changeset = Changeset.change(object, data: new_data)
|
||||||
{:ok, object} = Repo.update(changeset)
|
{:ok, object} = Repo.update(changeset)
|
||||||
|
|
||||||
update_object_in_activities(object)
|
update_object_in_activities(object)
|
||||||
|
|
|
@ -23,5 +23,5 @@ def to_simple_form(%{data: %{"object" => %{"type" => "Note"}}} = activity, user)
|
||||||
] ++ attachments
|
] ++ attachments
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_simple_form(_,_), do: nil
|
def to_simple_form(_, _), do: nil
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,7 +8,8 @@ def to_simple_form(user, activities, users) do
|
||||||
|
|
||||||
h = fn(str) -> [to_charlist(str)] end
|
h = fn(str) -> [to_charlist(str)] end
|
||||||
|
|
||||||
entries = Enum.map(activities, fn(activity) ->
|
entries = activities
|
||||||
|
|> Enum.map(fn(activity) ->
|
||||||
{:entry, ActivityRepresenter.to_simple_form(activity, user)}
|
{:entry, ActivityRepresenter.to_simple_form(activity, user)}
|
||||||
end)
|
end)
|
||||||
|> Enum.filter(fn ({_, form}) -> form end)
|
|> Enum.filter(fn ({_, form}) -> form end)
|
||||||
|
|
|
@ -16,7 +16,8 @@ def feed(conn, %{"nickname" => nickname}) do
|
||||||
activities = query
|
activities = query
|
||||||
|> Repo.all
|
|> Repo.all
|
||||||
|
|
||||||
response = FeedRepresenter.to_simple_form(user, activities, [user])
|
response = user
|
||||||
|
|> FeedRepresenter.to_simple_form(activities, [user])
|
||||||
|> :xmerl.export_simple(:xmerl_xml)
|
|> :xmerl.export_simple(:xmerl_xml)
|
||||||
|> to_string
|
|> to_string
|
||||||
|
|
||||||
|
@ -25,7 +26,7 @@ def feed(conn, %{"nickname" => nickname}) do
|
||||||
|> send_resp(200, response)
|
|> send_resp(200, response)
|
||||||
end
|
end
|
||||||
|
|
||||||
def temp(conn, params) do
|
def temp(_conn, params) do
|
||||||
IO.inspect(params)
|
IO.inspect(params)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -7,14 +7,14 @@ def to_simple_form(user) do
|
||||||
bio = to_charlist(user.bio)
|
bio = to_charlist(user.bio)
|
||||||
avatar_url = to_charlist(User.avatar_url(user))
|
avatar_url = to_charlist(User.avatar_url(user))
|
||||||
[
|
[
|
||||||
{ :id, [ap_id] },
|
{:id, [ap_id]},
|
||||||
{ :"activity:object", ['http://activitystrea.ms/schema/1.0/person'] },
|
{:"activity:object", ['http://activitystrea.ms/schema/1.0/person']},
|
||||||
{ :uri, [ap_id] },
|
{:uri, [ap_id]},
|
||||||
{ :"poco:preferredUsername", [nickname] },
|
{:"poco:preferredUsername", [nickname]},
|
||||||
{ :"poco:displayName", [name] },
|
{:"poco:displayName", [name]},
|
||||||
{ :"poco:note", [bio] },
|
{:"poco:note", [bio]},
|
||||||
{ :name, [nickname] },
|
{:name, [nickname]},
|
||||||
{ :link, [rel: 'avatar', href: avatar_url], []}
|
{:link, [rel: 'avatar', href: avatar_url], []}
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
defmodule Pleroma.Web.Router do
|
defmodule Pleroma.Web.Router do
|
||||||
use Pleroma.Web, :router
|
use Pleroma.Web, :router
|
||||||
|
|
||||||
alias Pleroma.{Repo, User}
|
alias Pleroma.{Repo, User, Web.Router}
|
||||||
|
|
||||||
def user_fetcher(username) do
|
def user_fetcher(username) do
|
||||||
{:ok, Repo.get_by(User, %{nickname: username})}
|
{:ok, Repo.get_by(User, %{nickname: username})}
|
||||||
|
@ -10,13 +10,13 @@ def user_fetcher(username) do
|
||||||
pipeline :api do
|
pipeline :api do
|
||||||
plug :accepts, ["json"]
|
plug :accepts, ["json"]
|
||||||
plug :fetch_session
|
plug :fetch_session
|
||||||
plug Pleroma.Plugs.AuthenticationPlug, %{fetcher: &Pleroma.Web.Router.user_fetcher/1, optional: true}
|
plug Pleroma.Plugs.AuthenticationPlug, %{fetcher: &Router.user_fetcher/1, optional: true}
|
||||||
end
|
end
|
||||||
|
|
||||||
pipeline :authenticated_api do
|
pipeline :authenticated_api do
|
||||||
plug :accepts, ["json"]
|
plug :accepts, ["json"]
|
||||||
plug :fetch_session
|
plug :fetch_session
|
||||||
plug Pleroma.Plugs.AuthenticationPlug, %{fetcher: &Pleroma.Web.Router.user_fetcher/1}
|
plug Pleroma.Plugs.AuthenticationPlug, %{fetcher: &Router.user_fetcher/1}
|
||||||
end
|
end
|
||||||
|
|
||||||
pipeline :well_known do
|
pipeline :well_known do
|
||||||
|
|
|
@ -10,7 +10,6 @@ def decode(salmon) do
|
||||||
{:xmlObj, :string, encoding} = :xmerl_xpath.string('string(//me:encoding[1])', doc)
|
{:xmlObj, :string, encoding} = :xmerl_xpath.string('string(//me:encoding[1])', doc)
|
||||||
{:xmlObj, :string, type} = :xmerl_xpath.string('string(//me:data[1]/@type)', doc)
|
{:xmlObj, :string, type} = :xmerl_xpath.string('string(//me:data[1]/@type)', doc)
|
||||||
|
|
||||||
|
|
||||||
{:ok, data} = Base.url_decode64(to_string(data), ignore: :whitespace)
|
{:ok, data} = Base.url_decode64(to_string(data), ignore: :whitespace)
|
||||||
{:ok, sig} = Base.url_decode64(to_string(sig), ignore: :whitespace)
|
{:ok, sig} = Base.url_decode64(to_string(sig), ignore: :whitespace)
|
||||||
alg = to_string(alg)
|
alg = to_string(alg)
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
defmodule Pleroma.Web.TwitterAPI.Representers.ActivityRepresenter do
|
defmodule Pleroma.Web.TwitterAPI.Representers.ActivityRepresenter do
|
||||||
use Pleroma.Web.TwitterAPI.Representers.BaseRepresenter
|
use Pleroma.Web.TwitterAPI.Representers.BaseRepresenter
|
||||||
alias Pleroma.Web.TwitterAPI.Representers.{UserRepresenter, ObjectRepresenter}
|
alias Pleroma.Web.TwitterAPI.Representers.{UserRepresenter, ObjectRepresenter}
|
||||||
alias Pleroma.Activity
|
alias Pleroma.{Activity, User}
|
||||||
|
alias Calendar.Strftime
|
||||||
|
|
||||||
defp user_by_ap_id(user_list, ap_id) do
|
defp user_by_ap_id(user_list, ap_id) do
|
||||||
Enum.find(user_list, fn (%{ap_id: user_id}) -> ap_id == user_id end)
|
Enum.find(user_list, fn (%{ap_id: user_id}) -> ap_id == user_id end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_map(%Activity{data: %{"type" => "Announce", "actor" => actor}} = activity, %{users: users, announced_activity: announced_activity} = opts) do
|
def to_map(%Activity{data: %{"type" => "Announce", "actor" => actor, "published" => created_at}} = activity,
|
||||||
|
%{users: users, announced_activity: announced_activity} = opts) do
|
||||||
user = user_by_ap_id(users, actor)
|
user = user_by_ap_id(users, actor)
|
||||||
created_at = get_in(activity.data, ["published"])
|
created_at = created_at |> date_to_asctime
|
||||||
|> date_to_asctime
|
|
||||||
|
|
||||||
text = "#{user.nickname} retweeted a status."
|
text = "#{user.nickname} retweeted a status."
|
||||||
|
|
||||||
|
@ -30,16 +30,16 @@ def to_map(%Activity{data: %{"type" => "Announce", "actor" => actor}} = activity
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_map(%Activity{data: %{"type" => "Like"}} = activity, %{user: user, liked_activity: liked_activity} = opts) do
|
def to_map(%Activity{data: %{"type" => "Like", "published" => created_at}} = activity,
|
||||||
created_at = get_in(activity.data, ["published"])
|
%{user: user, liked_activity: liked_activity} = opts) do
|
||||||
|> date_to_asctime
|
created_at = created_at |> date_to_asctime
|
||||||
|
|
||||||
text = "#{user.nickname} favorited a status."
|
text = "#{user.nickname} favorited a status."
|
||||||
|
|
||||||
%{
|
%{
|
||||||
"id" => activity.id,
|
"id" => activity.id,
|
||||||
"user" => UserRepresenter.to_map(user, opts),
|
"user" => UserRepresenter.to_map(user, opts),
|
||||||
"statusnet_html" => text, # TODO: add summary
|
"statusnet_html" => text,
|
||||||
"text" => text,
|
"text" => text,
|
||||||
"is_local" => true,
|
"is_local" => true,
|
||||||
"is_post_verb" => false,
|
"is_post_verb" => false,
|
||||||
|
@ -49,16 +49,17 @@ def to_map(%Activity{data: %{"type" => "Like"}} = activity, %{user: user, liked_
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_map(%Activity{data: %{"type" => "Follow"}} = activity, %{user: user} = opts) do
|
def to_map(%Activity{data: %{"type" => "Follow", "published" => created_at, "object" => followed_id}} = activity, %{user: user} = opts) do
|
||||||
created_at = get_in(activity.data, ["published"])
|
created_at = created_at |> date_to_asctime
|
||||||
|> date_to_asctime
|
|
||||||
|
|
||||||
|
followed = User.get_cached_by_ap_id(followed_id)
|
||||||
|
text = "#{user.nickname} started following #{followed.nickname}"
|
||||||
%{
|
%{
|
||||||
"id" => activity.id,
|
"id" => activity.id,
|
||||||
"user" => UserRepresenter.to_map(user, opts),
|
"user" => UserRepresenter.to_map(user, opts),
|
||||||
"attentions" => [],
|
"attentions" => [],
|
||||||
"statusnet_html" => "", # TODO: add summary
|
"statusnet_html" => text,
|
||||||
"text" => "",
|
"text" => text,
|
||||||
"is_local" => true,
|
"is_local" => true,
|
||||||
"is_post_verb" => false,
|
"is_post_verb" => false,
|
||||||
"created_at" => created_at,
|
"created_at" => created_at,
|
||||||
|
@ -66,14 +67,12 @@ def to_map(%Activity{data: %{"type" => "Follow"}} = activity, %{user: user} = op
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_map(%Activity{} = activity, %{user: user} = opts) do
|
def to_map(%Activity{data: %{"object" => %{"content" => content} = object}} = activity, %{user: user} = opts) do
|
||||||
content = get_in(activity.data, ["object", "content"])
|
created_at = object["published"] |> date_to_asctime
|
||||||
created_at = get_in(activity.data, ["object", "published"])
|
like_count = object["like_count"] || 0
|
||||||
|> date_to_asctime
|
announcement_count = object["announcement_count"] || 0
|
||||||
like_count = get_in(activity.data, ["object", "like_count"]) || 0
|
favorited = opts[:for] && opts[:for].ap_id in (object["likes"] || [])
|
||||||
announcement_count = get_in(activity.data, ["object", "announcement_count"]) || 0
|
repeated = opts[:for] && opts[:for].ap_id in (object["announcements"] || [])
|
||||||
favorited = opts[:for] && opts[:for].ap_id in (activity.data["object"]["likes"] || [])
|
|
||||||
repeated = opts[:for] && opts[:for].ap_id in (activity.data["object"]["announcements"] || [])
|
|
||||||
|
|
||||||
mentions = opts[:mentioned] || []
|
mentions = opts[:mentioned] || []
|
||||||
|
|
||||||
|
@ -91,22 +90,34 @@ def to_map(%Activity{} = activity, %{user: user} = opts) do
|
||||||
"is_local" => true,
|
"is_local" => true,
|
||||||
"is_post_verb" => true,
|
"is_post_verb" => true,
|
||||||
"created_at" => created_at,
|
"created_at" => created_at,
|
||||||
"in_reply_to_status_id" => activity.data["object"]["inReplyToStatusId"],
|
"in_reply_to_status_id" => object["inReplyToStatusId"],
|
||||||
"statusnet_conversation_id" => activity.data["object"]["statusnetConversationId"],
|
"statusnet_conversation_id" => object["statusnetConversationId"],
|
||||||
"attachments" => (activity.data["object"]["attachment"] || []) |> ObjectRepresenter.enum_to_list(opts),
|
"attachments" => (object["attachment"] || []) |> ObjectRepresenter.enum_to_list(opts),
|
||||||
"attentions" => attentions,
|
"attentions" => attentions,
|
||||||
"fave_num" => like_count,
|
"fave_num" => like_count,
|
||||||
"repeat_num" => announcement_count,
|
"repeat_num" => announcement_count,
|
||||||
"favorited" => !!favorited,
|
"favorited" => to_boolean(favorited),
|
||||||
"repeated" => !!repeated,
|
"repeated" => to_boolean(repeated),
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp date_to_asctime(date) do
|
defp date_to_asctime(date) do
|
||||||
with {:ok, date, _offset} <- date |> DateTime.from_iso8601 do
|
with {:ok, date, _offset} <- date |> DateTime.from_iso8601 do
|
||||||
Calendar.Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
|
Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
|
||||||
else _e ->
|
else _e ->
|
||||||
""
|
""
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp to_boolean(false) do
|
||||||
|
false
|
||||||
|
end
|
||||||
|
|
||||||
|
defp to_boolean(nil) do
|
||||||
|
false
|
||||||
|
end
|
||||||
|
|
||||||
|
defp to_boolean(_) do
|
||||||
|
true
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,19 +1,19 @@
|
||||||
defmodule Pleroma.Web.TwitterAPI.TwitterAPI do
|
defmodule Pleroma.Web.TwitterAPI.TwitterAPI do
|
||||||
|
alias Ecto.Changeset
|
||||||
alias Pleroma.{User, Activity, Repo, Object}
|
alias Pleroma.{User, Activity, Repo, Object}
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.{ActivityPub.ActivityPub, Websub, OStatus}
|
||||||
alias Pleroma.Web.TwitterAPI.Representers.{ActivityRepresenter, UserRepresenter}
|
alias Pleroma.Web.TwitterAPI.Representers.{ActivityRepresenter, UserRepresenter}
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
def create_status(user = %User{}, data = %{}) do
|
def create_status(%User{} = user, %{} = data) do
|
||||||
attachments = Enum.map(data["media_ids"] || [], fn (media_id) ->
|
attachments = Enum.map(data["media_ids"] || [], fn (media_id) ->
|
||||||
Repo.get(Object, media_id).data
|
Repo.get(Object, media_id).data
|
||||||
end)
|
end)
|
||||||
|
|
||||||
context = ActivityPub.generate_context_id
|
context = ActivityPub.generate_context_id
|
||||||
|
|
||||||
content = HtmlSanitizeEx.strip_tags(data["status"])
|
content = data["status"] |> HtmlSanitizeEx.strip_tags |> String.replace("\n", "<br>")
|
||||||
|> String.replace("\n", "<br>")
|
|
||||||
|
|
||||||
mentions = parse_mentions(content)
|
mentions = parse_mentions(content)
|
||||||
|
|
||||||
|
@ -40,10 +40,10 @@ def create_status(user = %User{}, data = %{}) do
|
||||||
"context" => context,
|
"context" => context,
|
||||||
"attachment" => attachments,
|
"attachment" => attachments,
|
||||||
"actor" => user.ap_id
|
"actor" => user.ap_id
|
||||||
},
|
},
|
||||||
"published" => date,
|
"published" => date,
|
||||||
"context" => context
|
"context" => context
|
||||||
}
|
}
|
||||||
|
|
||||||
# Wire up reply info.
|
# Wire up reply info.
|
||||||
activity = with inReplyToId when not is_nil(inReplyToId) <- data["in_reply_to_status_id"],
|
activity = with inReplyToId when not is_nil(inReplyToId) <- data["in_reply_to_status_id"],
|
||||||
|
@ -67,34 +67,34 @@ def create_status(user = %User{}, data = %{}) do
|
||||||
|
|
||||||
with {:ok, activity} <- ActivityPub.insert(activity) do
|
with {:ok, activity} <- ActivityPub.insert(activity) do
|
||||||
{:ok, activity} = add_conversation_id(activity)
|
{:ok, activity} = add_conversation_id(activity)
|
||||||
Pleroma.Web.Websub.publish(Pleroma.Web.OStatus.feed_path(user), user, activity)
|
Websub.publish(OStatus.feed_path(user), user, activity)
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_friend_statuses(user, opts \\ %{}) do
|
def fetch_friend_statuses(user, opts \\ %{}) do
|
||||||
ActivityPub.fetch_activities([user.ap_id | user.following], opts)
|
activities = ActivityPub.fetch_activities([user.ap_id | user.following], opts)
|
||||||
|> activities_to_statuses(%{for: user})
|
activities_to_statuses(activities, %{for: user})
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_public_statuses(user, opts \\ %{}) do
|
def fetch_public_statuses(user, opts \\ %{}) do
|
||||||
ActivityPub.fetch_public_activities(opts)
|
activities = ActivityPub.fetch_public_activities(opts)
|
||||||
|> activities_to_statuses(%{for: user})
|
activities_to_statuses(activities, %{for: user})
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_user_statuses(user, opts \\ %{}) do
|
def fetch_user_statuses(user, opts \\ %{}) do
|
||||||
ActivityPub.fetch_activities([], opts)
|
activities = ActivityPub.fetch_activities([], opts)
|
||||||
|> activities_to_statuses(%{for: user})
|
activities_to_statuses(activities, %{for: user})
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_mentions(user, opts \\ %{}) do
|
def fetch_mentions(user, opts \\ %{}) do
|
||||||
ActivityPub.fetch_activities([user.ap_id], opts)
|
activities = ActivityPub.fetch_activities([user.ap_id], opts)
|
||||||
|> activities_to_statuses(%{for: user})
|
activities_to_statuses(activities, %{for: user})
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_conversation(user, id) do
|
def fetch_conversation(user, id) do
|
||||||
query = from activity in Activity,
|
query = from activity in Activity,
|
||||||
where: fragment("? @> ?", activity.data, ^%{ statusnetConversationId: id}),
|
where: fragment("? @> ?", activity.data, ^%{statusnetConversationId: id}),
|
||||||
limit: 1
|
limit: 1
|
||||||
|
|
||||||
with %Activity{} = activity <- Repo.one(query),
|
with %Activity{} = activity <- Repo.one(query),
|
||||||
|
@ -116,26 +116,26 @@ def fetch_status(user, id) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def follow(%User{} = follower, params) do
|
def follow(%User{} = follower, params) do
|
||||||
with { :ok, %User{} = followed } <- get_user(params),
|
with {:ok, %User{} = followed} <- get_user(params),
|
||||||
{ :ok, follower } <- User.follow(follower, followed),
|
{:ok, follower} <- User.follow(follower, followed),
|
||||||
{ :ok, activity } <- ActivityPub.insert(%{
|
{:ok, activity} <- ActivityPub.insert(%{
|
||||||
"type" => "Follow",
|
"type" => "Follow",
|
||||||
"actor" => follower.ap_id,
|
"actor" => follower.ap_id,
|
||||||
"object" => followed.ap_id,
|
"object" => followed.ap_id,
|
||||||
"published" => make_date()
|
"published" => make_date()
|
||||||
})
|
})
|
||||||
do
|
do
|
||||||
{ :ok, follower, followed, activity }
|
{:ok, follower, followed, activity}
|
||||||
else
|
else
|
||||||
err -> err
|
err -> err
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def unfollow(%User{} = follower, params) do
|
def unfollow(%User{} = follower, params) do
|
||||||
with { :ok, %User{} = unfollowed } <- get_user(params),
|
with {:ok, %User{} = unfollowed} <- get_user(params),
|
||||||
{ :ok, follower } <- User.unfollow(follower, unfollowed)
|
{:ok, follower} <- User.unfollow(follower, unfollowed)
|
||||||
do
|
do
|
||||||
{ :ok, follower, unfollowed}
|
{:ok, follower, unfollowed}
|
||||||
else
|
else
|
||||||
err -> err
|
err -> err
|
||||||
end
|
end
|
||||||
|
@ -207,7 +207,7 @@ def upload(%Plug.Upload{} = file, format \\ "xml") do
|
||||||
media_id_string: "#{object.id}}",
|
media_id_string: "#{object.id}}",
|
||||||
media_url: href,
|
media_url: href,
|
||||||
size: 0
|
size: 0
|
||||||
} |> Poison.encode!
|
} |> Poison.encode!
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -215,15 +215,18 @@ def parse_mentions(text) do
|
||||||
# Modified from https://www.w3.org/TR/html5/forms.html#valid-e-mail-address
|
# Modified from https://www.w3.org/TR/html5/forms.html#valid-e-mail-address
|
||||||
regex = ~r/@[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@?[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*/
|
regex = ~r/@[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@?[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*/
|
||||||
|
|
||||||
Regex.scan(regex, text)
|
regex
|
||||||
|
|> Regex.scan(text)
|
||||||
|> List.flatten
|
|> List.flatten
|
||||||
|> Enum.uniq
|
|> Enum.uniq
|
||||||
|> Enum.map(fn ("@" <> match = full_match) -> {full_match, User.get_cached_by_nickname(match)} end)
|
|> Enum.map(fn ("@" <> match = full_match) ->
|
||||||
|
{full_match, User.get_cached_by_nickname(match)} end)
|
||||||
|> Enum.filter(fn ({_match, user}) -> user end)
|
|> Enum.filter(fn ({_match, user}) -> user end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_user_links(text, mentions) do
|
def add_user_links(text, mentions) do
|
||||||
Enum.reduce(mentions, text, fn ({match, %User{ap_id: ap_id}}, text) -> String.replace(text, match, "<a href='#{ap_id}'>#{match}</a>") end)
|
Enum.reduce(mentions, text, fn ({match, %User{ap_id: ap_id}}, text) ->
|
||||||
|
String.replace(text, match, "<a href='#{ap_id}'>#{match}</a>") end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp add_conversation_id(activity) do
|
defp add_conversation_id(activity) do
|
||||||
|
@ -236,10 +239,10 @@ defp add_conversation_id(activity) do
|
||||||
|
|
||||||
object = Object.get_by_ap_id(activity.data["object"]["id"])
|
object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||||
|
|
||||||
changeset = Ecto.Changeset.change(object, data: data["object"])
|
changeset = Changeset.change(object, data: data["object"])
|
||||||
Repo.update(changeset)
|
Repo.update(changeset)
|
||||||
|
|
||||||
changeset = Ecto.Changeset.change(activity, data: data)
|
changeset = Changeset.change(activity, data: data)
|
||||||
Repo.update(changeset)
|
Repo.update(changeset)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -252,7 +255,7 @@ def register_user(params) do
|
||||||
email: params["email"],
|
email: params["email"],
|
||||||
password: params["password"],
|
password: params["password"],
|
||||||
password_confirmation: params["confirm"]
|
password_confirmation: params["confirm"]
|
||||||
}
|
}
|
||||||
|
|
||||||
changeset = User.register_changeset(%User{}, params)
|
changeset = User.register_changeset(%User{}, params)
|
||||||
|
|
||||||
|
@ -260,22 +263,21 @@ def register_user(params) do
|
||||||
{:ok, UserRepresenter.to_map(user)}
|
{:ok, UserRepresenter.to_map(user)}
|
||||||
else
|
else
|
||||||
{:error, changeset} ->
|
{:error, changeset} ->
|
||||||
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
|
errors = Poison.encode!(Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end))
|
||||||
|> Poison.encode!
|
|
||||||
{:error, %{error: errors}}
|
{:error, %{error: errors}}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_user(user \\ nil, params) do
|
def get_user(user \\ nil, params) do
|
||||||
case params do
|
case params do
|
||||||
%{ "user_id" => user_id } ->
|
%{"user_id" => user_id} ->
|
||||||
case target = Repo.get(User, user_id) do
|
case target = Repo.get(User, user_id) do
|
||||||
nil ->
|
nil ->
|
||||||
{:error, "No user with such user_id"}
|
{:error, "No user with such user_id"}
|
||||||
_ ->
|
_ ->
|
||||||
{:ok, target}
|
{:ok, target}
|
||||||
end
|
end
|
||||||
%{ "screen_name" => nickname } ->
|
%{"screen_name" => nickname} ->
|
||||||
case target = Repo.get_by(User, nickname: nickname) do
|
case target = Repo.get_by(User, nickname: nickname) do
|
||||||
nil ->
|
nil ->
|
||||||
{:error, "No user with such screen_name"}
|
{:error, "No user with such screen_name"}
|
||||||
|
@ -303,7 +305,8 @@ defp activity_to_status(%Activity{data: %{"type" => "Like"}} = activity, opts) d
|
||||||
user = User.get_cached_by_ap_id(actor)
|
user = User.get_cached_by_ap_id(actor)
|
||||||
[liked_activity] = Activity.all_by_object_ap_id(activity.data["object"])
|
[liked_activity] = Activity.all_by_object_ap_id(activity.data["object"])
|
||||||
|
|
||||||
ActivityRepresenter.to_map(activity, Map.merge(opts, %{user: user, liked_activity: liked_activity}))
|
ActivityRepresenter.to_map(activity,
|
||||||
|
Map.merge(opts, %{user: user, liked_activity: liked_activity}))
|
||||||
end
|
end
|
||||||
|
|
||||||
# For announces, fetch the announced activity and the user.
|
# For announces, fetch the announced activity and the user.
|
||||||
|
@ -313,7 +316,8 @@ defp activity_to_status(%Activity{data: %{"type" => "Announce"}} = activity, opt
|
||||||
[announced_activity] = Activity.all_by_object_ap_id(activity.data["object"])
|
[announced_activity] = Activity.all_by_object_ap_id(activity.data["object"])
|
||||||
announced_actor = User.get_cached_by_ap_id(announced_activity.data["actor"])
|
announced_actor = User.get_cached_by_ap_id(announced_activity.data["actor"])
|
||||||
|
|
||||||
ActivityRepresenter.to_map(activity, Map.merge(opts, %{users: [user, announced_actor], announced_activity: announced_activity}))
|
ActivityRepresenter.to_map(activity,
|
||||||
|
Map.merge(opts, %{users: [user, announced_actor], announced_activity: announced_activity}))
|
||||||
end
|
end
|
||||||
|
|
||||||
defp activity_to_status(activity, opts) do
|
defp activity_to_status(activity, opts) do
|
||||||
|
@ -323,7 +327,7 @@ defp activity_to_status(activity, opts) do
|
||||||
mentioned_users = Enum.map(activity.data["to"] || [], fn (ap_id) ->
|
mentioned_users = Enum.map(activity.data["to"] || [], fn (ap_id) ->
|
||||||
User.get_cached_by_ap_id(ap_id)
|
User.get_cached_by_ap_id(ap_id)
|
||||||
end)
|
end)
|
||||||
|> Enum.filter(&(&1))
|
mentioned_users = mentioned_users |> Enum.filter(&(&1))
|
||||||
|
|
||||||
ActivityRepresenter.to_map(activity, Map.merge(opts, %{user: user, mentioned: mentioned_users}))
|
ActivityRepresenter.to_map(activity, Map.merge(opts, %{user: user, mentioned: mentioned_users}))
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,8 +2,9 @@ defmodule Pleroma.Web.TwitterAPI.Controller do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
alias Pleroma.Web.TwitterAPI.TwitterAPI
|
alias Pleroma.Web.TwitterAPI.TwitterAPI
|
||||||
alias Pleroma.Web.TwitterAPI.Representers.{UserRepresenter, ActivityRepresenter}
|
alias Pleroma.Web.TwitterAPI.Representers.{UserRepresenter, ActivityRepresenter}
|
||||||
alias Pleroma.{Repo, Activity}
|
alias Pleroma.{Web, Repo, Activity}
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
alias Ecto.Changeset
|
||||||
|
|
||||||
def verify_credentials(%{assigns: %{user: user}} = conn, _params) do
|
def verify_credentials(%{assigns: %{user: user}} = conn, _params) do
|
||||||
response = user |> UserRepresenter.to_json(%{for: user})
|
response = user |> UserRepresenter.to_json(%{for: user})
|
||||||
|
@ -15,7 +16,7 @@ def verify_credentials(%{assigns: %{user: user}} = conn, _params) do
|
||||||
def status_update(%{assigns: %{user: user}} = conn, %{"status" => status_text} = status_data) do
|
def status_update(%{assigns: %{user: user}} = conn, %{"status" => status_text} = status_data) do
|
||||||
if status_text |> String.trim |> String.length != 0 do
|
if status_text |> String.trim |> String.length != 0 do
|
||||||
media_ids = extract_media_ids(status_data)
|
media_ids = extract_media_ids(status_data)
|
||||||
{:ok, activity} = TwitterAPI.create_status(user, Map.put(status_data, "media_ids", media_ids ))
|
{:ok, activity} = TwitterAPI.create_status(user, Map.put(status_data, "media_ids", media_ids))
|
||||||
conn
|
conn
|
||||||
|> json_reply(200, ActivityRepresenter.to_json(activity, %{user: user}))
|
|> json_reply(200, ActivityRepresenter.to_json(activity, %{user: user}))
|
||||||
else
|
else
|
||||||
|
@ -79,34 +80,34 @@ def mentions_timeline(%{assigns: %{user: user}} = conn, params) do
|
||||||
|
|
||||||
def follow(%{assigns: %{user: user}} = conn, params) do
|
def follow(%{assigns: %{user: user}} = conn, params) do
|
||||||
case TwitterAPI.follow(user, params) do
|
case TwitterAPI.follow(user, params) do
|
||||||
{ :ok, user, followed, _activity } ->
|
{:ok, user, followed, _activity} ->
|
||||||
response = followed |> UserRepresenter.to_json(%{for: user})
|
response = followed |> UserRepresenter.to_json(%{for: user})
|
||||||
conn
|
conn
|
||||||
|> json_reply(200, response)
|
|> json_reply(200, response)
|
||||||
{ :error, msg } -> forbidden_json_reply(conn, msg)
|
{:error, msg} -> forbidden_json_reply(conn, msg)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def unfollow(%{assigns: %{user: user}} = conn, params) do
|
def unfollow(%{assigns: %{user: user}} = conn, params) do
|
||||||
case TwitterAPI.unfollow(user, params) do
|
case TwitterAPI.unfollow(user, params) do
|
||||||
{ :ok, user, unfollowed, } ->
|
{:ok, user, unfollowed} ->
|
||||||
response = unfollowed |> UserRepresenter.to_json(%{for: user})
|
response = unfollowed |> UserRepresenter.to_json(%{for: user})
|
||||||
conn
|
conn
|
||||||
|> json_reply(200, response)
|
|> json_reply(200, response)
|
||||||
{ :error, msg } -> forbidden_json_reply(conn, msg)
|
{:error, msg} -> forbidden_json_reply(conn, msg)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_status(%{assigns: %{user: user}} = conn, %{ "id" => id }) do
|
def fetch_status(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
response = TwitterAPI.fetch_status(user, id) |> Poison.encode!
|
response = Poison.encode!(TwitterAPI.fetch_status(user, id))
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> json_reply(200, response)
|
|> json_reply(200, response)
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_conversation(%{assigns: %{user: user}} = conn, %{ "id" => id }) do
|
def fetch_conversation(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
id = String.to_integer(id)
|
id = String.to_integer(id)
|
||||||
response = TwitterAPI.fetch_conversation(user, id) |> Poison.encode!
|
response = Poison.encode!(TwitterAPI.fetch_conversation(user, id))
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> json_reply(200, response)
|
|> json_reply(200, response)
|
||||||
|
@ -132,8 +133,8 @@ def upload_json(conn, %{"media" => media}) do
|
||||||
def config(conn, _params) do
|
def config(conn, _params) do
|
||||||
response = %{
|
response = %{
|
||||||
site: %{
|
site: %{
|
||||||
name: Pleroma.Web.base_url,
|
name: Web.base_url,
|
||||||
server: Pleroma.Web.base_url,
|
server: Web.base_url,
|
||||||
textlimit: -1
|
textlimit: -1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -188,11 +189,10 @@ def register(conn, params) do
|
||||||
|
|
||||||
def update_avatar(%{assigns: %{user: user}} = conn, params) do
|
def update_avatar(%{assigns: %{user: user}} = conn, params) do
|
||||||
{:ok, object} = ActivityPub.upload(params)
|
{:ok, object} = ActivityPub.upload(params)
|
||||||
change = Ecto.Changeset.change(user, %{avatar: object.data})
|
change = Changeset.change(user, %{avatar: object.data})
|
||||||
{:ok, user} = Repo.update(change)
|
{:ok, user} = Repo.update(change)
|
||||||
|
|
||||||
response = UserRepresenter.to_map(user, %{for: user})
|
response = Poison.encode!(UserRepresenter.to_map(user, %{for: user}))
|
||||||
|> Poison.encode!
|
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> json_reply(200, response)
|
|> json_reply(200, response)
|
||||||
|
|
|
@ -20,8 +20,7 @@ def controller do
|
||||||
quote do
|
quote do
|
||||||
use Phoenix.Controller, namespace: Pleroma.Web
|
use Phoenix.Controller, namespace: Pleroma.Web
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
import Pleroma.Web.Router.Helpers
|
import Pleroma.Web.{Gettext, Router.Helpers}
|
||||||
import Pleroma.Web.Gettext
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -33,9 +32,7 @@ def view do
|
||||||
# Import convenience functions from controllers
|
# Import convenience functions from controllers
|
||||||
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
|
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
|
||||||
|
|
||||||
import Pleroma.Web.Router.Helpers
|
import Pleroma.Web.{ErrorHelpers, Gettext, Router.Helpers}
|
||||||
import Pleroma.Web.ErrorHelpers
|
|
||||||
import Pleroma.Web.Gettext
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -75,7 +72,8 @@ def base_url do
|
||||||
|
|
||||||
protocol = settings |> Keyword.fetch!(:protocol)
|
protocol = settings |> Keyword.fetch!(:protocol)
|
||||||
|
|
||||||
port_fragment = with {:ok, protocol_info} <- settings |> Keyword.fetch(String.to_atom(protocol)),
|
port_fragment = with {:ok, protocol_info} <- settings
|
||||||
|
|> Keyword.fetch(String.to_atom(protocol)),
|
||||||
{:ok, port} <- protocol_info |> Keyword.fetch(:port)
|
{:ok, port} <- protocol_info |> Keyword.fetch(:port)
|
||||||
do
|
do
|
||||||
":#{port}"
|
":#{port}"
|
||||||
|
|
|
@ -1,21 +1,20 @@
|
||||||
defmodule Pleroma.Web.WebFinger do
|
defmodule Pleroma.Web.WebFinger do
|
||||||
alias Pleroma.XmlBuilder
|
alias Pleroma.{User, XmlBuilder}
|
||||||
alias Pleroma.User
|
alias Pleroma.{Web, Web.OStatus}
|
||||||
alias Pleroma.Web.OStatus
|
|
||||||
|
|
||||||
def host_meta() do
|
def host_meta do
|
||||||
base_url = Pleroma.Web.base_url
|
base_url = Web.base_url
|
||||||
{
|
{
|
||||||
:XRD, %{ xmlns: "http://docs.oasis-open.org/ns/xri/xrd-1.0" },
|
:XRD, %{xmlns: "http://docs.oasis-open.org/ns/xri/xrd-1.0"},
|
||||||
{
|
{
|
||||||
:Link, %{ rel: "lrdd", type: "application/xrd+xml", template: "#{base_url}/.well-known/webfinger?resource={uri}" }
|
:Link, %{rel: "lrdd", type: "application/xrd+xml", template: "#{base_url}/.well-known/webfinger?resource={uri}"}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|> XmlBuilder.to_doc
|
|> XmlBuilder.to_doc
|
||||||
end
|
end
|
||||||
|
|
||||||
def webfinger(resource) do
|
def webfinger(resource) do
|
||||||
host = Pleroma.Web.host
|
host = Web.host
|
||||||
regex = ~r/acct:(?<username>\w+)@#{host}/
|
regex = ~r/acct:(?<username>\w+)@#{host}/
|
||||||
case Regex.named_captures(regex, resource) do
|
case Regex.named_captures(regex, resource) do
|
||||||
%{"username" => username} ->
|
%{"username" => username} ->
|
||||||
|
@ -29,7 +28,7 @@ def represent_user(user) do
|
||||||
{
|
{
|
||||||
:XRD, %{xmlns: "http://docs.oasis-open.org/ns/xri/xrd-1.0"},
|
:XRD, %{xmlns: "http://docs.oasis-open.org/ns/xri/xrd-1.0"},
|
||||||
[
|
[
|
||||||
{:Subject, "acct:#{user.nickname}@#{Pleroma.Web.host}"},
|
{:Subject, "acct:#{user.nickname}@#{Web.host}"},
|
||||||
{:Alias, user.ap_id},
|
{:Alias, user.ap_id},
|
||||||
{:Link, %{rel: "http://schemas.google.com/g/2010#updates-from", type: "application/atom+xml", href: OStatus.feed_path(user)}}
|
{:Link, %{rel: "http://schemas.google.com/g/2010#updates-from", type: "application/atom+xml", href: OStatus.feed_path(user)}}
|
||||||
]
|
]
|
||||||
|
|
|
@ -12,7 +12,7 @@ def host_meta(conn, _params) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def webfinger(conn, %{"resource" => resource}) do
|
def webfinger(conn, %{"resource" => resource}) do
|
||||||
{:ok, response} = Pleroma.Web.WebFinger.webfinger(resource)
|
{:ok, response} = WebFinger.webfinger(resource)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_resp_content_type("application/xrd+xml")
|
|> put_resp_content_type("application/xrd+xml")
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
defmodule Pleroma.Web.Websub do
|
defmodule Pleroma.Web.Websub do
|
||||||
|
alias Ecto.Changeset
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.Web.Websub.WebsubServerSubscription
|
alias Pleroma.Web.Websub.WebsubServerSubscription
|
||||||
alias Pleroma.Web.OStatus.FeedRepresenter
|
alias Pleroma.Web.OStatus.FeedRepresenter
|
||||||
|
@ -8,9 +9,10 @@ defmodule Pleroma.Web.Websub do
|
||||||
|
|
||||||
@websub_verifier Application.get_env(:pleroma, :websub_verifier)
|
@websub_verifier Application.get_env(:pleroma, :websub_verifier)
|
||||||
|
|
||||||
def verify(subscription, getter \\ &HTTPoison.get/3 ) do
|
def verify(subscription, getter \\ &HTTPoison.get/3) do
|
||||||
challenge = Base.encode16(:crypto.strong_rand_bytes(8))
|
challenge = Base.encode16(:crypto.strong_rand_bytes(8))
|
||||||
lease_seconds = NaiveDateTime.diff(subscription.valid_until, subscription.updated_at) |> to_string
|
lease_seconds = NaiveDateTime.diff(subscription.valid_until, subscription.updated_at)
|
||||||
|
lease_seconds = lease_seconds |> to_string
|
||||||
|
|
||||||
params = %{
|
params = %{
|
||||||
"hub.challenge": challenge,
|
"hub.challenge": challenge,
|
||||||
|
@ -25,11 +27,11 @@ def verify(subscription, getter \\ &HTTPoison.get/3 ) do
|
||||||
with {:ok, response} <- getter.(url, [], [params: params]),
|
with {:ok, response} <- getter.(url, [], [params: params]),
|
||||||
^challenge <- response.body
|
^challenge <- response.body
|
||||||
do
|
do
|
||||||
changeset = Ecto.Changeset.change(subscription, %{state: "active"})
|
changeset = Changeset.change(subscription, %{state: "active"})
|
||||||
Repo.update(changeset)
|
Repo.update(changeset)
|
||||||
else _e ->
|
else _e ->
|
||||||
changeset = Ecto.Changeset.change(subscription, %{state: "rejected"})
|
changeset = Changeset.change(subscription, %{state: "rejected"})
|
||||||
{:ok, subscription } = Repo.update(changeset)
|
{:ok, subscription} = Repo.update(changeset)
|
||||||
{:error, subscription}
|
{:error, subscription}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -39,10 +41,11 @@ def publish(topic, user, activity) do
|
||||||
where: sub.topic == ^topic and sub.state == "active"
|
where: sub.topic == ^topic and sub.state == "active"
|
||||||
subscriptions = Repo.all(query)
|
subscriptions = Repo.all(query)
|
||||||
Enum.each(subscriptions, fn(sub) ->
|
Enum.each(subscriptions, fn(sub) ->
|
||||||
response = FeedRepresenter.to_simple_form(user, [activity], [user])
|
response = user
|
||||||
|
|> FeedRepresenter.to_simple_form([activity], [user])
|
||||||
|> :xmerl.export_simple(:xmerl_xml)
|
|> :xmerl.export_simple(:xmerl_xml)
|
||||||
|
|
||||||
signature = :crypto.hmac(:sha, sub.secret, response) |> Base.encode16
|
signature = Base.encode16(:crypto.hmac(:sha, sub.secret, response))
|
||||||
|
|
||||||
HTTPoison.post(sub.callback, response, [
|
HTTPoison.post(sub.callback, response, [
|
||||||
{"Content-Type", "application/atom+xml"},
|
{"Content-Type", "application/atom+xml"},
|
||||||
|
@ -65,10 +68,11 @@ def incoming_subscription_request(user, %{"hub.mode" => "subscribe"} = params) d
|
||||||
callback: callback
|
callback: callback
|
||||||
}
|
}
|
||||||
|
|
||||||
change = Ecto.Changeset.change(subscription, data)
|
change = Changeset.change(subscription, data)
|
||||||
websub = Repo.insert_or_update!(change)
|
websub = Repo.insert_or_update!(change)
|
||||||
|
|
||||||
change = Ecto.Changeset.change(websub, %{valid_until: NaiveDateTime.add(websub.updated_at, lease_time)})
|
change = Changeset.change(websub, %{valid_until:
|
||||||
|
NaiveDateTime.add(websub.updated_at, lease_time)})
|
||||||
websub = Repo.update!(change)
|
websub = Repo.update!(change)
|
||||||
|
|
||||||
# Just spawn that for now, maybe pool later.
|
# Just spawn that for now, maybe pool later.
|
||||||
|
@ -81,7 +85,8 @@ def incoming_subscription_request(user, %{"hub.mode" => "subscribe"} = params) d
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_subscription(topic, callback) do
|
defp get_subscription(topic, callback) do
|
||||||
Repo.get_by(WebsubServerSubscription, topic: topic, callback: callback) || %WebsubServerSubscription{}
|
Repo.get_by(WebsubServerSubscription, topic: topic, callback: callback) ||
|
||||||
|
%WebsubServerSubscription{}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp lease_time(%{"hub.lease_seconds" => lease_seconds}) do
|
defp lease_time(%{"hub.lease_seconds" => lease_seconds}) do
|
||||||
|
|
|
@ -30,13 +30,13 @@ def to_xml(%NaiveDateTime{} = time) do
|
||||||
NaiveDateTime.to_iso8601(time)
|
NaiveDateTime.to_iso8601(time)
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_doc(content), do: "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" <> to_xml(content)
|
def to_doc(content), do: ~s(<?xml version="1.0" encoding="UTF-8"?>) <> to_xml(content)
|
||||||
|
|
||||||
defp make_open_tag(tag, attributes) do
|
defp make_open_tag(tag, attributes) do
|
||||||
attributes_string = for {attribute, value} <- attributes do
|
attributes_string = for {attribute, value} <- attributes do
|
||||||
"#{attribute}=\"#{value}\""
|
"#{attribute}=\"#{value}\""
|
||||||
end |> Enum.join(" ")
|
end |> Enum.join(" ")
|
||||||
|
|
||||||
Enum.join([tag, attributes_string], " ") |> String.strip
|
[tag, attributes_string] |> Enum.join(" ") |> String.strip
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
1
mix.exs
1
mix.exs
|
@ -41,6 +41,7 @@ defp deps do
|
||||||
{:cachex, "~> 2.1"},
|
{:cachex, "~> 2.1"},
|
||||||
{:httpoison, "~> 0.11.1"},
|
{:httpoison, "~> 0.11.1"},
|
||||||
{:ex_machina, "~> 2.0", only: :test},
|
{:ex_machina, "~> 2.0", only: :test},
|
||||||
|
{:credo, "~> 0.7", only: [:dev, :test]},
|
||||||
{:mix_test_watch, "~> 0.2", only: :dev}]
|
{:mix_test_watch, "~> 0.2", only: :dev}]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
4
mix.lock
4
mix.lock
|
@ -1,4 +1,5 @@
|
||||||
%{"cachex": {:hex, :cachex, "2.1.0", "fad49b4e78d11c6c314e75bd8c9408f5b78cb065c047442798caed10803ee3be", [:mix], [{:eternal, "~> 1.1", [hex: :eternal, optional: false]}]},
|
%{"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], []},
|
||||||
|
"cachex": {:hex, :cachex, "2.1.0", "fad49b4e78d11c6c314e75bd8c9408f5b78cb065c047442798caed10803ee3be", [:mix], [{:eternal, "~> 1.1", [hex: :eternal, optional: false]}]},
|
||||||
"calendar": {:hex, :calendar, "0.16.1", "782327ad8bae7c797b887840dc4ddb933f05ce6e333e5b04964d7a5d5f79bde3", [:mix], [{:tzdata, "~> 0.5.8 or ~> 0.1.201603", [hex: :tzdata, optional: false]}]},
|
"calendar": {:hex, :calendar, "0.16.1", "782327ad8bae7c797b887840dc4ddb933f05ce6e333e5b04964d7a5d5f79bde3", [:mix], [{:tzdata, "~> 0.5.8 or ~> 0.1.201603", [hex: :tzdata, optional: false]}]},
|
||||||
"certifi": {:hex, :certifi, "1.0.0", "1c787a85b1855ba354f0b8920392c19aa1d06b0ee1362f9141279620a5be2039", [:rebar3], []},
|
"certifi": {:hex, :certifi, "1.0.0", "1c787a85b1855ba354f0b8920392c19aa1d06b0ee1362f9141279620a5be2039", [:rebar3], []},
|
||||||
"comeonin": {:hex, :comeonin, "3.0.2", "8b213268a6634bd2e31a8035a963e974681d13ccc1f73f2ae664b6ac4e993c96", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, optional: false]}]},
|
"comeonin": {:hex, :comeonin, "3.0.2", "8b213268a6634bd2e31a8035a963e974681d13ccc1f73f2ae664b6ac4e993c96", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, optional: false]}]},
|
||||||
|
@ -6,6 +7,7 @@
|
||||||
"connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], []},
|
"connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], []},
|
||||||
"cowboy": {:hex, :cowboy, "1.1.2", "61ac29ea970389a88eca5a65601460162d370a70018afe6f949a29dca91f3bb0", [:rebar3], [{:cowlib, "~> 1.0.2", [hex: :cowlib, optional: false]}, {:ranch, "~> 1.3.2", [hex: :ranch, optional: false]}]},
|
"cowboy": {:hex, :cowboy, "1.1.2", "61ac29ea970389a88eca5a65601460162d370a70018afe6f949a29dca91f3bb0", [:rebar3], [{:cowlib, "~> 1.0.2", [hex: :cowlib, optional: false]}, {:ranch, "~> 1.3.2", [hex: :ranch, optional: false]}]},
|
||||||
"cowlib": {:hex, :cowlib, "1.0.2", "9d769a1d062c9c3ac753096f868ca121e2730b9a377de23dec0f7e08b1df84ee", [:make], []},
|
"cowlib": {:hex, :cowlib, "1.0.2", "9d769a1d062c9c3ac753096f868ca121e2730b9a377de23dec0f7e08b1df84ee", [:make], []},
|
||||||
|
"credo": {:hex, :credo, "0.7.3", "9827ab04002186af1aec014a811839a06f72aaae6cd5eed3919b248c8767dbf3", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, optional: false]}]},
|
||||||
"db_connection": {:hex, :db_connection, "1.1.2", "2865c2a4bae0714e2213a0ce60a1b12d76a6efba0c51fbda59c9ab8d1accc7a8", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, optional: true]}]},
|
"db_connection": {:hex, :db_connection, "1.1.2", "2865c2a4bae0714e2213a0ce60a1b12d76a6efba0c51fbda59c9ab8d1accc7a8", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, optional: true]}]},
|
||||||
"decimal": {:hex, :decimal, "1.3.1", "157b3cedb2bfcb5359372a7766dd7a41091ad34578296e951f58a946fcab49c6", [:mix], []},
|
"decimal": {:hex, :decimal, "1.3.1", "157b3cedb2bfcb5359372a7766dd7a41091ad34578296e951f58a946fcab49c6", [:mix], []},
|
||||||
"deppie": {:hex, :deppie, "1.1.0", "cfb6fcee7dfb64eb78cb8505537971a0805131899326ad469ef10df04520f451", [:mix], []},
|
"deppie": {:hex, :deppie, "1.1.0", "cfb6fcee7dfb64eb78cb8505537971a0805131899326ad469ef10df04520f451", [:mix], []},
|
||||||
|
|
Loading…
Reference in a new issue