2019-09-11 19:43:00 +00:00
|
|
|
defmodule Pleroma.Web.PleromaAPI.EmojiAPIController do
|
2019-08-10 21:39:21 +00:00
|
|
|
use Pleroma.Web, :controller
|
|
|
|
|
2019-08-12 10:13:01 +00:00
|
|
|
require Logger
|
|
|
|
|
2019-09-24 12:11:25 +00:00
|
|
|
def emoji_dir_path() do
|
|
|
|
Path.join(
|
|
|
|
Pleroma.Config.get!([:instance, :static_dir]),
|
|
|
|
"emoji"
|
|
|
|
)
|
|
|
|
end
|
2019-08-12 10:13:01 +00:00
|
|
|
|
2019-09-10 18:34:57 +00:00
|
|
|
@doc """
|
|
|
|
Lists the packs available on the instance as JSON.
|
|
|
|
|
|
|
|
The information is public and does not require authentification. The format is
|
|
|
|
a map of "pack directory name" to pack.json contents.
|
|
|
|
"""
|
2019-08-10 21:39:21 +00:00
|
|
|
def list_packs(conn, _params) do
|
2019-09-24 06:27:34 +00:00
|
|
|
# Create the directory first if it does not exist. This is probably the first request made
|
|
|
|
# with the API so it should be sufficient
|
2019-09-24 12:11:25 +00:00
|
|
|
with {:create_dir, :ok} <- {:create_dir, File.mkdir_p(emoji_dir_path())},
|
|
|
|
{:ls, {:ok, results}} <- {:ls, File.ls(emoji_dir_path())} do
|
2019-09-11 15:48:51 +00:00
|
|
|
pack_infos =
|
|
|
|
results
|
|
|
|
|> Enum.filter(&has_pack_json?/1)
|
|
|
|
|> Enum.map(&load_pack/1)
|
|
|
|
# Check if all the files are in place and can be sent
|
|
|
|
|> Enum.map(&validate_pack/1)
|
|
|
|
# Transform into a map of pack-name => pack-data
|
|
|
|
|> Enum.into(%{})
|
|
|
|
|
|
|
|
json(conn, pack_infos)
|
2019-09-24 06:27:34 +00:00
|
|
|
else
|
|
|
|
{:create_dir, {:error, e}} ->
|
|
|
|
conn
|
|
|
|
|> put_status(:internal_server_error)
|
2019-09-24 12:11:25 +00:00
|
|
|
|> json(%{error: "Failed to create the emoji pack directory at #{emoji_dir_path()}: #{e}"})
|
2019-09-24 06:27:34 +00:00
|
|
|
|
|
|
|
{:ls, {:error, e}} ->
|
|
|
|
conn
|
|
|
|
|> put_status(:internal_server_error)
|
|
|
|
|> json(%{
|
|
|
|
error:
|
2019-09-24 12:11:25 +00:00
|
|
|
"Failed to get the contents of the emoji pack directory at #{emoji_dir_path()}: #{e}"
|
2019-09-24 06:27:34 +00:00
|
|
|
})
|
2019-09-11 15:48:51 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp has_pack_json?(file) do
|
2019-09-24 12:11:25 +00:00
|
|
|
dir_path = Path.join(emoji_dir_path(), file)
|
2019-09-11 15:48:51 +00:00
|
|
|
# Filter to only use the pack.json packs
|
|
|
|
File.dir?(dir_path) and File.exists?(Path.join(dir_path, "pack.json"))
|
|
|
|
end
|
|
|
|
|
|
|
|
defp load_pack(pack_name) do
|
2019-09-24 12:11:25 +00:00
|
|
|
pack_path = Path.join(emoji_dir_path(), pack_name)
|
2019-09-11 15:48:51 +00:00
|
|
|
pack_file = Path.join(pack_path, "pack.json")
|
|
|
|
|
|
|
|
{pack_name, Jason.decode!(File.read!(pack_file))}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp validate_pack({name, pack}) do
|
2019-09-24 12:11:25 +00:00
|
|
|
pack_path = Path.join(emoji_dir_path(), name)
|
2019-09-11 15:48:51 +00:00
|
|
|
|
|
|
|
if can_download?(pack, pack_path) do
|
|
|
|
archive_for_sha = make_archive(name, pack, pack_path)
|
|
|
|
archive_sha = :crypto.hash(:sha256, archive_for_sha) |> Base.encode16()
|
|
|
|
|
|
|
|
pack =
|
|
|
|
pack
|
|
|
|
|> put_in(["pack", "can-download"], true)
|
|
|
|
|> put_in(["pack", "download-sha256"], archive_sha)
|
2019-08-10 21:39:21 +00:00
|
|
|
|
2019-09-11 15:48:51 +00:00
|
|
|
{name, pack}
|
|
|
|
else
|
|
|
|
{name, put_in(pack, ["pack", "can-download"], false)}
|
|
|
|
end
|
2019-08-10 21:39:21 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
defp can_download?(pack, pack_path) do
|
|
|
|
# If the pack is set as shared, check if it can be downloaded
|
|
|
|
# That means that when asked, the pack can be packed and sent to the remote
|
|
|
|
# Otherwise, they'd have to download it from external-src
|
2019-09-01 12:38:45 +00:00
|
|
|
pack["pack"]["share-files"] &&
|
2019-08-10 21:39:21 +00:00
|
|
|
Enum.all?(pack["files"], fn {_, path} ->
|
|
|
|
File.exists?(Path.join(pack_path, path))
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2019-08-12 10:13:01 +00:00
|
|
|
defp create_archive_and_cache(name, pack, pack_dir, md5) do
|
2019-08-10 21:39:21 +00:00
|
|
|
files =
|
2019-08-15 08:39:39 +00:00
|
|
|
['pack.json'] ++
|
2019-08-10 21:39:21 +00:00
|
|
|
(pack["files"] |> Enum.map(fn {_, path} -> to_charlist(path) end))
|
|
|
|
|
|
|
|
{:ok, {_, zip_result}} = :zip.zip('#{name}.zip', files, [:memory, cwd: to_charlist(pack_dir)])
|
|
|
|
|
2019-09-24 12:11:25 +00:00
|
|
|
cache_seconds_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file])
|
|
|
|
cache_ms = :timer.seconds(cache_seconds_per_file * Enum.count(files))
|
2019-08-12 10:13:01 +00:00
|
|
|
|
|
|
|
Cachex.put!(
|
|
|
|
:emoji_packs_cache,
|
|
|
|
name,
|
2019-08-15 08:39:39 +00:00
|
|
|
# if pack.json MD5 changes, the cache is not valid anymore
|
|
|
|
%{pack_json_md5: md5, pack_data: zip_result},
|
2019-08-12 10:13:01 +00:00
|
|
|
# Add a minute to cache time for every file in the pack
|
|
|
|
ttl: cache_ms
|
|
|
|
)
|
|
|
|
|
2019-09-11 09:12:22 +00:00
|
|
|
Logger.debug("Created an archive for the '#{name}' emoji pack, \
|
2019-08-12 10:13:01 +00:00
|
|
|
keeping it in cache for #{div(cache_ms, 1000)}s")
|
|
|
|
|
|
|
|
zip_result
|
|
|
|
end
|
|
|
|
|
|
|
|
defp make_archive(name, pack, pack_dir) do
|
2019-08-15 08:39:39 +00:00
|
|
|
# Having a different pack.json md5 invalidates cache
|
|
|
|
pack_file_md5 = :crypto.hash(:md5, File.read!(Path.join(pack_dir, "pack.json")))
|
2019-08-12 10:13:01 +00:00
|
|
|
|
2019-09-11 09:07:19 +00:00
|
|
|
case Cachex.get!(:emoji_packs_cache, name) do
|
|
|
|
%{pack_file_md5: ^pack_file_md5, pack_data: zip_result} ->
|
|
|
|
Logger.debug("Using cache for the '#{name}' shared emoji pack")
|
|
|
|
zip_result
|
2019-08-12 10:13:01 +00:00
|
|
|
|
2019-09-11 09:07:19 +00:00
|
|
|
_ ->
|
2019-08-15 08:39:39 +00:00
|
|
|
create_archive_and_cache(name, pack, pack_dir, pack_file_md5)
|
2019-09-11 09:07:19 +00:00
|
|
|
end
|
2019-08-10 21:39:21 +00:00
|
|
|
end
|
|
|
|
|
2019-09-10 18:34:57 +00:00
|
|
|
@doc """
|
|
|
|
An endpoint for other instances (via admin UI) or users (via browser)
|
|
|
|
to download packs that the instance shares.
|
|
|
|
"""
|
2019-08-10 21:39:21 +00:00
|
|
|
def download_shared(conn, %{"name" => name}) do
|
2019-09-24 12:11:25 +00:00
|
|
|
pack_dir = Path.join(emoji_dir_path(), name)
|
2019-08-15 08:39:39 +00:00
|
|
|
pack_file = Path.join(pack_dir, "pack.json")
|
2019-08-10 21:39:21 +00:00
|
|
|
|
2019-09-11 16:00:48 +00:00
|
|
|
with {_, true} <- {:exists?, File.exists?(pack_file)},
|
|
|
|
pack = Jason.decode!(File.read!(pack_file)),
|
|
|
|
{_, true} <- {:can_download?, can_download?(pack, pack_dir)} do
|
|
|
|
zip_result = make_archive(name, pack, pack_dir)
|
|
|
|
send_download(conn, {:binary, zip_result}, filename: "#{name}.zip")
|
|
|
|
else
|
|
|
|
{:can_download?, _} ->
|
|
|
|
conn
|
|
|
|
|> put_status(:forbidden)
|
2019-09-11 16:39:47 +00:00
|
|
|
|> json(%{
|
|
|
|
error: "Pack #{name} cannot be downloaded from this instance, either pack sharing\
|
|
|
|
was disabled for this pack or some files are missing"
|
|
|
|
})
|
2019-08-10 21:39:21 +00:00
|
|
|
|
2019-09-11 16:00:48 +00:00
|
|
|
{:exists?, _} ->
|
2019-08-10 21:39:21 +00:00
|
|
|
conn
|
2019-09-11 16:00:48 +00:00
|
|
|
|> put_status(:not_found)
|
2019-09-11 16:39:47 +00:00
|
|
|
|> json(%{error: "Pack #{name} does not exist"})
|
2019-08-10 21:39:21 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-10 18:34:57 +00:00
|
|
|
@doc """
|
|
|
|
An admin endpoint to request downloading a pack named `pack_name` from the instance
|
|
|
|
`instance_address`.
|
|
|
|
|
|
|
|
If the requested instance's admin chose to share the pack, it will be downloaded
|
|
|
|
from that instance, otherwise it will be downloaded from the fallback source, if there is one.
|
|
|
|
"""
|
2019-08-10 21:39:21 +00:00
|
|
|
def download_from(conn, %{"instance_address" => address, "pack_name" => name} = data) do
|
2019-09-11 19:58:55 +00:00
|
|
|
shareable_packs_available =
|
2019-09-18 16:48:25 +00:00
|
|
|
"#{address}/.well-known/nodeinfo"
|
|
|
|
|> Tesla.get!()
|
|
|
|
|> Map.get(:body)
|
|
|
|
|> Jason.decode!()
|
|
|
|
|> List.last()
|
|
|
|
|> Map.get("href")
|
|
|
|
# Get the actual nodeinfo address and fetch it
|
2019-09-11 15:32:54 +00:00
|
|
|
|> Tesla.get!()
|
|
|
|
|> Map.get(:body)
|
|
|
|
|> Jason.decode!()
|
2019-09-18 15:09:57 +00:00
|
|
|
|> get_in(["metadata", "features"])
|
2019-09-11 19:58:55 +00:00
|
|
|
|> Enum.member?("shareable_emoji_packs")
|
|
|
|
|
|
|
|
if shareable_packs_available do
|
|
|
|
full_pack =
|
|
|
|
"#{address}/api/pleroma/emoji/packs/list"
|
|
|
|
|> Tesla.get!()
|
|
|
|
|> Map.get(:body)
|
|
|
|
|> Jason.decode!()
|
|
|
|
|> Map.get(name)
|
|
|
|
|
|
|
|
pack_info_res =
|
|
|
|
case full_pack["pack"] do
|
|
|
|
%{"share-files" => true, "can-download" => true, "download-sha256" => sha} ->
|
|
|
|
{:ok,
|
|
|
|
%{
|
|
|
|
sha: sha,
|
|
|
|
uri: "#{address}/api/pleroma/emoji/packs/download_shared/#{name}"
|
|
|
|
}}
|
|
|
|
|
|
|
|
%{"fallback-src" => src, "fallback-src-sha256" => sha} when is_binary(src) ->
|
|
|
|
{:ok,
|
|
|
|
%{
|
|
|
|
sha: sha,
|
|
|
|
uri: src,
|
|
|
|
fallback: true
|
|
|
|
}}
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
{:error,
|
|
|
|
"The pack was not set as shared and there is no fallback src to download from"}
|
|
|
|
end
|
|
|
|
|
|
|
|
with {:ok, %{sha: sha, uri: uri} = pinfo} <- pack_info_res,
|
|
|
|
%{body: emoji_archive} <- Tesla.get!(uri),
|
|
|
|
{_, true} <- {:checksum, Base.decode16!(sha) == :crypto.hash(:sha256, emoji_archive)} do
|
|
|
|
local_name = data["as"] || name
|
2019-09-24 12:11:25 +00:00
|
|
|
pack_dir = Path.join(emoji_dir_path(), local_name)
|
2019-09-11 19:58:55 +00:00
|
|
|
File.mkdir_p!(pack_dir)
|
|
|
|
|
|
|
|
files = Enum.map(full_pack["files"], fn {_, path} -> to_charlist(path) end)
|
|
|
|
# Fallback cannot contain a pack.json file
|
|
|
|
files = if pinfo[:fallback], do: files, else: ['pack.json'] ++ files
|
|
|
|
|
|
|
|
{:ok, _} = :zip.unzip(emoji_archive, cwd: to_charlist(pack_dir), file_list: files)
|
|
|
|
|
|
|
|
# Fallback can't contain a pack.json file, since that would cause the fallback-src-sha256
|
|
|
|
# in it to depend on itself
|
|
|
|
if pinfo[:fallback] do
|
|
|
|
pack_file_path = Path.join(pack_dir, "pack.json")
|
|
|
|
|
|
|
|
File.write!(pack_file_path, Jason.encode!(full_pack, pretty: true))
|
|
|
|
end
|
|
|
|
|
|
|
|
json(conn, "ok")
|
|
|
|
else
|
|
|
|
{:error, e} ->
|
|
|
|
conn |> put_status(:internal_server_error) |> json(%{error: e})
|
|
|
|
|
|
|
|
{:checksum, _} ->
|
|
|
|
conn
|
|
|
|
|> put_status(:internal_server_error)
|
|
|
|
|> json(%{error: "SHA256 for the pack doesn't match the one sent by the server"})
|
2019-08-10 21:39:21 +00:00
|
|
|
end
|
2019-09-11 15:59:31 +00:00
|
|
|
else
|
2019-09-11 19:58:55 +00:00
|
|
|
conn
|
|
|
|
|> put_status(:internal_server_error)
|
|
|
|
|> json(%{error: "The requested instance does not support sharing emoji packs"})
|
2019-08-10 21:39:21 +00:00
|
|
|
end
|
|
|
|
end
|
2019-08-12 15:03:59 +00:00
|
|
|
|
2019-09-10 18:34:57 +00:00
|
|
|
@doc """
|
|
|
|
Creates an empty pack named `name` which then can be updated via the admin UI.
|
|
|
|
"""
|
2019-08-28 16:29:01 +00:00
|
|
|
def create(conn, %{"name" => name}) do
|
2019-09-24 12:11:25 +00:00
|
|
|
pack_dir = Path.join(emoji_dir_path(), name)
|
2019-08-28 16:29:01 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
if not File.exists?(pack_dir) do
|
2019-08-28 16:29:01 +00:00
|
|
|
File.mkdir_p!(pack_dir)
|
|
|
|
|
|
|
|
pack_file_p = Path.join(pack_dir, "pack.json")
|
|
|
|
|
|
|
|
File.write!(
|
|
|
|
pack_file_p,
|
2019-09-23 21:37:27 +00:00
|
|
|
Jason.encode!(%{pack: %{}, files: %{}}, pretty: true)
|
2019-08-28 16:29:01 +00:00
|
|
|
)
|
|
|
|
|
2019-09-11 16:39:47 +00:00
|
|
|
conn |> json("ok")
|
2019-08-28 16:29:01 +00:00
|
|
|
else
|
|
|
|
conn
|
|
|
|
|> put_status(:conflict)
|
2019-09-11 16:39:47 +00:00
|
|
|
|> json(%{error: "A pack named \"#{name}\" already exists"})
|
2019-08-28 16:29:01 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-10 18:34:57 +00:00
|
|
|
@doc """
|
|
|
|
Deletes the pack `name` and all it's files.
|
|
|
|
"""
|
2019-08-12 15:03:59 +00:00
|
|
|
def delete(conn, %{"name" => name}) do
|
2019-09-24 12:11:25 +00:00
|
|
|
pack_dir = Path.join(emoji_dir_path(), name)
|
2019-08-12 15:03:59 +00:00
|
|
|
|
|
|
|
case File.rm_rf(pack_dir) do
|
|
|
|
{:ok, _} ->
|
2019-09-11 16:39:47 +00:00
|
|
|
conn |> json("ok")
|
2019-08-12 15:03:59 +00:00
|
|
|
|
|
|
|
{:error, _} ->
|
2019-09-11 16:39:47 +00:00
|
|
|
conn
|
|
|
|
|> put_status(:internal_server_error)
|
|
|
|
|> json(%{error: "Couldn't delete the pack #{name}"})
|
2019-08-12 15:03:59 +00:00
|
|
|
end
|
|
|
|
end
|
2019-08-15 16:55:58 +00:00
|
|
|
|
2019-09-10 18:34:57 +00:00
|
|
|
@doc """
|
|
|
|
An endpoint to update `pack_names`'s metadata.
|
|
|
|
|
|
|
|
`new_data` is the new metadata for the pack, that will replace the old metadata.
|
|
|
|
"""
|
2019-08-18 19:05:38 +00:00
|
|
|
def update_metadata(conn, %{"pack_name" => name, "new_data" => new_data}) do
|
2019-09-24 12:11:25 +00:00
|
|
|
pack_file_p = Path.join([emoji_dir_path(), name, "pack.json"])
|
2019-08-15 16:55:58 +00:00
|
|
|
|
|
|
|
full_pack = Jason.decode!(File.read!(pack_file_p))
|
|
|
|
|
2019-08-16 10:30:14 +00:00
|
|
|
# The new fallback-src is in the new data and it's not the same as it was in the old data
|
|
|
|
should_update_fb_sha =
|
|
|
|
not is_nil(new_data["fallback-src"]) and
|
|
|
|
new_data["fallback-src"] != full_pack["pack"]["fallback-src"]
|
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
with {_, true} <- {:should_update?, should_update_fb_sha},
|
|
|
|
%{body: pack_arch} <- Tesla.get!(new_data["fallback-src"]),
|
|
|
|
{:ok, flist} <- :zip.unzip(pack_arch, [:memory]),
|
|
|
|
{_, true} <- {:has_all_files?, has_all_files?(full_pack, flist)} do
|
|
|
|
fallback_sha = :crypto.hash(:sha256, pack_arch) |> Base.encode16()
|
2019-08-15 16:55:58 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
new_data = Map.put(new_data, "fallback-src-sha256", fallback_sha)
|
|
|
|
update_metadata_and_send(conn, full_pack, new_data, pack_file_p)
|
|
|
|
else
|
|
|
|
{:should_update?, _} ->
|
|
|
|
update_metadata_and_send(conn, full_pack, new_data, pack_file_p)
|
2019-08-15 16:55:58 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
{:has_all_files?, _} ->
|
|
|
|
conn
|
|
|
|
|> put_status(:bad_request)
|
2019-09-11 16:39:47 +00:00
|
|
|
|> json(%{error: "The fallback archive does not have all files specified in pack.json"})
|
2019-09-11 15:32:54 +00:00
|
|
|
end
|
|
|
|
end
|
2019-08-15 16:55:58 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
# Check if all files from the pack.json are in the archive
|
|
|
|
defp has_all_files?(%{"files" => files}, flist) do
|
|
|
|
Enum.all?(files, fn {_, from_manifest} ->
|
|
|
|
Enum.find(flist, fn {from_archive, _} ->
|
|
|
|
to_string(from_archive) == from_manifest
|
|
|
|
end)
|
|
|
|
end)
|
|
|
|
end
|
2019-08-15 16:55:58 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
defp update_metadata_and_send(conn, full_pack, new_data, pack_file_p) do
|
|
|
|
full_pack = Map.put(full_pack, "pack", new_data)
|
|
|
|
File.write!(pack_file_p, Jason.encode!(full_pack, pretty: true))
|
2019-08-15 16:55:58 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
# Send new data back with fallback sha filled
|
|
|
|
json(conn, new_data)
|
2019-08-15 16:55:58 +00:00
|
|
|
end
|
2019-08-18 19:05:38 +00:00
|
|
|
|
2019-09-11 16:39:47 +00:00
|
|
|
defp get_filename(%{"filename" => filename}), do: filename
|
|
|
|
|
|
|
|
defp get_filename(%{"file" => file}) do
|
|
|
|
case file do
|
|
|
|
%Plug.Upload{filename: filename} -> filename
|
|
|
|
url when is_binary(url) -> Path.basename(url)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp empty?(str), do: String.trim(str) == ""
|
|
|
|
|
|
|
|
defp update_file_and_send(conn, updated_full_pack, pack_file_p) do
|
|
|
|
# Write the emoji pack file
|
|
|
|
File.write!(pack_file_p, Jason.encode!(updated_full_pack, pretty: true))
|
|
|
|
|
|
|
|
# Return the modified file list
|
|
|
|
json(conn, updated_full_pack["files"])
|
|
|
|
end
|
|
|
|
|
2019-09-10 18:34:57 +00:00
|
|
|
@doc """
|
|
|
|
Updates a file in a pack.
|
|
|
|
|
|
|
|
Updating can mean three things:
|
|
|
|
|
|
|
|
- `add` adds an emoji named `shortcode` to the pack `pack_name`,
|
|
|
|
that means that the emoji file needs to be uploaded with the request
|
|
|
|
(thus requiring it to be a multipart request) and be named `file`.
|
|
|
|
There can also be an optional `filename` that will be the new emoji file name
|
|
|
|
(if it's not there, the name will be taken from the uploaded file).
|
|
|
|
- `update` changes emoji shortcode (from `shortcode` to `new_shortcode` or moves the file
|
|
|
|
(from the current filename to `new_filename`)
|
|
|
|
- `remove` removes the emoji named `shortcode` and it's associated file
|
|
|
|
"""
|
2019-09-11 16:39:47 +00:00
|
|
|
|
|
|
|
# Add
|
2019-08-18 19:05:38 +00:00
|
|
|
def update_file(
|
|
|
|
conn,
|
2019-09-11 16:39:47 +00:00
|
|
|
%{"pack_name" => pack_name, "action" => "add", "shortcode" => shortcode} = params
|
2019-08-18 19:05:38 +00:00
|
|
|
) do
|
2019-09-24 12:11:25 +00:00
|
|
|
pack_dir = Path.join(emoji_dir_path(), pack_name)
|
2019-08-18 19:05:38 +00:00
|
|
|
pack_file_p = Path.join(pack_dir, "pack.json")
|
|
|
|
|
|
|
|
full_pack = Jason.decode!(File.read!(pack_file_p))
|
|
|
|
|
2019-09-11 16:39:47 +00:00
|
|
|
with {_, false} <- {:has_shortcode, Map.has_key?(full_pack["files"], shortcode)},
|
|
|
|
filename <- get_filename(params),
|
|
|
|
false <- empty?(shortcode),
|
|
|
|
false <- empty?(filename) do
|
|
|
|
file_path = Path.join(pack_dir, filename)
|
2019-08-18 19:05:38 +00:00
|
|
|
|
2019-09-11 16:39:47 +00:00
|
|
|
# If the name contains directories, create them
|
|
|
|
if String.contains?(file_path, "/") do
|
|
|
|
File.mkdir_p!(Path.dirname(file_path))
|
2019-08-18 19:05:38 +00:00
|
|
|
end
|
|
|
|
|
2019-09-11 16:39:47 +00:00
|
|
|
case params["file"] do
|
|
|
|
%Plug.Upload{path: upload_path} ->
|
|
|
|
# Copy the uploaded file from the temporary directory
|
|
|
|
File.copy!(upload_path, file_path)
|
2019-08-18 19:05:38 +00:00
|
|
|
|
2019-09-11 16:39:47 +00:00
|
|
|
url when is_binary(url) ->
|
|
|
|
# Download and write the file
|
|
|
|
file_contents = Tesla.get!(url).body
|
|
|
|
File.write!(file_path, file_contents)
|
|
|
|
end
|
2019-08-18 19:05:38 +00:00
|
|
|
|
2019-09-11 16:39:47 +00:00
|
|
|
updated_full_pack = put_in(full_pack, ["files", shortcode], filename)
|
|
|
|
update_file_and_send(conn, updated_full_pack, pack_file_p)
|
|
|
|
else
|
|
|
|
{:has_shortcode, _} ->
|
|
|
|
conn
|
|
|
|
|> put_status(:conflict)
|
|
|
|
|> json(%{error: "An emoji with the \"#{shortcode}\" shortcode already exists"})
|
|
|
|
|
|
|
|
true ->
|
|
|
|
conn
|
|
|
|
|> put_status(:bad_request)
|
|
|
|
|> json(%{error: "shortcode or filename cannot be empty"})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Remove
|
|
|
|
def update_file(conn, %{
|
|
|
|
"pack_name" => pack_name,
|
|
|
|
"action" => "remove",
|
|
|
|
"shortcode" => shortcode
|
|
|
|
}) do
|
2019-09-24 12:11:25 +00:00
|
|
|
pack_dir = Path.join(emoji_dir_path(), pack_name)
|
2019-09-11 16:39:47 +00:00
|
|
|
pack_file_p = Path.join(pack_dir, "pack.json")
|
|
|
|
|
|
|
|
full_pack = Jason.decode!(File.read!(pack_file_p))
|
|
|
|
|
|
|
|
if Map.has_key?(full_pack["files"], shortcode) do
|
|
|
|
{emoji_file_path, updated_full_pack} = pop_in(full_pack, ["files", shortcode])
|
|
|
|
|
|
|
|
emoji_file_path = Path.join(pack_dir, emoji_file_path)
|
|
|
|
|
|
|
|
# Delete the emoji file
|
|
|
|
File.rm!(emoji_file_path)
|
|
|
|
|
|
|
|
# If the old directory has no more files, remove it
|
|
|
|
if String.contains?(emoji_file_path, "/") do
|
|
|
|
dir = Path.dirname(emoji_file_path)
|
|
|
|
|
|
|
|
if Enum.empty?(File.ls!(dir)) do
|
|
|
|
File.rmdir!(dir)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
update_file_and_send(conn, updated_full_pack, pack_file_p)
|
|
|
|
else
|
|
|
|
conn
|
|
|
|
|> put_status(:bad_request)
|
|
|
|
|> json(%{error: "Emoji \"#{shortcode}\" does not exist"})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Update
|
|
|
|
def update_file(
|
|
|
|
conn,
|
|
|
|
%{"pack_name" => pack_name, "action" => "update", "shortcode" => shortcode} = params
|
|
|
|
) do
|
2019-09-24 12:11:25 +00:00
|
|
|
pack_dir = Path.join(emoji_dir_path(), pack_name)
|
2019-09-11 16:39:47 +00:00
|
|
|
pack_file_p = Path.join(pack_dir, "pack.json")
|
|
|
|
|
|
|
|
full_pack = Jason.decode!(File.read!(pack_file_p))
|
|
|
|
|
|
|
|
with {_, true} <- {:has_shortcode, Map.has_key?(full_pack["files"], shortcode)},
|
|
|
|
%{"new_shortcode" => new_shortcode, "new_filename" => new_filename} <- params,
|
|
|
|
false <- empty?(new_shortcode),
|
|
|
|
false <- empty?(new_filename) do
|
|
|
|
# First, remove the old shortcode, saving the old path
|
|
|
|
{old_emoji_file_path, updated_full_pack} = pop_in(full_pack, ["files", shortcode])
|
|
|
|
old_emoji_file_path = Path.join(pack_dir, old_emoji_file_path)
|
|
|
|
new_emoji_file_path = Path.join(pack_dir, new_filename)
|
|
|
|
|
|
|
|
# If the name contains directories, create them
|
|
|
|
if String.contains?(new_emoji_file_path, "/") do
|
|
|
|
File.mkdir_p!(Path.dirname(new_emoji_file_path))
|
|
|
|
end
|
|
|
|
|
|
|
|
# Move/Rename the old filename to a new filename
|
|
|
|
# These are probably on the same filesystem, so just rename should work
|
|
|
|
:ok = File.rename(old_emoji_file_path, new_emoji_file_path)
|
|
|
|
|
|
|
|
# If the old directory has no more files, remove it
|
|
|
|
if String.contains?(old_emoji_file_path, "/") do
|
|
|
|
dir = Path.dirname(old_emoji_file_path)
|
|
|
|
|
|
|
|
if Enum.empty?(File.ls!(dir)) do
|
|
|
|
File.rmdir!(dir)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Then, put in the new shortcode with the new path
|
|
|
|
updated_full_pack = put_in(updated_full_pack, ["files", new_shortcode], new_filename)
|
|
|
|
update_file_and_send(conn, updated_full_pack, pack_file_p)
|
|
|
|
else
|
|
|
|
{:has_shortcode, _} ->
|
|
|
|
conn
|
|
|
|
|> put_status(:bad_request)
|
|
|
|
|> json(%{error: "Emoji \"#{shortcode}\" does not exist"})
|
|
|
|
|
|
|
|
true ->
|
|
|
|
conn
|
|
|
|
|> put_status(:bad_request)
|
|
|
|
|> json(%{error: "new_shortcode or new_filename cannot be empty"})
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
conn
|
|
|
|
|> put_status(:bad_request)
|
|
|
|
|> json(%{error: "new_shortcode or new_file were not specified"})
|
2019-08-18 19:05:38 +00:00
|
|
|
end
|
|
|
|
end
|
2019-09-10 18:16:30 +00:00
|
|
|
|
2019-09-11 16:39:47 +00:00
|
|
|
def update_file(conn, %{"action" => action}) do
|
|
|
|
conn
|
|
|
|
|> put_status(:bad_request)
|
|
|
|
|> json(%{error: "Unknown action: #{action}"})
|
|
|
|
end
|
|
|
|
|
2019-09-10 18:34:57 +00:00
|
|
|
@doc """
|
|
|
|
Imports emoji from the filesystem.
|
|
|
|
|
|
|
|
Importing means checking all the directories in the
|
|
|
|
`$instance_static/emoji/` for directories which do not have
|
|
|
|
`pack.json`. If one has an emoji.txt file, that file will be used
|
|
|
|
to create a `pack.json` file with it's contents. If the directory has
|
|
|
|
neither, all the files with specific configured extenstions will be
|
|
|
|
assumed to be emojis and stored in the new `pack.json` file.
|
|
|
|
"""
|
2019-09-10 18:16:30 +00:00
|
|
|
def import_from_fs(conn, _params) do
|
2019-09-24 12:11:25 +00:00
|
|
|
with {:ok, results} <- File.ls(emoji_dir_path()) do
|
2019-09-11 15:32:54 +00:00
|
|
|
imported_pack_names =
|
|
|
|
results
|
|
|
|
|> Enum.filter(fn file ->
|
2019-09-24 12:11:25 +00:00
|
|
|
dir_path = Path.join(emoji_dir_path(), file)
|
2019-09-11 15:32:54 +00:00
|
|
|
# Find the directories that do NOT have pack.json
|
|
|
|
File.dir?(dir_path) and not File.exists?(Path.join(dir_path, "pack.json"))
|
|
|
|
end)
|
|
|
|
|> Enum.map(&write_pack_json_contents/1)
|
|
|
|
|
|
|
|
json(conn, imported_pack_names)
|
|
|
|
else
|
2019-09-10 18:16:30 +00:00
|
|
|
{:error, _} ->
|
|
|
|
conn
|
|
|
|
|> put_status(:internal_server_error)
|
2019-09-11 16:39:47 +00:00
|
|
|
|> json(%{error: "Error accessing emoji pack directory"})
|
2019-09-11 15:32:54 +00:00
|
|
|
end
|
|
|
|
end
|
2019-09-10 18:16:30 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
defp write_pack_json_contents(dir) do
|
2019-09-24 12:11:25 +00:00
|
|
|
dir_path = Path.join(emoji_dir_path(), dir)
|
2019-09-11 15:32:54 +00:00
|
|
|
emoji_txt_path = Path.join(dir_path, "emoji.txt")
|
2019-09-10 18:16:30 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
files_for_pack = files_for_pack(emoji_txt_path, dir_path)
|
|
|
|
pack_json_contents = Jason.encode!(%{pack: %{}, files: files_for_pack})
|
2019-09-10 18:16:30 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
File.write!(Path.join(dir_path, "pack.json"), pack_json_contents)
|
2019-09-10 18:16:30 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
dir
|
|
|
|
end
|
2019-09-10 18:16:30 +00:00
|
|
|
|
2019-09-11 15:32:54 +00:00
|
|
|
defp files_for_pack(emoji_txt_path, dir_path) do
|
|
|
|
if File.exists?(emoji_txt_path) do
|
|
|
|
# There's an emoji.txt file, it's likely from a pack installed by the pack manager.
|
|
|
|
# Make a pack.json file from the contents of that emoji.txt fileh
|
|
|
|
|
|
|
|
# FIXME: Copy-pasted from Pleroma.Emoji/load_from_file_stream/2
|
|
|
|
|
|
|
|
# Create a map of shortcodes to filenames from emoji.txt
|
|
|
|
File.read!(emoji_txt_path)
|
|
|
|
|> String.split("\n")
|
|
|
|
|> Enum.map(&String.trim/1)
|
|
|
|
|> Enum.map(fn line ->
|
|
|
|
case String.split(line, ~r/,\s*/) do
|
|
|
|
# This matches both strings with and without tags
|
|
|
|
# and we don't care about tags here
|
|
|
|
[name, file | _] -> {name, file}
|
|
|
|
_ -> nil
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|> Enum.filter(fn x -> not is_nil(x) end)
|
|
|
|
|> Enum.into(%{})
|
|
|
|
else
|
|
|
|
# If there's no emoji.txt, assume all files
|
|
|
|
# that are of certain extensions from the config are emojis and import them all
|
|
|
|
pack_extensions = Pleroma.Config.get!([:emoji, :pack_extensions])
|
2019-09-25 09:24:12 +00:00
|
|
|
Pleroma.Emoji.Loader.make_shortcode_to_file_map(dir_path, pack_extensions)
|
2019-09-10 18:16:30 +00:00
|
|
|
end
|
|
|
|
end
|
2019-08-10 21:39:21 +00:00
|
|
|
end
|