2019-04-17 22:54:09 +02:00
|
|
|
# Pleroma: A lightweight social networking server
|
2019-09-18 23:20:54 +02:00
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
2019-04-17 22:54:09 +02:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
|
|
|
defmodule Mix.Tasks.Pleroma.Emoji do
|
|
|
|
use Mix.Task
|
|
|
|
|
2019-04-20 09:57:31 +02:00
|
|
|
@shortdoc "Manages emoji packs"
|
2019-04-17 22:54:09 +02:00
|
|
|
@moduledoc """
|
2019-04-20 09:57:31 +02:00
|
|
|
Manages emoji packs
|
|
|
|
|
|
|
|
## ls-packs
|
|
|
|
|
2019-04-22 10:02:31 +02:00
|
|
|
mix pleroma.emoji ls-packs [OPTION...]
|
2019-04-20 09:57:31 +02:00
|
|
|
|
|
|
|
Lists the emoji packs and metadata specified in the manifest.
|
|
|
|
|
|
|
|
### Options
|
|
|
|
|
2019-04-21 21:16:46 +02:00
|
|
|
- `-m, --manifest PATH/URL` - path to a custom manifest, it can
|
|
|
|
either be an URL starting with `http`, in that case the
|
|
|
|
manifest will be fetched from that address, or a local path
|
2019-04-20 09:57:31 +02:00
|
|
|
|
|
|
|
## get-packs
|
|
|
|
|
2019-04-22 10:02:31 +02:00
|
|
|
mix pleroma.emoji get-packs [OPTION...] PACKS
|
2019-04-20 09:57:31 +02:00
|
|
|
|
2019-04-21 21:16:46 +02:00
|
|
|
Fetches, verifies and installs the specified PACKS from the
|
2019-04-22 10:02:31 +02:00
|
|
|
manifest into the `STATIC-DIR/emoji/PACK-NAME`
|
2019-04-20 09:57:31 +02:00
|
|
|
|
|
|
|
### Options
|
|
|
|
|
|
|
|
- `-m, --manifest PATH/URL` - same as ls-packs
|
|
|
|
|
|
|
|
## gen-pack
|
|
|
|
|
2019-04-22 10:02:31 +02:00
|
|
|
mix pleroma.emoji gen-pack PACK-URL
|
2019-04-20 09:57:31 +02:00
|
|
|
|
2019-04-21 21:16:46 +02:00
|
|
|
Creates a new manifest entry and a file list from the specified
|
|
|
|
remote pack file. Currently, only .zip archives are recognized
|
|
|
|
as remote pack files and packs are therefore assumed to be zip
|
|
|
|
archives. This command is intended to run interactively and will
|
|
|
|
first ask you some basic questions about the pack, then download
|
2019-04-21 21:19:19 +02:00
|
|
|
the remote file and generate an SHA256 checksum for it, then
|
2019-04-21 21:16:46 +02:00
|
|
|
generate an emoji file list for you.
|
|
|
|
|
|
|
|
The manifest entry will either be written to a newly created
|
|
|
|
`index.json` file or appended to the existing one, *replacing*
|
|
|
|
the old pack with the same name if it was in the file previously.
|
|
|
|
|
|
|
|
The file list will be written to the file specified previously,
|
|
|
|
*replacing* that file. You _should_ check that the file list doesn't
|
|
|
|
contain anything you don't need in the pack, that is, anything that is
|
|
|
|
not an emoji (the whole pack is downloaded, but only emoji files
|
|
|
|
are extracted).
|
2019-04-17 22:54:09 +02:00
|
|
|
"""
|
|
|
|
|
2019-04-18 09:57:20 +02:00
|
|
|
def run(["ls-packs" | args]) do
|
2019-04-17 22:54:09 +02:00
|
|
|
Application.ensure_all_started(:hackney)
|
|
|
|
|
2019-04-18 09:57:20 +02:00
|
|
|
{options, [], []} = parse_global_opts(args)
|
|
|
|
|
|
|
|
manifest =
|
2019-06-14 17:45:05 +02:00
|
|
|
fetch_manifest(if options[:manifest], do: options[:manifest], else: default_manifest())
|
2019-04-17 22:54:09 +02:00
|
|
|
|
|
|
|
Enum.each(manifest, fn {name, info} ->
|
|
|
|
to_print = [
|
|
|
|
{"Name", name},
|
|
|
|
{"Homepage", info["homepage"]},
|
|
|
|
{"Description", info["description"]},
|
|
|
|
{"License", info["license"]},
|
|
|
|
{"Source", info["src"]}
|
|
|
|
]
|
|
|
|
|
|
|
|
for {param, value} <- to_print do
|
|
|
|
IO.puts(IO.ANSI.format([:bright, param, :normal, ": ", value]))
|
|
|
|
end
|
2019-04-19 23:22:11 +02:00
|
|
|
|
|
|
|
# A newline
|
|
|
|
IO.puts("")
|
2019-04-17 22:54:09 +02:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2019-04-18 09:57:20 +02:00
|
|
|
def run(["get-packs" | args]) do
|
2019-04-17 22:54:09 +02:00
|
|
|
Application.ensure_all_started(:hackney)
|
|
|
|
|
2019-04-18 09:57:20 +02:00
|
|
|
{options, pack_names, []} = parse_global_opts(args)
|
|
|
|
|
2019-06-14 17:45:05 +02:00
|
|
|
manifest_url = if options[:manifest], do: options[:manifest], else: default_manifest()
|
2019-04-18 14:32:18 +02:00
|
|
|
|
|
|
|
manifest = fetch_manifest(manifest_url)
|
2019-04-18 09:57:20 +02:00
|
|
|
|
|
|
|
for pack_name <- pack_names do
|
|
|
|
if Map.has_key?(manifest, pack_name) do
|
|
|
|
pack = manifest[pack_name]
|
|
|
|
src_url = pack["src"]
|
|
|
|
|
|
|
|
IO.puts(
|
|
|
|
IO.ANSI.format([
|
|
|
|
"Downloading ",
|
|
|
|
:bright,
|
|
|
|
pack_name,
|
|
|
|
:normal,
|
|
|
|
" from ",
|
|
|
|
:underline,
|
|
|
|
src_url
|
|
|
|
])
|
2019-04-17 22:54:09 +02:00
|
|
|
)
|
|
|
|
|
2019-05-08 14:05:25 +02:00
|
|
|
binary_archive = Tesla.get!(client(), src_url).body
|
2019-04-21 21:19:19 +02:00
|
|
|
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
2019-04-18 14:46:07 +02:00
|
|
|
|
2019-04-21 21:19:19 +02:00
|
|
|
sha_status_text = ["SHA256 of ", :bright, pack_name, :normal, " source file is ", :bright]
|
2019-04-18 17:09:43 +02:00
|
|
|
|
2019-04-21 21:19:19 +02:00
|
|
|
if archive_sha == String.upcase(pack["src_sha256"]) do
|
|
|
|
IO.puts(IO.ANSI.format(sha_status_text ++ [:green, "OK"]))
|
2019-04-18 14:46:07 +02:00
|
|
|
else
|
2019-04-21 21:19:19 +02:00
|
|
|
IO.puts(IO.ANSI.format(sha_status_text ++ [:red, "BAD"]))
|
2019-04-18 14:46:07 +02:00
|
|
|
|
2019-04-21 21:19:19 +02:00
|
|
|
raise "Bad SHA256 for #{pack_name}"
|
2019-04-18 14:46:07 +02:00
|
|
|
end
|
2019-04-18 09:57:20 +02:00
|
|
|
|
2019-04-18 14:32:18 +02:00
|
|
|
# The url specified in files should be in the same directory
|
|
|
|
files_url = Path.join(Path.dirname(manifest_url), pack["files"])
|
|
|
|
|
|
|
|
IO.puts(
|
|
|
|
IO.ANSI.format([
|
|
|
|
"Fetching the file list for ",
|
|
|
|
:bright,
|
|
|
|
pack_name,
|
|
|
|
:normal,
|
|
|
|
" from ",
|
|
|
|
:underline,
|
|
|
|
files_url
|
|
|
|
])
|
|
|
|
)
|
|
|
|
|
2019-05-13 22:37:38 +02:00
|
|
|
files = Tesla.get!(client(), files_url).body |> Jason.decode!()
|
2019-04-18 14:32:18 +02:00
|
|
|
|
2019-04-18 09:57:20 +02:00
|
|
|
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
|
|
|
|
|
|
|
pack_path =
|
|
|
|
Path.join([
|
|
|
|
Pleroma.Config.get!([:instance, :static_dir]),
|
|
|
|
"emoji",
|
|
|
|
pack_name
|
|
|
|
])
|
|
|
|
|
|
|
|
files_to_unzip =
|
|
|
|
Enum.map(
|
2019-04-18 14:32:18 +02:00
|
|
|
files,
|
2019-04-18 09:57:20 +02:00
|
|
|
fn {_, f} -> to_charlist(f) end
|
|
|
|
)
|
|
|
|
|
|
|
|
{:ok, _} =
|
|
|
|
:zip.unzip(binary_archive,
|
|
|
|
cwd: pack_path,
|
|
|
|
file_list: files_to_unzip
|
|
|
|
)
|
|
|
|
|
|
|
|
IO.puts(IO.ANSI.format(["Writing emoji.txt for ", :bright, pack_name]))
|
|
|
|
|
|
|
|
emoji_txt_str =
|
|
|
|
Enum.map(
|
2019-04-18 14:32:18 +02:00
|
|
|
files,
|
2019-04-18 09:57:20 +02:00
|
|
|
fn {shortcode, path} ->
|
2019-04-20 14:06:56 +02:00
|
|
|
emojo_path = Path.join("/emoji/#{pack_name}", path)
|
|
|
|
"#{shortcode}, #{emojo_path}"
|
2019-04-18 09:57:20 +02:00
|
|
|
end
|
|
|
|
)
|
|
|
|
|> Enum.join("\n")
|
|
|
|
|
|
|
|
File.write!(Path.join(pack_path, "emoji.txt"), emoji_txt_str)
|
|
|
|
else
|
|
|
|
IO.puts(IO.ANSI.format([:bright, :red, "No pack named \"#{pack_name}\" found"]))
|
|
|
|
end
|
2019-04-17 22:54:09 +02:00
|
|
|
end
|
|
|
|
end
|
2019-04-18 14:47:49 +02:00
|
|
|
|
2019-04-18 16:02:22 +02:00
|
|
|
def run(["gen-pack", src]) do
|
|
|
|
Application.ensure_all_started(:hackney)
|
|
|
|
|
|
|
|
proposed_name = Path.basename(src) |> Path.rootname()
|
|
|
|
name = String.trim(IO.gets("Pack name [#{proposed_name}]: "))
|
|
|
|
# If there's no name, use the default one
|
|
|
|
name = if String.length(name) > 0, do: name, else: proposed_name
|
|
|
|
|
|
|
|
license = String.trim(IO.gets("License: "))
|
|
|
|
homepage = String.trim(IO.gets("Homepage: "))
|
|
|
|
description = String.trim(IO.gets("Description: "))
|
|
|
|
|
|
|
|
proposed_files_name = "#{name}.json"
|
|
|
|
files_name = String.trim(IO.gets("Save file list to [#{proposed_files_name}]: "))
|
|
|
|
files_name = if String.length(files_name) > 0, do: files_name, else: proposed_files_name
|
|
|
|
|
|
|
|
default_exts = [".png", ".gif"]
|
|
|
|
default_exts_str = Enum.join(default_exts, " ")
|
2019-04-18 17:09:43 +02:00
|
|
|
|
2019-04-18 16:02:22 +02:00
|
|
|
exts =
|
2019-04-18 17:09:43 +02:00
|
|
|
String.trim(
|
|
|
|
IO.gets("Emoji file extensions (separated with spaces) [#{default_exts_str}]: ")
|
|
|
|
)
|
2019-04-18 16:02:22 +02:00
|
|
|
|
2019-04-18 17:09:43 +02:00
|
|
|
exts =
|
|
|
|
if String.length(exts) > 0 do
|
|
|
|
String.split(exts, " ")
|
|
|
|
|> Enum.filter(fn e -> e |> String.trim() |> String.length() > 0 end)
|
|
|
|
else
|
|
|
|
default_exts
|
|
|
|
end
|
|
|
|
|
2019-04-21 21:19:19 +02:00
|
|
|
IO.puts("Downloading the pack and generating SHA256")
|
2019-04-18 16:02:22 +02:00
|
|
|
|
2019-05-08 14:05:25 +02:00
|
|
|
binary_archive = Tesla.get!(client(), src).body
|
2019-04-21 21:19:19 +02:00
|
|
|
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
2019-04-18 16:02:22 +02:00
|
|
|
|
2019-04-21 21:19:19 +02:00
|
|
|
IO.puts("SHA256 is #{archive_sha}")
|
2019-04-18 16:02:22 +02:00
|
|
|
|
|
|
|
pack_json = %{
|
|
|
|
name => %{
|
|
|
|
license: license,
|
|
|
|
homepage: homepage,
|
|
|
|
description: description,
|
|
|
|
src: src,
|
2019-04-21 21:19:19 +02:00
|
|
|
src_sha256: archive_sha,
|
2019-04-18 16:02:22 +02:00
|
|
|
files: files_name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
tmp_pack_dir = Path.join(System.tmp_dir!(), "emoji-pack-#{name}")
|
2019-04-18 17:09:43 +02:00
|
|
|
|
2019-04-18 16:02:22 +02:00
|
|
|
{:ok, _} =
|
|
|
|
:zip.unzip(
|
|
|
|
binary_archive,
|
|
|
|
cwd: tmp_pack_dir
|
|
|
|
)
|
|
|
|
|
2019-08-28 20:32:44 +02:00
|
|
|
emoji_map = Pleroma.Emoji.Loader.make_shortcode_to_file_map(tmp_pack_dir, exts)
|
2019-04-18 17:04:02 +02:00
|
|
|
|
2019-05-13 22:37:38 +02:00
|
|
|
File.write!(files_name, Jason.encode!(emoji_map, pretty: true))
|
2019-04-18 16:02:22 +02:00
|
|
|
|
2019-04-18 17:09:43 +02:00
|
|
|
IO.puts("""
|
2019-04-18 16:02:22 +02:00
|
|
|
|
|
|
|
#{files_name} has been created and contains the list of all found emojis in the pack.
|
|
|
|
Please review the files in the remove those not needed.
|
2019-04-18 17:09:43 +02:00
|
|
|
""")
|
2019-04-18 16:02:22 +02:00
|
|
|
|
|
|
|
if File.exists?("index.json") do
|
2019-05-13 22:37:38 +02:00
|
|
|
existing_data = File.read!("index.json") |> Jason.decode!()
|
2019-04-18 16:02:22 +02:00
|
|
|
|
|
|
|
File.write!(
|
|
|
|
"index.json",
|
2019-05-13 22:37:38 +02:00
|
|
|
Jason.encode!(
|
2019-04-18 16:02:22 +02:00
|
|
|
Map.merge(
|
|
|
|
existing_data,
|
|
|
|
pack_json
|
|
|
|
),
|
|
|
|
pretty: true
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2019-04-18 17:09:43 +02:00
|
|
|
IO.puts("index.json file has been update with the #{name} pack")
|
2019-04-18 16:02:22 +02:00
|
|
|
else
|
2019-05-13 22:37:38 +02:00
|
|
|
File.write!("index.json", Jason.encode!(pack_json, pretty: true))
|
2019-04-18 16:02:22 +02:00
|
|
|
|
2019-04-18 17:09:43 +02:00
|
|
|
IO.puts("index.json has been created with the #{name} pack")
|
2019-04-18 16:02:22 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-04-18 14:47:49 +02:00
|
|
|
defp fetch_manifest(from) do
|
2019-05-13 22:37:38 +02:00
|
|
|
Jason.decode!(
|
2019-04-18 19:06:59 +02:00
|
|
|
if String.starts_with?(from, "http") do
|
2019-05-08 14:05:25 +02:00
|
|
|
Tesla.get!(client(), from).body
|
2019-04-18 19:06:59 +02:00
|
|
|
else
|
|
|
|
File.read!(from)
|
|
|
|
end
|
|
|
|
)
|
2019-04-18 14:47:49 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
defp parse_global_opts(args) do
|
|
|
|
OptionParser.parse(
|
|
|
|
args,
|
|
|
|
strict: [
|
|
|
|
manifest: :string
|
|
|
|
],
|
|
|
|
aliases: [
|
|
|
|
m: :manifest
|
|
|
|
]
|
|
|
|
)
|
|
|
|
end
|
2019-05-08 14:05:25 +02:00
|
|
|
|
|
|
|
defp client do
|
|
|
|
middleware = [
|
|
|
|
{Tesla.Middleware.FollowRedirects, [max_redirects: 3]}
|
|
|
|
]
|
|
|
|
|
|
|
|
Tesla.client(middleware)
|
|
|
|
end
|
2019-06-14 17:45:05 +02:00
|
|
|
|
|
|
|
defp default_manifest, do: Pleroma.Config.get!([:emoji, :default_manifest])
|
2019-04-17 22:54:09 +02:00
|
|
|
end
|