Page MenuHomePhabricator

No OneTemporary

diff --git a/lib/lsg_irc/link_plugin.ex b/lib/lsg_irc/link_plugin.ex
index 97835e4..ea6df0c 100644
--- a/lib/lsg_irc/link_plugin.ex
+++ b/lib/lsg_irc/link_plugin.ex
@@ -1,303 +1,256 @@
defmodule LSG.IRC.LinkPlugin do
@moduledoc """
# Link Previewer
An extensible link previewer for IRC.
To extend the supported sites, create a new handler implementing the callbacks.
See `link_plugin/` directory for examples. The first in list handler that returns true to the `match/2` callback will be used,
and if the handler returns `:error` or crashes, will fallback to the default preview.
Unsupported websites will use the default link preview method, which is for html document the title, otherwise it'll use
the mimetype and size.
## Configuration:
```
config :lsg, LSG.IRC.LinkPlugin,
handlers: [
LSG.IRC.LinkPlugin.Youtube: [
invidious: true
],
LSG.IRC.LinkPlugin.Twitter: [],
LSG.IRC.LinkPlugin.Imgur: [],
]
```
"""
@ircdoc """
# Link preview
Previews links (just post a link!).
Announces real URL after redirections and provides extended support for YouTube, Twitter and Imgur.
"""
def short_irc_doc, do: false
def irc_doc, do: @ircdoc
require Logger
def start_link() do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@callback match(uri :: URI.t, options :: Keyword.t) :: {true, params :: Map.t} | false
@callback expand(uri :: URI.t, params :: Map.t, options :: Keyword.t) :: {:ok, lines :: [] | String.t} | :error
+ @callback post_match(uri :: URI.t, content_type :: binary, headers :: [], opts :: Keyword.t) :: {:body | :file, params :: Map.t} | false
+ @callback post_expand(uri :: URI.t, body :: binary() | Path.t, params :: Map.t, options :: Keyword.t) :: {:ok, lines :: [] | String.t} | :error
+
+ @optional_callbacks [expand: 3, post_expand: 4]
defstruct [:client]
def init([]) do
{:ok, _} = Registry.register(IRC.PubSub, "message", [])
#{:ok, _} = Registry.register(IRC.PubSub, "message:telegram", [])
Logger.info("Link handler started")
{:ok, %__MODULE__{}}
end
def handle_info({:irc, :text, message = %{text: text}}, state) do
String.split(text)
|> Enum.map(fn(word) ->
if String.starts_with?(word, "http://") || String.starts_with?(word, "https://") do
uri = URI.parse(word)
if uri.scheme && uri.host do
spawn(fn() ->
+ :timer.kill_after(:timer.seconds(30))
case expand_link([uri]) do
{:ok, uris, text} ->
text = case uris do
[uri] -> text
- [uri | _] -> ["-> #{URI.to_string(uri)}", text]
+ [luri | _] ->
+ if luri.host == uri.host && luri.path == luri.path do
+ text
+ else
+ ["-> #{URI.to_string(luri)}", text]
+ end
end
- IO.inspect(text)
if is_list(text) do
for line <- text, do: message.replyfun.(line)
else
message.replyfun.(text)
end
_ -> nil
end
end)
end
end
end)
{:noreply, state}
end
def handle_info(msg, state) do
{:noreply, state}
end
def terminate(_reason, state) do
:ok
end
# 1. Match the first valid handler
# 2. Try to run the handler
# 3. If :error or crash, default link.
# If :skip, nothing
# 4. ?
# Over five redirections: cancel.
def expand_link(acc = [_, _, _, _, _ | _]) do
{:ok, acc, "link redirects more than five times"}
end
def expand_link(acc=[uri | _]) do
handlers = Keyword.get(Application.get_env(:lsg, __MODULE__, [handlers: []]), :handlers)
handler = Enum.reduce_while(handlers, nil, fn({module, opts}, acc) ->
module = Module.concat([module])
case module.match(uri, opts) do
{true, params} -> {:halt, {module, params, opts}}
false -> {:cont, acc}
end
end)
run_expand(acc, handler)
end
def run_expand(acc, nil) do
expand_default(acc)
end
def run_expand(acc=[uri|_], {module, params, opts}) do
case module.expand(uri, params, opts) do
{:ok, data} -> {:ok, acc, data}
:error -> expand_default(acc)
:skip -> nil
end
rescue
e ->
Logger.error(inspect(e))
expand_default(acc)
catch
e, b ->
Logger.error(inspect({b}))
expand_default(acc)
end
defp get(url, headers \\ [], options \\ []) do
get_req(url, :hackney.get(url, headers, <<>>, options))
end
defp get_req(_, {:error, reason}) do
{:error, reason}
end
defp get_req(url, {:ok, 200, headers, client}) do
headers = Enum.reduce(headers, %{}, fn({key, value}, acc) ->
Map.put(acc, String.downcase(key), value)
end)
content_type = Map.get(headers, "content-type", "application/octect-stream")
length = Map.get(headers, "content-length", "0")
{length, _} = Integer.parse(length)
+ handlers = Keyword.get(Application.get_env(:lsg, __MODULE__, [handlers: []]), :handlers)
+ handler = Enum.reduce_while(handlers, nil, fn({module, opts}, acc) ->
+ module = Module.concat([module])
+ try do
+ case module.post_match(url, content_type, headers, opts) do
+ {mode, params} when mode in [:body, :file] -> {:halt, {module, params, opts, mode}}
+ false -> {:cont, acc}
+ end
+ rescue
+ e ->
+ Logger.error(inspect(e))
+ {:cont, false}
+ catch
+ e, b ->
+ Logger.error(inspect({b}))
+ {:cont, false}
+ end
+ end)
+
cond do
- String.starts_with?(content_type, "text/html") && length <= 30_000_000 ->
- get_body(url, 30_000_000, client, <<>>)
+ handler != false and length <= 30_000_000 ->
+ case get_body(url, 30_000_000, client, handler, <<>>) do
+ {:ok, _} = ok -> ok
+ :error ->
+ {:ok, "file: #{content_type}, size: #{length} bytes"}
+ end
+ #String.starts_with?(content_type, "text/html") && length <= 30_000_000 ->
+ # get_body(url, 30_000_000, client, <<>>)
true ->
:hackney.close(client)
{:ok, "file: #{content_type}, size: #{length} bytes"}
end
end
defp get_req(_, {:ok, redirect, headers, client}) when redirect in 300..399 do
headers = Enum.reduce(headers, %{}, fn({key, value}, acc) ->
Map.put(acc, String.downcase(key), value)
end)
location = Map.get(headers, "location")
:hackney.close(client)
{:redirect, location}
end
defp get_req(_, {:ok, status, headers, client}) do
:hackney.close(client)
{:error, status, headers}
end
- defp get_body(url, len, client, acc) when len >= byte_size(acc) do
+ defp get_body(url, len, client, {handler, params, opts, mode} = h, acc) when len >= byte_size(acc) do
case :hackney.stream_body(client) do
{:ok, data} ->
- get_body(url, len, client, << acc::binary, data::binary >>)
+ get_body(url, len, client, h, << acc::binary, data::binary >>)
:done ->
- html = Floki.parse(acc)
- title = collect_title(html)
- opengraph = collect_open_graph(html)
- itemprops = collect_itemprops(html)
- Logger.debug("OG: #{inspect opengraph}")
- text = if Map.has_key?(opengraph, "title") && Map.has_key?(opengraph, "description") do
- sitename = if sn = Map.get(opengraph, "site_name") do
- "#{sn}"
- else
- ""
- end
- paywall? = if Map.get(opengraph, "article:content_tier", Map.get(itemprops, "article:content_tier", "free")) == "free" do
- ""
- else
- "[paywall] "
- end
- section = if section = Map.get(opengraph, "article:section", Map.get(itemprops, "article:section", nil)) do
- ": #{section}"
- else
- ""
- end
- date = case DateTime.from_iso8601(Map.get(opengraph, "article:published_time", Map.get(itemprops, "article:published_time", ""))) do
- {:ok, date, _} ->
- "#{Timex.format!(date, "%d/%m/%y", :strftime)}. "
- _ ->
- ""
- end
- uri = URI.parse(url)
-
- prefix = "#{paywall?}#{Map.get(opengraph, "site_name", uri.host)}#{section}"
- prefix = unless prefix == "" do
- "#{prefix} — "
- else
- ""
- end
- [clean_text("#{prefix}#{Map.get(opengraph, "title")}")] ++ IRC.splitlong(clean_text("#{date}#{Map.get(opengraph, "description")}"))
- else
- clean_text(title)
+ body = case mode do
+ :body -> acc
+ :file ->
+ {:ok, tmpfile} = Plug.Upload.random_file("linkplugin")
+ File.write!(tmpfile, acc)
+ tmpfile
end
- {:ok, text}
+ handler.post_expand(url, body, params, opts)
{:error, reason} ->
{:ok, "failed to fetch body: #{inspect reason}"}
end
end
- defp clean_text(text) do
- text
- |> String.replace("\n", " ")
- |> HtmlEntities.decode()
- end
-
- defp get_body(len, client, _acc) do
+ defp get_body(_, len, client, _, _acc) do
:hackney.close(client)
{:ok, "Error: file over 30"}
end
def expand_default(acc = [uri = %URI{scheme: scheme} | _]) when scheme in ["http", "https"] do
- headers = []
+ headers = [{"user-agent", "DmzBot (like TwitterBot)"}]
options = [follow_redirect: false, max_body_length: 30_000_000]
case get(URI.to_string(uri), headers, options) do
{:ok, text} ->
{:ok, acc, text}
{:redirect, link} ->
new_uri = URI.parse(link)
- new_uri = %URI{new_uri | scheme: scheme, authority: uri.authority, host: uri.host, port: uri.port}
+ #new_uri = %URI{new_uri | scheme: scheme, authority: uri.authority, host: uri.host, port: uri.port}
expand_link([new_uri | acc])
{:error, status, _headers} ->
text = Plug.Conn.Status.reason_phrase(status)
{:ok, acc, "Error: HTTP #{text} (#{status})"}
{:error, reason} ->
{:ok, acc, "Error: #{to_string(reason)}"}
end
end
# Unsupported scheme, came from a redirect.
def expand_default(acc = [uri | _]) do
{:ok, [uri], "-> #{URI.to_string(uri)}"}
end
- defp collect_title(html) do
- case Floki.find(html, "title") do
- [{"title", [], [title]} | _] ->
- String.trim(title)
- _ ->
- nil
- end
- end
-
- defp collect_open_graph(html) do
- Enum.reduce(Floki.find(html, "head meta"), %{}, fn(tag, acc) ->
- case tag do
- {"meta", values, []} ->
- name = List.keyfind(values, "property", 0, {nil, nil}) |> elem(1)
- content = List.keyfind(values, "content", 0, {nil, nil}) |> elem(1)
- case name do
- "og:" <> key ->
- Map.put(acc, key, content)
- "article:"<>_ ->
- Map.put(acc, name, content)
- _other -> acc
- end
- _other -> acc
- end
- end)
- end
-
- defp collect_itemprops(html) do
- Enum.reduce(Floki.find(html, "[itemprop]"), %{}, fn(tag, acc) ->
- case tag do
- {"meta", values, []} ->
- name = List.keyfind(values, "itemprop", 0, {nil, nil}) |> elem(1)
- content = List.keyfind(values, "content", 0, {nil, nil}) |> elem(1)
- case name do
- "article:" <> key ->
- Map.put(acc, name, content)
- _other -> acc
- end
- _other -> acc
- end
- end)
- end
-
end
diff --git a/lib/lsg_irc/link_plugin/github.ex b/lib/lsg_irc/link_plugin/github.ex
index c7444c2..19be89b 100644
--- a/lib/lsg_irc/link_plugin/github.ex
+++ b/lib/lsg_irc/link_plugin/github.ex
@@ -1,44 +1,49 @@
defmodule LSG.IRC.LinkPlugin.Github do
@behaviour LSG.IRC.LinkPlugin
+ @impl true
def match(uri = %URI{host: "github.com", path: path}, _) do
case String.split(path, "/") do
["", user, repo] ->
{true, %{user: user, repo: repo, path: "#{user}/#{repo}"}}
_ ->
false
end
end
def match(_, _), do: false
+ @impl true
+ def post_match(_, _, _, _), do: false
+
+ @impl true
def expand(_uri, %{user: user, repo: repo}, _opts) do
case HTTPoison.get("https://api.github.com/repos/#{user}/#{repo}") do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, json} = Jason.decode(body)
src = json["source"]["full_name"]
disabled = if(json["disabled"], do: " (disabled)", else: "")
archived = if(json["archived"], do: " (archived)", else: "")
fork = if src && src != json["full_name"] do
" (⑂ #{json["source"]["full_name"]})"
else
""
end
start = "#{json["full_name"]}#{disabled}#{archived}#{fork} - #{json["description"]}"
tags = for(t <- json["topics"]||[], do: "##{t}") |> Enum.intersperse(", ") |> Enum.join("")
lang = if(json["language"], do: "#{json["language"]} - ", else: "")
issues = if(json["open_issues_count"], do: "#{json["open_issues_count"]} issues - ", else: "")
last_push = if at = json["pushed_at"] do
{:ok, date, _} = DateTime.from_iso8601(at)
" - last pushed #{DateTime.to_string(date)}"
else
""
end
network = "#{lang}#{issues}#{json["stargazers_count"]} stars - #{json["subscribers_count"]} watchers - #{json["forks_count"]} forks#{last_push}"
{:ok, [start, tags, network]}
other ->
:error
end
end
end
diff --git a/lib/lsg_irc/link_plugin/html.ex b/lib/lsg_irc/link_plugin/html.ex
new file mode 100644
index 0000000..e0e4229
--- /dev/null
+++ b/lib/lsg_irc/link_plugin/html.ex
@@ -0,0 +1,106 @@
+defmodule LSG.IRC.LinkPlugin.HTML do
+ @behaviour LSG.IRC.LinkPlugin
+
+ @impl true
+ def match(_, _), do: false
+
+ @impl true
+ def post_match(_url, "text/html"<>_, _header, _opts) do
+ {:body, nil}
+ end
+ def post_match(_, _, _, _), do: false
+
+ @impl true
+ def post_expand(url, body, _params, _opts) do
+ html = Floki.parse(body)
+ title = collect_title(html)
+ opengraph = collect_open_graph(html)
+ itemprops = collect_itemprops(html)
+ text = if Map.has_key?(opengraph, "title") && Map.has_key?(opengraph, "description") do
+ sitename = if sn = Map.get(opengraph, "site_name") do
+ "#{sn}"
+ else
+ ""
+ end
+ paywall? = if Map.get(opengraph, "article:content_tier", Map.get(itemprops, "article:content_tier", "free")) == "free" do
+ ""
+ else
+ "[paywall] "
+ end
+ section = if section = Map.get(opengraph, "article:section", Map.get(itemprops, "article:section", nil)) do
+ ": #{section}"
+ else
+ ""
+ end
+ date = case DateTime.from_iso8601(Map.get(opengraph, "article:published_time", Map.get(itemprops, "article:published_time", ""))) do
+ {:ok, date, _} ->
+ "#{Timex.format!(date, "%d/%m/%y", :strftime)}. "
+ _ ->
+ ""
+ end
+ uri = URI.parse(url)
+
+ prefix = "#{paywall?}#{Map.get(opengraph, "site_name", uri.host)}#{section}"
+ prefix = unless prefix == "" do
+ "#{prefix} — "
+ else
+ ""
+ end
+ [clean_text("#{prefix}#{Map.get(opengraph, "title")}")] ++ IRC.splitlong(clean_text("#{date}#{Map.get(opengraph, "description")}"))
+ else
+ clean_text(title)
+ end
+ {:ok, text}
+ end
+
+ defp collect_title(html) do
+ case Floki.find(html, "title") do
+ [{"title", [], [title]} | _] ->
+ String.trim(title)
+ _ ->
+ nil
+ end
+ end
+
+ defp collect_open_graph(html) do
+ Enum.reduce(Floki.find(html, "head meta"), %{}, fn(tag, acc) ->
+ case tag do
+ {"meta", values, []} ->
+ name = List.keyfind(values, "property", 0, {nil, nil}) |> elem(1)
+ content = List.keyfind(values, "content", 0, {nil, nil}) |> elem(1)
+ case name do
+ "og:" <> key ->
+ Map.put(acc, key, content)
+ "article:"<>_ ->
+ Map.put(acc, name, content)
+ _other -> acc
+ end
+ _other -> acc
+ end
+ end)
+ end
+
+ defp collect_itemprops(html) do
+ Enum.reduce(Floki.find(html, "[itemprop]"), %{}, fn(tag, acc) ->
+ case tag do
+ {"meta", values, []} ->
+ name = List.keyfind(values, "itemprop", 0, {nil, nil}) |> elem(1)
+ content = List.keyfind(values, "content", 0, {nil, nil}) |> elem(1)
+ case name do
+ "article:" <> key ->
+ Map.put(acc, name, content)
+ _other -> acc
+ end
+ _other -> acc
+ end
+ end)
+ end
+
+ defp clean_text(text) do
+ text
+ |> String.replace("\n", " ")
+ |> HtmlEntities.decode()
+ end
+
+
+end
diff --git a/lib/lsg_irc/link_plugin/imgur.ex b/lib/lsg_irc/link_plugin/imgur.ex
index 1b8173f..41b7e08 100644
--- a/lib/lsg_irc/link_plugin/imgur.ex
+++ b/lib/lsg_irc/link_plugin/imgur.ex
@@ -1,86 +1,90 @@
defmodule LSG.IRC.LinkPlugin.Imgur do
@behaviour LSG.IRC.LinkPlugin
@moduledoc """
# Imgur link preview
No options.
Needs to have a Imgur API key configured:
```
config :lsg, :imgur,
client_id: "xxxxxxxx",
client_secret: "xxxxxxxxxxxxxxxxxxxx"
```
"""
+ @impl true
def match(uri = %URI{host: "imgur.com", path: "/a/"<>album_id}, _) do
{true, %{album_id: album_id}}
end
def match(uri = %URI{host: "imgur.com", path: "/gallery/"<>album_id}, _) do
{true, %{album_id: album_id}}
end
def match(uri = %URI{host: "i.imgur.com", path: "/"<>image}, _) do
[hash, _] = String.split(image, ".", parts: 2)
{true, %{image_id: hash}}
end
def match(_, _), do: false
+ @impl true
+ def post_match(_, _, _, _), do: false
+
def expand(_uri, %{album_id: album_id}, opts) do
expand_imgur_album(album_id, opts)
end
def expand(_uri, %{image_id: image_id}, opts) do
expand_imgur_image(image_id, opts)
end
def expand_imgur_image(image_id, opts) do
client_id = Keyword.get(Application.get_env(:lsg, :imgur, []), :client_id, "42")
headers = [{"Authorization", "Client-ID #{client_id}"}]
options = []
case HTTPoison.get("https://api.imgur.com/3/image/#{image_id}", headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, json} = Jason.decode(body)
data = json["data"]
title = String.slice(data["title"] || data["description"], 0, 180)
nsfw = if data["nsfw"], do: "(NSFW) - ", else: " "
height = Map.get(data, "height")
width = Map.get(data, "width")
size = Map.get(data, "size")
{:ok, "image, #{width}x#{height}, #{size} bytes #{nsfw}#{title}"}
other ->
:error
end
end
def expand_imgur_album(album_id, opts) do
client_id = Keyword.get(Application.get_env(:lsg, :imgur, []), :client_id, "42")
headers = [{"Authorization", "Client-ID #{client_id}"}]
options = []
case HTTPoison.get("https://api.imgur.com/3/album/#{album_id}", headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, json} = Jason.decode(body)
data = json["data"]
title = data["title"]
nsfw = data["nsfw"]
nsfw = if nsfw, do: "(NSFW) - ", else: ""
if data["images_count"] == 1 do
[image] = data["images"]
title = if title || data["title"] do
title = [title, data["title"]] |> Enum.filter(fn(x) -> x end) |> Enum.uniq() |> Enum.join(" — ")
"#{title} — "
else
""
end
{:ok, "#{nsfw}#{title}#{image["link"]}"}
else
title = if title, do: title, else: "Untitled album"
{:ok, "#{nsfw}#{title} - #{data["images_count"]} images"}
end
other ->
:error
end
end
end
diff --git a/lib/lsg_irc/link_plugin/pdf.ex b/lib/lsg_irc/link_plugin/pdf.ex
new file mode 100644
index 0000000..8c4869c
--- /dev/null
+++ b/lib/lsg_irc/link_plugin/pdf.ex
@@ -0,0 +1,39 @@
+defmodule LSG.IRC.LinkPlugin.PDF do
+ require Logger
+ @behaviour LSG.IRC.LinkPlugin
+
+ @impl true
+ def match(_, _), do: false
+
+ @impl true
+ def post_match(_url, "application/pdf"<>_, _header, _opts) do
+ {:file, nil}
+ end
+
+ def post_match(_, _, _, _), do: false
+
+ @impl true
+ def post_expand(url, file, _, _) do
+ case System.cmd("pdftitle", ["-p", file]) do
+ {text, 0} ->
+ text = text
+ |> String.trim()
+
+ if text == "" do
+ :error
+ else
+ basename = Path.basename(url, ".pdf")
+ text = "[#{basename}] " <> text
+ |> String.split("\n")
+ {:ok, text}
+ end
+ {_, 127} ->
+ Logger.error("dependency `pdftitle` is missing, please install it: `pip3 install pdftitle`.")
+ :error
+ {error, code} ->
+ Logger.warn("command `pdftitle` exited with status code #{code}:\n#{inspect error}")
+ :error
+ end
+ end
+
+end
diff --git a/lib/lsg_irc/link_plugin/reddit_plugin.ex b/lib/lsg_irc/link_plugin/reddit.ex
similarity index 97%
rename from lib/lsg_irc/link_plugin/reddit_plugin.ex
rename to lib/lsg_irc/link_plugin/reddit.ex
index a7f5235..6fc1723 100644
--- a/lib/lsg_irc/link_plugin/reddit_plugin.ex
+++ b/lib/lsg_irc/link_plugin/reddit.ex
@@ -1,114 +1,119 @@
defmodule LSG.IRC.LinkPlugin.Reddit do
@behaviour LSG.IRC.LinkPlugin
+ @impl true
def match(uri = %URI{host: "reddit.com", path: path}, _) do
case String.split(path, "/") do
["", "r", sub, "comments", post_id, _slug] ->
{true, %{mode: :post, path: path, sub: sub, post_id: post_id}}
["", "r", sub, "comments", post_id, _slug, ""] ->
{true, %{mode: :post, path: path, sub: sub, post_id: post_id}}
["", "r", sub, ""] ->
{true, %{mode: :sub, path: path, sub: sub}}
["", "r", sub] ->
{true, %{mode: :sub, path: path, sub: sub}}
# ["", "u", user] ->
# {true, %{mode: :user, path: path, user: user}}
_ ->
false
end
end
def match(uri = %URI{host: host, path: path}, opts) do
if String.ends_with?(host, ".reddit.com") do
match(%URI{uri | host: "reddit.com"}, opts)
else
false
end
end
+ @impl true
+ def post_match(_, _, _, _), do: false
+
+ @impl true
def expand(_, %{mode: :sub, sub: sub}, _opts) do
url = "https://api.reddit.com/r/#{sub}/about"
case HTTPoison.get(url) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
sr = Jason.decode!(body)
|> Map.get("data")
|> IO.inspect(limit: :infinity)
description = Map.get(sr, "public_description")||Map.get(sr, "description", "")
|> String.split("\n")
|> List.first()
name = if title = Map.get(sr, "title") do
Map.get(sr, "display_name_prefixed") <> ": " <> title
else
Map.get(sr, "display_name_prefixed")
end
nsfw = if Map.get(sr, "over18") do
"[NSFW] "
else
""
end
quarantine = if Map.get(sr, "quarantine") do
"[Quarantined] "
else
""
end
count = "#{Map.get(sr, "subscribers")} subscribers, #{Map.get(sr, "active_user_count")} active"
preview = "#{quarantine}#{nsfw}#{name} — #{description} (#{count})"
{:ok, preview}
_ ->
:error
end
end
def expand(_uri, %{mode: :post, path: path, sub: sub, post_id: post_id}, _opts) do
case HTTPoison.get("https://api.reddit.com#{path}?sr_detail=true") do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
json = Jason.decode!(body)
op = List.first(json)
|> Map.get("data")
|> Map.get("children")
|> List.first()
|> Map.get("data")
|> IO.inspect(limit: :infinity)
sr = get_in(op, ["sr_detail", "display_name_prefixed"])
{self?, url} = if Map.get(op, "selftext") == "" do
{false, Map.get(op, "url")}
else
{true, nil}
end
self_str = if(self?, do: "text", else: url)
up = Map.get(op, "ups")
down = Map.get(op, "downs")
comments = Map.get(op, "num_comments")
nsfw = if Map.get(op, "over_18") do
"[NSFW] "
else
""
end
state = cond do
Map.get(op, "hidden") -> "hidden"
Map.get(op, "archived") -> "archived"
Map.get(op, "locked") -> "locked"
Map.get(op, "quarantine") -> "quarantined"
Map.get(op, "removed_by") || Map.get(op, "removed_by_category") -> "removed"
Map.get(op, "banned_by") -> "banned"
Map.get(op, "pinned") -> "pinned"
Map.get(op, "stickied") -> "stickied"
true -> nil
end
flair = if flair = Map.get(op, "link_flair_text") do
"[#{flair}] "
else
""
end
title = "#{nsfw}#{sr}: #{flair}#{Map.get(op, "title")}"
state_str = if(state, do: "#{state}, ")
content = "by u/#{Map.get(op, "author")} - #{state_str}#{up} up, #{down} down, #{comments} comments - #{self_str}"
{:ok, [title, content]}
err ->
:error
end
end
end
diff --git a/lib/lsg_irc/link_plugin/twitter.ex b/lib/lsg_irc/link_plugin/twitter.ex
index a6b6e29..e462384 100644
--- a/lib/lsg_irc/link_plugin/twitter.ex
+++ b/lib/lsg_irc/link_plugin/twitter.ex
@@ -1,102 +1,105 @@
defmodule LSG.IRC.LinkPlugin.Twitter do
@behaviour LSG.IRC.LinkPlugin
@moduledoc """
# Twitter Link Preview
Configuration:
needs an API key and auth tokens:
```
config :extwitter, :oauth, [
consumer_key: "zzzzz",
consumer_secret: "xxxxxxx",
access_token: "yyyyyy",
access_token_secret: "ssshhhhhh"
]
```
options:
* `expand_quoted`: Add the quoted tweet instead of its URL. Default: true.
"""
def match(uri = %URI{host: twitter, path: path}, _opts) when twitter in ["twitter.com", "m.twitter.com", "mobile.twitter.com"] do
case String.split(path, "/", parts: 4) do
["", _username, "status", status_id] ->
{status_id, _} = Integer.parse(status_id)
{true, %{status_id: status_id}}
_ -> false
end
end
def match(_, _), do: false
+ @impl true
+ def post_match(_, _, _, _), do: false
+
def expand(_uri, %{status_id: status_id}, opts) do
expand_tweet(ExTwitter.show(status_id, tweet_mode: "extended"), opts)
end
defp expand_tweet(nil, _opts) do
:error
end
defp expand_tweet(tweet, opts) do
text = expand_twitter_text(tweet)
text = if tweet.quoted_status do
quote_url = "https://twitter.com/#{tweet.quoted_status.user.screen_name}/status/#{tweet.quoted_status.id}"
String.replace(text, quote_url, "")
else
text
end
text = IRC.splitlong(text)
{:ok, at} = Timex.parse(tweet.created_at, "%a %b %e %H:%M:%S %z %Y", :strftime)
{:ok, format} = Timex.format(at, "{relative}", :relative)
replyto = if tweet.in_reply_to_status_id do
replyurl = "https://twitter.com/#{tweet.in_reply_to_screen_name}/status/#{tweet.in_reply_to_status_id}"
if tweet.in_reply_to_screen_name == tweet.user.screen_name do
"— continued from #{replyurl}"
else
"— replying to #{replyurl}"
end
else
""
end
quoted = if tweet.quoted_status do
quote_url = "https://twitter.com/#{tweet.quoted_status.user.screen_name}/status/#{tweet.quoted_status.id}"
full_text = expand_twitter_text(tweet.quoted_status)
|> IRC.splitlong_with_prefix(">")
["> #{tweet.quoted_status.user.name} (@#{tweet.quoted_status.user.screen_name}): #{quote_url}"] ++ full_text
else
[]
end
foot = "— #{format} - #{tweet.retweet_count} retweets - #{tweet.favorite_count} likes"
text = ["#{tweet.user.name} (@#{tweet.user.screen_name}):", replyto] ++ text ++ quoted ++ [foot]
{:ok, text}
end
defp expand_twitter_text(tweet) do
text = Enum.reduce(tweet.entities.urls, tweet.full_text, fn(entity, text) ->
String.replace(text, entity.url, entity.expanded_url)
end)
extended = tweet.extended_entities || %{media: []}
text = Enum.reduce(extended.media, text, fn(entity, text) ->
url = Enum.filter(extended.media, fn(e) -> entity.url == e.url end)
|> Enum.map(fn(e) ->
cond do
e.type == "video" -> e.expanded_url
true -> e.media_url_https
end
end)
|> Enum.join(" ")
String.replace(text, entity.url, url)
end)
end
end
diff --git a/lib/lsg_irc/link_plugin/youtube.ex b/lib/lsg_irc/link_plugin/youtube.ex
index ea4f213..b68a86f 100644
--- a/lib/lsg_irc/link_plugin/youtube.ex
+++ b/lib/lsg_irc/link_plugin/youtube.ex
@@ -1,69 +1,73 @@
defmodule LSG.IRC.LinkPlugin.YouTube do
@behaviour LSG.IRC.LinkPlugin
@moduledoc """
# YouTube link preview
needs an API key:
```
config :lsg, :youtube,
api_key: "xxxxxxxxxxxxx"
```
options:
* `invidious`: Add a link to invidio.us. Default: true.
"""
+ @impl true
def match(uri = %URI{host: yt, path: "/watch", query: "v="<>video_id}, _opts) when yt in ["youtube.com", "www.youtube.com"] do
{true, %{video_id: video_id}}
end
def match(%URI{host: "youtu.be", path: "/"<>video_id}, _opts) do
{true, %{video_id: video_id}}
end
def match(_, _), do: false
+ @impl true
+ def post_match(_, _, _, _), do: false
+ @impl true
def expand(uri, %{video_id: video_id}, opts) do
key = Application.get_env(:lsg, :youtube)[:api_key]
params = %{
"part" => "snippet,contentDetails,statistics",
"id" => video_id,
"key" => key
}
headers = []
options = [params: params]
case HTTPoison.get("https://www.googleapis.com/youtube/v3/videos", [], options) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
case Jason.decode(body) do
{:ok, json} ->
item = List.first(json["items"])
if item do
snippet = item["snippet"]
duration = item["contentDetails"]["duration"] |> String.replace("PT", "") |> String.downcase
date = snippet["publishedAt"]
|> DateTime.from_iso8601()
|> elem(1)
|> Timex.format("{relative}", :relative)
|> elem(1)
line = if Keyword.get(opts, :invidious, true) do
["-> https://invidio.us/watch?v=#{video_id}"]
else
[]
end
{:ok, line ++ ["#{snippet["title"]}", "— #{duration} — uploaded by #{snippet["channelTitle"]} — #{date}"
<> " — #{item["statistics"]["viewCount"]} views, #{item["statistics"]["likeCount"]} likes,"
<> " #{item["statistics"]["dislikeCount"]} dislikes"]}
else
:error
end
_ -> :error
end
end
end
end

File Metadata

Mime Type
text/x-diff
Expires
Sun, Jul 6, 8:00 AM (13 h, 36 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
49812
Default Alt Text
(31 KB)

Event Timeline