summaryrefslogtreecommitdiff
path: root/lib/nola_plugins/link_plugin
diff options
context:
space:
mode:
authorJordan Bracco <href@random.sh>2022-12-20 00:21:54 +0000
committerJordan Bracco <href@random.sh>2022-12-20 19:29:41 +0100
commit2d83df8b32bff7f0028923bb5b64dc0b55f20d03 (patch)
tree1207e67b5b15f540963db05e7be89f3ca950e724 /lib/nola_plugins/link_plugin
parentNola rename, the end. pt 6. Refs T77. (diff)
Nola rename: The Big Move, Refs T77
Diffstat (limited to 'lib/nola_plugins/link_plugin')
-rw-r--r--lib/nola_plugins/link_plugin/github.ex49
-rw-r--r--lib/nola_plugins/link_plugin/html.ex106
-rw-r--r--lib/nola_plugins/link_plugin/imgur.ex96
-rw-r--r--lib/nola_plugins/link_plugin/pdf.ex39
-rw-r--r--lib/nola_plugins/link_plugin/redacted.ex18
-rw-r--r--lib/nola_plugins/link_plugin/reddit.ex119
-rw-r--r--lib/nola_plugins/link_plugin/twitter.ex158
-rw-r--r--lib/nola_plugins/link_plugin/youtube.ex72
8 files changed, 657 insertions, 0 deletions
diff --git a/lib/nola_plugins/link_plugin/github.ex b/lib/nola_plugins/link_plugin/github.ex
new file mode 100644
index 0000000..93e0892
--- /dev/null
+++ b/lib/nola_plugins/link_plugin/github.ex
@@ -0,0 +1,49 @@
+defmodule Nola.IRC.LinkPlugin.Github do
+ @behaviour Nola.IRC.LinkPlugin
+
+ @impl true
+ def match(uri = %URI{host: "github.com", path: path}, _) do
+ case String.split(path, "/") do
+ ["", user, repo] ->
+ {true, %{user: user, repo: repo, path: "#{user}/#{repo}"}}
+ _ ->
+ false
+ end
+ end
+
+ def match(_, _), do: false
+
+ @impl true
+ def post_match(_, _, _, _), do: false
+
+ @impl true
+ def expand(_uri, %{user: user, repo: repo}, _opts) do
+ case HTTPoison.get("https://api.github.com/repos/#{user}/#{repo}") do
+ {:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
+ {:ok, json} = Jason.decode(body)
+ src = json["source"]["full_name"]
+ disabled = if(json["disabled"], do: " (disabled)", else: "")
+ archived = if(json["archived"], do: " (archived)", else: "")
+ fork = if src && src != json["full_name"] do
+ " (⑂ #{json["source"]["full_name"]})"
+ else
+ ""
+ end
+ start = "#{json["full_name"]}#{disabled}#{archived}#{fork} - #{json["description"]}"
+ tags = for(t <- json["topics"]||[], do: "##{t}") |> Enum.intersperse(", ") |> Enum.join("")
+ lang = if(json["language"], do: "#{json["language"]} - ", else: "")
+ issues = if(json["open_issues_count"], do: "#{json["open_issues_count"]} issues - ", else: "")
+ last_push = if at = json["pushed_at"] do
+ {:ok, date, _} = DateTime.from_iso8601(at)
+ " - last pushed #{DateTime.to_string(date)}"
+ else
+ ""
+ end
+ network = "#{lang}#{issues}#{json["stargazers_count"]} stars - #{json["subscribers_count"]} watchers - #{json["forks_count"]} forks#{last_push}"
+ {:ok, [start, tags, network]}
+ other ->
+ :error
+ end
+ end
+
+end
diff --git a/lib/nola_plugins/link_plugin/html.ex b/lib/nola_plugins/link_plugin/html.ex
new file mode 100644
index 0000000..56a8ceb
--- /dev/null
+++ b/lib/nola_plugins/link_plugin/html.ex
@@ -0,0 +1,106 @@
+defmodule Nola.IRC.LinkPlugin.HTML do
+ @behaviour Nola.IRC.LinkPlugin
+
+ @impl true
+ def match(_, _), do: false
+
+ @impl true
+ def post_match(_url, "text/html"<>_, _header, _opts) do
+ {:body, nil}
+ end
+ def post_match(_, _, _, _), do: false
+
+ @impl true
+ def post_expand(url, body, _params, _opts) do
+ html = Floki.parse(body)
+ title = collect_title(html)
+ opengraph = collect_open_graph(html)
+ itemprops = collect_itemprops(html)
+ text = if Map.has_key?(opengraph, "title") && Map.has_key?(opengraph, "description") do
+ sitename = if sn = Map.get(opengraph, "site_name") do
+ "#{sn}"
+ else
+ ""
+ end
+ paywall? = if Map.get(opengraph, "article:content_tier", Map.get(itemprops, "article:content_tier", "free")) == "free" do
+ ""
+ else
+ "[paywall] "
+ end
+ section = if section = Map.get(opengraph, "article:section", Map.get(itemprops, "article:section", nil)) do
+ ": #{section}"
+ else
+ ""
+ end
+ date = case DateTime.from_iso8601(Map.get(opengraph, "article:published_time", Map.get(itemprops, "article:published_time", ""))) do
+ {:ok, date, _} ->
+ "#{Timex.format!(date, "%d/%m/%y", :strftime)}. "
+ _ ->
+ ""
+ end
+ uri = URI.parse(url)
+
+ prefix = "#{paywall?}#{Map.get(opengraph, "site_name", uri.host)}#{section}"
+ prefix = unless prefix == "" do
+ "#{prefix} — "
+ else
+ ""
+ end
+ [clean_text("#{prefix}#{Map.get(opengraph, "title")}")] ++ IRC.splitlong(clean_text("#{date}#{Map.get(opengraph, "description")}"))
+ else
+ clean_text(title)
+ end
+ {:ok, text}
+ end
+
+ defp collect_title(html) do
+ case Floki.find(html, "title") do
+ [{"title", [], [title]} | _] ->
+ String.trim(title)
+ _ ->
+ nil
+ end
+ end
+
+ defp collect_open_graph(html) do
+ Enum.reduce(Floki.find(html, "head meta"), %{}, fn(tag, acc) ->
+ case tag do
+ {"meta", values, []} ->
+ name = List.keyfind(values, "property", 0, {nil, nil}) |> elem(1)
+ content = List.keyfind(values, "content", 0, {nil, nil}) |> elem(1)
+ case name do
+ "og:" <> key ->
+ Map.put(acc, key, content)
+ "article:"<>_ ->
+ Map.put(acc, name, content)
+ _other -> acc
+ end
+ _other -> acc
+ end
+ end)
+ end
+
+ defp collect_itemprops(html) do
+ Enum.reduce(Floki.find(html, "[itemprop]"), %{}, fn(tag, acc) ->
+ case tag do
+ {"meta", values, []} ->
+ name = List.keyfind(values, "itemprop", 0, {nil, nil}) |> elem(1)
+ content = List.keyfind(values, "content", 0, {nil, nil}) |> elem(1)
+ case name do
+ "article:" <> key ->
+ Map.put(acc, name, content)
+ _other -> acc
+ end
+ _other -> acc
+ end
+ end)
+ end
+
+ defp clean_text(text) do
+ text
+ |> String.replace("\n", " ")
+ |> HtmlEntities.decode()
+ end
+
+
+end
diff --git a/lib/nola_plugins/link_plugin/imgur.ex b/lib/nola_plugins/link_plugin/imgur.ex
new file mode 100644
index 0000000..5d74956
--- /dev/null
+++ b/lib/nola_plugins/link_plugin/imgur.ex
@@ -0,0 +1,96 @@
+defmodule Nola.IRC.LinkPlugin.Imgur do
+ @behaviour Nola.IRC.LinkPlugin
+
+ @moduledoc """
+ # Imgur link preview
+
+ No options.
+
+ Needs to have a Imgur API key configured:
+
+ ```
+ config :nola, :imgur,
+ client_id: "xxxxxxxx",
+ client_secret: "xxxxxxxxxxxxxxxxxxxx"
+ ```
+ """
+
+ @impl true
+ def match(uri = %URI{host: "imgur.io"}, arg) do
+ match(%URI{uri | host: "imgur.com"}, arg)
+ end
+ def match(uri = %URI{host: "i.imgur.io"}, arg) do
+ match(%URI{uri | host: "i.imgur.com"}, arg)
+ end
+ def match(uri = %URI{host: "imgur.com", path: "/a/"<>album_id}, _) do
+ {true, %{album_id: album_id}}
+ end
+ def match(uri = %URI{host: "imgur.com", path: "/gallery/"<>album_id}, _) do
+ {true, %{album_id: album_id}}
+ end
+ def match(uri = %URI{host: "i.imgur.com", path: "/"<>image}, _) do
+ [hash, _] = String.split(image, ".", parts: 2)
+ {true, %{image_id: hash}}
+ end
+ def match(_, _), do: false
+
+ @impl true
+ def post_match(_, _, _, _), do: false
+
+ def expand(_uri, %{album_id: album_id}, opts) do
+ expand_imgur_album(album_id, opts)
+ end
+
+ def expand(_uri, %{image_id: image_id}, opts) do
+ expand_imgur_image(image_id, opts)
+ end
+
+ def expand_imgur_image(image_id, opts) do
+ client_id = Keyword.get(Application.get_env(:nola, :imgur, []), :client_id, "42")
+ headers = [{"Authorization", "Client-ID #{client_id}"}]
+ options = []
+ case HTTPoison.get("https://api.imgur.com/3/image/#{image_id}", headers, options) do
+ {:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
+ {:ok, json} = Jason.decode(body)
+ data = json["data"]
+ title = String.slice(data["title"] || data["description"], 0, 180)
+ nsfw = if data["nsfw"], do: "(NSFW) - ", else: " "
+ height = Map.get(data, "height")
+ width = Map.get(data, "width")
+ size = Map.get(data, "size")
+ {:ok, "image, #{width}x#{height}, #{size} bytes #{nsfw}#{title}"}
+ other ->
+ :error
+ end
+ end
+
+ def expand_imgur_album(album_id, opts) do
+ client_id = Keyword.get(Application.get_env(:nola, :imgur, []), :client_id, "42")
+ headers = [{"Authorization", "Client-ID #{client_id}"}]
+ options = []
+ case HTTPoison.get("https://api.imgur.com/3/album/#{album_id}", headers, options) do
+ {:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
+ {:ok, json} = Jason.decode(body)
+ data = json["data"]
+ title = data["title"]
+ nsfw = data["nsfw"]
+ nsfw = if nsfw, do: "(NSFW) - ", else: ""
+ if data["images_count"] == 1 do
+ [image] = data["images"]
+ title = if title || data["title"] do
+ title = [title, data["title"]] |> Enum.filter(fn(x) -> x end) |> Enum.uniq() |> Enum.join(" — ")
+ "#{title} — "
+ else
+ ""
+ end
+ {:ok, "#{nsfw}#{title}#{image["link"]}"}
+ else
+ title = if title, do: title, else: "Untitled album"
+ {:ok, "#{nsfw}#{title} - #{data["images_count"]} images"}
+ end
+ other ->
+ :error
+ end
+ end
+
+end
diff --git a/lib/nola_plugins/link_plugin/pdf.ex b/lib/nola_plugins/link_plugin/pdf.ex
new file mode 100644
index 0000000..5f72ef5
--- /dev/null
+++ b/lib/nola_plugins/link_plugin/pdf.ex
@@ -0,0 +1,39 @@
+defmodule Nola.IRC.LinkPlugin.PDF do
+ require Logger
+ @behaviour Nola.IRC.LinkPlugin
+
+ @impl true
+ def match(_, _), do: false
+
+ @impl true
+ def post_match(_url, "application/pdf"<>_, _header, _opts) do
+ {:file, nil}
+ end
+
+ def post_match(_, _, _, _), do: false
+
+ @impl true
+ def post_expand(url, file, _, _) do
+ case System.cmd("pdftitle", ["-p", file]) do
+ {text, 0} ->
+ text = text
+ |> String.trim()
+
+ if text == "" do
+ :error
+ else
+ basename = Path.basename(url, ".pdf")
+ text = "[#{basename}] " <> text
+ |> String.split("\n")
+ {:ok, text}
+ end
+ {_, 127} ->
+ Logger.error("dependency `pdftitle` is missing, please install it: `pip3 install pdftitle`.")
+ :error
+ {error, code} ->
+ Logger.warn("command `pdftitle` exited with status code #{code}:\n#{inspect error}")
+ :error
+ end
+ end
+
+end
diff --git a/lib/nola_plugins/link_plugin/redacted.ex b/lib/nola_plugins/link_plugin/redacted.ex
new file mode 100644
index 0000000..7a6229d
--- /dev/null
+++ b/lib/nola_plugins/link_plugin/redacted.ex
@@ -0,0 +1,18 @@
+defmodule Nola.IRC.LinkPlugin.Redacted do
+ @behaviour Nola.IRC.LinkPlugin
+
+ @impl true
+ def match(uri = %URI{host: "redacted.ch", path: "/torrent.php", query: query = "id="<>id}, _opts) do
+ %{"id" => id} = URI.decode_query(id)
+ {true, %{torrent: id}}
+ end
+
+ def match(_, _), do: false
+
+ @impl true
+ def post_match(_, _, _, _), do: false
+
+ def expand(_uri, %{torrent: id}, _opts) do
+ end
+
+end
diff --git a/lib/nola_plugins/link_plugin/reddit.ex b/lib/nola_plugins/link_plugin/reddit.ex
new file mode 100644
index 0000000..79102e0
--- /dev/null
+++ b/lib/nola_plugins/link_plugin/reddit.ex
@@ -0,0 +1,119 @@
+defmodule Nola.IRC.LinkPlugin.Reddit do
+ @behaviour Nola.IRC.LinkPlugin
+
+ @impl true
+ def match(uri = %URI{host: "reddit.com", path: path}, _) do
+ case String.split(path, "/") do
+ ["", "r", sub, "comments", post_id, _slug] ->
+ {true, %{mode: :post, path: path, sub: sub, post_id: post_id}}
+ ["", "r", sub, "comments", post_id, _slug, ""] ->
+ {true, %{mode: :post, path: path, sub: sub, post_id: post_id}}
+ ["", "r", sub, ""] ->
+ {true, %{mode: :sub, path: path, sub: sub}}
+ ["", "r", sub] ->
+ {true, %{mode: :sub, path: path, sub: sub}}
+# ["", "u", user] ->
+# {true, %{mode: :user, path: path, user: user}}
+ _ ->
+ false
+ end
+ end
+
+ def match(uri = %URI{host: host, path: path}, opts) do
+ if String.ends_with?(host, ".reddit.com") do
+ match(%URI{uri | host: "reddit.com"}, opts)
+ else
+ false
+ end
+ end
+
+ @impl true
+ def post_match(_, _, _, _), do: false
+
+ @impl true
+ def expand(_, %{mode: :sub, sub: sub}, _opts) do
+ url = "https://api.reddit.com/r/#{sub}/about"
+ case HTTPoison.get(url) do
+ {:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
+ sr = Jason.decode!(body)
+ |> Map.get("data")
+ |> IO.inspect(limit: :infinity)
+ description = Map.get(sr, "public_description")||Map.get(sr, "description", "")
+ |> String.split("\n")
+ |> List.first()
+ name = if title = Map.get(sr, "title") do
+ Map.get(sr, "display_name_prefixed") <> ": " <> title
+ else
+ Map.get(sr, "display_name_prefixed")
+ end
+ nsfw = if Map.get(sr, "over18") do
+ "[NSFW] "
+ else
+ ""
+ end
+ quarantine = if Map.get(sr, "quarantine") do
+ "[Quarantined] "
+ else
+ ""
+ end
+ count = "#{Map.get(sr, "subscribers")} subscribers, #{Map.get(sr, "active_user_count")} active"
+ preview = "#{quarantine}#{nsfw}#{name} — #{description} (#{count})"
+ {:ok, preview}
+ _ ->
+ :error
+ end
+ end
+
+ def expand(_uri, %{mode: :post, path: path, sub: sub, post_id: post_id}, _opts) do
+ case HTTPoison.get("https://api.reddit.com#{path}?sr_detail=true") do
+ {:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
+ json = Jason.decode!(body)
+ op = List.first(json)
+ |> Map.get("data")
+ |> Map.get("children")
+ |> List.first()
+ |> Map.get("data")
+ |> IO.inspect(limit: :infinity)
+ sr = get_in(op, ["sr_detail", "display_name_prefixed"])
+ {self?, url} = if Map.get(op, "selftext") == "" do
+ {false, Map.get(op, "url")}
+ else
+ {true, nil}
+ end
+
+ self_str = if(self?, do: "text", else: url)
+ up = Map.get(op, "ups")
+ down = Map.get(op, "downs")
+ comments = Map.get(op, "num_comments")
+ nsfw = if Map.get(op, "over_18") do
+ "[NSFW] "
+ else
+ ""
+ end
+ state = cond do
+ Map.get(op, "hidden") -> "hidden"
+ Map.get(op, "archived") -> "archived"
+ Map.get(op, "locked") -> "locked"
+ Map.get(op, "quarantine") -> "quarantined"
+ Map.get(op, "removed_by") || Map.get(op, "removed_by_category") -> "removed"
+ Map.get(op, "banned_by") -> "banned"
+ Map.get(op, "pinned") -> "pinned"
+ Map.get(op, "stickied") -> "stickied"
+ true -> nil
+ end
+ flair = if flair = Map.get(op, "link_flair_text") do
+ "[#{flair}] "
+ else
+ ""
+ end
+ title = "#{nsfw}#{sr}: #{flair}#{Map.get(op, "title")}"
+ state_str = if(state, do: "#{state}, ")
+ content = "by u/#{Map.get(op, "author")} - #{state_str}#{up} up, #{down} down, #{comments} comments - #{self_str}"
+
+ {:ok, [title, content]}
+ err ->
+ :error
+ end
+ end
+
+end
diff --git a/lib/nola_plugins/link_plugin/twitter.ex b/lib/nola_plugins/link_plugin/twitter.ex
new file mode 100644
index 0000000..640b193
--- /dev/null
+++ b/lib/nola_plugins/link_plugin/twitter.ex
@@ -0,0 +1,158 @@
+defmodule Nola.IRC.LinkPlugin.Twitter do
+ @behaviour Nola.IRC.LinkPlugin
+
+ @moduledoc """
+ # Twitter Link Preview
+
+ Configuration:
+
+ needs an API key and auth tokens:
+
+ ```
+ config :extwitter, :oauth, [
+ consumer_key: "zzzzz",
+ consumer_secret: "xxxxxxx",
+ access_token: "yyyyyy",
+ access_token_secret: "ssshhhhhh"
+ ]
+ ```
+
+ options:
+
+ * `expand_quoted`: Add the quoted tweet instead of its URL. Default: true.
+ """
+
+ def match(uri = %URI{host: twitter, path: path}, _opts) when twitter in ["twitter.com", "m.twitter.com", "mobile.twitter.com"] do
+ case String.split(path, "/", parts: 4) do
+ ["", _username, "status", status_id] ->
+ {status_id, _} = Integer.parse(status_id)
+ {true, %{status_id: status_id}}
+ _ -> false
+ end
+ end
+
+ def match(_, _), do: false
+
+ @impl true
+ def post_match(_, _, _, _), do: false
+
+ def expand(_uri, %{status_id: status_id}, opts) do
+ expand_tweet(ExTwitter.show(status_id, tweet_mode: "extended"), opts)
+ end
+
+ defp expand_tweet(nil, _opts) do
+ :error
+ end
+
+ defp link_tweet(tweet_or_screen_id_tuple, opts, force_twitter_com \\ false)
+
+ defp link_tweet({screen_name, id}, opts, force_twitter_com) do
+ path = "/#{screen_name}/status/#{id}"
+ nitter = Keyword.get(opts, :nitter)
+ host = if !force_twitter_com && nitter, do: nitter, else: "twitter.com"
+ "https://#{host}/#{screen_name}/status/#{id}"
+ end
+
+ defp link_tweet(tweet, opts, force_twitter_com) do
+ link_tweet({tweet.user.screen_name, tweet.id}, opts, force_twitter_com)
+ end
+
+ defp expand_tweet(tweet, opts) do
+ head = format_tweet_header(tweet, opts)
+
+ # Format tweet text
+ text = expand_twitter_text(tweet, opts)
+ text = if tweet.quoted_status do
+ quote_url = link_tweet(tweet.quoted_status, opts, true)
+ String.replace(text, quote_url, "")
+ else
+ text
+ end
+ text = IRC.splitlong(text)
+
+ reply_to = if tweet.in_reply_to_status_id do
+ reply_url = link_tweet({tweet.in_reply_to_screen_name, tweet.in_reply_to_status_id}, opts)
+ text = if tweet.in_reply_to_screen_name == tweet.user.screen_name, do: "continued from", else: "replying to"
+ <<3, 15, " ↪ ", text::binary, " ", reply_url::binary, 3>>
+ end
+
+ quoted = if tweet.quoted_status do
+ full_text = tweet.quoted_status
+ |> expand_twitter_text(opts)
+ |> IRC.splitlong_with_prefix(">")
+
+ head = format_tweet_header(tweet.quoted_status, opts, details: false, prefix: "↓ quoting")
+
+ [head | full_text]
+ else
+ []
+ end
+
+ #<<2, "#{tweet.user.name} (@#{tweet.user.screen_name})", 2, " ", 3, 61, "#{foot} #{nitter_link}", 3>>, reply_to] ++ text ++ quoted
+
+ text = [head, reply_to | text] ++ quoted
+ |> Enum.filter(& &1)
+ {:ok, text}
+ end
+
+ defp expand_twitter_text(tweet, _opts) do
+ text = Enum.reduce(tweet.entities.urls, tweet.full_text, fn(entity, text) ->
+ String.replace(text, entity.url, entity.expanded_url)
+ end)
+ extended = tweet.extended_entities || %{media: []}
+ text = Enum.reduce(extended.media, text, fn(entity, text) ->
+ url = Enum.filter(extended.media, fn(e) -> entity.url == e.url end)
+ |> Enum.map(fn(e) ->
+ cond do
+ e.type == "video" -> e.expanded_url
+ true -> e.media_url_https
+ end
+ end)
+ |> Enum.join(" ")
+ String.replace(text, entity.url, url)
+ end)
+ |> HtmlEntities.decode()
+ end
+
+ defp format_tweet_header(tweet, opts, format_opts \\ []) do
+ prefix = Keyword.get(format_opts, :prefix, nil)
+ details = Keyword.get(format_opts, :details, true)
+
+ padded_prefix = if prefix, do: "#{prefix} ", else: ""
+ author = <<padded_prefix::binary, 2, "#{tweet.user.name} (@#{tweet.user.screen_name})", 2>>
+
+ link = link_tweet(tweet, opts)
+
+ {:ok, at} = Timex.parse(tweet.created_at, "%a %b %e %H:%M:%S %z %Y", :strftime)
+ {:ok, formatted_time} = Timex.format(at, "{relative}", :relative)
+
+ nsfw = if tweet.possibly_sensitive, do: <<3, 52, "NSFW", 3>>
+
+ rts = if tweet.retweet_count && tweet.retweet_count > 0, do: "#{tweet.retweet_count} RT"
+ likes = if tweet.favorite_count && tweet.favorite_count > 0, do: "#{tweet.favorite_count} ❤︎"
+ qrts = if tweet.quote_count && tweet.quote_count > 0, do: "#{tweet.quote_count} QRT"
+ replies = if tweet.reply_count && tweet.reply_count > 0, do: "#{tweet.reply_count} Reps"
+
+ dmcad = if tweet.withheld_copyright, do: <<3, 52, "DMCA", 3>>
+ withheld_local = if tweet.withheld_in_countries && length(tweet.withheld_in_countries) > 0 do
+ "Withheld in #{length(tweet.withheld_in_countries)} countries"
+ end
+
+ verified = if tweet.user.verified, do: <<3, 51, "✔", 3>>
+
+ meta = if details do
+ [verified, nsfw, formatted_time, dmcad, withheld_local, rts, qrts, likes, replies]
+ else
+ [verified, nsfw, formatted_time, dmcad, withheld_local]
+ end
+
+ meta = meta
+ |> Enum.filter(& &1)
+ |> Enum.join(" - ")
+
+ meta = <<3, 15, meta::binary, " → #{link}", 3>>
+
+ <<author::binary, " — ", meta::binary>>
+ end
+
+end
diff --git a/lib/nola_plugins/link_plugin/youtube.ex b/lib/nola_plugins/link_plugin/youtube.ex
new file mode 100644
index 0000000..f7c7541
--- /dev/null
+++ b/lib/nola_plugins/link_plugin/youtube.ex
@@ -0,0 +1,72 @@
+defmodule Nola.IRC.LinkPlugin.YouTube do
+ @behaviour Nola.IRC.LinkPlugin
+
+ @moduledoc """
+ # YouTube link preview
+
+ needs an API key:
+
+ ```
+ config :nola, :youtube,
+ api_key: "xxxxxxxxxxxxx"
+ ```
+
+ options:
+
+ * `invidious`: Add a link to invidious.
+ """
+
+ @impl true
+ def match(uri = %URI{host: yt, path: "/watch", query: "v="<>video_id}, _opts) when yt in ["youtube.com", "www.youtube.com"] do
+ {true, %{video_id: video_id}}
+ end
+
+ def match(%URI{host: "youtu.be", path: "/"<>video_id}, _opts) do
+ {true, %{video_id: video_id}}
+ end
+
+ def match(_, _), do: false
+
+ @impl true
+ def post_match(_, _, _, _), do: false
+
+ @impl true
+ def expand(uri, %{video_id: video_id}, opts) do
+ key = Application.get_env(:nola, :youtube)[:api_key]
+ params = %{
+ "part" => "snippet,contentDetails,statistics",
+ "id" => video_id,
+ "key" => key
+ }
+ headers = []
+ options = [params: params]
+ case HTTPoison.get("https://www.googleapis.com/youtube/v3/videos", [], options) do
+ {:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
+ case Jason.decode(body) do
+ {:ok, json} ->
+ item = List.first(json["items"])
+ if item do
+ snippet = item["snippet"]
+ duration = item["contentDetails"]["duration"] |> String.replace("PT", "") |> String.downcase
+ date = snippet["publishedAt"]
+ |> DateTime.from_iso8601()
+ |> elem(1)
+ |> Timex.format("{relative}", :relative)
+ |> elem(1)
+
+ line = if host = Keyword.get(opts, :invidious) do
+ ["-> https://#{host}/watch?v=#{video_id}"]
+ else
+ []
+ end
+ {:ok, line ++ ["#{snippet["title"]}", "— #{duration} — uploaded by #{snippet["channelTitle"]} — #{date}"
+ <> " — #{item["statistics"]["viewCount"]} views, #{item["statistics"]["likeCount"]} likes"]}
+ else
+ :error
+ end
+ _ -> :error
+ end
+ end
+ end
+
+end