summaryrefslogtreecommitdiff
path: root/lib/lsg_irc/link_plugin.ex
diff options
context:
space:
mode:
authorhref <href@random.sh>2020-04-17 15:53:14 +0200
committerhref <href@random.sh>2020-04-17 15:53:14 +0200
commit919725a6941830ce82c835ed3288c1722ddd8c9f (patch)
tree49a95b0ce716a24c7e056036d3353ceca1debe4a /lib/lsg_irc/link_plugin.ex
parentwelp (diff)
bleh
Diffstat (limited to 'lib/lsg_irc/link_plugin.ex')
-rw-r--r--lib/lsg_irc/link_plugin.ex97
1 files changed, 70 insertions, 27 deletions
diff --git a/lib/lsg_irc/link_plugin.ex b/lib/lsg_irc/link_plugin.ex
index 61bdbf9..bc9764a 100644
--- a/lib/lsg_irc/link_plugin.ex
+++ b/lib/lsg_irc/link_plugin.ex
@@ -127,40 +127,83 @@ defmodule LSG.IRC.LinkPlugin do
expand_default(acc)
end
+ defp get(url, headers \\ [], options \\ []) do
+ get_req(:hackney.get(url, headers, <<>>, options))
+ end
+
+ defp get_req({:error, reason}) do
+ {:error, reason}
+ end
+
+ defp get_req({:ok, 200, headers, client}) do
+ headers = Enum.reduce(headers, %{}, fn({key, value}, acc) ->
+ Map.put(acc, String.downcase(key), value)
+ end)
+ content_type = Map.get(headers, "content-type", "application/octect-stream")
+ length = Map.get(headers, "content-length", "0")
+ {length, _} = Integer.parse(length)
+
+ cond do
+ String.starts_with?(content_type, "text/html") && length <= 30_000_000 ->
+ get_body(30_000_000, client, <<>>)
+ true ->
+ :hackney.close(client)
+ {:ok, "file: #{content_type}, size: #{length} bytes"}
+ end
+ end
+
+ defp get_req({:ok, redirect, headers, client}) when redirect in 300..399 do
+ headers = Enum.reduce(headers, %{}, fn({key, value}, acc) ->
+ Map.put(acc, String.downcase(key), value)
+ end)
+ location = Map.get(headers, "location")
+
+ :hackney.close(client)
+ {:redirect, location}
+ end
+
+ defp get_req({:ok, status, headers, client}) do
+ :hackney.close(client)
+ {:error, status, headers}
+ end
+
+ defp get_body(len, client, acc) when len >= byte_size(acc) do
+ case :hackney.stream_body(client) do
+ {:ok, data} ->
+ get_body(len, client, << acc::binary, data::binary >>)
+ :done ->
+ html = Floki.parse(acc)
+ title = case Floki.find(html, "title") do
+ [{"title", [], [title]} | _] ->
+ String.trim(title)
+ _ ->
+ nil
+ end
+ {:ok, title}
+ {:error, reason} ->
+ {:ok, "failed to fetch body: #{inspect reason}"}
+ end
+ end
+
+ defp get_body(len, client, _acc) do
+ :hackney.close(client)
+ {:ok, "mais il rentrera jamais en ram ce fichier !"}
+ end
+
def expand_default(acc = [uri = %URI{scheme: scheme} | _]) when scheme in ["http", "https"] do
headers = []
options = [follow_redirect: false, max_body_length: 30_000_000]
- case HTTPoison.get(URI.to_string(uri), headers, options) do
- {:ok, %HTTPoison.Response{status_code: 200, headers: headers, body: body}} ->
- headers = Enum.reduce(headers, %{}, fn({key, value}, acc) ->
- Map.put(acc, String.downcase(key), value)
- end)
- text = case Map.get(headers, "content-type") do
- "text/html"<>_ ->
- html = Floki.parse(body)
- case Floki.find(html, "title") do
- [{"title", [], [title]} | _] ->
- title
- _ ->
- nil
- end
- other ->
- "file: #{other}, size: #{Map.get(headers, "content-length", "?")} bytes"
- end
+ case get(URI.to_string(uri), headers, options) do
+ {:ok, text} ->
{:ok, acc, text}
- {:ok, resp = %HTTPoison.Response{headers: headers, status_code: redirect, body: body}} when redirect in 300..399 ->
- headers = Enum.reduce(headers, %{}, fn({key, value}, acc) ->
- Map.put(acc, String.downcase(key), value)
- end)
- link = Map.get(headers, "location")
+ {:redirect, link} ->
new_uri = URI.parse(link)
+ new_uri = %URI{new_uri | scheme: scheme, authority: uri.authority, host: uri.host, port: uri.port}
expand_link([new_uri | acc])
- {:ok, %HTTPoison.Response{status_code: code}} ->
- {:ok, acc, "Error #{code}"}
- {:error, %HTTPoison.Error{reason: reason}} ->
+ {:error, status, _headers} ->
+ {:ok, acc, "Error #{status}"}
+ {:error, reason} ->
{:ok, acc, "Error #{to_string(reason)}"}
- {:error, error} ->
- {:ok, acc, "Error #{inspect(error)}"}
end
end