Page MenuHomePhorge

No OneTemporary

Size
40 KB
Referenced Files
None
Subscribers
None
diff --git a/lib/linkify.ex b/lib/linkify.ex
index 5a5e720..d15d01c 100644
--- a/lib/linkify.ex
+++ b/lib/linkify.ex
@@ -1,51 +1,60 @@
defmodule Linkify do
@moduledoc """
Create url links from text containing urls.
Turns an input string like `"Check out google.com"` into
`Check out "<a href=\"http://google.com\">google.com</a>"`
## Examples
iex> Linkify.link("google.com")
~s(<a href="http://google.com">google.com</a>)
iex> Linkify.link("google.com", new_window: true, rel: "noopener noreferrer")
~s(<a href="http://google.com" target="_blank" rel="noopener noreferrer">google.com</a>)
iex> Linkify.link("google.com", class: "linkified")
~s(<a href="http://google.com" class="linkified">google.com</a>)
"""
import Linkify.Parser
@doc """
Finds links and turns them into HTML `<a>` tag.
Options:
* `class` - specify the class to be added to the generated link.
* `rel` - specify the rel attribute.
* `new_window` - set to `true` to add `target="_blank"` attribute
* `truncate` - Set to a number to truncate urls longer then the number. Truncated urls will end in `...`
* `strip_prefix` - Strip the scheme prefix (default: `false`)
* `exclude_class` - Set to a class name when you don't want urls auto linked in the html of the give class (default: `false`)
* `exclude_id` - Set to an element id when you don't want urls auto linked in the html of the give element (default: `false`)
* `email` - link email links (default: `false`)
* `mention` - link @mentions (when `true`, requires `mention_prefix` or `mention_handler` options to be set) (default: `false`)
* `mention_prefix` - a prefix to build a link for a mention (example: `https://example.com/user/`, default: `nil`)
* `mention_handler` - a custom handler to validate and formart a mention (default: `nil`)
* `hashtag: false` - link #hashtags (when `true`, requires `hashtag_prefix` or `hashtag_handler` options to be set)
* `hashtag_prefix: nil` - a prefix to build a link for a hashtag (example: `https://example.com/tag/`)
* `hashtag_handler: nil` - a custom handler to validate and formart a hashtag
* `extra: false` - link urls with rarely used schemes (magnet, ipfs, irc, etc.)
* `validate_tld: true` - Set to false to disable TLD validation for urls/emails, also can be set to :no_scheme to validate TLDs only for urls without a scheme (e.g `example.com` will be validated, but `http://example.loki` won't)
+ * `iodata` - Set to `true` to return iodata as a result, or `:safe` for iodata with linkified anchor tags wrapped in Phoenix.HTML `:safe` tuples (removes need for further sanitization)
"""
def link(text, opts \\ []) do
parse(text, opts)
end
+ def link_to_iodata(text, opts \\ []) do
+ parse(text, Keyword.merge(opts, iodata: true))
+ end
+
+ def link_safe(text, opts \\ []) do
+ parse(text, Keyword.merge(opts, iodata: :safe))
+ end
+
def link_map(text, acc, opts \\ []) do
parse({text, acc}, opts)
end
end
diff --git a/lib/linkify/builder.ex b/lib/linkify/builder.ex
index 385f6b3..8edf7e8 100644
--- a/lib/linkify/builder.ex
+++ b/lib/linkify/builder.ex
@@ -1,145 +1,162 @@
defmodule Linkify.Builder do
@moduledoc """
Module for building the auto generated link.
"""
@doc """
Create a link.
"""
def create_link(text, opts) do
url = add_scheme(text)
[]
|> build_attrs(url, opts, :rel)
|> build_attrs(url, opts, :target)
|> build_attrs(url, opts, :class)
|> build_attrs(url, opts, :href)
|> format_url(text, opts)
end
defp build_attrs(attrs, uri, %{rel: get_rel}, :rel) when is_function(get_rel, 1) do
case get_rel.(uri) do
nil -> attrs
rel -> [{:rel, rel} | attrs]
end
end
defp build_attrs(attrs, _, opts, :rel) do
case Map.get(opts, :rel) do
rel when is_binary(rel) -> [{:rel, rel} | attrs]
_ -> attrs
end
end
defp build_attrs(attrs, _, opts, :target) do
if Map.get(opts, :new_window), do: [{:target, :_blank} | attrs], else: attrs
end
defp build_attrs(attrs, _, opts, :class) do
case Map.get(opts, :class) do
cls when is_binary(cls) -> [{:class, cls} | attrs]
_ -> attrs
end
end
defp build_attrs(attrs, url, _opts, :href) do
[{:href, url} | attrs]
end
defp add_scheme("http://" <> _ = url), do: url
defp add_scheme("https://" <> _ = url), do: url
defp add_scheme(url), do: "http://" <> url
defp format_url(attrs, url, opts) do
url =
url
|> strip_prefix(Map.get(opts, :strip_prefix, false))
|> truncate(Map.get(opts, :truncate, false))
- attrs = format_attrs(attrs)
- "<a #{attrs}>#{url}</a>"
+ attrs
+ |> format_attrs()
+ |> format_tag(url, opts)
end
defp format_attrs(attrs) do
attrs
|> Enum.map(fn {key, value} -> ~s(#{key}="#{value}") end)
|> Enum.join(" ")
end
defp truncate(url, false), do: url
defp truncate(url, len) when len < 3, do: url
defp truncate(url, len) do
if String.length(url) > len, do: String.slice(url, 0, len - 2) <> "...", else: url
end
defp strip_prefix(url, true) do
url
|> String.replace(~r/^https?:\/\//, "")
|> String.replace(~r/^www\./, "")
end
defp strip_prefix(url, _), do: url
def create_mention_link("@" <> name, _buffer, opts) do
mention_prefix = opts[:mention_prefix]
url = mention_prefix <> name
[]
|> build_attrs(url, opts, :rel)
|> build_attrs(url, opts, :target)
|> build_attrs(url, opts, :class)
|> build_attrs(url, opts, :href)
|> format_mention(name, opts)
end
def create_hashtag_link("#" <> tag, _buffer, opts) do
hashtag_prefix = opts[:hashtag_prefix]
url = hashtag_prefix <> tag
[]
|> build_attrs(url, opts, :rel)
|> build_attrs(url, opts, :target)
|> build_attrs(url, opts, :class)
|> build_attrs(url, opts, :href)
|> format_hashtag(tag, opts)
end
def create_email_link(email, opts) do
[]
|> build_attrs(email, opts, :class)
|> build_attrs("mailto:#{email}", opts, :href)
|> format_email(email, opts)
end
def create_extra_link(uri, opts) do
[]
|> build_attrs(uri, opts, :class)
|> build_attrs(uri, opts, :rel)
|> build_attrs(uri, opts, :target)
|> build_attrs(uri, opts, :href)
|> format_extra(uri, opts)
end
- def format_mention(attrs, name, _opts) do
- attrs = format_attrs(attrs)
- "<a #{attrs}>@#{name}</a>"
+ def format_mention(attrs, name, opts) do
+ attrs
+ |> format_attrs()
+ |> format_tag("@#{name}", opts)
+ end
+
+ def format_hashtag(attrs, tag, opts) do
+ attrs
+ |> format_attrs()
+ |> format_tag("##{tag}", opts)
+ end
+
+ def format_email(attrs, email, opts) do
+ attrs
+ |> format_attrs()
+ |> format_tag(email, opts)
+ end
+
+ def format_extra(attrs, uri, opts) do
+ attrs
+ |> format_attrs()
+ |> format_tag(uri, opts)
end
- def format_hashtag(attrs, tag, _opts) do
- attrs = format_attrs(attrs)
- "<a #{attrs}>##{tag}</a>"
+ def format_tag(attrs, content, %{iodata: true}) do
+ ["<a ", attrs, ">", content, "</a>"]
end
- def format_email(attrs, email, _opts) do
- attrs = format_attrs(attrs)
- ~s(<a #{attrs}>#{email}</a>)
+ def format_tag(attrs, content, %{iodata: :safe}) do
+ [{:safe, ["<a ", attrs, ">"]}, content, {:safe, "</a>"}]
end
- def format_extra(attrs, uri, _opts) do
- attrs = format_attrs(attrs)
- ~s(<a #{attrs}>#{uri}</a>)
+ def format_tag(attrs, content, _opts) do
+ "<a #{attrs}>#{content}</a>"
end
end
diff --git a/lib/linkify/parser.ex b/lib/linkify/parser.ex
index fe72668..9669844 100644
--- a/lib/linkify/parser.ex
+++ b/lib/linkify/parser.ex
@@ -1,335 +1,355 @@
defmodule Linkify.Parser do
@moduledoc """
Module to handle parsing the the input string.
"""
alias Linkify.Builder
@invalid_url ~r/(\.\.+)|(^(\d+\.){1,2}\d+$)/
@match_url ~r{^(?:\W*)?(?<url>(?:https?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:\/?#[\]@!\$&'\(\)\*\+,;=.]+$)}u
@match_hostname ~r{^\W*(?<scheme>https?:\/\/)?(?:[^@\n]+\\w@)?(?<host>[^:#~\/\n?]+)}u
@match_ip ~r"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$"
# @user
# @user@example.com
@match_mention ~r"^@[a-zA-Z\d_-]+@[a-zA-Z0-9_-](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*|@[a-zA-Z\d_-]+"u
# https://www.w3.org/TR/html5/forms.html#valid-e-mail-address
@match_email ~r"^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$"u
@match_hashtag ~r/^(?<tag>\#[[:word:]_]*[[:alpha:]_·][[:word:]_·\p{M}]*)/u
@prefix_extra [
"magnet:?",
"dweb://",
"dat://",
"gopher://",
"ipfs://",
"ipns://",
"irc://",
"ircs://",
"irc6://",
"mumble://",
"ssb://"
]
@tlds "./priv/tlds.txt" |> File.read!() |> String.split("\n", trim: true) |> MapSet.new()
@default_opts %{
url: true,
validate_tld: true
}
@doc """
Parse the given string, identifying items to link.
Parses the string, replacing the matching urls with an html link.
## Examples
iex> Linkify.Parser.parse("Check out google.com")
~s{Check out <a href="http://google.com">google.com</a>}
"""
@types [:url, :email, :hashtag, :mention, :extra]
def parse(input, opts \\ %{})
def parse(input, opts) when is_binary(input), do: {input, %{}} |> parse(opts) |> elem(0)
def parse(input, list) when is_list(list), do: parse(input, Enum.into(list, %{}))
def parse(input, opts) do
opts = Map.merge(@default_opts, opts)
- opts_list = Map.to_list(opts)
-
- Enum.reduce(@types, input, fn
- type, input ->
- if {type, true} in opts_list do
- do_parse(input, opts, {"", "", :parsing}, type)
- else
- input
- end
- end)
+ acc = if opts[:iodata], do: [], else: ""
+ do_parse(input, opts, {"", acc, :parsing})
end
- defp do_parse({"", user_acc}, _opts, {"", acc, _}, _handler),
+ defp accumulate(acc, buffer) when is_list(acc),
+ do: [buffer | acc]
+
+ defp accumulate(acc, buffer) when is_binary(acc),
+ do: acc <> buffer
+
+ defp accumulate(acc, buffer, trailing) when is_list(acc),
+ do: [trailing, buffer | acc]
+
+ defp accumulate(acc, buffer, trailing) when is_binary(acc),
+ do: acc <> buffer <> trailing
+
+ defp do_parse({"", user_acc}, _opts, {"", acc, _}) when is_list(acc),
+ do: {Enum.reverse(acc), user_acc}
+
+ defp do_parse({"", user_acc}, _opts, {"", acc, _}) when is_binary(acc),
do: {acc, user_acc}
- defp do_parse({"@" <> text, user_acc}, opts, {buffer, acc, :skip}, type),
- do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "@", :skip}, type)
+ defp do_parse({"@" <> text, user_acc}, opts, {buffer, acc, :skip}),
+ do: do_parse({text, user_acc}, opts, {"", accumulate(acc, buffer, "@"), :skip})
- defp do_parse({"<a" <> text, user_acc}, opts, {buffer, acc, :parsing}, type),
- do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "<a", :skip}, type)
+ defp do_parse({"<a" <> text, user_acc}, opts, {buffer, acc, :parsing}),
+ do: do_parse({text, user_acc}, opts, {"", accumulate(acc, buffer, "<a"), :skip})
- defp do_parse({"<pre" <> text, user_acc}, opts, {buffer, acc, :parsing}, type),
- do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "<pre", :skip}, type)
+ defp do_parse({"<pre" <> text, user_acc}, opts, {buffer, acc, :parsing}),
+ do: do_parse({text, user_acc}, opts, {"", accumulate(acc, buffer, "<pre"), :skip})
- defp do_parse({"<code" <> text, user_acc}, opts, {buffer, acc, :parsing}, type),
- do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "<code", :skip}, type)
+ defp do_parse({"<code" <> text, user_acc}, opts, {buffer, acc, :parsing}),
+ do: do_parse({text, user_acc}, opts, {"", accumulate(acc, buffer, "<code"), :skip})
- defp do_parse({"</a>" <> text, user_acc}, opts, {buffer, acc, :skip}, type),
- do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "</a>", :parsing}, type)
+ defp do_parse({"</a>" <> text, user_acc}, opts, {buffer, acc, :skip}),
+ do: do_parse({text, user_acc}, opts, {"", accumulate(acc, buffer, "</a>"), :parsing})
- defp do_parse({"</pre>" <> text, user_acc}, opts, {buffer, acc, :skip}, type),
- do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "</pre>", :parsing}, type)
+ defp do_parse({"</pre>" <> text, user_acc}, opts, {buffer, acc, :skip}),
+ do: do_parse({text, user_acc}, opts, {"", accumulate(acc, buffer, "</pre>"), :parsing})
- defp do_parse({"</code>" <> text, user_acc}, opts, {buffer, acc, :skip}, type),
- do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "</code>", :parsing}, type)
+ defp do_parse({"</code>" <> text, user_acc}, opts, {buffer, acc, :skip}),
+ do: do_parse({text, user_acc}, opts, {"", accumulate(acc, buffer, "</code>"), :parsing})
- defp do_parse({"<" <> text, user_acc}, opts, {"", acc, :parsing}, type),
- do: do_parse({text, user_acc}, opts, {"<", acc, {:open, 1}}, type)
+ defp do_parse({"<" <> text, user_acc}, opts, {"", acc, :parsing}),
+ do: do_parse({text, user_acc}, opts, {"<", acc, {:open, 1}})
- defp do_parse({"<" <> text, user_acc}, opts, {"", acc, {:html, level}}, type) do
- do_parse({text, user_acc}, opts, {"<", acc, {:open, level + 1}}, type)
+ defp do_parse({"<" <> text, user_acc}, opts, {"", acc, {:html, level}}) do
+ do_parse({text, user_acc}, opts, {"<", acc, {:open, level + 1}})
end
- defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:attrs, level}}, type),
+ defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:attrs, level}}),
do:
do_parse(
{text, user_acc},
opts,
- {"", acc <> buffer <> ">", {:html, level}},
- type
+ {"", accumulate(acc, buffer, ">"), {:html, level}}
)
- defp do_parse({<<ch::8>> <> text, user_acc}, opts, {"", acc, {:attrs, level}}, type) do
- do_parse({text, user_acc}, opts, {"", acc <> <<ch::8>>, {:attrs, level}}, type)
+ defp do_parse({<<ch::8>> <> text, user_acc}, opts, {"", acc, {:attrs, level}}) do
+ do_parse({text, user_acc}, opts, {"", accumulate(acc, <<ch::8>>), {:attrs, level}})
end
- defp do_parse({"</" <> text, user_acc}, opts, {buffer, acc, {:html, level}}, type) do
- {buffer, user_acc} = link(type, buffer, opts, user_acc)
+ defp do_parse({"</" <> text, user_acc}, opts, {buffer, acc, {:html, level}}) do
+ {buffer, user_acc} = link(buffer, opts, user_acc)
do_parse(
{text, user_acc},
opts,
- {"", acc <> buffer <> "</", {:close, level}},
- type
+ {"", accumulate(acc, buffer, "</"), {:close, level}}
)
end
- defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:close, 1}}, type),
- do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> ">", :parsing}, type)
+ defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:close, 1}}),
+ do: do_parse({text, user_acc}, opts, {"", accumulate(acc, buffer, ">"), :parsing})
- defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:close, level}}, type),
+ defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:close, level}}),
do:
do_parse(
{text, user_acc},
opts,
- {"", acc <> buffer <> ">", {:html, level - 1}},
- type
+ {"", accumulate(acc, buffer, ">"), {:html, level - 1}}
)
- defp do_parse({text, user_acc}, opts, {buffer, acc, {:open, level}}, type) do
- do_parse({text, user_acc}, opts, {"", acc <> buffer, {:attrs, level}}, type)
+ defp do_parse({text, user_acc}, opts, {buffer, acc, {:open, level}}) do
+ do_parse({text, user_acc}, opts, {"", acc <> buffer, {:attrs, level}})
end
defp do_parse(
{<<char::bytes-size(1), text::binary>>, user_acc},
opts,
- {buffer, acc, state},
- type
+ {buffer, acc, state}
)
when char in [" ", "\r", "\n"] do
- {buffer, user_acc} = link(type, buffer, opts, user_acc)
+ {buffer, user_acc} = link(buffer, opts, user_acc)
do_parse(
{text, user_acc},
opts,
- {"", acc <> buffer <> char, state},
- type
+ {"", accumulate(acc, buffer, char), state}
)
end
- defp do_parse({<<ch::8>>, user_acc}, opts, {buffer, acc, state}, type) do
- {buffer, user_acc} = link(type, buffer <> <<ch::8>>, opts, user_acc)
+ defp do_parse({<<ch::8>>, user_acc}, opts, {buffer, acc, state}) do
+ {buffer, user_acc} = link(buffer <> <<ch::8>>, opts, user_acc)
do_parse(
{"", user_acc},
opts,
- {"", acc <> buffer, state},
- type
+ {"", accumulate(acc, buffer), state}
)
end
- defp do_parse({<<ch::8>> <> text, user_acc}, opts, {buffer, acc, state}, type),
- do: do_parse({text, user_acc}, opts, {buffer <> <<ch::8>>, acc, state}, type)
+ defp do_parse({<<ch::8>> <> text, user_acc}, opts, {buffer, acc, state}),
+ do: do_parse({text, user_acc}, opts, {buffer <> <<ch::8>>, acc, state})
def check_and_link(:url, buffer, opts, _user_acc) do
str = strip_parens(buffer)
if url?(str, opts) do
case @match_url |> Regex.run(str, capture: [:url]) |> hd() do
- ^buffer -> link_url(buffer, opts)
- url -> String.replace(buffer, url, link_url(url, opts))
+ ^buffer ->
+ link_url(buffer, opts)
+
+ url ->
+ buffer
+ |> String.split(url)
+ |> Enum.intersperse(link_url(url, opts))
+ |> (if opts[:iodata], do: & &1, else: & Enum.join(&1)).()
end
else
- buffer
+ :nomatch
end
end
def check_and_link(:email, buffer, opts, _user_acc) do
- if email?(buffer, opts), do: link_email(buffer, opts), else: buffer
+ if email?(buffer, opts), do: link_email(buffer, opts), else: :nomatch
end
def check_and_link(:mention, buffer, opts, user_acc) do
buffer
|> match_mention
|> link_mention(buffer, opts, user_acc)
end
def check_and_link(:hashtag, buffer, opts, user_acc) do
buffer
|> match_hashtag
|> link_hashtag(buffer, opts, user_acc)
end
def check_and_link(:extra, "xmpp:" <> handle, opts, _user_acc) do
if email?(handle, opts), do: link_extra("xmpp:" <> handle, opts), else: handle
end
def check_and_link(:extra, buffer, opts, _user_acc) do
- if String.starts_with?(buffer, @prefix_extra), do: link_extra(buffer, opts), else: buffer
+ if String.starts_with?(buffer, @prefix_extra), do: link_extra(buffer, opts), else: :nomatch
end
defp strip_parens("(" <> buffer) do
~r/[^\)]*/ |> Regex.run(buffer) |> hd()
end
defp strip_parens(buffer), do: buffer
def url?(buffer, opts) do
valid_url?(buffer) && Regex.match?(@match_url, buffer) && valid_tld?(buffer, opts)
end
def email?(buffer, opts) do
valid_url?(buffer) && Regex.match?(@match_email, buffer) && valid_tld?(buffer, opts)
end
defp valid_url?(url), do: !Regex.match?(@invalid_url, url)
@doc """
Validates a URL's TLD. Returns a boolean.
Will return `true` if `:validate_tld` option set to `false`.
Will skip validation and return `true` if `:validate_tld` set to `:no_scheme` and the url has a scheme.
"""
def valid_tld?(url, opts) do
[scheme, host] = Regex.run(@match_hostname, url, capture: [:scheme, :host])
cond do
opts[:validate_tld] == false ->
true
ip?(host) ->
true
# don't validate if scheme is present
opts[:validate_tld] == :no_scheme and scheme != "" ->
true
true ->
tld = host |> String.split(".") |> List.last()
MapSet.member?(@tlds, tld)
end
end
def ip?(buffer), do: Regex.match?(@match_ip, buffer)
def match_mention(buffer) do
case Regex.run(@match_mention, buffer) do
[mention] -> mention
_ -> nil
end
end
def match_hashtag(buffer) do
case Regex.run(@match_hashtag, buffer, capture: [:tag]) do
[hashtag] -> hashtag
_ -> nil
end
end
- def link_hashtag(nil, buffer, _, _user_acc), do: buffer
+ def link_hashtag(nil, _buffer, _, _user_acc), do: :nomatch
def link_hashtag(hashtag, buffer, %{hashtag_handler: hashtag_handler} = opts, user_acc) do
hashtag
|> hashtag_handler.(buffer, opts, user_acc)
|> maybe_update_buffer(hashtag, buffer)
end
def link_hashtag(hashtag, buffer, opts, _user_acc) do
hashtag
|> Builder.create_hashtag_link(buffer, opts)
|> maybe_update_buffer(hashtag, buffer)
end
- def link_mention(nil, buffer, _, user_acc), do: {buffer, user_acc}
+ def link_mention(nil, _buffer, _, _user_acc), do: :nomatch
def link_mention(mention, buffer, %{mention_handler: mention_handler} = opts, user_acc) do
mention
|> mention_handler.(buffer, opts, user_acc)
|> maybe_update_buffer(mention, buffer)
end
def link_mention(mention, buffer, opts, _user_acc) do
mention
|> Builder.create_mention_link(buffer, opts)
|> maybe_update_buffer(mention, buffer)
end
defp maybe_update_buffer(out, match, buffer) when is_binary(out) do
maybe_update_buffer({out, nil}, match, buffer)
end
defp maybe_update_buffer({out, user_acc}, match, buffer)
when match != buffer and out != buffer do
out = String.replace(buffer, match, out)
{out, user_acc}
end
defp maybe_update_buffer(out, _match, _buffer), do: out
@doc false
def link_url(buffer, opts) do
Builder.create_link(buffer, opts)
end
@doc false
def link_email(buffer, opts) do
Builder.create_email_link(buffer, opts)
end
def link_extra(buffer, opts) do
Builder.create_extra_link(buffer, opts)
end
- defp link(type, buffer, opts, user_acc) do
+ defp link(buffer, opts, user_acc) do
+ opts_list = Map.to_list(opts)
+
+ Enum.reduce_while @types, {buffer, user_acc}, fn type, _ ->
+ if {type, true} in opts_list do
+ check_and_link_reducer(type, buffer, opts, user_acc)
+ else
+ {:cont, {buffer, user_acc}}
+ end
+ end
+ end
+
+ defp check_and_link_reducer(type, buffer, opts, user_acc) do
case check_and_link(type, buffer, opts, user_acc) do
- {buffer, user_acc} -> {buffer, user_acc}
- buffer -> {buffer, user_acc}
+ :nomatch -> {:cont, {buffer, user_acc}}
+ {buffer, user_acc} -> {:halt, {buffer, user_acc}}
+ buffer -> {:halt, {buffer, user_acc}}
end
end
end
diff --git a/test/linkify_test.exs b/test/linkify_test.exs
index 0128bc2..3216b38 100644
--- a/test/linkify_test.exs
+++ b/test/linkify_test.exs
@@ -1,445 +1,487 @@
defmodule LinkifyTest do
use ExUnit.Case, async: true
doctest Linkify
test "default link" do
assert Linkify.link("google.com") ==
"<a href=\"http://google.com\">google.com</a>"
end
+ test "default link iodata" do
+ assert Linkify.link_to_iodata("google.com") ==
+ [["<a ", "href=\"http://google.com\"", ">", "google.com", "</a>"]]
+ end
+
+ test "default link safe iodata" do
+ assert Linkify.link_safe("google.com") ==
+ [[{:safe, ["<a ", "href=\"http://google.com\"", ">"]}, "google.com", {:safe, "</a>"}]]
+ end
+
test "does on link existing links" do
text = ~s(<a href="http://google.com">google.com</a>)
assert Linkify.link(text) == text
end
test "all kinds of links" do
text = "hello google.com https://ddg.com user@email.com irc:///mIRC"
expected =
"hello <a href=\"http://google.com\">google.com</a> <a href=\"https://ddg.com\">https://ddg.com</a> <a href=\"mailto:user@email.com\">user@email.com</a> <a href=\"irc:///mIRC\">irc:///mIRC</a>"
assert Linkify.link(text,
email: true,
extra: true
) == expected
end
+ test "all kinds of links iodata" do
+ text = "hello google.com https://ddg.com user@email.com irc:///mIRC"
+
+ expected =
+ ["hello", " ", ["<a ", "href=\"http://google.com\"", ">", "google.com", "</a>"], " ", ["<a ", "href=\"https://ddg.com\"", ">", "https://ddg.com", "</a>"], " ", ["<a ", "href=\"mailto:user@email.com\"", ">", "user@email.com", "</a>"], " ", ["<a ", "href=\"irc:///mIRC\"", ">", "irc:///mIRC", "</a>"]]
+
+ assert Linkify.link_to_iodata(text,
+ email: true,
+ extra: true
+ ) == expected
+ end
+
test "class attribute" do
assert Linkify.link("google.com", class: "linkified") ==
"<a href=\"http://google.com\" class=\"linkified\">google.com</a>"
end
+ test "class attribute iodata" do
+ assert Linkify.link_to_iodata("google.com", class: "linkified") ==
+ [["<a ", "href=\"http://google.com\" class=\"linkified\"", ">", "google.com", "</a>"]]
+ end
+
test "rel attribute" do
assert Linkify.link("google.com", rel: "noopener noreferrer") ==
"<a href=\"http://google.com\" rel=\"noopener noreferrer\">google.com</a>"
end
+ test "rel attribute iodata" do
+ assert Linkify.link_to_iodata("google.com", rel: "noopener noreferrer") ==
+ [["<a ", "href=\"http://google.com\" rel=\"noopener noreferrer\"", ">", "google.com", "</a>"]]
+ end
+
test "rel as function" do
text = "google.com"
expected = "<a href=\"http://google.com\" rel=\"com\">google.com</a>"
custom_rel = fn url ->
url |> String.split(".") |> List.last()
end
assert Linkify.link(text, rel: custom_rel) == expected
text = "google.com"
expected = "<a href=\"http://google.com\">google.com</a>"
custom_rel = fn _ -> nil end
assert Linkify.link(text, rel: custom_rel) == expected
end
+ test "strip parens" do
+ assert Linkify.link("(google.com)") ==
+ "(<a href=\"http://google.com\">google.com</a>)"
+ end
+
+ test "strip parens iodata" do
+ assert Linkify.link_to_iodata("(google.com)") ==
+ [["(", ["<a ", "href=\"http://google.com\"", ">", "google.com", "</a>"], ")"]]
+ end
+
test "link_map/2" do
assert Linkify.link_map("google.com", []) ==
{"<a href=\"http://google.com\">google.com</a>", []}
end
describe "custom handlers" do
test "mentions handler" do
text = "hello @user, @valid_user and @invalid_user"
valid_users = ["user", "valid_user"]
handler = fn "@" <> user = mention, buffer, _opts, acc ->
if Enum.member?(valid_users, user) do
link = ~s(<a href="https://example.com/user/#{user}" data-user="#{user}">#{mention}</a>)
{link, %{acc | mentions: MapSet.put(acc.mentions, {mention, user})}}
else
{buffer, acc}
end
end
{result_text, %{mentions: mentions}} =
Linkify.link_map(text, %{mentions: MapSet.new()},
mention: true,
mention_handler: handler
)
assert result_text ==
"hello <a href=\"https://example.com/user/user\" data-user=\"user\">@user</a>, <a href=\"https://example.com/user/valid_user\" data-user=\"valid_user\">@valid_user</a> and @invalid_user"
assert mentions |> MapSet.to_list() |> Enum.map(&elem(&1, 1)) == valid_users
end
test "hashtags handler" do
text = "#hello #world"
handler = fn hashtag, buffer, opts, acc ->
link = Linkify.Builder.create_hashtag_link(hashtag, buffer, opts)
{link, %{acc | tags: MapSet.put(acc.tags, hashtag)}}
end
{result_text, %{tags: tags}} =
Linkify.link_map(text, %{tags: MapSet.new()},
hashtag: true,
hashtag_handler: handler,
hashtag_prefix: "https://example.com/user/",
rel: false
)
assert result_text ==
"<a href=\"https://example.com/user/hello\">#hello</a> <a href=\"https://example.com/user/world\">#world</a>"
assert MapSet.to_list(tags) == ["#hello", "#world"]
end
test "mention handler and hashtag prefix" do
text =
"Hello again, @user.&lt;script&gt;&lt;/script&gt;\nThis is on another :moominmamma: line. #2hu #epic #phantasmagoric"
handler = fn "@" <> user = mention, _, _, _ ->
~s(<span class="h-card"><a href="#/user/#{user}">@<span>#{mention}</span></a></span>)
end
expected =
~s(Hello again, <span class="h-card"><a href="#/user/user">@<span>@user</span></a></span>.&lt;script&gt;&lt;/script&gt;\nThis is on another :moominmamma: line. <a href="/tag/2hu" target="_blank">#2hu</a> <a href="/tag/epic" target="_blank">#epic</a> <a href="/tag/phantasmagoric" target="_blank">#phantasmagoric</a>)
assert Linkify.link(text,
mention: true,
mention_handler: handler,
hashtag: true,
hashtag_prefix: "/tag/",
new_window: true
) == expected
end
test "mentions handler with hostname/@user links" do
text =
"hi @user, take a look at this post: https://example.com/@valid_user/posts/9w5AkQp956XIh74apc"
valid_users = ["user", "valid_user"]
handler = fn "@" <> user = mention, buffer, _opts, acc ->
if Enum.member?(valid_users, user) do
link = ~s(<a href="https://example.com/user/#{user}" data-user="#{user}">#{mention}</a>)
{link, %{acc | mentions: MapSet.put(acc.mentions, {mention, user})}}
else
{buffer, acc}
end
end
{result_text, %{mentions: mentions}} =
Linkify.link_map(text, %{mentions: MapSet.new()},
mention: true,
mention_handler: handler,
new_window: true
)
assert result_text ==
"hi <a href=\"https://example.com/user/user\" data-user=\"user\">@user</a>, take a look at this post: <a href=\"https://example.com/@valid_user/posts/9w5AkQp956XIh74apc\" target=\"_blank\">https://example.com/@valid_user/posts/9w5AkQp956XIh74apc</a>"
assert mentions |> MapSet.to_list() |> Enum.map(&elem(&1, 1)) == ["user"]
end
end
describe "mentions" do
test "simple mentions" do
expected =
~s{hello <a href="https://example.com/user/user" target="_blank">@user</a> and <a href="https://example.com/user/anotherUser" target="_blank">@anotherUser</a>.}
assert Linkify.link("hello @user and @anotherUser.",
mention: true,
mention_prefix: "https://example.com/user/",
new_window: true
) == expected
end
test "mentions inside html tags" do
text =
"<p><strong>hello world</strong></p>\n<p><`em>another @user__test and @user__test google.com paragraph</em></p>\n"
expected =
"<p><strong>hello world</strong></p>\n<p><`em>another <a href=\"u/user__test\">@user__test</a> and <a href=\"u/user__test\">@user__test</a> <a href=\"http://google.com\">google.com</a> paragraph</em></p>\n"
assert Linkify.link(text, mention: true, mention_prefix: "u/") == expected
end
test "metion @user@example.com" do
text = "hey @user@example.com"
expected =
"hey <a href=\"https://example.com/user/user@example.com\" target=\"_blank\">@user@example.com</a>"
assert Linkify.link(text,
mention: true,
mention_prefix: "https://example.com/user/",
new_window: true
) == expected
end
end
describe "hashtag links" do
test "hashtag" do
expected =
" one <a href=\"https://example.com/tag/2two\" target=\"_blank\">#2two</a> three <a href=\"https://example.com/tag/four\" target=\"_blank\">#four</a>."
assert Linkify.link(" one #2two three #four.",
hashtag: true,
hashtag_prefix: "https://example.com/tag/",
new_window: true
) == expected
end
test "must have non-numbers" do
expected = "<a href=\"/t/1ok\">#1ok</a> #42 #7"
assert Linkify.link("#1ok #42 #7",
hashtag: true,
hashtag_prefix: "/t/",
rel: false
) == expected
end
test "support French" do
text = "#administrateur·rice·s #ingénieur·e·s"
expected =
"<a href=\"/t/administrateur·rice·s\">#administrateur·rice·s</a> <a href=\"/t/ingénieur·e·s\">#ingénieur·e·s</a>"
assert Linkify.link(text,
hashtag: true,
hashtag_prefix: "/t/",
rel: false
) == expected
end
test "support Telugu" do
text = "#చక్రం #కకకకక్ #కకకకాక #కకకక్రకకకక"
expected =
"<a href=\"/t/చక్రం\">#చక్రం</a> <a href=\"/t/కకకకక్\">#కకకకక్</a> <a href=\"/t/కకకకాక\">#కకకకాక</a> <a href=\"/t/కకకక్రకకకక\">#కకకక్రకకకక</a>"
assert Linkify.link(text,
hashtag: true,
hashtag_prefix: "/t/",
rel: false
) == expected
end
test "do not turn urls with hashes into hashtags" do
text = "google.com#test #test google.com/#test #tag"
expected =
"<a href=\"http://google.com#test\">google.com#test</a> <a href=\"https://example.com/tag/test\">#test</a> <a href=\"http://google.com/#test\">google.com/#test</a> <a href=\"https://example.com/tag/tag\">#tag</a>"
assert Linkify.link(text,
hashtag: true,
rel: false,
hashtag_prefix: "https://example.com/tag/"
) == expected
end
test "works with non-latin characters" do
text = "#漢字 #は #тест #ทดสอบ"
expected =
"<a href=\"https://example.com/tag/漢字\">#漢字</a> <a href=\"https://example.com/tag/は\">#は</a> <a href=\"https://example.com/tag/тест\">#тест</a> <a href=\"https://example.com/tag/ทดสอบ\">#ทดสอบ</a>"
assert Linkify.link(text,
rel: false,
hashtag: true,
hashtag_prefix: "https://example.com/tag/"
) == expected
end
end
describe "links" do
test "turning urls into links" do
text = "Hey, check out http://www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla ."
expected =
"Hey, check out <a href=\"http://www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla\" target=\"_blank\">http://www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla</a> ."
assert Linkify.link(text, new_window: true) == expected
# no scheme
text = "Hey, check out www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla ."
expected =
"Hey, check out <a href=\"http://www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla\" target=\"_blank\">www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla</a> ."
assert Linkify.link(text, new_window: true) == expected
end
test "turn urls with schema into urls" do
text = "📌https://google.com"
expected = "📌<a href=\"https://google.com\">https://google.com</a>"
assert Linkify.link(text, rel: false) == expected
end
test "skip prefix" do
assert Linkify.link("http://google.com", strip_prefix: true) ==
"<a href=\"http://google.com\">google.com</a>"
assert Linkify.link("http://www.google.com", strip_prefix: true) ==
"<a href=\"http://www.google.com\">google.com</a>"
end
test "hostname/@user" do
text = "https://example.com/@user"
expected =
"<a href=\"https://example.com/@user\" target=\"_blank\">https://example.com/@user</a>"
assert Linkify.link(text, new_window: true) == expected
text = "https://example.com:4000/@user"
expected =
"<a href=\"https://example.com:4000/@user\" target=\"_blank\">https://example.com:4000/@user</a>"
assert Linkify.link(text, new_window: true) == expected
text = "https://example.com:4000/@user"
expected =
"<a href=\"https://example.com:4000/@user\" target=\"_blank\">https://example.com:4000/@user</a>"
assert Linkify.link(text, new_window: true) == expected
text = "@username"
expected = "@username"
assert Linkify.link(text, new_window: true) == expected
text = "http://www.cs.vu.nl/~ast/intel/"
expected = "<a href=\"http://www.cs.vu.nl/~ast/intel/\">http://www.cs.vu.nl/~ast/intel/</a>"
assert Linkify.link(text) == expected
text = "https://forum.zdoom.org/viewtopic.php?f=44&t=57087"
expected =
"<a href=\"https://forum.zdoom.org/viewtopic.php?f=44&t=57087\">https://forum.zdoom.org/viewtopic.php?f=44&t=57087</a>"
assert Linkify.link(text) == expected
text = "https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul"
expected =
"<a href=\"https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul\">https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul</a>"
assert Linkify.link(text) == expected
text = "https://en.wikipedia.org/wiki/Duff's_device"
expected =
"<a href=\"https://en.wikipedia.org/wiki/Duff's_device\">https://en.wikipedia.org/wiki/Duff's_device</a>"
assert Linkify.link(text) == expected
end
end
describe "non http links" do
test "xmpp" do
text = "xmpp:user@example.com"
expected = "<a href=\"xmpp:user@example.com\">xmpp:user@example.com</a>"
assert Linkify.link(text, extra: true) == expected
end
test "email" do
text = "user@example.com"
expected = "<a href=\"mailto:user@example.com\">user@example.com</a>"
assert Linkify.link(text, email: true) == expected
end
test "magnet" do
text =
"magnet:?xt=urn:btih:a4104a9d2f5615601c429fe8bab8177c47c05c84&dn=ubuntu-18.04.1.0-live-server-amd64.iso&tr=http%3A%2F%2Ftorrent.ubuntu.com%3A6969%2Fannounce&tr=http%3A%2F%2Fipv6.torrent.ubuntu.com%3A6969%2Fannounce"
expected =
"<a href=\"magnet:?xt=urn:btih:a4104a9d2f5615601c429fe8bab8177c47c05c84&dn=ubuntu-18.04.1.0-live-server-amd64.iso&tr=http%3A%2F%2Ftorrent.ubuntu.com%3A6969%2Fannounce&tr=http%3A%2F%2Fipv6.torrent.ubuntu.com%3A6969%2Fannounce\">magnet:?xt=urn:btih:a4104a9d2f5615601c429fe8bab8177c47c05c84&dn=ubuntu-18.04.1.0-live-server-amd64.iso&tr=http%3A%2F%2Ftorrent.ubuntu.com%3A6969%2Fannounce&tr=http%3A%2F%2Fipv6.torrent.ubuntu.com%3A6969%2Fannounce</a>"
assert Linkify.link(text, extra: true) == expected
end
test "dweb" do
text =
"dweb://584faa05d394190ab1a3f0240607f9bf2b7e2bd9968830a11cf77db0cea36a21+v1.0.0/path/to/file.txt"
expected =
"<a href=\"dweb://584faa05d394190ab1a3f0240607f9bf2b7e2bd9968830a11cf77db0cea36a21+v1.0.0/path/to/file.txt\">dweb://584faa05d394190ab1a3f0240607f9bf2b7e2bd9968830a11cf77db0cea36a21+v1.0.0/path/to/file.txt</a>"
assert Linkify.link(text, extra: true) == expected
end
end
describe "TLDs" do
test "parse with scheme" do
text = "https://google.com"
expected = "<a href=\"https://google.com\">https://google.com</a>"
assert Linkify.link(text) == expected
end
test "only existing TLDs with scheme" do
text = "this url https://google.foobar.blah11blah/ has invalid TLD"
expected = "this url https://google.foobar.blah11blah/ has invalid TLD"
assert Linkify.link(text) == expected
text = "this url https://google.foobar.com/ has valid TLD"
expected =
"this url <a href=\"https://google.foobar.com/\">https://google.foobar.com/</a> has valid TLD"
assert Linkify.link(text) == expected
end
test "only existing TLDs without scheme" do
text = "this url google.foobar.blah11blah/ has invalid TLD"
assert Linkify.link(text) == text
text = "this url google.foobar.com/ has valid TLD"
expected =
"this url <a href=\"http://google.foobar.com/\">google.foobar.com/</a> has valid TLD"
assert Linkify.link(text) == expected
end
test "only existing TLDs with and without scheme" do
text = "this url http://google.foobar.com/ has valid TLD"
expected =
"this url <a href=\"http://google.foobar.com/\">http://google.foobar.com/</a> has valid TLD"
assert Linkify.link(text) == expected
text = "this url google.foobar.com/ has valid TLD"
expected =
"this url <a href=\"http://google.foobar.com/\">google.foobar.com/</a> has valid TLD"
assert Linkify.link(text) == expected
end
end
end

File Metadata

Mime Type
text/x-diff
Expires
Wed, Nov 27, 5:13 AM (1 d, 19 h)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
40557
Default Alt Text
(40 KB)

Event Timeline