Update elixir 1.11.0 and OTP 23

This commit is contained in:
Simon Prévost 2020-10-09 07:46:58 -04:00
parent dd45a74f75
commit 6cd211ee83
20 changed files with 1417 additions and 81 deletions

View File

@ -32,7 +32,7 @@ RUN rebar3 compile
#
# Build the OTP binary
#
FROM hexpm/elixir:1.10.4-erlang-22.3.4.1-alpine-3.11.6 AS builder
FROM hexpm/elixir:1.11.0-erlang-23.1.1-alpine-3.12.0 AS builder
ENV MIX_ENV=prod
@ -40,7 +40,7 @@ WORKDIR /build
RUN apk --no-cache update && \
apk --no-cache upgrade && \
apk --no-cache add make g++ git openssl-dev python yaml-dev
apk --no-cache add make g++ git yaml-dev
RUN mix local.rebar --force && \
mix local.hex --force
@ -72,11 +72,11 @@ RUN mkdir -p /opt/build && \
#
# Build a lean runtime container
#
FROM alpine:3.11.6
FROM alpine:3.12.0
RUN apk --no-cache update && \
apk --no-cache upgrade && \
apk --no-cache add bash openssl erlang-crypto yaml-dev
apk --no-cache add bash yaml-dev
WORKDIR /opt/accent
COPY --from=builder /opt/build .

View File

@ -1,28 +1,22 @@
defmodule Accent do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
supervisor(Accent.Endpoint, []),
{Accent.Repo, []},
Accent.Endpoint,
Accent.Repo,
{Oban, oban_config()},
{Phoenix.PubSub, [name: Accent.PubSub, adapter: Phoenix.PubSub.PG2]}
]
{:ok, _} = Logger.add_backend(Sentry.LoggerBackend)
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Accent.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Accent.Endpoint.config_change(changed, removed)
:ok

View File

@ -1,6 +1,4 @@
**<%= @user.fullname %>** just synced a file: *<%= @document_path %>*
**Stats:**
<%= for %{"action" => action, "count" => count} <- @stats do %>
<%= action %>: *<%= count %>*
<% end %>
**Stats:**<%= for %{"action" => action, "count" => count} <- @stats do %>
<%= action %>: *<%= count %>*<% end %>

View File

@ -1,6 +1,4 @@
*<%= @user.fullname %>* just synced a file: _<%= @document_path %>_
*Stats:*
<%= for %{"action" => action, "count" => count} <- @stats do %>
<%= action %>: _<%= count %>_
<% end %>
*Stats:*<%= for %{"action" => action, "count" => count} <- @stats do %>
<%= action %>: _<%= count %>_<% end %>

View File

@ -9,27 +9,21 @@ defmodule Movement.TranslationComparer do
@renew "renew"
@remove "remove"
@doc """
@moduledoc """
Receives a translation marked to be removed
## Examples
iex> Movement.TranslationComparer.compare(%{marked_as_removed: true}, "test")
{"remove", nil}
"""
def compare(%{marked_as_removed: true}, _text), do: {@remove, nil}
@doc """
Receives a removed translation
## Examples
iex> Movement.TranslationComparer.compare(%{removed: true}, "test")
{"renew", "test"}
"""
def compare(%{removed: true}, text), do: {@renew, text}
@doc """
Receives a translation with a corrected text,
where the corrected text is not equal to text
and proposed text is equal to text
@ -38,12 +32,7 @@ defmodule Movement.TranslationComparer do
iex> Movement.TranslationComparer.compare(%{proposed_text: "Hello", corrected_text: "Hi"}, "Hello")
{"autocorrect", "Hi"}
"""
def compare(%{proposed_text: proposed, corrected_text: corrected}, text)
when proposed == text and corrected != text,
do: {@autocorrect, corrected}
@doc """
Receives a translation with a corrected text,
where the corrected text is equal to text
@ -51,12 +40,7 @@ defmodule Movement.TranslationComparer do
iex> Movement.TranslationComparer.compare(%{proposed_text: "Hi", corrected_text: "Hi"}, "Hi")
{"noop", "Hi"}
"""
def compare(%{corrected_text: corrected, proposed_text: proposed}, text)
when proposed == text and corrected == text,
do: {@noop, text}
@doc """
Receives a translation with a corrected text,
where the corrected text is equal to text
@ -64,12 +48,7 @@ defmodule Movement.TranslationComparer do
iex> Movement.TranslationComparer.compare(%{proposed_text: "Hello", corrected_text: "Hi"}, "Hi")
{"update_proposed", "Hi"}
"""
def compare(%{corrected_text: corrected}, text)
when corrected == text,
do: {@update_proposed, text}
@doc """
Receives a translation with no corrected text,
where the proposed text is not equal to text
@ -77,12 +56,7 @@ defmodule Movement.TranslationComparer do
iex> Movement.TranslationComparer.compare(%{proposed_text: "Hello", corrected_text: "Hello"}, "Hi")
{"conflict_on_proposed", "Hi"}
"""
def compare(%{proposed_text: proposed, corrected_text: corrected}, text)
when proposed != text and corrected == proposed,
do: {@conflict_on_proposed, text}
@doc """
Receives a translation with corrected text,
where the proposed text is not equal to text
and the corrected text is not equal to text
@ -91,22 +65,14 @@ defmodule Movement.TranslationComparer do
iex> Movement.TranslationComparer.compare(%{proposed_text: "Hello", corrected_text: "Hi"}, "Welcome")
{"conflict_on_corrected", "Welcome"}
"""
def compare(%{proposed_text: proposed, corrected_text: corrected}, text)
when proposed != text and corrected != text,
do: {@conflict_on_corrected, text}
@doc """
No condition matches
## Examples
iex> Movement.TranslationComparer.compare(%{}, "Welcome")
{"new", "Welcome"}
"""
def compare(%{}, text), do: {@new, text}
@doc """
Nil translation
## Examples
@ -114,5 +80,29 @@ defmodule Movement.TranslationComparer do
iex> Movement.TranslationComparer.compare(nil, "Welcome")
{"new", "Welcome"}
"""
def compare(nil, text), do: {@new, text}
def compare(%{marked_as_removed: true}, _text), do: {@remove, nil}
def compare(%{removed: true}, text), do: {@renew, text}
def compare(%{proposed_text: proposed, corrected_text: corrected}, text)
when proposed == text and corrected != text,
do: {@autocorrect, corrected}
def compare(%{corrected_text: corrected, proposed_text: proposed}, text)
when proposed == text and corrected == text,
do: {@noop, text}
def compare(%{corrected_text: corrected}, text)
when corrected == text,
do: {@update_proposed, text}
def compare(%{proposed_text: proposed, corrected_text: corrected}, text)
when proposed != text and corrected == proposed,
do: {@conflict_on_proposed, text}
def compare(%{proposed_text: proposed, corrected_text: corrected}, text)
when proposed != text and corrected != text,
do: {@conflict_on_corrected, text}
def compare(_, text), do: {@new, text}
end

View File

@ -6,6 +6,8 @@ defmodule Accent.Plugs.BotParamsInjector do
@doc """
If the current_user is the projects bot, we automatically add the project id in the params.
This makes the param not required in the URL when making calls such as `sync` or `merge`.
Fallback to doing nothing with the connection
"""
def assign_project_id(conn = %{assigns: %{current_user: user = %{bot: true}}}, _) do
user.permissions
@ -28,8 +30,5 @@ defmodule Accent.Plugs.BotParamsInjector do
end
end
@doc """
Fallback to doing nothing with the connection
"""
def assign_project_id(conn, _), do: conn
end

View File

@ -13,6 +13,7 @@ defmodule Accent.Mixfile do
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
aliases: aliases(),
xref: [exclude: IEx],
deps: deps(),
releases: releases(),
erlc_paths: ["src", "gen"],
@ -28,8 +29,8 @@ defmodule Accent.Mixfile do
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
defp elixirc_paths(:test), do: ["lib", "web", "vendor", "test/support"]
defp elixirc_paths(_), do: ["lib", "web", "vendor"]
defp extra_compilers(:prod), do: [:phoenix]
defp extra_compilers(_), do: [:gleam, :phoenix]
@ -78,7 +79,6 @@ defmodule Accent.Mixfile do
{:jason, "~> 1.2", override: true},
{:erlsom, "~> 1.5"},
{:xml_builder, "~> 2.0"},
{:ex_minimatch, "~> 0.0.1"},
# Auth
{:oauth2, "~> 2.0", override: true},
@ -104,7 +104,7 @@ defmodule Accent.Mixfile do
# Gleam
{:mix_gleam, "~> 0.1", only: [:dev, :test]},
{:gleam_stdlib, "0.11.0", only: [:dev, :test]},
{:gleam_stdlib, "~> 0.11", only: [:dev, :test]},
# Dev
{:dialyxir, "~> 1.0", only: ~w(dev test)a, runtime: false},

View File

@ -12,7 +12,7 @@
"corsica": {:hex, :corsica, "1.1.3", "5f1de40bc9285753aa03afbdd10c364dac79b2ddbf2ba9c5c9c47b397ec06f40", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "8156b3a14a114a346262871333a931a1766b2597b56bf994fcfcb65443a348ad"},
"cowboy": {:hex, :cowboy, "2.8.0", "f3dc62e35797ecd9ac1b50db74611193c29815401e53bac9a5c0577bd7bc667d", [:rebar3], [{:cowlib, "~> 2.9.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.7.1", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "4643e4fba74ac96d4d152c75803de6fad0b3fa5df354c71afdd6cbeeb15fac8a"},
"cowlib": {:hex, :cowlib, "2.9.1", "61a6c7c50cf07fdd24b2f45b89500bb93b6686579b069a89f88cb211e1125c78", [:rebar3], [], "hexpm", "e4175dc240a70d996156160891e1c62238ede1729e45740bdd38064dad476170"},
"credo": {:hex, :credo, "1.4.0", "92339d4cbadd1e88b5ee43d427b639b68a11071b6f73854e33638e30a0ea11f5", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1fd3b70dce216574ce3c18bdf510b57e7c4c85c2ec9cad4bff854abaf7e58658"},
"credo": {:hex, :credo, "1.4.1", "16392f1edd2cdb1de9fe4004f5ab0ae612c92e230433968eab00aafd976282fc", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "155f8a2989ad77504de5d8291fa0d41320fdcaa6a1030472e9967f285f8c7692"},
"credo_envvar": {:hex, :credo_envvar, "0.1.4", "40817c10334e400f031012c0510bfa0d8725c19d867e4ae39cf14f2cbebc3b20", [:mix], [{:credo, "~> 1.0", [hex: :credo, repo: "hexpm", optional: false]}], "hexpm", "5055cdb4bcbaf7d423bc2bb3ac62b4e2d825e2b1e816884c468dee59d0363009"},
"csv": {:hex, :csv, "2.4.1", "50e32749953b6bf9818dbfed81cf1190e38cdf24f95891303108087486c5925e", [:mix], [{:parallel_stream, "~> 1.0.4", [hex: :parallel_stream, repo: "hexpm", optional: false]}], "hexpm", "54508938ac67e27966b10ef49606e3ad5995d665d7fc2688efb3eab1307c9079"},
"dataloader": {:hex, :dataloader, "1.0.8", "114294362db98a613f231589246aa5b0ce847412e8e75c4c94f31f204d272cbf", [:mix], [{:ecto, ">= 3.4.3 and < 4.0.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "eaf3c2aa2bc9dbd2f1e960561d616b7f593396c4754185b75904f6d66c82a667"},
@ -23,8 +23,6 @@
"ecto_sql": {:hex, :ecto_sql, "3.4.5", "30161f81b167d561a9a2df4329c10ae05ff36eca7ccc84628f2c8b9fa1e43323", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.4.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.0", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "31990c6a3579b36a3c0841d34a94c275e727de8b84f58509da5f1b2032c98ac2"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
"erlsom": {:hex, :erlsom, "1.5.0", "c5a5cdd0ee0e8dca62bcc4b13ff08da24fdefc16ccd8b25282a2fda2ba1be24a", [:rebar3], [], "hexpm", "55a9dbf9cfa77fcfc108bd8e2c4f9f784dea228a8f4b06ea10b684944946955a"},
"ex_brace_expansion": {:hex, :ex_brace_expansion, "0.0.2", "7574fd9497f3f045346dfd9517f10f237f4d39137bf42142b0fbdcd4bacbc6ed", [:mix], [], "hexpm", "d7470a00cffe4425f89e83d7288c24b641c3f6cbde136a08089e7420467cd237"},
"ex_minimatch": {:hex, :ex_minimatch, "0.0.1", "4b41726183c104ac227c5996f083ec370f97bd38c2232d74a847888c1bb715bc", [:mix], [{:ex_brace_expansion, "~> 0.0.1", [hex: :ex_brace_expansion, repo: "hexpm", optional: false]}], "hexpm", "3255bb8496635d3ef5d86ec6829958a3573ff730ca01534b0fead9c2e3af7de4"},
"excoveralls": {:hex, :excoveralls, "0.13.2", "5ca05099750c086f144fcf75842c363fc15d7d9c6faa7ad323d010294ced685e", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1e7ed75c158808a5a8f019d3ad63a5efe482994f2f8336c0a8c77d2f0ab152ce"},
"fast_yaml": {:git, "https://github.com/processone/fast_yaml.git", "e789f68895f71b7ad31057177810ca0161bf790e", [ref: "e789f68895f71b7ad31057177810ca0161bf790e"]},
"file_system": {:hex, :file_system, "0.2.9", "545b9c9d502e8bfa71a5315fac2a923bd060fd9acb797fe6595f54b0f975fd32", [:mix], [], "hexpm", "3cf87a377fe1d93043adeec4889feacf594957226b4f19d5897096d6f61345d8"},

View File

@ -8,5 +8,5 @@
{project_plugins, [rebar_gleam]}.
{deps, [
{gleam_stdlib, "0.9.0"}
{gleam_stdlib, "0.11.0"}
]}.

View File

@ -1,6 +1,6 @@
{"1.1.0",
[{<<"gleam_stdlib">>,{pkg,<<"gleam_stdlib">>,<<"0.9.0">>},0}]}.
[{<<"gleam_stdlib">>,{pkg,<<"gleam_stdlib">>,<<"0.11.0">>},0}]}.
[
{pkg_hash,[
{<<"gleam_stdlib">>, <<"D1323850E8C1481D8D6661EE572DD54D5F0F1C2CCEF208A0ADEBB72183B7B465">>}]}
{<<"gleam_stdlib">>, <<"9B1089739574CDF78A1C25A463D770A98F59E63A92324D17095AD67E867EE549">>}]}
].

View File

@ -35,13 +35,15 @@ defmodule AccentTest.Hook.Outbounds.Discord do
received_body =
Jason.encode!(%{
text: """
**Test** just synced a file: *foo.json*
text:
"""
**Test** just synced a file: *foo.json*
**Stats:**
new: *4*
conflict_on_proposed: *10*
"""
**Stats:**
new: *4*
conflict_on_proposed: *10*
"""
|> String.trim_trailing()
})
with_mock(HTTPoison, [post: fn ^received_url, ^received_body, ^received_headers -> {:ok, "done"} end], do: Discord.perform(%Oban.Job{args: context}))

View File

@ -41,13 +41,15 @@ defmodule AccentTest.Hook.Outbounds.Slack do
received_body =
Jason.encode!(%{
text: """
*Test* just synced a file: _foo.json_
text:
"""
*Test* just synced a file: _foo.json_
*Stats:*
new: _4_
conflict_on_proposed: _10_
"""
*Stats:*
new: _4_
conflict_on_proposed: _10_
"""
|> String.trim_trailing()
})
with_mock(HTTPoison, [post: fn ^received_url, ^received_body, ^received_headers -> {:ok, "done"} end], do: Slack.perform(%Oban.Job{args: context}))

View File

@ -0,0 +1,274 @@
defmodule ExBraceExpansion do
@esc_slash "\0SLASH#{:random.uniform}\0"
@esc_open "\0OPEN#{:random.uniform}\0"
@esc_close "\0CLOSE#{:random.uniform}\0"
@esc_comma "\0COMMA#{:random.uniform}\0"
@esc_period "\0PERIOD#{:random.uniform}\0"
@moduledoc """
[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), as known from sh/bash, in Elixir. This is a port of [brace-expasion](https://github.com/juliangruber/brace-expansion) javascript project.
"""
import ExBraceExpansion.BalancedMatch
import ExBraceExpansion.ConcatMap
@doc ~S"""
expands the `str` into a list of patterns
## Examples
iex> import ExBraceExpansion
nil
iex> expand("file-{a,b,c}.jpg")
["file-a.jpg", "file-b.jpg", "file-c.jpg"]
iex> expand("-v{,,}")
["-v", "-v", "-v"]
iex> expand("file{0..2}.jpg")
["file0.jpg", "file1.jpg", "file2.jpg"]
iex> expand("file-{a..c}.jpg")
["file-a.jpg", "file-b.jpg", "file-c.jpg"]
iex> expand("file{2..0}.jpg")
["file2.jpg", "file1.jpg", "file0.jpg"]
iex> expand("file{0..4..2}.jpg")
["file0.jpg", "file2.jpg", "file4.jpg"]
iex> expand("file-{a..e..2}.jpg")
["file-a.jpg", "file-c.jpg", "file-e.jpg"]
iex> expand("file{00..10..5}.jpg")
["file00.jpg", "file05.jpg", "file10.jpg"]
iex> expand("{{A..C},{a..c}}")
["A", "B", "C", "a", "b", "c"]
iex> expand("ppp{,config,oe{,conf}}")
["ppp", "pppconfig", "pppoe", "pppoeconf"]
"""
def expand(str) do
if str == nil do
[]
else
_expand(escape_braces(str), true)
|> Enum.map(fn val -> unescape_braces(val) end)
end
end
defp _expand(str, is_top) do
m = balanced("{", "}", str)
done = m == nil || m.pre =~ ~r/\$$/
is_numeric_sequence = if m, do: m.body =~ ~r/^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/, else: false
is_alpha_sequence = if m, do: m.body =~ ~r/^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/, else: false
is_sequence = is_numeric_sequence || is_alpha_sequence
is_options = if m, do: m.body =~ ~r/^(.*,)+(.+)?$/, else: false
is_comma_and_brace = if m, do: m.body =~ ~r/,.*}/, else: false
state = %{
str: str,
m: m,
is_top: is_top,
is_numeric_sequence: is_numeric_sequence,
is_alpha_sequence: is_alpha_sequence,
is_sequence: is_sequence,
is_options: is_options,
is_comma_and_brace: is_comma_and_brace,
done: done,
value: (if done, do: [str], else: nil)
}
state
|> expand_step1
|> expand_step2
|> expand_step3
|> expand_step4
|> expand_step5
end
defp expand_step1(%{done: done} = state) when done, do: state
defp expand_step1(%{is_sequence: is_sequence, is_options: is_options, is_comma_and_brace: is_comma_and_brace, m: m} = state) when not is_sequence and not is_options and is_comma_and_brace do
state
|> put_in([:done], true)
|> put_in([:value], _expand(m.pre <> "{" <> m.body <> @esc_close <> m.post, false))
end
defp expand_step1(%{is_sequence: is_sequence, is_options: is_options, str: str} = state) when not is_sequence and not is_options do
state
|> put_in([:done], true)
|> put_in([:value], [str])
end
defp expand_step1(%{is_sequence: is_sequence, m: m} = state) when is_sequence do
state
|> put_in([:n], Regex.split(~r/\.\./, m.body))
end
defp expand_step1(%{m: m} = state) do
state
|> put_in([:n], parse_comma_parts(m.body))
end
defp expand_step2(%{done: done} = state) when done, do: state
defp expand_step2(%{is_sequence: is_sequence, n: n} = state) when not is_sequence and length(n) == 1 do
state
|> put_in([:n], _expand(hd(n), false) |> Enum.map(fn val -> embrace(val) end))
|> expand_step2a
end
defp expand_step2(state), do: state
defp expand_step2a(%{n: n, m: m} = state) when length(n) == 1 do
post = if String.length(m.post) > 0, do: _expand(m.post, false), else: [""]
value = Enum.map post, fn p ->
m.pre <> hd(n) <> p
end
state
|> put_in([:done], true)
|> put_in([:value], value)
end
defp expand_step2a(state), do: state
defp expand_step3(%{done: done} = state) when done, do: state
defp expand_step3(%{m: m} = state) do
state
|> put_in([:pre], m.pre)
|> put_in([:post], (if String.length(m.post) > 0, do: _expand(m.post, false), else: [""]))
end
defp expand_step4(%{done: done} = state) when done, do: state
defp expand_step4(%{is_sequence: is_sequence, n: n, is_alpha_sequence: is_alpha_sequence} = state) when is_sequence do
n_0 = Enum.at(n, 0)
n_1 = Enum.at(n, 1)
n_2 = Enum.at(n, 2)
x = numeric(n_0)
y = numeric(n_1)
incr = if length(n) == 3, do: abs(numeric(n_2)), else: 1
nn = for i <- x..y, rem(i - x, incr) == 0 do
if is_alpha_sequence do
get_alpha_character(i)
else
get_numeric_character(i, n, n_0, n_1)
end
end
state
|> put_in([:nn], nn)
end
defp expand_step4(%{n: n} = state) do
state
|> put_in([:nn], concat_map(n, fn val -> _expand(val, false) end))
end
defp expand_step5(%{done: done, value: value}) when done, do: value
defp expand_step5(%{nn: nn, post: post, pre: pre, is_top: is_top, is_sequence: is_sequence}) do
for x <- nn, y <- post do
expansion = pre <> x <> y
if not is_top or is_sequence or expansion do
expansion
else
nil
end
end
end
# helpers
defp escape_braces(str) do
str
|> String.split("\\\\") |> Enum.join(@esc_slash)
|> String.split("\\{") |> Enum.join(@esc_open)
|> String.split("\\}") |> Enum.join(@esc_close)
|> String.split("\\,") |> Enum.join(@esc_comma)
|> String.split("\\.") |> Enum.join(@esc_period)
end
defp unescape_braces(str) do
str
|> String.split(@esc_slash) |> Enum.join("\\")
|> String.split(@esc_open) |> Enum.join("{")
|> String.split(@esc_close) |> Enum.join("}")
|> String.split(@esc_comma) |> Enum.join(",")
|> String.split(@esc_period) |> Enum.join(".")
end
defp parse_comma_parts(str) when str == nil or str == "", do: [""]
defp parse_comma_parts(str) do
m = balanced("{", "}", str)
if m == nil do
str |> String.split(",")
else
p = m.pre |> String.split(",")
p = List.update_at(p, length(p) - 1, fn val -> val <> "{" <> m.body <> "}" end)
post_parts = parse_comma_parts(m.post)
p = if m.post != "" do
[post_parts_hd | post_parts_tail] = post_parts
p = List.update_at(p, length(p) - 1, fn val -> val <> post_parts_hd end)
p ++ post_parts_tail
else
p
end
[] ++ p
end
end
defp numeric(val) do
try do
String.to_integer(val)
rescue
_ -> hd(to_charlist(val))
end
end
defp embrace(str) do
"{" <> str <> "}"
end
defp get_alpha_character(i) do
c = to_string([i])
if c == "\\", do: "", else: c
end
defp get_numeric_character(i, n, n_0, n_1) do
c = to_string(i)
pad = Enum.any? n, fn val -> val =~ ~r/^-?0\d/ end
c = if pad do
width = Enum.max([String.length(n_0), String.length(n_1)])
need = width - String.length(c)
if need > 0 do
front_padding = Enum.join(Enum.map(0..(need-1), fn _ -> "0" end), "")
if i < 0 do
"-" <> front_padding <> String.slice(c, 1, String.length(c))
else
front_padding <> c
end
end
else
c
end
c
end
end

View File

@ -0,0 +1,137 @@
defmodule ExBraceExpansion.BalancedMatch do
def balanced(a, b, str), do: _balanced(a, b, str) |> format
defp _balanced(a, b, str) do
a_len = len(a)
b_len = len(b)
str_len = len(str)
%{
str: str,
a: a,
b: b,
ended: false,
i: 0,
i_to_a_len: slice(str, 0, a_len),
i_to_b_len: slice(str, 0, b_len),
a_len: a_len,
b_len: b_len,
str_len: str_len, # need this for guard
bal: 0,
start: nil,
finish: nil,
segments: %{
pre: "",
body: "",
post: ""
}
}
|> get_segments
|> deal_with_inbalance
end
defp get_segments(%{i: i, str_len: str_len} = state) when i == str_len do
state
end
defp get_segments(%{i_to_a_len: i_to_a_len, a: a, start: start, bal: bal, i: i} = state) when i_to_a_len == a and start == nil do
state
|> move_to_next_i
|> put_in([:bal], bal + 1)
|> put_in([:start], i)
|> get_segments
end
defp get_segments(%{i_to_a_len: i_to_a_len, a: a, bal: bal} = state) when i_to_a_len == a do
state
|> move_to_next_i
|> put_in([:bal], bal + 1)
|> get_segments
end
defp get_segments(%{i_to_b_len: i_to_b_len, b: b, start: start, bal: bal} = state) when i_to_b_len == b and start != nil and bal - 1 == 0 do
%{i: i, str: str, start: start, a_len: a_len, b_len: b_len, str_len: str_len} = state
segments = %{
pre: slice(str, 0, start),
body: (if i - start > 1, do: slice(str, (a_len + start)..i), else: ""),
post: slice(str, (i + b_len)..str_len)
}
state
|> put_in([:ended], true)
|> put_in([:bal], bal - 1)
|> put_in([:finish], i)
|> put_in([:segments], segments)
end
defp get_segments(%{i_to_b_len: i_to_b_len, b: b, start: start, bal: bal} = state) when i_to_b_len == b and start != nil do
state
|> move_to_next_i
|> put_in([:ended], true)
|> put_in([:bal], bal - 1)
|> get_segments
end
defp get_segments(state) do
state
|> move_to_next_i
|> get_segments
end
defp deal_with_inbalance(%{bal: bal, ended: ended} = state) when bal != 0 and ended do
%{a: a, b: b, str: str, a_len: a_len, str_len: str_len, start: start} = state
_balanced(a, b, slice(str, start + a_len, str_len))
|> reconstitute(str, start + a_len)
end
defp deal_with_inbalance(%{start: start, finish: finish}) when start == nil and finish == nil do
nil
end
defp deal_with_inbalance(state) do
state
end
defp reconstitute(state, _prev_str, _prev_start) when state == nil do
nil
end
defp reconstitute(state, prev_str, prev_start) do
%{start: start, finish: finish, segments: %{pre: pre}} = state
state
|> put_in([:start], start + prev_start)
|> put_in([:finish], finish + prev_start)
|> put_in([:segments, :pre], slice(prev_str, 0..prev_start) <> pre)
end
defp format(nil), do: nil
defp format(%{start: start, finish: finish, segments: %{pre: pre, body: body, post: post}}) do
%{
start: start,
finish: finish,
pre: pre,
body: body,
post: post
}
end
# helpers
defp move_to_next_i(%{i: i, str: str, a_len: a_len, b_len: b_len} = state) do
state
|> put_in([:i], i + 1)
|> put_in([:i_to_a_len], slice(str, i + 1, a_len))
|> put_in([:i_to_b_len], slice(str, i + 1, b_len))
end
defp len(str), do: String.length(str)
defp slice(str, start, len), do: String.slice(str, start, len)
defp slice(str, start..finish), do: String.slice(str, start..(finish-1))
end

View File

@ -0,0 +1,9 @@
defmodule ExBraceExpansion.ConcatMap do
def concat_map(coll, func) do
coll
|> Enum.map(fn val -> func.(val) end)
|> List.flatten
end
end

207
vendor/ex_minimatch/ex_minimatch.ex vendored Normal file
View File

@ -0,0 +1,207 @@
defmodule ExMinimatch do
@moduledoc """
ExMinimatch
===========
Globbing paths without walking the tree! Elixir and Erlang provide `wildcard`
functions in the stdlib. But these will walk the directory tree. If you simply
want to test whether a file path matches a glob, ExMinimatch is for you.
Quick examples:
iex> import ExMinimatch
nil
iex> match("**/*{1..2}{a,b}.{png,jpg}", "asdf/pic2a.jpg")
true
iex> match("*.+(bar|foo)", "bar.foo")
true
iex> ["me.jpg", "images/me.png", "images/you.svg"] |> filter("**/*.{png,jpg}")
["me.jpg", "images/me.png"]
Compiled forms below allows us to cache the %ExMinimatcher{} struct when used
against large number of files repeated.
iex> compile("**/*{1..2}{a,b}.{png,jpg}") |> match("asdf/pic2a.jpg")
true
iex> ["me.jpg", "images/me.png", "images/you.svg"] |> filter(compile("**/*.{png,jpg}"))
["me.jpg", "images/me.png"]
ExMinimatch is a port of the [minimatch](https://github.com/isaacs/minimatch)
javascript project. It is a close port but not exactly the same. See
"Comparison to minimatch.js" section below.
## Glob Patterns
Supports these glob features:
- [Brace Expansion](https://github.com/gniquil/ex_brace_expansion)
- Extended glob matching
- "Globstar" ** matching
See:
- man sh
- man bash
- man 3 match
- man 5 gitignore
## Options
`compile`, `match`, and `filter` all have forms that take an options argument (as a map %{}).
The following are the explanations. By default, all fo these are false.
### log
Possible values are `:info`, and `:debug`. If set, will dump information into
repl. `:debug` dumps more.
### nobrace
Do not expand `{a,b}` and `{1..3}` brace sets.
### noglobstar
Disable `**` matching against multiple folder names.
### dot
Allow patterns to match filenames starting with a period, even if the pattern
does not explicitly have a period in that spot.
Note that by default, `a/**/b` will not match `a/.d/b`, unless dot is set, e.g.
`match("a/**/b", "a/.d/b", %{dot: true})`
### noext
Disable "extglob" style patterns like `+(a|b)`.
### nocase
Perform a case-insensitive match.
### match_base
If set, then patterns without slashes will be matched against the basename of
the path if it contains slashes. For example, `a?b` would match the path
`/xyz/123/acb`, but not `/xyz/acb/123`.
### nocomment
Suppress the behavior of treating `#` at the start of a pattern as a comment.
### nonegate
Suppress the behavior of treating leading `!` character as negation.
## Comparison to minimatch.js
`minimatch.js` converts a glob into a list of regular expressions. However, when
it comes to matching, one can choose to use `minimatch.match` or use the complete
regular expression generated by `minimatch.makeRe` to test it against a file
pattern. Unfortunately, the 2 approaches are __inconsistent__. Notably the full regular
expression based approach has a few important pieces missing. Therefore, here
we implement the first approach. For detail, take a look at `ExMinimatcher.Matcher`.
"""
@doc """
Return a compiled %ExMinimatcher{} struct, which can be used in conjunction
with `match/2`, 'match/3', or `filter/2` to match files.
This purpose of this function is to save time by precompiling the glob
pattern.
For possible glob patterns and available options, please refer to moduledoc.
"""
def compile(glob), do: compile(glob, %{})
def compile(glob, options) do
options = %{
dot: false,
nocase: false,
match_base: false,
nonegate: false,
noext: false,
noglobstar: false,
nocomment: false,
nobrace: false,
log: nil
} |> Map.merge(options)
ExMinimatch.Compiler.compile_matcher(glob, options)
end
@doc ~S"""
Returns true when file matches the compiled %ExMinimatcher{} struct.
This is intended to be used with `compile`
## Examples
iex> compile("**/*.{png,jpg}") |> match("me.jpg")
true
iex> compile("**/*.{png,jpg}") |> match("images/me.png")
true
iex> compile("**/*.{png,jpg}") |> match("images/you.svg")
false
iex> ["me.jpg", "images/me.png", "images/you.svg"] |> filter(compile("**/*.{png,jpg}"))
["me.jpg", "images/me.png"]
Return true if the file matches the glob. This is a convenience function that
is literally `glob |> compile(options) |> match(file)`
Use this for one off matching, as the glob is recompiled every time this is
called.
For possible glob patterns and available options, please refer to moduledoc.
## Examples
iex> match("**/*.png", "qwer.png")
true
iex> match("**/*.png", "qwer/qwer.png")
true
"""
def match(%ExMinimatcher{pattern: pattern}, file) when pattern == [] and file == "", do: true
def match(%ExMinimatcher{pattern: pattern}, _file) when pattern == [], do: false
def match(%ExMinimatcher{} = matcher, file), do: ExMinimatch.Matcher.match_file(file, matcher)
def match(glob, file) when is_binary(glob), do: match(glob, file, %{})
def match(glob, file, options) when is_binary(glob), do: glob |> compile(options) |> match(file)
@doc """
Returns a list of files filtered by the compiled %ExMinimatcher{} struct.
Note the collection argument comes first, different from `match`. This is
more suitable for piping collections.
## Examples
iex> ["me.jpg", "images/me.png", "images/you.svg"] |> filter(compile("**/*.{png,jpg}"))
["me.jpg", "images/me.png"]
Return a list of files that match the given pattern. This is a convenience
function
For possible glob patterns and available options, please refer to moduledoc.
## Examples
iex> filter(["qwer.png", "asdf/qwer.png"], "**/*.png")
["qwer.png", "asdf/qwer.png"]
iex> filter(["qwer/pic1a.png", "qwer/asdf/pic2a.png", "asdf/pic2c.jpg"], "**/*{1..2}{a,b}.{png,jpg}")
["qwer/pic1a.png", "qwer/asdf/pic2a.png"]
"""
def filter(files, %ExMinimatcher{} = matcher), do: files |> Enum.filter(&match(matcher, &1))
def filter(files, pattern) when is_binary(pattern), do: filter(files, pattern, %{})
def filter(files, pattern, options) when is_binary(pattern), do: files |> filter(compile(pattern, options))
end

View File

@ -0,0 +1,533 @@
defmodule ExMinimatch.Compiler do
import Map, only: [merge: 2]
import ExBraceExpansion
import ExMinimatch.Helper
@qmark ExMinimatcher.qmark
@globstar ExMinimatcher.globstar
@star ExMinimatcher.star
@re_specials ExMinimatcher.re_specials
@slash_split ExMinimatcher.slash_split
def compile_matcher(glob, options) do
{regex_parts_set, negate} = if short_circuit_comments(glob, options) do
{[], false}
else
make_re(glob, options)
end
%ExMinimatcher{
glob: glob,
pattern: regex_parts_set,
negate: negate,
options: options
}
end
def short_circuit_comments(pattern, options) do
(not options[:nocomment] and String.first(pattern) == "#") or pattern == ""
end
def make_re(pattern, options) do
debug {"make_re", pattern, options}, options
# step 1 figure out negate
{negate, pattern} = parse_negate(pattern, options)
debug {"make_re step 1", negate, pattern}, options
# step 2 expand braces
expanded_pattern_set = expand_braces(pattern, options)
debug {"make_re step 2", expanded_pattern_set}, options
# step 3: now we have a set, so turn each one into a series of path-portion
# matching patterns.
# These will be regexps, except in the case of "**", which is
# set to the GLOBSTAR object for globstar behavior,
# and will not contain any / characters
# step 3a split slashes
glob_parts_set = Enum.map expanded_pattern_set, fn (expanded_pattern) ->
Regex.split(@slash_split, expanded_pattern)
end
debug {"make_re step 3a", glob_parts_set}, options
# step 3b glob -> regex
regex_parts_set = Enum.map glob_parts_set, fn glob_parts ->
Enum.map glob_parts, fn glob_part ->
parse_glob_to_re(glob_part, options)
end
end
debug {"make_re step 3b", regex_parts_set}, options
# step 4 filter out everything that didn't compile properly.
regex_parts_set = Enum.filter regex_parts_set, fn regex_parts ->
Enum.all? regex_parts, fn regexp -> regexp != false end
end
debug {"make_re step 4", regex_parts_set}, options
{regex_parts_set, negate}
end
def expand_braces(pattern, options) do
if options[:nobrace] or not (pattern =~ ~r/\{.*\}/) do
[pattern]
else
expand(pattern)
end
end
def parse_negate(pattern, %{nonegate: nonegate}) when nonegate, do: {false, pattern}
def parse_negate(pattern, _options) do
{_, negate, negateOffset} = Enum.reduce 0..(len(pattern) - 1), {true, false, 0}, fn i, {previous_negate, negate, negateOffset} ->
cond do
not previous_negate ->
{previous_negate, negate, negateOffset}
at(pattern, i) == "!" ->
{previous_negate, not negate, negateOffset + 1}
true ->
{false, negate, negateOffset}
end
end
if (negateOffset > 0) do
{negate, slice(pattern, negateOffset, len(pattern))}
else
{negate, pattern}
end
end
# parse a component of the expanded set.
# At this point, no pattern may contain "/" in it
# so we're going to return a 2d array, where each entry is the full
# pattern, split on '/', and then turned into a regular expression.
# A regexp is made at the end which joins each array with an
# escaped /, and another full one which joins each regexp with |.
#
# Following the lead of Bash 4.1, note that "**" only has special meaning
# when it is the *only* thing in a path portion. Otherwise, any series
# of * is equivalent to a single *.
def parse_glob_to_re(glob_pattern, %{noglobstar: noglobstar}) when glob_pattern == "**" and not noglobstar, do: @globstar
def parse_glob_to_re(glob_pattern, _options) when glob_pattern == "", do: ""
def parse_glob_to_re(glob_pattern, options, is_sub \\ false) do
%{
pattern: glob_pattern,
pattern_len: len(glob_pattern),
i: 0,
c: at(glob_pattern, 0),
re: "",
has_magic: options[:nocase],
escaping: false,
pattern_list_stack: [],
pl_type: nil,
state_char: "",
in_class: false,
re_class_start: -1,
class_start: -1,
pattern_start: (if String.first(glob_pattern) == ".", do: "", else: (if options[:dot], do: "(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))", else: "(?!\\.)")),
is_sub: is_sub,
options: options,
failed: false
}
|> parse
|> tap(fn state -> debug {"parse", state}, state[:options] end)
|> handle_open_class
|> tap(fn state -> debug {"handle_open_class", state}, state[:options] end)
|> handle_weird_end
|> tap(fn state -> debug {"handle_weird_end", state}, state[:options] end)
|> handle_trailing_things
|> tap(fn state -> debug {"handle_trailing_things", state}, state[:options] end)
|> handle_dot_start
|> tap(fn state -> debug {"handle_dot_start", state}, state[:options] end)
|> finish_parse
end
# terminal, should return something rather than recurs
def parse(%{failed: failed} = state) when failed, do: state
def parse(%{i: i, pattern_len: pattern_len} = state) when i == pattern_len, do: state
def parse(%{c: c, re: re, escaping: escaping} = state) when escaping and c in @re_specials do
state
|> merge(%{
re: re <> "\\" <> c,
escaping: false
})
|> continue
end
def parse(%{c: "/"} = state), do: merge(state, %{failed: true})
def parse(%{c: "\\"} = state) do
state
|> clear_state_char()
|> merge(%{escaping: true})
|> continue
end
def parse(%{c: c, in_class: in_class, i: i, class_start: class_start, re: re} = state) when c in ["?", "*", "+", "@", "!"] and in_class do
c = if c == "!" and i == class_start + 1, do: "^", else: c
state
|> merge(%{re: re <> c})
|> continue
end
def parse(%{c: c, options: %{noext: noext}} = state) when c in ["?", "*", "+", "@", "!"] do
state
|> clear_state_char()
|> merge(%{state_char: c})
|> transform(fn state -> if noext, do: clear_state_char(state), else: state end)
|> continue
end
def parse(%{c: c, in_class: in_class, re: re} = state) when c == "(" and in_class do
state
|> merge(%{re: re <> "("})
|> continue
end
def parse(%{c: c, state_char: state_char, re: re} = state) when c == "(" and state_char == "" do
state
|> merge(%{re: re <> "\\("})
|> continue
end
def parse(%{c: c, state_char: state_char, pattern_list_stack: pattern_list_stack, re: re, i: i} = state) when c == "(" do
state
|> merge(%{
pl_type: state_char,
pattern_list_stack: [%{type: state_char, start: i - 1, re_start: len(re)} | pattern_list_stack],
re: re <> (if state_char == "!", do: "(?:(?!", else: "(?:"),
state_char: ""
})
|> continue
end
def parse(%{c: c, in_class: in_class, pattern_list_stack: pattern_list_stack, re: re} = state) when c == ")" and (in_class or length(pattern_list_stack) == 0) do
state
|> merge(%{re: re <> "\\)"})
|> continue
end
def parse(%{c: c} = state) when c == ")" do
state
|> clear_state_char()
|> merge(%{has_magic: true})
|> transform(fn %{pattern_list_stack: pattern_list_stack, re: re} = state ->
new_re = re <> ")"
[pattern_list_stack_hd | new_pattern_list_stack] = pattern_list_stack
new_pl_type = pattern_list_stack_hd[:type]
new_re = cond do
new_pl_type == "!" ->
new_re <> "[^/]*?)"
new_pl_type in ["*", "?", "+"] ->
new_re <> new_pl_type
true ->
new_re
end
state
|> merge(%{
re: new_re,
pl_type: new_pl_type,
pattern_list_stack: new_pattern_list_stack
})
end)
|> continue
end
def parse(%{c: c, in_class: in_class, pattern_list_stack: pattern_list_stack, escaping: escaping, re: re} = state) when c == "|" and (in_class or length(pattern_list_stack) == 0 or escaping) do
state
|> merge(%{
re: re <> "\\|",
escaping: false
})
|> continue
end
def parse(%{c: c} = state) when c == "|" do
state
|> clear_state_char()
|> transform(fn %{re: re} = state ->
state
|> merge(%{re: re <> "|"})
end)
|> continue
end
def parse(%{c: c, in_class: in_class} = state) when c == "[" and in_class do
state
|> clear_state_char()
|> transform(fn %{re: re} = state ->
state
|> merge(%{re: re <> "\\" <> c})
end)
|> continue
end
def parse(%{c: c, i: i} = state) when c == "[" do
state
|> clear_state_char()
|> transform(fn %{re: re} = state ->
state
|> merge(%{
in_class: true,
class_start: i,
re_class_start: len(re),
re: re <> c
})
end)
|> continue
end
def parse(%{c: c, re: re, in_class: in_class, i: i, class_start: class_start} = state) when c == "]" and (i == class_start + 1 or not in_class) do
state
|> merge(%{
re: re <> "\\" <> c,
escaping: false
})
|> continue
end
def parse(%{c: c, re: re, in_class: in_class} = state) when c == "]" and in_class do
%{
pattern: pattern,
class_start: class_start,
i: i,
re_class_start: re_class_start,
has_magic: has_magic
} = state
cs = slice(pattern, (class_start + 1)..(i - 1))
state_changes = case Regex.compile("[" <> cs <> "]") do
{:error, _} ->
{sub_re, sub_has_magic} = parse_glob_to_re(cs, state[:options], true)
%{
re: slice(re, 0, re_class_start) <> "\\[" <> sub_re <> "\\]",
has_magic: has_magic or sub_has_magic,
in_class: false
}
_ ->
%{
re: re <> c,
has_magic: true,
in_class: false
}
end
state
|> merge(state_changes)
|> continue
end
def parse(%{escaping: escaping, c: c} = state) when escaping do
state
|> clear_state_char()
|> transform(fn %{re: re} = state ->
state
|> merge(%{
escaping: false,
re: re <> c
})
end)
|> continue
end
def parse(%{c: c, in_class: in_class} = state) when c in @re_specials and not (c == "^" and in_class) do
state
|> clear_state_char()
|> transform(fn %{re: re} = state ->
state
|> merge(%{
re: re <> "\\" <> c
})
end)
|> continue
end
def parse(%{c: c} = state) do
state
|> clear_state_char()
|> transform(fn %{re: re} = state ->
state
|> merge(%{
re: re <> c
})
end)
|> continue
end
# handle the case where we left a class open.
# "[abc" is valid, equivalent to "\[abc"
def handle_open_class(%{failed: failed} = state) when failed, do: state
def handle_open_class(%{in_class: in_class} = state) when in_class do
%{
pattern: pattern,
re: re,
class_start: class_start,
re_class_start: re_class_start,
has_magic: has_magic
} = state
cs = slice(pattern, class_start + 1, len(pattern))
{sub_re, sub_has_magic} = parse_glob_to_re(cs, state[:options], true)
state
|> merge(%{
re: slice(re, 0, re_class_start) <> "\\[" <> sub_re,
has_magic: has_magic or sub_has_magic
})
end
def handle_open_class(state), do: state
# handle the case where we had a +( thing at the *end*
# of the pattern.
# each pattern list stack adds 3 chars, and we need to go through
# and escape any | chars that were passed through as-is for the regexp.
# Go through and escape them, taking care not to double-escape any
# | chars that were already escaped.
def handle_weird_end(%{failed: failed} = state) when failed, do: state
def handle_weird_end(%{pattern_list_stack: pattern_list_stack, re: re} = state) when length(pattern_list_stack) > 0 do
debug {"handle_weird_end", pattern_list_stack}, state[:options]
[pl | new_pattern_list_stack] = pattern_list_stack
tail = slice(re, pl[:re_start] + 3, len(re))
tail = Regex.replace ~r/((?:\\{2})*)(\\?)\|/, tail, fn (_, a, b) ->
if b == "" do
a <> a <> "\\" <> "|"
else
a <> a <> b <> "|"
end
end
t = case pl[:type] do
"*" -> @star
"?" -> @qmark
_ -> "\\" <> pl[:type]
end
state
|> merge(%{
has_magic: true,
re: slice(re, 0, pl[:re_start]) <> t <> "\\(" <> tail,
pattern_list_stack: new_pattern_list_stack
})
|> handle_weird_end
end
def handle_weird_end(state), do: state
def handle_trailing_things(%{failed: failed} = state) when failed, do: state
def handle_trailing_things(%{escaping: escaping, re: re} = state) when escaping do
debug {"handle_trailing_things", escaping, re}, state[:options]
state
|> clear_state_char()
|> merge(%{
re: re <> "\\\\"
})
end
def handle_trailing_things(state), do: clear_state_char(state)
def handle_dot_start(%{failed: failed} = state) when failed, do: state
def handle_dot_start(%{re: re, has_magic: has_magic, pattern_start: pattern_start} = state) do
debug {"handle_dot_start", re, has_magic, pattern_start}, state[:options]
add_pattern_start = String.first(re) in [".", "[", "("]
new_re = if re != "" and has_magic, do: "(?=.)" <> re, else: re
new_re = if add_pattern_start, do: pattern_start <> new_re, else: new_re
state
|> merge(%{ re: new_re })
end
def finish_parse(%{failed: failed}) when failed, do: false
def finish_parse(%{is_sub: is_sub, re: re, has_magic: has_magic}) when is_sub, do: {re, has_magic}
# skip the regexp for non-magical patterns
# unescape anything in it, though, so that it'll be
# an exact match against a file etc.
def finish_parse(%{has_magic: has_magic, pattern: pattern}) when not has_magic, do: glob_unescape(pattern)
def finish_parse(%{options: options, re: re}) do
flags = if options[:nocase], do: "i", else: ""
case Regex.compile("^#{re}$", flags) do
{:ok, result} ->
# {result, re}
result
_ ->
false
end
end
def glob_unescape(s) do
Regex.replace(~r/\\(.)/, s, fn _, a -> a end)
end
def regex_escape(s) do
Regex.replace(~r/[-[\]{}()*+?.,\\^$|#\s]/, s, "\\\\\\0")
end
def clear_state_char(%{state_char: state_char} = state) when state_char == "", do: state
def clear_state_char(%{state_char: state_char, re: re} = state) do
state
|> merge(case state_char do
"*" ->
%{
re: re <> @star,
has_magic: true,
state_char: ""
}
"?" ->
%{
re: re <> @qmark,
has_magic: true,
state_char: ""
}
_ ->
%{
re: re <> "\\" <> state_char,
state_char: ""
}
end)
end
def move_to_next(%{i: i, pattern: pattern} = state) do
state
|> merge(%{
i: i + 1,
c: at(pattern, i + 1)
})
end
def continue(state) do
state
|> tap(fn %{options: options} = state -> info({"continue", state}, options) end)
|> move_to_next
|> parse
end
end

View File

@ -0,0 +1,32 @@
defmodule ExMinimatch.Helper do
def debug(obj, options) do
if options[:log] in [:debug], do: IO.inspect(obj)
end
def info(obj, options) do
if options[:log] in [:info, :debug], do: IO.inspect(obj)
end
# preserves the state
def tap(state, sideback) do
sideback.(state)
state
end
def transform(state, callback) do
callback.(state)
end
def len(a) when is_binary(a), do: String.length(a)
def len(a), do: length(a)
def at(a, i) when is_binary(a), do: String.at(a, i)
def at(a, i), do: Enum.at(a, i)
def slice(a, rng) when is_binary(a), do: String.slice(a, rng)
def slice(a, rng), do: Enum.slice(a, rng)
def slice(a, i, l) when is_binary(a), do: String.slice(a, i, l)
def slice(a, i, l), do: Enum.slice(a, i, l)
end

View File

@ -0,0 +1,119 @@
defmodule ExMinimatch.Matcher do
import Map, only: [merge: 2]
import ExMinimatch.Helper
@globstar ExMinimatcher.globstar
@slash_split ExMinimatcher.slash_split
def match_file(file, %ExMinimatcher{pattern: regex_parts_set, negate: negate, options: options}) do
info {"match_file", file, regex_parts_set, negate, options}, options
split_file_parts = Regex.split(@slash_split, file)
basename = Path.basename(file)
found = Enum.any? regex_parts_set, fn regex_parts ->
file_parts = if options[:match_base] and length(regex_parts) == 1, do: [basename], else: split_file_parts
match_regex_parts(regex_parts, file_parts, options)
end
if found, do: not negate, else: negate
end
def match_regex_parts(regex_parts, file_parts, options) do
debug {"match_regex_parts", file_parts, regex_parts, options}, options
%{
file_parts: file_parts,
regex_parts: regex_parts,
fi: 0,
ri: 0,
fl: len(file_parts),
rl: len(regex_parts),
f: at(file_parts, 0),
r: at(regex_parts, 0),
options: options
}
|> match_regex_parts
end
# ran out of regex and file parts at the same time, which is a match
def match_regex_parts(%{fi: fi, ri: ri, fl: fl, rl: rl}) when fi == fl and ri == rl, do: true
# ran out of file parts but still regex left, no match
def match_regex_parts(%{fi: fi, fl: fl}) when fi == fl, do: false
# ran out of pattern but still file parts left
def match_regex_parts(%{fi: fi, ri: ri, fl: fl, rl: rl} = state) when ri == rl do
# is match only if the file part is the last one and it is ""
fi == fl - 1 && at(state[:file_parts], fi) == ""
end
# current regex is a **, but it's also the last regex, and since ** matches
# everything, true unless dots are found (except if dot: true is requested)
def match_regex_parts(%{r: r, ri: ri, rl: rl, fi: fi, fl: fl, file_parts: file_parts, options: options}) when r == @globstar and ri + 1 == rl do
dot_found = Enum.find fi..(fl-1), fn i ->
file_part_i = at(file_parts, i)
file_part_i in [".", ".."] or (not options[:dot] and String.first(file_part_i) == ".")
end
dot_found == nil
end
# current regex is a **, and not the last regex, then try swallow file parts
# match on the next pattern
def match_regex_parts(%{r: r, fi: fi} = state) when r == @globstar do
swallow_and_match_next_regex_part(state, fi)
end
def match_regex_parts(%{f: f, r: r, fi: fi, ri: ri, file_parts: file_parts, regex_parts: regex_parts, options: options} = state) do
hit = if is_binary(r) do
if options[:nocase], do: String.downcase(f) == String.downcase(r), else: f == r
else
Regex.match?(r, f)
end
if not hit do
false
else
state
|> merge(%{
fi: fi + 1,
ri: ri + 1,
f: at(file_parts, fi + 1),
r: at(regex_parts, ri + 1)
})
|> match_regex_parts
end
end
def swallow_and_match_next_regex_part(%{fl: fl} = state, fr) when fr < fl do
%{
ri: ri,
rl: rl,
file_parts: file_parts,
regex_parts: regex_parts,
options: %{
dot: dot
} = options
} = state
rest_of_regex_parts_from_next = slice(regex_parts, ri + 1, rl)
rest_of_file_parts = slice(file_parts, fr, fl)
swallowee = at(file_parts, fr)
cond do
match_regex_parts(rest_of_regex_parts_from_next, rest_of_file_parts, options) ->
true
swallowee in [".", ".."] or (String.starts_with?(swallowee, ".") and not dot) ->
swallow_and_match_next_regex_part(state, fl) # recurse to terminate
true ->
swallow_and_match_next_regex_part(state, fr + 1) # recurse to next file part
end
end
def swallow_and_match_next_regex_part(%{fl: fl}, fr) when fl == fr, do: false
end

44
vendor/ex_minimatch/ex_minimatcher.ex vendored Normal file
View File

@ -0,0 +1,44 @@
defmodule ExMinimatcher do
defstruct glob: "",
pattern: [],
negate: false,
options: %{
dot: false,
nocase: false,
match_base: false,
nonegate: false,
noext: false,
noglobstar: false,
nocomment: false,
nobrace: false,
log: nil
}
@qmark "[^/]"
def qmark, do: @qmark
@globstar :globstar
def globstar, do: @globstar
# * => any number of characters
@star "#{@qmark}*?"
def star, do: @star
# ** when dots are allowed. Anything goes, except .. and .
# not (^ or / followed by one or two dots followed by $ or /),
# followed by anything, any number of times.
@two_star_dot "(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?"
def two_star_dot, do: @two_star_dot
# not a ^ or / followed by a dot,
# followed by anything, any number of times.
@two_star_no_dot "(?:(?!(?:\\\/|^)\\.).)*?"
def two_star_no_dot, do: @two_star_no_dot
# characters that need to be escaped in RegExp.
@re_specials [ "(", ")", ".", "*", "{", "}", "+", "?", "[", "]", "^", "$", "\\", "!" ]
def re_specials, do: @re_specials
@slash_split ~r/\/+/
def slash_split, do: @slash_split
end