code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Finitomata.Mermaid do
@moduledoc false
import NimbleParsec
alias Finitomata.Transition
use Boundary, deps: [Finitomata], exports: []
@alphanumeric [?a..?z, ?A..?Z, ?0..?9, ?_]
blankspace = ignore(ascii_string([?\s], min: 1))
semicolon = ignore(string(";"))
transition_op = string("-->")
identifier =
ascii_char([?a..?z])
|> optional(ascii_string(@alphanumeric, min: 1))
|> reduce({IO, :iodata_to_binary, []})
state = identifier
event = ignore(string("|")) |> concat(identifier) |> ignore(string("|"))
mermaid_line =
optional(blankspace)
|> concat(state)
|> ignore(blankspace)
|> ignore(transition_op)
|> ignore(blankspace)
|> concat(event)
|> ignore(blankspace)
|> concat(state)
|> optional(blankspace)
|> optional(semicolon)
|> ignore(choice([times(string("\n"), min: 1), eos()]))
|> tag(:transition)
malformed =
optional(utf8_string([not: ?\n], min: 1))
|> string("\n")
|> pre_traverse(:abort)
@type parse_error ::
{:error, String.t(), binary(), map(), {pos_integer(), pos_integer()}, pos_integer()}
@doc ~S"""
iex> {:ok, result, _, _, _, _} = Finitomata.Mermaid.transition("state1 --> |succeeded| state2")
iex> result
[transition: ["state1", "succeeded", "state2"]]
iex> {:error, message, _, _, _, _} = Finitomata.Mermaid.transition("state1 --> |succeeded| State2")
iex> String.slice(message, 0..13)
"expected ASCII"
"""
defparsec(:transition, mermaid_line)
@doc ~S"""
iex> {:ok, result, _, _, _, _} = Finitomata.Mermaid.fsm("s1 --> |ok| s2;\ns2 --> |ko| s3")
iex> result
[transition: ["s1", "ok", "s2"], transition: ["s2", "ko", "s3"]]
"""
defparsec(:fsm, times(choice([mermaid_line, malformed]), min: 1))
@doc ~S"""
iex> {:ok, result, _, _, _, _} = Finitomata.Mermaid.fsm("s1 --> |ok| s2\ns2 --> |ko| s3")
...> Finitomata.Mermaid.validate(result)
{:ok,
[
%Finitomata.Transition{event: :__start__, from: :*, to: :s1},
%Finitomata.Transition{event: :ok, from: :s1, to: :s2},
%Finitomata.Transition{event: :ko, from: :s2, to: :s3},
%Finitomata.Transition{event: :__end__, from: :s3, to: :*}
]}
"""
@spec validate([{:transition, [binary()]}]) ::
{:ok, [Transition.t()]} | {:error, Finitomata.validation_error()}
def validate(parsed) do
parsed =
Enum.map(parsed, fn {:transition, [from, event, to]} -> {:transition, [from, to, event]} end)
from_states = parsed |> Enum.map(fn {:transition, [from, _, _]} -> from end) |> Enum.uniq()
to_states = parsed |> Enum.map(fn {:transition, [_, to, _]} -> to end) |> Enum.uniq()
start_states =
Enum.map(from_states -- to_states, fn from -> {:transition, ["[*]", from, "__start__"]} end)
final_states =
Enum.map(to_states -- from_states, fn to -> {:transition, [to, "[*]", "__end__"]} end)
amended = start_states ++ parsed ++ final_states
Finitomata.validate(amended)
end
@doc ~S"""
iex> Finitomata.Mermaid.parse("s1 --> |ok| s2\ns2 --> |ko| s3")
{:ok,
[
%Finitomata.Transition{event: :__start__, from: :*, to: :s1},
%Finitomata.Transition{event: :ok, from: :s1, to: :s2},
%Finitomata.Transition{event: :ko, from: :s2, to: :s3},
%Finitomata.Transition{event: :__end__, from: :s3, to: :*}
]}
"""
@spec parse(binary()) ::
{:ok, [Transition.t()]} | {:error, Finitomata.validation_error()} | parse_error()
def parse(input) do
case fsm(input) do
{:ok, result, _, _, _, _} ->
validate(result)
{:error, "[line: " <> _ = msg, _rest, context, _, _} ->
[numbers, msg] = String.split(msg, "|||")
{numbers, []} = Code.eval_string(numbers)
{:error, msg, numbers[:rest], context, {numbers[:line], numbers[:column]},
numbers[:offset]}
error ->
error
end
end
@spec lint(binary()) :: binary()
def lint(input) do
input = input |> String.split("\n", trim: true) |> Enum.map_join("\n", &(" " <> &1))
"graph TD\n" <> input
end
@spec abort(
String.t(),
[String.t()],
map(),
{non_neg_integer, non_neg_integer},
non_neg_integer
) :: {:error, binary()}
defp abort(rest, content, _context, {line, column}, offset) do
rest = content |> Enum.reverse() |> Enum.join() |> Kernel.<>(rest)
meta = inspect(line: line, column: column, offset: offset, rest: rest)
{:error, meta <> "|||malformed FSM transition, expected `from --> |event| to`"}
end
end | lib/finitomata/parsers/mermaid.ex | 0.743168 | 0.471345 | mermaid.ex | starcoder |
defmodule Membrane.RTP.VP8.Frame do
@moduledoc """
Module resposible for accumulating data from RTP packets into VP8 frames
Implements loosely algorithm described here: https://tools.ietf.org/html/rfc7741#section-4.5
"""
alias Membrane.Buffer
alias Membrane.RTP.VP8.PayloadDescriptor
alias Membrane.RTP.VP8.Depayloader
@type t :: %__MODULE__{
fragments: [binary()],
last_seq_num: nil | Depayloader.sequence_number(),
last_timestamp: nil | non_neg_integer()
}
defstruct [:last_seq_num, :last_timestamp, fragments: []]
defguardp is_next(last_seq_num, next_seq_num) when rem(last_seq_num + 1, 65_536) == next_seq_num
defguardp equal_timestamp(last_timestamp, next_timestamp) when last_timestamp == next_timestamp
@spec parse(Buffer.t(), t()) ::
{:ok, binary(), t()}
| {:ok, :incomplete, t()}
| {:error,
:packet_malformed
| :invalid_first_packet
| :not_rtp_buffer
| :missing_packet
| :timestamps_not_equal}
def parse(rtp_buffer, acc) do
with %Buffer{
payload: payload,
metadata: %{rtp: %{timestamp: timestamp, sequence_number: sequence_number}}
} <- rtp_buffer,
{:ok, {payload_descriptor, payload}} <-
PayloadDescriptor.parse_payload_descriptor(payload) do
do_parse(payload_descriptor, payload, timestamp, sequence_number, acc)
else
{:error, reason} -> {:error, reason}
_not_rtp_buffer -> {:error, :not_rtp_buffer}
end
end
@spec flush(__MODULE__.t()) :: {binary(), __MODULE__.t()}
def flush(acc) do
accumulated_frame = acc.fragments |> Enum.reverse() |> Enum.join()
{accumulated_frame, %__MODULE__{}}
end
@spec do_parse(
PayloadDescriptor.t(),
binary(),
non_neg_integer(),
Depayloader.sequence_number(),
t()
) ::
{:ok, binary(), t()}
| {:ok, :incomplete, t()}
| {:error, :invalid_first_packet | :missing_packet | :timestamps_not_equal}
defp do_parse(payload_descriptor, payload, timestamp, sequence_number, acc)
# when s bit is 1 and partition_index is 0 it means that it is first packet of new frame
defp do_parse(
%PayloadDescriptor{s: 1, partition_index: 0},
payload,
timestamp,
sequence_number,
%__MODULE__{fragments: []} = acc
) do
{:ok, :incomplete,
%{acc | last_seq_num: sequence_number, last_timestamp: timestamp, fragments: [payload]}}
end
defp do_parse(
%PayloadDescriptor{s: 1, partition_index: 0},
payload,
timestamp,
sequence_number,
acc
) do
{frame, acc} = flush(acc)
{:ok, frame,
%{acc | last_seq_num: sequence_number, last_timestamp: timestamp, fragments: [payload]}}
end
# when payload descriptor indicates that it is not a first packet but accumulator is empty
# it means that first packet is invalid
defp do_parse(
_payload_descriptor,
_payload,
_timestamp,
_sequence_number,
%__MODULE__{fragments: []}
) do
{:error, :invalid_first_packet}
end
# payload is fragment of currently accumulated frame
defp do_parse(
_payload_descriptor,
payload,
timestamp,
sequence_number,
%__MODULE__{last_seq_num: last_seq_num, last_timestamp: last_timestamp} = acc
)
when is_next(last_seq_num, sequence_number) and equal_timestamp(last_timestamp, timestamp) do
{:ok, :incomplete,
%{acc | last_seq_num: sequence_number, fragments: [payload | acc.fragments]}}
end
# either timestamps are not equal or packet is missing
defp do_parse(_payload_descriptor, _payload, timestamp, _sequence_number, %__MODULE__{
last_timestamp: last_timestamp
})
when not equal_timestamp(last_timestamp, timestamp),
do: {:error, :timestamps_not_equal}
defp do_parse(_payload_descriptor, _payload, _timestamp, sequence_number, %__MODULE__{
last_seq_num: last_seq_num
})
when not is_next(last_seq_num, sequence_number),
do: {:error, :missing_packet}
end | lib/frame.ex | 0.765944 | 0.403302 | frame.ex | starcoder |
defmodule Cased.Event do
@moduledoc """
Data modeling a Cased audit event.
"""
import Norm
defstruct [:audit_trail, :id, :url, :data, :published_at, :processed_at]
@type t :: %__MODULE__{
audit_trail: Cased.AuditTrail.t(),
id: String.t(),
url: String.t(),
published_at: DateTime.t(),
processed_at: DateTime.t(),
data: %{String.t() => any()}
}
@default_audit_trail :default
@type get_opts :: [get_opt()]
@type get_opt ::
{:audit_trail, String.t()}
| {:key, String.t()}
@default_get_opts [
audit_trail: @default_audit_trail
]
@spec get(
client :: Cased.Client.t(),
event_id :: String.t(),
opts :: get_opts()
) :: Cased.Request.t() | no_return()
@doc """
Build a request to retrieve an event.
## Options
All optional:
- `:audit_trail` — The audit trail, used to ensure the event comes from the
given audit trail.
- `:key` — A Cased policy key allowing access to events.
If `:key` is omitted:
- If an `:audit_trail` is provided, the key configured on the client for that
audit trail will be used.
- If an `:audit_trail` is **not** provided, the key configured on the client
for the `:default` audit trail will be used.
# If `:audit_trail` is omitted, the `#{inspect(Keyword.fetch!(@default_get_opts, :audit_trail))}` audit trail is assumed.
"""
def get(client, event_id, opts \\ []) do
opts =
@default_get_opts
|> Keyword.merge(opts)
with {:ok, options} <- validate_get_opts(opts, client) do
audit_trail = Map.get(options, :audit_trail)
key = Map.get_lazy(options, :key, fn -> Map.fetch!(client.keys, audit_trail) end)
%Cased.Request{
client: client,
id: :audit_trail_event,
method: :get,
path: "/audit-trails/#{audit_trail}/events/#{event_id}",
key: key
}
else
{:error, details} ->
raise %Cased.RequestError{details: details}
end
end
@spec validate_get_opts(opts :: keyword(), client :: Cased.Client.t()) ::
{:ok, map()} | {:error, list()}
defp validate_get_opts(opts, client) do
conform(Map.new(opts), get_opts_schema(client))
end
# Option schema for `query/2`.
@spec get_opts_schema(client :: Cased.Client.t()) :: struct()
defp get_opts_schema(client) do
schema(%{
audit_trail: spec(is_atom() and (&Map.has_key?(client.keys, &1))),
key: spec(is_binary())
})
end
@type query_opts :: [query_opt()]
@type query_opt ::
{:phrase, String.t()}
| {:key, String.t()}
| {:variables, keyword()}
| {:per_page, pos_integer()}
| {:page, pos_integer()}
@default_query_opts [
page: 1,
per_page: 25
]
@doc """
Build a request to retrieve events from an audit trail.
## Options
- `:phrase` — The search phrase.
- `:audit_trail` — The audit trail.
- `:key` — A Cased policy key allowing access to events.
- `:variables` — Cased Policy variables.
- `:per_page` — Number of results per page (default: `#{
inspect(Keyword.fetch!(@default_query_opts, :per_page))
}`).
- `:page` — Requested page (default: `#{inspect(Keyword.fetch!(@default_query_opts, :page))}`).
If `:key` is omitted:
- If an `:audit_trail` is provided, the key configured on the client for that
audit trail will be used.
- If an `:audit_trail` is **not** provided, the key configured on the client
for the `:default` audit trail will be used.
"""
@spec query(client :: Cased.Client.t(), opts :: query_opts()) ::
Cased.Request.t() | no_return()
def query(client, opts \\ []) do
opts =
@default_query_opts
|> Keyword.merge(opts)
with {:ok, options} <- validate_query_opts(opts, client) do
{options, query} =
options
|> Map.split([:audit_trail, :key])
{id, path, key} =
if Map.get(options, :audit_trail) do
{:audit_trail_events, "/audit-trails/#{options.audit_trail}/events",
Map.get_lazy(options, :key, fn -> Map.fetch!(client.keys, options.audit_trail) end)}
else
{:events, "/events", Map.get(options, :key, client.keys.default)}
end
%Cased.Request{
client: client,
id: id,
method: :get,
path: path,
key: key,
query: query
}
else
{:error, details} ->
raise %Cased.RequestError{details: details}
end
end
@spec validate_query_opts(opts :: keyword(), client :: Cased.Client.t()) ::
{:ok, map()} | {:error, list()}
defp validate_query_opts(opts, client) do
conform(Map.new(opts), query_opts_schema(client))
end
# Option schema for `query/2`.
@spec query_opts_schema(client :: Cased.Client.t()) :: struct()
defp query_opts_schema(client) do
schema(%{
phrase: spec(is_binary()),
variables: spec(&Keyword.keyword?/1),
per_page: spec(&Enum.member?(1..100, &1)),
page: spec(is_integer() and (&(&1 > 0))),
audit_trail: spec(is_atom() and (&Map.has_key?(client.keys, &1)))
})
end
@doc false
@spec from_json!(map()) :: t()
def from_json!(event) do
{:ok, published_at, _} = DateTime.from_iso8601(event["published_at"])
{:ok, processed_at, _} = DateTime.from_iso8601(event["processed_at"])
%__MODULE__{
id: event["id"],
audit_trail: Cased.AuditTrail.from_json(event["audit_trail"]),
url: event["url"],
published_at: published_at,
processed_at: processed_at,
data: event["event"]
}
end
end | lib/cased/event.ex | 0.864968 | 0.447762 | event.ex | starcoder |
defmodule Earmark do
if Version.compare(System.version, "1.12.0") == :lt do
IO.puts(:stderr, "DEPRECATION WARNING: versions < 1.12.0 of Elixir are not tested anymore and will not be supported in Earmark v1.5")
end
@type ast_meta :: map()
@type ast_tag :: binary()
@type ast_attribute_name :: binary()
@type ast_attribute_value :: binary()
@type ast_attribute :: {ast_attribute_name(), ast_attribute_value()}
@type ast_attributes :: list(ast_attribute())
@type ast_tuple :: {ast_tag(), ast_attributes(), ast(), ast_meta()}
@type ast_node :: binary() | ast_tuple()
@type ast :: list(ast_node())
@moduledoc """
## Earmark
### Abstract Syntax Tree and Rendering
The AST generation has now been moved out to [`EarmarkParser`](https://github.com/robertdober/earmark_parser)
which is installed as a dependency.
This brings some changes to this documentation and also deprecates the usage of `Earmark.as_ast`
Earmark takes care of rendering the AST to HTML, exposing some AST Transformation Tools and providing a CLI as escript.
Therefore you will not find a detailed description of the supported Markdown here anymore as this is done in
[here](https://hexdocs.pm/earmark_parser/EarmarkParser.html)
#### Earmark.as_ast
WARNING: This is just a proxy towards `EarmarkParser.as_ast` and is deprecated, it will be removed in version 1.5!
Replace your calls to `Earmark.as_ast` with `EarmarkParse.as_ast` as soon as possible.
**N.B.** If all you use is `Earmark.as_ast` consider _only_ using `EarmarkParser`.
Also please refer yourself to the documentation of [`EarmarkParser`](https://hexdocs.pm/earmark_parser/EarmarkParser.html)
The function is described below and the other two API functions `as_html` and `as_html!` are now based upon
the structure of the result of `as_ast`.
{:ok, ast, []} = EarmarkParser.as_ast(markdown)
{:ok, ast, deprecation_messages} = EarmarkParser.as_ast(markdown)
{:error, ast, error_messages} = EarmarkParser.as_ast(markdown)
#### Earmark.as_html
{:ok, html_doc, []} = Earmark.as_html(markdown)
{:ok, html_doc, deprecation_messages} = Earmark.as_html(markdown)
{:error, html_doc, error_messages} = Earmark.as_html(markdown)
#### Earmark.as_html!
html_doc = Earmark.as_html!(markdown, options)
Formats the error_messages returned by `as_html` and adds the filename to each.
Then prints them to stderr and just returns the html_doc
#### Options
Options can be passed into as `as_html/2` or `as_html!/2` according to the documentation.
A keyword list with legal options (c.f. `Earmark.Options`) or an `Earmark.Options` struct are accepted.
{status, html_doc, errors} = Earmark.as_html(markdown, options)
html_doc = Earmark.as_html!(markdown, options)
{status, ast, errors} = EarmarkParser.as_ast(markdown, options)
### Rendering
All options passed through to `EarmarkParser.as_ast` are defined therein, however some options concern only
the rendering of the returned AST
These are:
* `compact_output:` defaults to `false`
Normally `Earmark` aims to produce _Human Readable_ output.
This will give results like these:
iex(1)> markdown = "# Hello\\nWorld"
...(1)> Earmark.as_html!(markdown, compact_output: false)
"<h1>\\nHello</h1>\\n<p>\\nWorld</p>\\n"
But sometimes whitespace is not desired:
iex(2)> markdown = "# Hello\\nWorld"
...(2)> Earmark.as_html!(markdown, compact_output: true)
"<h1>Hello</h1><p>World</p>"
Be cautions though when using this options, lines will become loooooong.
#### `escape:` defaulting to `true`
If set HTML will be properly escaped
iex(3)> markdown = "Hello<br />World"
...(3)> Earmark.as_html!(markdown)
"<p>\\nHello<br />World</p>\\n"
However disabling `escape:` gives you maximum control of the created document, which in some
cases (e.g. inside tables) might even be necessary
iex(4)> markdown = "Hello<br />World"
...(4)> Earmark.as_html!(markdown, escape: false)
"<p>\\nHello<br />World</p>\\n"
#### `inner_html:` defaulting to `false`
This is especially useful inside templates, when a block element will disturb the layout as
in this case
```html
<span><%= Earmark.as_html!(....)%></span>
<span><%= Earmark.as_html!(....)%></span>
```
By means of the `inner_html` option the disturbing paragraph can be removed from `as_html!`'s
output
iex(5)> markdown = "Hello<br />World"
...(5)> Earmark.as_html!(markdown, escape: false, inner_html: true)
"Hello<br />World\\n"
**N.B.** that this applies only to top level paragraphs, as can be seen here
iex(6)> markdown = "- Item\\n\\nPara"
...(6)> Earmark.as_html!(markdown, inner_html: true)
"<ul>\\n <li>\\nItem </li>\\n</ul>\\nPara\\n"
* `postprocessor:` defaults to nil
Before rendering the AST is transformed by a postprocessor.
For details see the description of `Earmark.Transform.map_ast` below which will accept the same postprocessor as
a matter of fact specifying `postprocessor: fun` is conecptionnaly the same as
```elixir
markdown
|> EarmarkParser.as_ast
|> Earmark.Transform.map_ast(fun)
|> Earmark.Transform.transform
```
with all the necessary bookkeeping for options and messages
* `renderer:` defaults to `Earmark.HtmlRenderer`
The module used to render the final document.
#### `smartypants:` defaulting to `true`
If set the following replacements will be made during rendering of inline text
"---" → "—"
"--" → "–"
"' → "’"
?" → "”"
"..." → "…"
### Command line
```sh
$ mix escript.build
$ ./earmark file.md
```
Some options defined in the `Earmark.Options` struct can be specified as command line switches.
Use
```sh
$ ./earmark --help
```
to find out more, but here is a short example
```sh
$ ./earmark --smartypants false --code-class-prefix "a- b-" file.md
```
will call
```sh
Earmark.as_html!( ..., %Earmark.Options{smartypants: false, code_class_prefix: "a- b-"})
```
### Timeouts
By default, that is if the `timeout` option is not set Earmark uses parallel mapping as implemented in `Earmark.pmap/2`,
which uses `Task.await` with its default timeout of 5000ms.
In rare cases that might not be enough.
By indicating a longer `timeout` option in milliseconds Earmark will use parallel mapping as implemented in `Earmark.pmap/3`,
which will pass `timeout` to `Task.await`.
In both cases one can override the mapper function with either the `mapper` option (used if and only if `timeout` is nil) or the
`mapper_with_timeout` function (used otherwise).
For the escript only the `timeout` command line argument can be used.
### Security
Please be aware that Markdown is not a secure format. It produces
HTML from Markdown and HTML. It is your job to sanitize and or
filter the output of `Earmark.as_html` if you cannot trust the input
and are to serve the produced HTML on the Web.
"""
alias Earmark.{Internal, Options, Transform}
alias Earmark.EarmarkParserProxy, as: Proxy
defdelegate as_ast!(markdown, options \\ []), to: Internal
defdelegate as_html(lines, options \\ []), to: Internal
defdelegate as_html!(lines, options \\ []), to: Internal
@doc """
DEPRECATED call `EarmarkParser.as_ast` instead
"""
def as_ast(lines, options \\ %Options{}) do
{status, ast, messages} = _as_ast(lines, options)
message =
{:warning, 0,
"DEPRECATION: Earmark.as_ast will be removed in version 1.5, please use EarmarkParser.as_ast, which is of the same type"}
messages1 = [message | messages]
{status, ast, messages1}
end
@doc """
A convenience method that *always* returns an HTML representation of the markdown document passed in.
In case of the presence of any error messages they are printed to stderr.
Otherwise it behaves exactly as `as_html`.
"""
defdelegate from_file!(filename, options \\ []), to: Internal
@default_timeout_in_ms 5000
defdelegate pmap(collection, func, timeout \\ @default_timeout_in_ms), to: Internal
defdelegate transform(ast, options \\ []), to: Transform
@doc """
Accesses current hex version of the `Earmark` application. Convenience for
`iex` usage.
"""
def version() do
with {:ok, version} = :application.get_key(:earmark, :vsn),
do: to_string(version)
end
defp _as_ast(lines, options)
defp _as_ast(lines, %Options{} = options) do
Proxy.as_ast(lines, options |> Map.delete(:__struct__) |> Enum.into([]))
end
defp _as_ast(lines, options) do
Proxy.as_ast(lines, options)
end
end
# SPDX-License-Identifier: Apache-2.0 | lib/earmark.ex | 0.724675 | 0.574395 | earmark.ex | starcoder |
defmodule TextDelta.Iterator do
@moduledoc """
Iterator iterates over two sets of operations at the same time, ensuring next
elements in the resulting stream are of equal length.
"""
alias TextDelta.Operation
@typedoc """
Individual set of operations.
"""
@type set :: [Operation.t()]
@typedoc """
Two sets of operations to iterate.
"""
@type sets :: {set, set}
@typedoc """
A type which is not to be sliced when iterating. Can be `:insert`, `:delete`
or nil
"""
@type skip_type :: :insert | :delete | nil
@typedoc """
A tuple representing the new head and tail operations of the two operation
sets being iterated over.
"""
@type cycle :: {set_split, set_split}
@typedoc """
A set's next scanned full or partial operation, and its resulting tail set.
"""
@type set_split :: {Operation.t() | nil, set}
@doc """
Generates next cycle by iterating over given sets of operations.
"""
@spec next(sets, skip_type) :: cycle
def next(sets, skip_type \\ nil)
def next({[], []}, _) do
{{nil, []}, {nil, []}}
end
def next({[], [head_b | tail_b]}, _) do
{{nil, []}, {head_b, tail_b}}
end
def next({[head_a | tail_a], []}, _) do
{{head_a, tail_a}, {nil, []}}
end
def next({[head_a | _], [head_b | _]} = sets, skip_type) do
skip = Operation.type(head_a) == skip_type
len_a = Operation.length(head_a)
len_b = Operation.length(head_b)
cond do
len_a > len_b -> do_next(sets, :gt, len_b, skip)
len_a < len_b -> do_next(sets, :lt, len_a, skip)
true -> do_next(sets, :eq, 0, skip)
end
end
defp do_next({[head_a | tail_a], [head_b | tail_b]}, :gt, len, false) do
{head_a, remainder_a} = Operation.slice(head_a, len)
{{head_a, [remainder_a | tail_a]}, {head_b, tail_b}}
end
defp do_next({[head_a | tail_a], [head_b | tail_b]}, :lt, len, _) do
{head_b, remainder_b} = Operation.slice(head_b, len)
{{head_a, tail_a}, {head_b, [remainder_b | tail_b]}}
end
defp do_next({[head_a | tail_a], [head_b | tail_b]}, _, _, _) do
{{head_a, tail_a}, {head_b, tail_b}}
end
end | lib/text_delta/iterator.ex | 0.885226 | 0.625281 | iterator.ex | starcoder |
defmodule AttributeRepository.Resource do
@moduledoc """
Convenience macro to create resource module that makes it fancier to use an
attribute repository
## Usage
Create a module that uses this module:
```elixir
defmodule Asteroid.Subject do
use AttributeRepository.Resource, otp_app: :asteroid
end
```
It expects one option: `otp_app` used to retrieve configuration.
## Example
```elixir
iex(13)> alias Asteroid.Subject
Asteroid.Subject
iex> {:ok, s} = Subject.load("uid=john,ou=People,dc=example,dc=org")
{:ok,
%Asteroid.Subject{
attrs: %{
"cn" => ["<NAME>"],
"displayName" => "<NAME>",
"givenName" => ["John"],
"mail" => ["<EMAIL>"],
"manager" => ["uid=toto,ou=People,dc=example,dc=org"],
"sn" => ["Doe"]
},
id: "uid=john,ou=People,dc=example,dc=org",
modifications: [],
newly_created: false
}}
iex> s = s
...> |> Subject.add("initials", "JD")
...> |> Subject.add("mail", "<EMAIL>")
...> |> Subject.remove("manager")
%Asteroid.Subject{
attrs: %{
"cn" => ["<NAME>"],
"displayName" => "<NAME>",
"givenName" => ["John"],
"initials" => "JD",
"mail" => ["<EMAIL>", "<EMAIL>"],
"sn" => ["Doe"]
},
id: "uid=john,ou=People,dc=example,dc=org",
modifications: [
{:add, "initials", "JD"},
{:add, "mail", "<EMAIL>"},
{:delete, "manager"}
],
newly_created: false
}
iex> Subject.store(s)
:ok
```
## Configuration
Configuration is retrieved by the `config/1` function which requests the configuration by
calling:
```elixir
Application.get_env(otp_app, :attribute_repositories)[instance]
```
where :
- `otp_app` is the option passed to the module
- `instance` is the lowercased atom of the last item of the `Module.split/1` call of the
module. For example, the `instance` for `MyApp.Resource.User` is `:user`
The `config/1` function is overridable.
## Generating ids
The `id()` is generated by `gen_new_id/1` using the following implementation:
```elixir
def gen_new_id(_gen_new_opts) do
16
|> :crypto.strong_rand_bytes()
|> Base.url_encode64(padding: false)
end
```
16 bytes of randomness is the same as for UUIDv4.
This function is overridable in case one need different identifiers:
- prefixing / suffixing
- generating LDAP DNs
- etc.
Example:
```elixir
defmodule Asteroid.Subject do
use AttributeRepository.Resource, otp_app: :asteroid
def gen_new_id(opts) do
"sub-" <> super(opts)
end
end
```
will generate:
```elixir
iex> Asteroid.Subject.gen_new()
%Asteroid.Subject{
attrs: %{},
id: "sub-PhXdYtqDHuBxNcIfog7J1w",
modifications: [],
newly_created: true
}
```
"""
defmodule NotUniqueAttributeError do
@moduledoc """
Error returned when a search on a attribute that should be unique returns more than one
result, i.e. the attribute is actually not unique
"""
defexception message: "Attribute is not unique"
end
@type config :: Keyword.t()
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
additional_doc = """
## Functions
There are two types of functions:
- those interacting with the backend attribute repository:
- `load/2`
- `load_from_unique_attribute/3`
- `store/2`
- `delete/2`
- those for manipulating the #{__MODULE__}.t() resource:
- `gen_new/1`
- `gen_new_id/1`
- `add/3`
- `remove/3`
"""
# FIXME: does not work, additional doc not appended
{line_number, existing_moduledoc} =
Module.delete_attribute(__MODULE__, :moduledoc) || {0, ""}
Module.put_attribute(
__MODULE__,
:moduledoc,
{line_number, existing_moduledoc <> additional_doc})
@enforce_keys [:id]
defstruct id: "", attrs: %{}, modifications: [], newly_created: false
@type id :: String.t()
@type t :: %__MODULE__{
id: id(),
attrs: AttributeRepository.resource(),
modifications: [AttributeRepository.Write.modify_op()],
newly_created: boolean()
}
@doc """
Add a value to a `#{__MODULE__}.t()`
## Rules
- If the attribute already exists:
- if the attribute is a list: add the new value to the list
- otherwise set or replace the value of the target attribute
Note that if you want to make a new attribute multi-valued, you shall add a list as
its first value (be it a list with only one element).
"""
@spec add(t(),
AttributeRepository.attribute_name(),
AttributeRepository.attribute_data_type()) :: t()
def add(%__MODULE__{attrs: attrs} = resource, attribute_name, attribute_value) do
new_value =
case attrs[attribute_name] do
nil ->
attribute_value
[_ | _] = list ->
[attribute_value] ++ list
_ ->
attribute_value
end
%{resource |
attrs: Map.put(resource.attrs, attribute_name, new_value),
modifications: resource.modifications ++ [{:add, attribute_name, attribute_value}]
}
end
@doc """
Removes a value from a `#{__MODULE__}.t()`
This functions behaves differently depending on its 3rd argument:
- `:no_value`: the attribute is simply removed
- Otherwise:
- if the attribute is a list, removes all the occurrences of the attribute value from
the list
- otherwise doesn't do anything and returns the `#{__MODULE__}.t()` as-is
"""
@spec remove(t(),
AttributeRepository.attribute_name(),
AttributeRepository.attribute_data_type() | :no_value) :: t()
def remove(resource, attribute_name, value \\ :no_value)
def remove(resource, attribute_name, :no_value) do
%{resource |
attrs: Map.delete(resource.attrs, attribute_name),
modifications: resource.modifications ++ [{:delete, attribute_name}]
}
end
def remove(%__MODULE__{attrs: attrs} = resource, attribute_name, attribute_value) do
case attrs[attribute_name] do
[_ | _] = list ->
new_value = Enum.filter(list, fn elt -> elt != attribute_value end)
%{resource |
attrs: Map.put(attrs, attribute_name, new_value),
modifications: resource.modifications ++ [{:delete, attribute_name, attribute_value}]
}
_ ->
resource
end
end
@doc """
Generates a new `#{__MODULE__}.t()`
Uses `gen_new_id/1` to generate the identifier.
## Options
- `:id`: the id of the client
"""
@spec gen_new(Keyword.t()) :: t()
def gen_new(gen_new_opts \\ []) do
%__MODULE__{
id: gen_new_opts[:id] || gen_new_id(gen_new_opts),
newly_created: true
}
end
@doc """
Generates a new id
"""
@spec gen_new_id(Keyword.t()) :: String.t()
def gen_new_id(_gen_new_opts) do
16
|> :crypto.strong_rand_bytes()
|> Base.url_encode64(padding: false)
end
defoverridable gen_new_id: 1
@doc """
Loads a resource given its id
## Load options
- `:attributes`: list of attributes to load (`[AttributeRepository.attribute_name()]`).
Takes precedence over the default loaded attributes as set in the configuration
## Example
```elixir
iex> Asteroid.Subject.load("uid=john,ou=People,dc=example,dc=org")
{:ok,
%Asteroid.Subject{
attrs: %{
"cn" => ["<NAME>"],
"displayName" => "<NAME>",
"gecos" => "<NAME>",
"gidNumber" => 5000,
"givenName" => ["John"],
"homeDirectory" => "/home/john",
"loginShell" => "/bin/bash",
"mail" => ["<EMAIL>"],
"manager" => ["uid=toto,ou=People,dc=example,dc=org"],
"objectClass" => ["inetOrgPerson", "posixAccount", "shadowAccount"],
"sn" => ["Doe"],
"uid" => ["john"],
"uidNumber" => 10000
},
id: "uid=john,ou=People,dc=example,dc=org"
}}
```
"""
@spec load(AttributeRepository.resource_id(), Keyword.t()) ::
{:ok, t()}
| {:error, %AttributeRepository.ReadError{}}
| {:error, %AttributeRepository.Read.NotFoundError{}}
def load(resource_id, load_opts \\ []) do
attribute_repository_conf = config(context: load_opts[:context])
case attribute_repository_conf[:module].get(
resource_id,
load_opts[:attributes] || attribute_repository_conf[:default_loaded_attributes] || :all,
attribute_repository_conf[:run_opts]
) do
{:ok, resource} ->
{:ok, %__MODULE__{id: resource_id, attrs: resource}}
{:error, _} = error ->
error
end
end
@doc """
Loads a resource given one of its attributes
Note that:
- the configured module must support the search behaviour (`AttributeRepository.Search`)
- the attribute value must be unique (otherwise
`{:error, %AttributeRepository.Resource.NotUniqueAttributeError{}})` error will be returned
## Load options
- `:attributes`: list of attributes to load (`[AttributeRepository.attribute_name()]`).
Takes precedence over the default loaded attributes as set in the configuration
## Example
```elixir
iex> Asteroid.Subject.load_from_unique_attribute("mail", "<EMAIL>")
{:ok,
%Asteroid.Subject{
attrs: %{
"cn" => ["<NAME>"],
"displayName" => "<NAME>",
"gecos" => "<NAME>",
"gidNumber" => 5000,
"givenName" => ["John"],
"homeDirectory" => "/home/john",
"loginShell" => "/bin/bash",
"mail" => ["<EMAIL>"],
"manager" => ["uid=toto,ou=People,dc=example,dc=org"],
"objectClass" => ["inetOrgPerson", "posixAccount", "shadowAccount"],
"sn" => ["Doe"],
"uid" => ["john"],
"uidNumber" => 10000
},
id: "uid=john,ou=People,dc=example,dc=org"
}}
iex> Asteroid.Subject.load_from_unique_attribute("sn", "Doe")
{:error,
%AttributeRepository.Resource.NotUniqueAttributeError{
message: "Multiple entries returned"
}}
```
"""
@spec load_from_unique_attribute(AttributeRepository.attribute_name(),
AttributeRepository.attribute_data_type(),
Keyword.t()) ::
{:ok, t()}
| {:error, %AttributeRepository.ReadError{}}
| {:error, %AttributeRepository.Read.NotFoundError{}}
| {:error, %AttributeRepository.UnsupportedError{}}
| {:error, %AttributeRepository.Resource.NotUniqueAttributeError{}}
def load_from_unique_attribute(attribute_name, attribute_value, load_opts \\ []) do
attribute_repository_conf = config(context: load_opts[:context])
# generated by AttributeRepository.Search.Filter.parse/1
filter = {:attrExp,
{:eq,
%AttributeRepository.Search.AttributePath{
attribute: attribute_name,
sub_attribute: nil,
uri: nil
}, attribute_value}}
case attribute_repository_conf[:module].search(
filter,
load_opts[:attributes] || attribute_repository_conf[:default_loaded_attributes] || :all,
attribute_repository_conf[:run_opts]
) do
{:ok, []} ->
{:error, AttributeRepository.Read.NotFoundError.exception("Entry not found")}
{:ok, [{resource_id, resource}]} ->
{:ok, %__MODULE__{id: resource_id, attrs: resource}}
{:ok, _} ->
{:error, AttributeRepository.Resource.NotUniqueAttributeError.exception(
"Multiple entries returned")}
{:error, _} = error ->
error
end
end
@doc """
Fetches the listed attributes if not present and returns the resource
Returns the object with the added attributes, or raises:
- `AttributeRepository.ReadError`
- `AttributeRepository.Read.NotFoundError`
"""
@spec fetch_attributes(t(), [AttributeRepository.attribute_name()]) ::
t()
| no_return()
def fetch_attributes(resource, requested_attributes) do
existing_attributes =
resource.attrs
|> Map.keys()
|> MapSet.new()
requested_attributes = MapSet.new(requested_attributes)
case MapSet.to_list(MapSet.difference(requested_attributes, existing_attributes)) do
[] -> # all attributes are already loaded
resource
needed_attributes ->
case load(resource.id, attributes: needed_attributes) do
{:ok, new_resource} ->
%{resource | attrs: Map.merge(resource.attrs, new_resource.attrs)}
{:error, %AttributeRepository.ReadError{}} ->
raise AttributeRepository.ReadError, message: "Read error"
{:error, %AttributeRepository.Read.NotFoundError{}} ->
raise AttributeRepository.Read.NotFoundError, message: "Not found"
end
end
end
@doc """
Persists a `#{__MODULE__}.t()` in the attribute store
The attribute store must support the write behaviour (`AttributeRepository`)
Unless the `#{__MODULE__}.t()` has been newly generated (using `#{__MODULE__}.gen_new()/1`),
this function will first try to modify the resource first (using the
`c:AttributeRepository.modify/3` callback) and only then, the resource doesn't exist, it
attempts to create it (using the `c:AttributeRepository.put/3` callback).
"""
@spec store(t(), Keyword.t()) ::
:ok
| {:error, %AttributeRepository.WriteError{}}
| {:error, %AttributeRepository.ReadError{}}
| {:error, %AttributeRepository.UnsupportedError{}}
def store(resource, store_opts \\ [])
def store(%__MODULE__{newly_created: false} = resource, store_opts) do
attribute_repository_conf = config(context: store_opts[:context])
case attribute_repository_conf[:module].modify(
resource.id,
resource.modifications,
attribute_repository_conf[:run_opts]
) do
:ok ->
:ok
{:error, %AttributeRepository.Read.NotFoundError{}} ->
attribute_repository_conf[:module].put(
resource.id,
resource.attrs,
attribute_repository_conf[:run_opts]
)
{:error, _} = error ->
error
end
end
def store(%__MODULE__{newly_created: true} = resource, store_opts) do
attribute_repository_conf = config(context: store_opts[:context])
case attribute_repository_conf[:module].put(
resource.id,
resource.attrs,
attribute_repository_conf[:run_opts]
) do
{:ok, _} ->
:ok
{:error, _} = error ->
error
end
end
@doc """
Deletes a `#{__MODULE__}.t()`
Returns `:ok` if the object was successfuly deleted (whether it existed or not),
`{:error, any()}` otherwise.
"""
@spec delete(t(), Keyword.t()) ::
:ok
| {:error, %AttributeRepository.WriteError{}}
def delete(resource, delete_opts \\ [])
def delete(%__MODULE__{newly_created: false} = resource, delete_opts) do
attribute_repository_conf = config(context: delete_opts[:context])
case attribute_repository_conf[:module].delete(
resource.id,
attribute_repository_conf[:run_opts]
) do
:ok ->
:ok
{:error, %AttributeRepository.Read.NotFoundError{}} ->
:ok
{:error, _} = error ->
error
end
end
def delete(%__MODULE__{newly_created: true}, _delete_opts) do
:ok
end
instance =
__MODULE__
|> Module.split()
|> List.last()
|> String.downcase()
|> String.to_atom()
@doc """
Returns the configuration to request the attribute repository at runtime
"""
@spec config(Keyword.t()) :: AttributeRepository.Resource.config()
def config(_config_opts) do
case Application.get_env(unquote(opts[:otp_app]), :attribute_repositories)[unquote(instance)] do
nil ->
raise "Could not find instance `#{unquote(instance)}` in #{unquote(opts[:otp_app])} "
<> "application configuration"
conf ->
conf
end
end
defoverridable config: 1
end
end
end | lib/attribute_repository/resource.ex | 0.916236 | 0.573858 | resource.ex | starcoder |
defmodule Crutches.List do
@moduledoc ~s"""
Convenience functions for lists.
This module provides several convenience functions operating on lists.
Simply call any function (with any options if applicable) to make use of it.
"""
@doc ~S"""
Returns the tail of the `collection` from `position`.
## Examples
iex> List.from(["a", "b", "c", "d"], 0)
["a", "b", "c", "d"]
iex> List.from(["a", "b", "c", "d"], 2)
["c", "d"]
iex> List.from(["a", "b", "c", "d"], 10)
[]
iex> List.from([], 0)
[]
iex> List.from(["a", "b", "c", "d"], -2)
["c", "d"]
iex> List.from(["a", "b", "c", "d"], -10)
[]
"""
@spec from(list(any), integer) :: list(any)
def from(collection, position) do
Enum.slice(collection, position, length(collection))
end
@doc ~S"""
Shorten a `list` by a given `amount`.
When the list is shorter than the amount given, this function returns `nil`.
## Examples
iex> List.shorten(["one", "two", "three"], 2)
{:ok, ["one"]}
iex> List.shorten([5, 6], 2)
{:ok, []}
iex> List.shorten([5, 6, 7, 8], 5)
{:error, "Amount to shorten by is greater than the length of the list"}
"""
@spec shorten(list(any), integer) :: list(any)
def shorten(list, amount \\ 1) do
shorten(list, amount, length(list))
end
defp shorten(_, amount, len) when len < amount,
do: {:error, "Amount to shorten by is greater than the length of the list"}
defp shorten(list, amount, len) do
shortened_list = Enum.take(list, len - amount)
{:ok, shortened_list}
end
@doc ~S"""
Returns a copy of the List from the beginning to the required index.
## Examples
iex> List.to(["a", "b", "c"], 0)
["a"]
iex> List.to(["a", "b", "c"], 1)
["a", "b"]
iex> List.to(["a", "b", "c"], 20)
["a", "b", "c"]
iex> List.to(["a", "b", "c"], -1)
[]
"""
@spec to(list(any), integer) :: list(any)
def to(collection, position) do
if position >= 0, do: Enum.take(collection, position + 1), else: []
end
@doc ~S"""
Split a `collection` by an element or by a function (`x`)
The function removes elements when they are equal to the given element, or;
When passing in a function, an element gets removed if the function returns
`true` for that element.
## Parameters
`collection` - The collection to do the split on.
`x` - Function predicate or element to split on.
## Examples
iex> List.split(["a", "b", "c", "d", "c", "e"], "c")
[["a", "b"], ["d"], ["e"]]
iex> List.split(["c", "a", "b"], "c")
[[], ["a", "b"]]
iex> List.split([], 1)
[[]]
iex> List.split([1, 2, 3, 4, 5, 6, 7, 8], fn(x) -> rem(x, 2) == 0 end)
[[1], [3], [5], [7], []]
iex> List.split(Enum.to_list(1..15), &(rem(&1,3) == 0))
[[1, 2], [4, 5], [7, 8], [10, 11], [13, 14], []]
"""
@spec split(list(any), any) :: list(any)
def split([], _), do: [[]]
def split(collection, predicate) when not is_function(predicate) do
split(collection, &(&1 == predicate))
end
def split(collection, predicate) do
{head, tail} = List.foldr collection, {[], []}, fn elem, {head, acc} ->
case predicate.(elem) do
true -> {[], [head | acc]}
false -> {[elem | head], acc}
end
end
[head] ++ tail
end
@doc ~S"""
Splits or iterates over the array in +number+ of groups, padding any
remaining slots with +fill_with+ unless it is +false+.
## Examples
iex> List.in_groups(~w(1 2 3 4 5 6 7 8 9 10), 3)
[["1", "2", "3", "4"], ["5", "6", "7", nil], ["8", "9", "10", nil]]
iex> List.in_groups(~w(1 2 3 4 5 6 7 8 9 10), 3, false, fn(x) -> Enum.join(x, ",") end)
["1,2,3,4", "5,6,7", "8,9,10"]
iex> List.in_groups(~w(1 2 3 4 5 6 7 8 9 10), 3, false)
[["1", "2", "3", "4"], ["5", "6", "7"], ["8", "9", "10"]]
"""
@spec in_groups(list(any), integer, any, (any -> any)) :: list(any)
def in_groups(collection, number, elem, fun) do
in_groups(collection, number, elem)
|> Enum.map(fun)
end
@doc ~S"""
+List.in_groups/3+ accept both an element or a function as +elem+ parameter.
When the +elem+ is not a function, the +elem+ will be used to fill the empty
slots in the list. When +elem+ is a function, it will map every *list* created.
When +elem+ is +false+, it will not fill the list.
## Examples
iex>List.in_groups(~w(1 2 3 4 5 6 7 8), 3, "a")
[["1", "2", "3"], ["4", "5", "6"], ["7", "8", "a"]]
"""
def in_groups(collection, number, elem \\ nil)
def in_groups(collection, number, elem) when is_function(elem) do
in_groups(collection, number, nil, elem)
end
def in_groups(collection, number, elem) do
coll_size = length(collection)
group_min = div(coll_size, number)
group_rem = rem(coll_size, number)
{result, _} =
Enum.to_list(1..number)
|> Enum.reduce({[], collection}, fn(x, acc) ->
{list, kollection} = acc
if x <= group_rem do
{[Enum.take(kollection, group_min + 1) | list], Enum.drop(kollection, group_min + 1)}
else
case group_rem do
0 ->
{[Enum.take(kollection, group_min) | list], Enum.drop(kollection, group_min)}
_ ->
case elem do
false ->
{[Enum.take(kollection, group_min) | list], Enum.drop(kollection, group_min)}
_ ->
{[(Enum.take(kollection, group_min) |> Enum.concat([elem])) | list], Enum.drop(kollection, group_min)}
end
end
end
end)
Enum.reverse(result)
end
end | lib/crutches/list.ex | 0.846133 | 0.606848 | list.ex | starcoder |
defmodule Stripe.Token do
@moduledoc """
Work with Stripe token objects.
You can:
- Create a token for a Connect customer with a card
- Create a token with all options - Only for Unit Tests with Stripe
- Retrieve a token
Does not yet render lists or take options.
Stripe API reference: https://stripe.com/docs/api#token
"""
@type t :: %__MODULE__{}
defstruct [
:id, :object,
:card, :client_ip, :created, :livemode, :type, :used
]
@plural_endpoint "tokens"
@schema %{
bank_account: [:create, :retrieve],
card: [:create, :retrieve],
client_ip: [:retrieve],
created: [:retrieve],
customer: [:create],
id: [:retrieve],
livemode: [:retrieve],
object: [:retrieve],
pii: %{
personal_id_number: [:create]
},
type: [:retrieve],
used: [:retrieve]
}
@doc """
Create a token for a Connect customer with a card belonging to the
platform customer.
You must pass in the account number for the Stripe Connect account
in `opts`.
"""
@spec create_on_connect_account(String.t, String.t, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def create_on_connect_account(customer_id, customer_card_id, opts = [connect_account: _]) do
body = %{
card: customer_card_id,
customer: customer_id
}
Stripe.Request.create(@plural_endpoint, body, @schema, opts)
end
@doc """
Create a token for a Connect customer using the default card.
You must pass in the account number for the Stripe Connect account
in `opts`.
"""
@spec create_with_default_card(String.t, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def create_with_default_card(customer_id, opts \\ []) do
body = %{
customer: customer_id
}
Stripe.Request.create(@plural_endpoint, body, @schema, opts)
end
@doc """
Create a token.
WARNING : This function is mainly for testing purposes only, you should not use
it on a production server, unless you are able to transfer and store credit card
data on your server in a PCI compliance way.
Use the Stripe.js library on the client device instead.
"""
@spec create(map, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def create(changes, opts \\ []) do
Stripe.Request.create(@plural_endpoint, changes, @schema, opts)
end
@doc """
Retrieve a token.
"""
@spec retrieve(binary, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def retrieve(id, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id
Stripe.Request.retrieve(endpoint, opts)
end
end | lib/stripe/token.ex | 0.802517 | 0.520984 | token.ex | starcoder |
defmodule Timex.Format.Time.Formatters.Humanized do
@moduledoc """
Handles formatting timestamp values as human readable strings.
For formatting timestamps as points in time rather than intervals,
use `DateFormat`
"""
use Timex.Format.Time.Formatter
@minute 60
@hour @minute * 60
@day @hour * 24
@week @day * 7
@month @day * 30
@year @day * 365
@doc """
Return a human readable string representing the time interval.
## Examples
iex> {1435, 180354, 590264} |> #{__MODULE__}.format
"45 years, 6 months, 5 days, 21 hours, 12 minutes, 34 seconds, 590.264 milliseconds"
iex> {0, 65, 0} |> #{__MODULE__}.format
"1 minutes, 5 seconds"
"""
@spec format(Date.timestamp) :: String.t
def format({_,_,_} = timestamp), do: timestamp |> deconstruct |> do_format
defp do_format(components), do: do_format(components, <<>>)
defp do_format([], str), do: str
defp do_format([{unit, value}|rest], str) do
case str do
<<>> -> do_format(rest, "#{value} #{Atom.to_string(unit)}")
_ -> do_format(rest, str <> ", #{value} #{Atom.to_string(unit)}")
end
end
defp deconstruct({_, _, micro} = ts), do: deconstruct({ts |> Time.to_secs |> trunc, micro}, [])
defp deconstruct({0, 0}, components), do: components |> Enum.reverse
defp deconstruct({seconds, us}, components) when seconds > 0 do
cond do
seconds >= @year -> deconstruct({rem(seconds, @year), us}, [{:years, div(seconds, @year)} | components])
seconds >= @month -> deconstruct({rem(seconds, @month), us}, [{:months, div(seconds, @month)} | components])
seconds >= @week -> deconstruct({rem(seconds, @week), us}, [{:weeks, div(seconds, @week)} | components])
seconds >= @day -> deconstruct({rem(seconds, @day), us}, [{:days, div(seconds, @day)} | components])
seconds >= @hour -> deconstruct({rem(seconds, @hour), us}, [{:hours, div(seconds, @hour)} | components])
seconds >= @minute -> deconstruct({rem(seconds, @minute), us}, [{:minutes, div(seconds, @minute)} | components])
true -> deconstruct({0, us}, [{:seconds, seconds} | components])
end
end
defp deconstruct({seconds, micro}, components) when seconds < 0, do: deconstruct({seconds * -1, micro}, components)
defp deconstruct({0, micro}, components) when micro > 0 do
msecs = {0, 0, micro} |> Time.abs |> Time.to_msecs
cond do
msecs >= 1.0 -> deconstruct({0, 0}, [{:milliseconds, msecs} | components])
true -> deconstruct({0, 0}, [{:microseconds, micro} | components])
end
end
end | lib/format/time/formatters/humanized.ex | 0.933688 | 0.441372 | humanized.ex | starcoder |
defmodule Guardian.Token.Jwt do
@moduledoc """
Deals with things JWT
This module should not be used directly.
It is intended to be used by Guardian on behalf of your implementation
as it's token module.
Token types are encoded in the `typ` field.
### Configuration
Configuration should be added to the implementation module
in either the configuration file or as options to `use Guardian`
#### Required
* `issuer` - The issuer of the token. Your application name/id
* `secret_key` - The secret key to use for the implementation module.
This may be any resolvable value for `Guardian.Config`
#### Optional
* `token_verify_module` - default `Guardian.Token.Jwt.Verify`. The module that verifies the claims
* `allowed_algos` - The allowed algos to use for encoding and decoding.
See JOSE for available. Default ["HS512"]
* `ttl` - The default time to live for all tokens. See the type in Guardian.ttl
* `token_ttl` a map of `token_type` to `ttl`. Set specific ttls for specific types of tokens
* `allowed_drift` The drift that is allowed when decoding/verifying a token in milli seconds
* `verify_issuer` Verify that the token was issued by the configured issuer. Default false
* `secret_fetcher` A module used to fetch the secret. Default: `Guardian.Token.Jwt.SecretFetcher`
Options:
These options are available to encoding and decoding:
* `secret` The secret key to use for signing
* `headers` The Jose headers that should be used
* `allowed_algos`
* `token_type` - Override the default token type
* `ttl` - The time to live. See `Guardian.Token.ttl` type
#### Example
```elixir
# encode a simple token
{:ok, token, claims} =
MyApp.Tokens.encode_and_sign(resource)
# encode a token with custom claims
{:ok, token, claims} =
MyApp.Tokens.encode_and_sign(resource, %{some: "claim"})
# encode a token with a custom type
{:ok, token, claims} =
MyApp.Tokens.encode_and_sign(resource, %{}, token_type: "refresh")
# encode a token with custom options
{:ok, token, claims} =
MyApp.Tokens.encode_and_sign(
resource,
%{},
secret: {MyModule, :get_my_secret, ["some", "args"]},
ttl: {4, :weeks},
token_type: "refresh"
)
# decode a token
{:ok, claims} =
MyApp.Tokens.decode_and_verify(token)
# decode a token and check literal claims
{:ok, claims} =
MyApp.Tokens.decode_and_verify(token, %{"typ" => "refresh"})
# decode a token and check literal claims with options
{:ok, claims} =
MyApp.Tokens.decode_and_verify(token, %{"typ" => "refresh"}, secret: {MyModule, :get_my_secret, ["some", "args"]})
# exchange a token
{:ok, {old_token, old_claims}, {new_token, new_claims}} =
MyApp.Tokens.exchange(old_token, ["access", "refresh"], "access")
# exchange a token with options
{:ok, {old_token, old_claims}, {new_token, new_claims}} =
MyApp.Tokens.exchange(old_token, ["access", "refresh"], "access" secret: {MyModule, :get_my_secret, ["some", "args"]}, ttl: {1, :hour})
# refresh a token using defaults
{:ok, {old_token, old_claims}, {new_token, new_claims}} = MyApp.Tokens.refresh(old_token)
# refresh a token using options
{:ok, {old_token, old_claims}, {new_token, new_claims}} =
MyApp.Tokens.refresh(old_token, ttl: {1, :week}, secret: {MyMod, :get_secret, ["some", "args"})
```
### Token verify module
The token verify module by default is `Guardian.Token.Jwt.Verify`.
This module implements the `Guardian.Token.Verify` behaviour.
To customize your token validation you have 2 options.
1. Implement the `verify_claims` callback on your implementation
2. `use Guardian.Token.Verify` in your own module and use that.
To create your own verify module use `Guardian.Token.Verify` and configure
your implementation to use it either through config files or when you setup your implementation.
```elixir
defmodule MyApp.Tokens do
use Guardian, otp_app: :my_app,
token_verify_module: MyVerifyModule
# ... snip
end
### SecretFetcher
When you need dynamic secret verification, you should use a custom
`Guardian.Token.Jwt.SecretFetcher` module.
This will allow you to use the header values to determine dynamically the
key that should be used.
```
defmodule MyCustomSecretFetcher do
use Guardian.Token.Jwt.SecretFetcher
def fetch_signing_secret(impl_module, opts) do
# fetch the secret for sigining
end
def fetch_verifying_secret(impl_module, token_headers, opts) do
# fetch the secret for verifying the token
end
end
```
"""
@behaviour Guardian.Token
alias Guardian.{Config, Token.Jwt.Verify, Token.Jwt.SecretFetcher.SecretFetcherDefaultImpl}
alias JOSE.{JWT, JWS, JWK}
import Guardian, only: [stringify_keys: 1]
@default_algos ["HS512"]
@default_token_type "access"
@type_key "typ"
@default_ttl {4, :weeks}
defmodule SecretFetcher do
@moduledoc """
Provides a behaviour that specifies how to fetch the secret for the token
`use Guardian.Token.JWT.SecretFetcher` to provide default implementations of each function
"""
@doc """
fetch_siginig_secret fetches the secret to sign.
"""
@callback fetch_signing_secret(module, opts :: Guardian.options()) ::
{:ok, term} | {:error, :secret_not_found}
@doc """
fetch_verifying_secret fetches the secret to verify a token.
It is provided with the tokens headers in order to lookup the secret.
"""
@callback fetch_verifying_secret(module, token_headers :: map, opts :: Guardian.options()) ::
{:ok, term} | {:error, :secret_not_found}
defmacro __using__(_opts \\ []) do
quote do
alias Guardian.Token.Jwt.SecretFetcher.SecretFetcherDefaultImpl, as: DI
def fetch_signing_secret(mod, opts), do: DI.fetch_signing_secret(mod, opts)
def fetch_verifying_secret(mod, token_headers, opts),
do: DI.fetch_verifying_secret(mod, token_headers, opts)
defoverridable fetch_signing_secret: 2, fetch_verifying_secret: 3
end
end
end
defmodule SecretFetcher.SecretFetcherDefaultImpl do
@moduledoc false
use Guardian.Token.Jwt.SecretFetcher
def fetch_signing_secret(mod, opts) do
secret = Keyword.get(opts, :secret)
secret = Config.resolve_value(secret) || apply(mod, :config, [:secret_key])
case secret do
nil -> {:error, :secret_not_found}
val -> {:ok, val}
end
end
def fetch_verifying_secret(mod, _token_headers, opts) do
secret = Keyword.get(opts, :secret)
secret = Config.resolve_value(secret) || mod.config(:secret_key)
case secret do
nil -> {:error, :secret_not_found}
val -> {:ok, val}
end
end
end
@doc """
Inspect the JWT without any validation or signature checking.
Return an map with keys: `headers` and `claims`
"""
def peek(_mod, nil), do: nil
def peek(_mod, token) do
%{headers: JWT.peek_protected(token).fields, claims: JWT.peek_payload(token).fields}
end
@doc """
Generate unique token id
"""
def token_id, do: Guardian.UUID.generate()
@doc """
Create a token. Uses the claims, encodes and signs the token.
The signing secret will be found first from the options.
If not specified the secret key from the configuration will be used.
Configuration:
* `secret_key` The secret key to use for signing
Options:
* `secret` The secret key to use for signing
* `headers` The Jose headers that should be used
* `allowed_algos`
The secret may be in the form of any resolved value from `Guardian.Config`
"""
def create_token(mod, claims, options \\ []) do
with {:ok, secret_fetcher} <- fetch_secret_fetcher(mod),
{:ok, secret} <- secret_fetcher.fetch_signing_secret(mod, options) do
{_, token} =
secret
|> jose_jwk()
|> JWT.sign(jose_jws(mod, options), claims)
|> JWS.compact()
{:ok, token}
end
end
@doc """
Builds the default claims for all JWT tokens.
Note:
* `aud` is set to the configured `issuer` unless `aud` is set
Options:
Options may override the defaults found in the configuration.
* `token_type` - Override the default token type
* `ttl` - The time to live. See `Guardian.Token.ttl` type
"""
# credo:disable-for-next-line /\.Warning\./
def build_claims(mod, _resource, sub, claims \\ %{}, options \\ []) do
claims =
claims
|> stringify_keys()
|> set_jti()
|> set_iat()
|> set_iss(mod, options)
|> set_aud(mod, options)
|> set_type(mod, options)
|> set_sub(mod, sub, options)
|> set_ttl(mod, options)
{:ok, claims}
end
@doc """
Decodes the token and validates the signature.
Options:
* `secret` - Override the configured secret. `Guardian.Config.config_value` is valid
* `allowed_algos` - a list of allowable algos
"""
def decode_token(mod, token, options \\ []) do
with {:ok, secret_fetcher} <- fetch_secret_fetcher(mod),
%{headers: headers} <- peek(mod, token),
{:ok, raw_secret} <- secret_fetcher.fetch_verifying_secret(mod, headers, options),
secret <- jose_jwk(raw_secret),
algos = fetch_allowed_algos(mod, options) do
verify_result = JWT.verify_strict(secret, algos, token)
case verify_result do
{true, jose_jwt, _} -> {:ok, jose_jwt.fields}
{false, _, _} -> {:error, :invalid_token}
end
end
end
@doc """
Verifies the claims.
Configuration:
* `token_verify_module` Default `Guardian.Token.Jwt.Verify` the module to use to verify the claims
"""
def verify_claims(mod, claims, options) do
result =
mod
|> apply(:config, [:token_verify_module, Verify])
|> apply(:verify_claims, [mod, claims, options])
case result do
{:ok, claims} -> apply(mod, :verify_claims, [claims, options])
err -> err
end
end
@doc """
Revoking a JWT by default does not do anything.
You'll need to track the token in storage in some way
and revoke in your implementation callbacks.
See `GuardianDb` for an example.
"""
def revoke(_mod, claims, _token, _options), do: {:ok, claims}
@doc """
Refresh the token
Options:
* `secret` - Override the configured secret. `Guardian.Config.config_value` is valid
* `allowed_algos` - a list of allowable algos
* `ttl` - The time to live. See `Guardian.Token.ttl` type
"""
def refresh(mod, old_token, options) do
with {:ok, old_claims} <- apply(mod, :decode_and_verify, [old_token, %{}, options]),
{:ok, claims} <- refresh_claims(mod, old_claims, options),
{:ok, token} <- create_token(mod, claims, options) do
{:ok, {old_token, old_claims}, {token, claims}}
else
{:error, _} = err -> err
err -> {:error, err}
end
end
@doc """
Exchange a token of one type to another.
Type is encoded in the `typ` field.
Options:
* `secret` - Override the configured secret. `Guardian.Config.config_value` is valid
* `allowed_algos` - a list of allowable algos
* `ttl` - The time to live. See `Guardian.Token.ttl` type
"""
def exchange(mod, old_token, from_type, to_type, options) do
with {:ok, old_claims} <- apply(mod, :decode_and_verify, [old_token, %{}, options]),
{:ok, claims} <- exchange_claims(mod, old_claims, from_type, to_type, options),
{:ok, token} <- create_token(mod, claims, options) do
{:ok, {old_token, old_claims}, {token, claims}}
else
{:error, _} = err -> err
err -> {:error, err}
end
end
defp jose_jws(mod, opts) do
algos = fetch_allowed_algos(mod, opts) || @default_algos
headers = Keyword.get(opts, :headers, %{})
Map.merge(%{"alg" => hd(algos)}, headers)
end
defp jose_jwk(%JWK{} = the_secret), do: the_secret
defp jose_jwk(the_secret) when is_binary(the_secret), do: JWK.from_oct(the_secret)
defp jose_jwk(the_secret) when is_map(the_secret), do: JWK.from_map(the_secret)
defp jose_jwk(value), do: Config.resolve_value(value)
defp fetch_allowed_algos(mod, opts) do
opts
|> Keyword.get(:allowed_algos)
|> Config.resolve_value() || apply(mod, :config, [:allowed_algos, @default_algos])
end
defp set_type(%{"typ" => typ} = claims, _mod, _opts) when not is_nil(typ), do: claims
defp set_type(claims, mod, opts) do
defaults = apply(mod, :default_token_type, [])
typ = Keyword.get(opts, :token_type, defaults)
Map.put(claims, @type_key, to_string(typ || @default_token_type))
end
defp set_sub(claims, _mod, subject, _opts), do: Map.put(claims, "sub", subject)
defp set_iat(claims) do
ts = Guardian.timestamp()
claims |> Map.put("iat", ts) |> Map.put("nbf", ts - 1)
end
defp set_ttl(%{"exp" => exp} = claims, _mod, _opts) when not is_nil(exp), do: claims
defp set_ttl(%{"typ" => token_typ} = claims, mod, opts) do
ttl = Keyword.get(opts, :ttl)
if ttl do
set_ttl(claims, ttl)
else
token_typ = to_string(token_typ)
token_ttl = apply(mod, :config, [:token_ttl, %{}])
fallback_ttl = apply(mod, :config, [:ttl, @default_ttl])
ttl = Map.get(token_ttl, token_typ, fallback_ttl)
set_ttl(claims, ttl)
end
end
defp set_ttl(the_claims, {num, period}) when is_binary(num),
do: set_ttl(the_claims, {String.to_integer(num), period})
defp set_ttl(the_claims, {num, period}) when is_binary(period),
do: set_ttl(the_claims, {num, String.to_existing_atom(period)})
defp set_ttl(%{"iat" => iat_v} = the_claims, requested_ttl),
do: assign_exp_from_ttl(the_claims, {iat_v, requested_ttl})
# catch all for when the issued at iat is not yet set
defp set_ttl(claims, requested_ttl), do: claims |> set_iat() |> set_ttl(requested_ttl)
defp assign_exp_from_ttl(the_claims, {iat_v, {seconds, unit}}) when unit in [:second, :seconds],
do: Map.put(the_claims, "exp", iat_v + seconds)
defp assign_exp_from_ttl(the_claims, {iat_v, {minutes, unit}}) when unit in [:minute, :minutes],
do: Map.put(the_claims, "exp", iat_v + minutes * 60)
defp assign_exp_from_ttl(the_claims, {iat_v, {hours, unit}}) when unit in [:hour, :hours],
do: Map.put(the_claims, "exp", iat_v + hours * 60 * 60)
defp assign_exp_from_ttl(the_claims, {iat_v, {days, unit}}) when unit in [:day, :days],
do: Map.put(the_claims, "exp", iat_v + days * 24 * 60 * 60)
defp assign_exp_from_ttl(the_claims, {iat_v, {weeks, unit}}) when unit in [:week, :weeks],
do: Map.put(the_claims, "exp", iat_v + weeks * 7 * 24 * 60 * 60)
defp assign_exp_from_ttl(_, {_iat_v, {_, units}}), do: raise("Unknown Units: #{units}")
defp set_iss(claims, mod, _opts) do
issuer = mod |> apply(:config, [:issuer]) |> to_string()
Map.put(claims, "iss", issuer)
end
defp set_aud(%{"aud" => aud} = claims, _mod, _opts) when not is_nil(aud), do: claims
defp set_aud(claims, mod, _opts) do
issuer = mod |> apply(:config, [:issuer]) |> to_string()
Map.put(claims, "aud", issuer)
end
defp set_jti(claims), do: Map.put(claims, "jti", token_id())
defp refresh_claims(mod, claims, options), do: {:ok, reset_claims(mod, claims, options)}
defp exchange_claims(mod, old_claims, from_type, to_type, options) when is_list(from_type) do
from_type = Enum.map(from_type, &to_string(&1))
if Enum.member?(from_type, old_claims["typ"]) do
exchange_claims(mod, old_claims, old_claims["typ"], to_type, options)
else
{:error, :incorrect_token_type}
end
end
defp exchange_claims(mod, old_claims, from_type, to_type, options) do
if old_claims["typ"] == to_string(from_type) do
new_type = to_string(to_type)
# set the type first because the ttl can depend on the type
claims = Map.put(old_claims, "typ", new_type)
claims = reset_claims(mod, claims, options)
{:ok, claims}
else
{:error, :incorrect_token_type}
end
end
defp reset_claims(mod, claims, options) do
claims
|> Map.drop(["jti", "iss", "iat", "nbf", "exp"])
|> set_jti()
|> set_iat()
|> set_iss(mod, options)
|> set_ttl(mod, options)
end
defp fetch_secret_fetcher(mod) do
{:ok, mod.config(:secret_fetcher, SecretFetcherDefaultImpl)}
end
end | lib/guardian/token/jwt.ex | 0.869063 | 0.820937 | jwt.ex | starcoder |
defmodule Drab.Live do
@moduledoc """
Drab Module to provide a live access and update of assigns of the template, which is currently rendered and displayed
in the browser.
The idea is to reuse your Phoenix templates and let them live, to make a possibility to update assigns
on the living page, from the Elixir, without re-rendering the whole html. But because Drab tries to update the
smallest amount of the html, there are some limitations, for example, it when updating the nested block
it does not know the local variables used before. Please check out `Drab.Live.EExEngine` for more detailed
description.
Use `peek/2` to get the assign value, and `poke/2` to modify it directly in the DOM tree.
Drab.Live uses the modified EEx Engine (`Drab.Live.EExEngine`) to compile the template and indicate where assigns
were rendered. To enable it, rename the template you want to go live from extension `.eex` to `.drab`. Then,
add Drab Engine to the template engines in `config.exs`:
config :phoenix, :template_engines,
drab: Drab.Live.Engine
### Update Behaviours
There are different behaviours of `Drab.Live`, depends on where the expression with the updated assign lives.
For example, if the expression defines tag attribute, like `<span class="<%= @class %>">`, we don't want to
re-render the whole tag, as it might override changes you made with other Drab module, or even with Javascript.
Because of this, Drab finds the tag and updates only the required attributes.
#### Plain Text
If the expression in the template is given in any tag body, Drab will try to find the sourrounding tag and mark
it with the attribute called `drab-ampere`. The attribute value is a hash of the previous buffer and the expression
itself.
Consider the template, with assign `@chapter_no` with initial value of `1` (given in render function in the
controller, as usual):
<p>Chapter <%= @chapter_no %>.</p>
which renders to:
<p drab-ampere="someid">Chapter 1.</p>
This `drab-ampere` attribute is injected automatically by `Drab.Live.EExEngine`. Updating the `@chapter_no`
assign in the Drab Commander, by using `poke/2`:
chapter = peek(socket, :chapter_no) # get the current value of `@chapter_no`
poke(socket, chapter_no: chapter + 1) # push the new value to the browser
will change the `innerHTML` of the `<p drab-ampere="someid">` to "Chapter 2." by executing the following JS
on the browser:
document.querySelector('[drab-ampere=someid]').innerHTML = "Chapter 2."
This is possible because during the compile phase, Drab stores the `drab-ampere` and the corresponding pattern in
the cache DETS file (located in `priv/`).
#### Injecting `<span>`
In case, when Drab can't find the parent tag, it injects `<span>` in the generated html. For example, template
like:
Chapter <%= @chapter_no %>.
renders to:
Chapter <span drab-ampere="someid">1</span>.
#### Attributes
When the expression is defining the attribute of the tag, the behaviour if different. Let's assume there is
a template with following html, rendered in the Controller with value of `@button` set to string `"btn-danger"`.
<button class="btn <%= @button %>">
It renders to:
<button drab-ampere="someid" class="btn btn-danger">
Again, you can see injected `drab-ampere` attribute. This allows Drab to indicate where to update the attribute.
Pushing the changes to the browser with:
poke socket, button: "btn btn-info"
will result with updated `class` attribute on the given tag. It is acomplished by running
`node.setAttribute("class", "btn btn-info")` on the browser.
Notice that the pattern where your expression lives is preserved: you may update only the partials of the
attribute value string.
##### Updating `value` attribute for `<input>` and `<textarea>`
There is a special case for `<input>` and `<textarea>`: when poking attribute of `value`, Drab updates
the corresponding `value` property as well.
#### Properties
Nowadays we deal more with node properties than attributes. This is why `Drab.Live` introduces the special syntax.
When using the `@` sign at the beginning of the attribute name, it will be treated as a property.
<button @hidden=<%= @hidden %>>
Updating `@hidden` in the Drab Commander with `poke/2` will change the value of the `hidden` property
(without dollar sign!), by sending the update javascript: `node['hidden'] = false`.
You may also dig deeper into the Node properties, using dot - like in JavaScript - to bind the expression
with the specific property. The good example is to set up `.style`:
<button @style.backgroundColor=<%= @color %>>
Additionally, Drab sets up all the properties defined that way when the page loads. Thanks to this, you
don't have to worry about the initial value.
Notice that `@property=<%= expression %>` *is the only available syntax*, you can not use string pattern or
give more than one expression. Property must be solid bind to the expression.
The expression binded with the property *must be encodable to JSON*, so, for example, tuples are not allowed here.
Please refer to `Jason` for more information about encoding JS.
#### Scripts
When the assign we want to change is inside the `<script></script>` tag, Drab will re-evaluate the whole
script after assigment change. Let's say you don't want to use `$property=<%=expression%>` syntax to define
the object property. You may want to render the javascript:
<script>
document.querySelectorAll("button").hidden = <%= @buttons_state %>
</script>
If you render the template in the Controller with `@button_state` set to `false`, the initial html will look like:
<script drab-ampere="someid">
document.querySelectorAll("button").hidden = false
</script>
Again, Drab injects some ID to know where to find its victim. After you `poke/2` the new value of `@button_state`,
Drab will re-render the whole script with a new value and will send a request to re-evaluate the script.
Browser will run something like: `eval("document.querySelectorAll(\"button\").hidden = true")`.
Please notice this behaviour is disabled by default for safety. To enable it, use the following in your
`config.exs`:
config :drab, enable_live_scripts: true
### Avoiding using Drab
If there is no need to use Drab with some expression, you may mark it with `nodrab/1` function. Such expressions
will be treated as a "normal" Phoenix expressions and will not be updatable by `poke/2`.
<p>Chapter <%= nodrab(@chapter_no) %>.</p>
With Elixir 1.6, you may use the special marker "/", which does exactly the same as `nodrab`:
<p>Chapter <%/ @chapter_no %>.</p>
#### The `@conn` case
The `@conn` assign is often used in Phoenix templates. Drab considers it read-only, you can not update it
with `poke/2`. And, because it is often quite hudge, may significantly increase the number of data sent to
the browser. This is why Drab treats all expressions with only one assign, which happen to be `@conn`, as
a `nodrab` assign.
### Partials
Function `poke/2` and `peek/2` works on the default template - the one rendered with the Controller. In case there
are some child templates, rendered inside the main one, you need to specify the template name as a second argument
of `poke/3` and `peek/3` functions.
In case the template is not under the current (main) view, use `poke/4` and `peek/4` to specify the external
view name.
Assigns are archored within their partials. Manipulation of the assign outside the template it lives will raise
`ArgumentError`. *Partials are not hierachical*, eg. modifying the assign in the main partial will not update
assigns in the child partials, even if they exist there.
#### Rendering partial templates in a runtime
There is a possibility add the partial to the DOM tree in a runtime, using `render_to_string/2` helper:
poke socket, live_partial1: render_to_string("partial1.html", color: "#aaaabb")
But remember that assigns are assigned to the partials, so after adding it to the page, manipulation
must be done within the added partial:
poke socket, "partial1.html", color: "red"
### Evaluating expressions
When the assign change is poked back to the browser, Drab need to re-evaluate all the expressions from the template
which contain the given assign. This expressions are stored with the pattern in the cache DETS file.
Because the expression must be run in the Phoenix environments, Drab does some `import` and `use` before. For example,
it does `use Phoenix.HTML` and `import Phoenix.View`. It also imports the following modules from your application:
import YourApplication.Router.Helpers
import YourApplication.ErrorHelpers
import YourApplication.Gettext
If you renamed any of those modules in your application, you must tell Drab where to find it by adding the following
entry to the `config.exs` file:
config :drab, live_helper_modules: [Router.Helpers, ErrorHelpers, Gettext]
Notice that the application name is derived automatically. Please check `Drab.Config.get/1` for more information
on Drab setup.
### Limitions
Because Drab must interpret the template, inject it's ID etc, it assumes that the template HTML is valid.
There are also some limits for defining attributes, properties, local variables, etc. See `Drab.Live.EExEngine`
for a full description.
"""
@type result :: Phoenix.Socket.t() | Drab.Core.result() | no_return
import Drab.Core
require IEx
use DrabModule
@impl true
def js_templates(), do: ["drab.live.js"]
@doc """
Returns the current value of the assign from the current (main) partial.
iex> peek(socket, :count)
42
iex> peek(socket, :nonexistent)
** (ArgumentError) Assign @nonexistent not found in Drab EEx template
Notice that this is a value of the assign, and not the value of any node property or attribute. Assign
gets its value only while rendering the page or via `poke`. After changing the value of node attribute
or property on the client side, the assign value will remain the same.
"""
# TODO: think if it is needed to sign/encrypt
@spec peek(Phoenix.Socket.t(), atom) :: term | no_return
def peek(socket, assign), do: peek(socket, nil, nil, assign)
@doc """
Like `peek/2`, but takes partial name and returns assign from that specified partial.
Partial is taken from the current view.
iex> peek(socket, "users.html", :count)
42
"""
# TODO: think if it is needed to sign/encrypt
@spec peek(Phoenix.Socket.t(), String.t(), atom) :: term | no_return
def peek(socket, partial, assign), do: peek(socket, nil, partial, assign)
@doc """
Like `peek/2`, but takes a view and a partial name and returns assign from that specified view/partial.
iex> peek(socket, MyApp.UserView, "users.html", :count)
42
"""
@spec peek(Phoenix.Socket.t(), atom | nil, String.t() | nil, atom | String.t()) :: term | no_return
def peek(socket, view, partial, assign) when is_binary(assign) do
view = view || Drab.get_view(socket)
hash = if partial, do: partial_hash(view, partial), else: index(socket)
current_assigns = assign_data_for_partial(socket, hash, partial)
current_assigns_keys = current_assigns |> Map.keys() |> Enum.map(&String.to_existing_atom/1)
case current_assigns |> Map.fetch(assign) do
# |> Drab.Live.Crypto.decode64()
{:ok, val} ->
val
:error ->
raise_assign_not_found(assign, current_assigns_keys)
end
end
def peek(socket, view, partial, assign) when is_atom(assign) do
peek(socket, view, partial, Atom.to_string(assign))
end
@doc """
Updates the current page in the browser with the new assign value.
Raises `ArgumentError` when assign is not found within the partial.
Returns untouched socket or tuple {:error, description} or {:timeout, description}
iex> poke(socket, count: 42)
%Phoenix.Socket{ ...
"""
@spec poke(Phoenix.Socket.t(), Keyword.t()) :: result
def poke(socket, assigns) do
do_poke(socket, nil, nil, assigns, &Drab.Core.exec_js/2)
end
@doc """
Like `poke/2`, but limited only to the given partial name.
iex> poke(socket, "user.html", name: "Bożywój")
%Phoenix.Socket{ ...
"""
@spec poke(Phoenix.Socket.t(), String.t(), Keyword.t()) :: result
def poke(socket, partial, assigns) do
do_poke(socket, nil, partial, assigns, &Drab.Core.exec_js/2)
end
@doc """
Like `poke/3`, but searches for the partial within the given view.
iex> poke(socket, MyApp.UserView, "user.html", name: "Bożywój")
%Phoenix.Socket{ ...
"""
@spec poke(Phoenix.Socket.t(), atom, String.t(), Keyword.t()) :: result
def poke(socket, view, partial, assigns) do
do_poke(socket, view, partial, assigns, &Drab.Core.exec_js/2)
end
@spec do_poke(Phoenix.Socket.t(), atom | nil, String.t() | nil, Keyword.t(), function) :: result
defp do_poke(socket, view, partial_name, assigns, function) do
if Enum.member?(Keyword.keys(assigns), :conn) do
raise ArgumentError,
message: """
assign @conn is read only.
"""
end
view = view || Drab.get_view(socket)
partial = if partial_name, do: partial_hash(view, partial_name), else: index(socket)
current_assigns = assign_data_for_partial(socket, partial, partial_name)
current_assigns_keys = current_assigns |> Map.keys() |> Enum.map(&String.to_existing_atom/1)
assigns_to_update = Enum.into(assigns, %{})
assigns_to_update_keys = Map.keys(assigns_to_update)
for as <- assigns_to_update_keys do
unless Enum.find(current_assigns_keys, fn key -> key === as end) do
raise_assign_not_found(as, current_assigns_keys)
end
end
updated_assigns =
current_assigns
|> Enum.map(fn {k, v} -> {String.to_existing_atom(k), v} end)
|> Keyword.merge(assigns)
modules = {
Drab.get_view(socket),
Drab.Config.get(:live_helper_modules)
}
amperes_to_update =
for {assign, _} <- assigns do
Drab.Live.Cache.get({partial, assign})
end
|> List.flatten()
|> Enum.uniq()
# construct the javascripts for update of amperes
# TODO: group updates on one node
update_javascripts =
for ampere <- amperes_to_update,
{gender, tag, prop_or_attr, expr, _, parent_assigns} <- Drab.Live.Cache.get({partial, ampere}) || [],
!is_a_child?(parent_assigns, assigns_to_update_keys) do
case gender do
:html ->
safe = eval_expr(expr, modules, updated_assigns, gender)
# |> Drab.Live.HTML.remove_drab_marks()
new_value = safe |> safe_to_string()
case {tag, Drab.Config.get(:enable_live_scripts)} do
{"script", false} ->
nil
{_, _} ->
"Drab.update_tag(#{encode_js(tag)}, #{encode_js(ampere)}, #{encode_js(new_value)})"
end
:attr ->
new_value = eval_expr(expr, modules, updated_assigns, gender) |> safe_to_string()
"Drab.update_attribute(#{encode_js(ampere)}, #{encode_js(prop_or_attr)}, #{encode_js(new_value)})"
:prop ->
new_value = eval_expr(expr, modules, updated_assigns, gender) |> safe_to_string()
"Drab.update_property(#{encode_js(ampere)}, #{encode_js(prop_or_attr)}, #{new_value})"
end
end
assign_updates = assign_updates_js(assigns_to_update, partial)
all_javascripts = (assign_updates ++ update_javascripts) |> Enum.uniq()
# IO.inspect(all_javascripts)
case function.(socket, all_javascripts |> Enum.join(";")) do
{:ok, _} ->
# Save updated assigns in the Drab Server
assigns_to_update =
for {k, v} <- assigns_to_update, into: %{} do
{Atom.to_string(k), v}
end
updated_assigns =
for {k, v} <- Map.merge(current_assigns, assigns_to_update), into: %{} do
{k, Drab.Live.Crypto.encode64(v)}
end
priv = socket |> Drab.pid() |> Drab.get_priv()
partial_assigns_updated = %{priv.__ampere_assigns | partial => updated_assigns}
socket |> Drab.pid() |> Drab.set_priv(%{priv | __ampere_assigns: partial_assigns_updated})
socket
other ->
other
end
end
# the case when the expression is inside another expression
# and we update assigns of the parent expression as well
defp is_a_child?(list1, list2) do
not Enum.empty?(list1) &&
Enum.all?(list1, fn element ->
element in list2
end)
end
@doc """
Returns a list of the assigns for the main partial.
Examples:
iex> Drab.Live.assigns(socket)
[:welcome_text]
"""
@spec assigns(Phoenix.Socket.t()) :: list
def assigns(socket) do
assigns(socket, nil, nil)
end
@doc """
Like `assigns/1` but will return the assigns for a given `partial` instead of the main partial.
Examples:
iex> assigns(socket, "user.html")
[:name, :age, :email]
"""
@spec assigns(Phoenix.Socket.t(), String.t() | nil) :: list
def assigns(socket, partial) do
assigns(socket, nil, partial)
end
@doc """
Like `assigns/2`, but returns the assigns for a given combination of a `view` and a `partial`.
iex> assigns(socket, MyApp.UserView, "user.html")
[:name, :age, :email]
"""
@spec assigns(Phoenix.Socket.t(), atom | nil, String.t() | nil) :: list
def assigns(socket, view, partial) do
view = view || Drab.get_view(socket)
partial_hash = if partial, do: partial_hash(view, partial), else: index(socket)
assigns =
socket
|> ampere_assigns()
|> Map.get(partial_hash, [])
for {assign, _} <- assigns do
assign |> String.to_existing_atom()
end
end
@spec eval_expr(Macro.t(), {atom, list}, Keyword.t(), atom) :: term | no_return
defp eval_expr(expr, modules, updated_assigns, :prop) do
eval_expr(Drab.Live.EExEngine.encoded_expr(expr), modules, updated_assigns)
end
defp eval_expr(expr, modules, updated_assigns, _) do
eval_expr(expr, modules, updated_assigns)
end
@spec eval_expr(Macro.t(), {atom, list}, Keyword.t()) :: term | no_return
defp eval_expr(expr, modules, updated_assigns) do
e = expr_with_imports(expr, modules)
try do
{safe, _assigns} = Code.eval_quoted(e, assigns: updated_assigns)
safe
rescue
# TODO: to be removed after solving #71
e in CompileError ->
msg =
if String.contains?(e.description, "undefined function") do
"""
#{e.description}
Using local variables defined in external blocks is prohibited in Drab.
Please check the following documentation page for more details:
https://hexdocs.pm/drab/Drab.Live.EExEngine.html#module-limitations
"""
else
e.description
end
stacktrace = System.stacktrace()
reraise CompileError, [description: msg], stacktrace
end
end
@spec expr_with_imports(Macro.t(), {atom, list}) :: Macro.t()
defp expr_with_imports(expr, {view, modules}) do
quote do
import Phoenix.View
import unquote(view)
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
use Phoenix.HTML
unquote do
for module <- modules do
quote do
import unquote(module)
end
end
end
unquote(expr)
end
end
@spec assign_updates_js(map, String.t()) :: [String.t()]
defp assign_updates_js(assigns, partial) do
Enum.map(assigns, fn {k, v} ->
"__drab.assigns[#{Drab.Core.encode_js(partial)}][#{Drab.Core.encode_js(k)}] = '#{Drab.Live.Crypto.encode64(v)}'"
end)
end
# defp safe_to_encoded_js(safe), do: safe |> safe_to_string() |> encode_js()
@spec safe_to_string(Phoenix.HTML.safe() | [Phoenix.HTML.safe()]) :: String.t()
defp safe_to_string(list) when is_list(list), do: list |> Enum.map(&safe_to_string/1) |> Enum.join("")
defp safe_to_string({:safe, _} = safe), do: Phoenix.HTML.safe_to_string(safe)
defp safe_to_string(safe), do: to_string(safe)
@spec assign_data_for_partial(Phoenix.Socket.t(), String.t() | atom, String.t() | atom) :: map | no_return
defp assign_data_for_partial(socket, partial, partial_name) do
assigns =
case socket
|> ampere_assigns()
|> Map.fetch(partial) do
{:ok, val} ->
val
:error ->
raise ArgumentError,
message: """
Drab is unable to find a partial #{partial_name || "main"}.
Please check the path or specify the View.
"""
end
for {name, value} <- assigns, into: %{} do
{name, Drab.Live.Crypto.decode64(value)}
end
end
@spec ampere_assigns(Phoenix.Socket.t()) :: map
defp ampere_assigns(socket) do
socket
|> Drab.pid()
|> Drab.get_priv()
|> Map.get(:__ampere_assigns, %{})
end
@spec index(Phoenix.Socket.t()) :: String.t()
defp index(socket) do
socket
|> Drab.pid()
|> Drab.get_priv()
|> Map.get(:__index)
end
@spec partial_hash(atom, String.t()) :: String.t() | no_return
defp partial_hash(view, partial_name) do
# Drab.Live.Cache.get({:partial, partial_path(view, partial_name)})
path = partial_path(view, partial_name)
case Drab.Live.Cache.get(path) do
{hash, _assigns} -> hash
_ -> raise_partial_not_found(path)
end
end
@spec partial_path(atom, String.t()) :: String.t()
defp partial_path(view, partial_name) do
templates_path(view) <> partial_name <> Drab.Config.drab_extension()
end
@spec templates_path(atom) :: String.t()
defp templates_path(view) do
{path, _, _} = view.__templates__()
path <> "/"
end
@spec raise_assign_not_found(atom, list) :: no_return
defp raise_assign_not_found(assign, current_keys) do
raise ArgumentError,
message: """
assign @#{assign} not found in Drab EEx template.
Please make sure all proper assigns have been set. If this
is a child template, ensure assigns are given explicitly by
the parent template as they are not automatically forwarded.
Available assigns:
#{inspect(current_keys)}
"""
end
@spec raise_partial_not_found(String.t()) :: no_return
defp raise_partial_not_found(path) do
raise ArgumentError,
message: """
template `#{path}` not found.
Please make sure this partial exists and has been compiled
by Drab (has *.drab extension).
If you want to poke assign to the partial which belong to
the other view, you need to specify the view name in `poke/4`.
"""
end
end | lib/drab/live.ex | 0.817028 | 0.741323 | live.ex | starcoder |
defmodule Exqlite.Pragma do
@moduledoc """
Handles parsing extra options for the SQLite connection
"""
def journal_mode(nil), do: journal_mode([])
def journal_mode(options) do
case Keyword.get(options, :journal_mode, :delete) do
:delete -> "DELETE"
:memory -> "MEMORY"
:off -> "OFF"
:persist -> "PERSIST"
:truncate -> "TRUNCATE"
:wal -> "WAL"
_ -> raise ArgumentError, ":journal_mode can only be :delete, :truncate, :persist, :memory, :wal, or :off"
end
end
def temp_store(nil), do: temp_store([])
def temp_store(options) do
case Keyword.get(options, :temp_store, :default) do
:file -> 1
:memory -> 2
:default -> 0
_ -> raise ArgumentError, ":temp_store can only be :memory, :file, or :default"
end
end
def synchronous(nil), do: synchronous([])
def synchronous(options) do
case Keyword.get(options, :synchronous, :normal) do
:extra -> 3
:full -> 2
:normal -> 1
:off -> 0
_ -> raise ArgumentError, "synchronous can only be :off, :full, :extra, or :normal"
end
end
def foreign_keys(nil), do: foreign_keys([])
def foreign_keys(options) do
case Keyword.get(options, :foreign_keys, :on) do
:off -> 0
:on -> 1
_ -> raise ArgumentError, ":foreign_keys can only be :on or :off"
end
end
def cache_size(nil), do: cache_size([])
def cache_size(options) do
Keyword.get(options, :cache_size, -2000)
end
def cache_spill(nil), do: cache_spill([])
def cache_spill(options) do
case Keyword.get(options, :cache_spill, :on) do
:off -> 0
:on -> 1
_ -> raise ArgumentError, ":cache_spill can only be :on or :off"
end
end
def case_sensitive_like(nil), do: case_sensitive_like([])
def case_sensitive_like(options) do
case Keyword.get(options, :case_sensitive_like, :off) do
:off -> 0
:on -> 1
_ -> raise ArgumentError, ":case_sensitive_like can only be :on or :off"
end
end
def auto_vacuum(nil), do: auto_vacuum([])
def auto_vacuum(options) do
case Keyword.get(options, :auto_vacuum, :none) do
:none -> 0
:full -> 1
:incremental -> 2
_ -> raise ArgumentError, ":auto_vacuum can only be :none, :full, or :incremental"
end
end
def locking_mode(nil), do: locking_mode([])
def locking_mode(options) do
case Keyword.get(options, :locking_mode, :normal) do
:normal -> "NORMAL"
:exclusive -> "EXCLUSIVE"
_ -> raise ArgumentError, ":locking_mode can only be :normal or :exclusive"
end
end
def secure_delete(nil), do: secure_delete([])
def secure_delete(options) do
case Keyword.get(options, :secure_delete, :off) do
:off -> 0
:on -> 1
_ -> raise ArgumentError, ":secure_delete can only be :on or :off"
end
end
def wal_auto_check_point(nil), do: wal_auto_check_point([])
def wal_auto_check_point(options) do
Keyword.get(options, :wal_auto_check_point, 1000)
end
end | lib/exqlite/pragma.ex | 0.590425 | 0.438545 | pragma.ex | starcoder |
defmodule TelemetryMetricsStatsd do
@moduledoc """
`Telemetry.Metrics` reporter for StatsD-compatible metric servers.
To use it, start the reporter with the `start_link/1` function, providing it a list of
`Telemetry.Metrics` metric definitions:
import Telemetry.Metrics
TelemetryMetricsStatsd.start_link(
metrics: [
counter("http.request.count"),
sum("http.request.payload_size"),
last_value("vm.memory.total")
]
)
> Note that in the real project the reporter should be started under a supervisor, e.g. the main
> supervisor of your application.
By default the reporter sends metrics to 127.0.0.1:8125 - both hostname and port number can be
configured using the `:host` and `:port` options.
Note that the reporter doesn't aggregate metrics in-process - it sends metric updates to StatsD
whenever a relevant Telemetry event is emitted.
## Translation between Telemetry.Metrics and StatsD
In this section we walk through how the Telemetry.Metrics metric definitions are mapped to StatsD
metrics and their types at runtime.
Telemetry.Metrics metric names are translated as follows:
* if the metric name was provided as a string, e.g. `"http.request.count"`, it is sent to
StatsD server as-is
* if the metric name was provided as a list of atoms, e.g. `[:http, :request, :count]`, it is
first converted to a string by joiging the segments with dots. In this example, the StatsD
metric name would be `"http.request.count"` as well
Since there are multiple implementations of StatsD and each of them provides slightly different
set of features, other aspects of metric translation are controlled by the formatters.
The formatter can be selected using the `:formatter` option. Currently only two formats are
supported - `:standard` and `:datadog`.
The following table shows how `Telemetry.Metrics` metrics map to StatsD metrics:
| Telemetry.Metrics | StatsD |
|-------------------|--------|
| `last_value` | `gauge`, always set to an absolute value |
| `counter` | `counter`, always increased by 1 |
| `sum` | `gauge`, increased and decreased by the provided value |
| `summary` | `timer` recording individual measurement |
| `histogram` | Reported as histogram if DataDog formatter is used |
### The standard StatsD formatter
The `:standard` formatter is compatible with the
[Etsy implementation](https://github.com/statsd/statsd/blob/master/docs/metric_types.md) of StatsD.
Since this particular implementation doesn't support explicit tags, tag values are appended as
consecutive segments of the metric name. For example, given the definition
counter("db.query.count", tags: [:table, :operation])
and the event
:telemetry.execute([:db, :query], %{}, %{table: "users", operation: "select"})
the StatsD metric name would be `"db.query.count.users.select"`. Note that the tag values are
appended to the base metric name in the order they were declared in the metric definition.
Another important aspect of the standard formatter is that all measurements are converted to
integers, i.e. no floats are ever sent to the StatsD daemon.
Now to the metric types!
#### Counter
Telemetry.Metrics counter is simply represented as a StatsD counter. Each event the metric is
based on increments the counter by 1. To be more concrete, given the metric definition
counter("http.request.count")
and the event
:telemetry.execute([:http, :request], %{duration: 120})
the following line would be send to StatsD
"http.request.count:1|c"
Note that the counter was bumped by 1, regardless of the measurements included in the event
(careful reader will notice that the `:count` measurement we chose for the metric wasn't present
in the map of measurements at all!). Such behaviour conforms to the specification of counter as
defined by `Telemetry.Metrics` package - a counter should be incremented by 1 every time a given
event is dispatched.
#### Last value
Last value metric is represented as a StatsD gauge, whose values are always set to the value
of the measurement from the most recent event. With the following metric definition
last_value("vm.memory.total")
and the event
:telemetry.execute([:vm, :memory], %{total: 1024})
the following metric update would be send to StatsD
"vm.memory.total:1024|g"
#### Sum
Sum metric is also represented as a gauge - the difference is that it always changes relatively
and is never set to an absolute value. Given metric definition below
sum("http.request.payload_size")
and the event
:telemetry.execute([:http, :request], %{payload_size: 1076})
the following line would be send to StatsD
"http.request.count:+1076|g"
When the measurement is negative, the StatsD gauge is decreased accordingly.
#### Summary
The summary is simply represented as a StatsD timer, since it should generate statistics about
gathered measurements. Given the metric definition below
summary("http.request.duration")
and the event
:telemetry.execute([:http, :request], %{duration: 120})
the following line would be send to StatsD
"http.request.duration:120|ms"
#### Distribution
There is no metric in original StatsD implementation equivalent to Telemetry.Metrics distribution.
However, histograms can be enabled for selected timer metrics in the
[StatsD daemon configuration](https://github.com/statsd/statsd/blob/master/docs/metric_types.md#timing).
Because of that, the distribution is also reported as a timer. For example, given the following metric
definition
distribution("http.request.duration", buckets: [0])
and the event
:telemetry.execute([:http, :request], %{duration: 120})
the following line would be send to StatsD
"http.request.duration:120|ms"
Since histograms are configured on the StatsD server side, the `:buckets` option has no effect
when used with this reporter.
### The DataDog formatter
The DataDog formatter is compatible with [DogStatsD](https://docs.datadoghq.com/developers/dogstatsd/),
the DataDog StatsD service bundled with its agent.
#### Tags
The main difference from the standard formatter is that DataDog supports explicit tagging in its
protocol. Using the same example as with the standard formatter, given the following definition
counter("db.query.count", tags: [:table, :operation])
and the event
:telemetry.execute([:db, :query], %{}, %{table: "users", operation: "select"})
the metric update packet sent to StatsD would be `db.query.count:1|c|#table:users,operation:select`.
#### Metric types
The only difference between DataDog and standard StatsD metric types is that DataDog provides
a dedicated histogram metric. That's why Telemetry.Metrics distribution is translated to DataDog
histogram.
Also note that DataDog allows measurements to be floats, that's why no rounding is performed when
formatting the metric.
## Global tags
The library provides an option to specify a set of global tag values, which are available to all
metrics running under the reporter.
For example, if you're running your application in multiple deployment environment (staging, production,
etc.), you might set the environment as a global tag:
TelemetryMetricsStatsd.start_link(
metrics: [
counter("http.request.count", tags: [:env])
],
global_tags: [env: "prod"]
)
Note that if the global tag is to be sent with the metric, the metric needs to have it listed under the
`:tags` option, just like any other tag.
Also, if the same key is configured as a global tag and emitted as a part of event metadata or returned
by the `:tag_values` function, the metadata/`:tag_values` take precedence and override the global tag
value.
## Prefixing metric names
Sometimes it's convenient to prefix all metric names with particular value, to group them by the
name of the service, the host, or something else. You can use `:prefix` option to provide a prefix
which will be prepended to all metrics published by the reporter (regardless of the formatter used).
## Maximum datagram size
Metrics are sent to StatsD over UDP, so it's important that the size of the datagram does not
exceed the Maximum Transmission Unit, or MTU, of the link, so that no data is lost on the way.
By default the reporter will break up the datagrams at 512 bytes, but this is configurable via
the `:mtu` option.
"""
use GenServer
require Logger
alias Telemetry.Metrics
alias TelemetryMetricsStatsd.{EventHandler, UDP}
@type prefix :: String.t() | nil
@type host :: String.t() | :inet.ip_address()
@type option ::
{:port, :inet.port_number()}
| {:host, host()}
| {:metrics, [Metrics.t()]}
| {:mtu, non_neg_integer()}
| {:prefix, prefix()}
| {:formatter, :standard | :datadog}
| {:global_tags, Keyword.t()}
@type options :: [option]
@default_port 8125
@default_mtu 512
@default_formatter :standard
@doc """
Reporter's child spec.
This function allows you to start the reporter under a supervisor like this:
children = [
{TelemetryMetricsStatsd, options}
]
See `start_link/1` for a list of available options.
"""
@spec child_spec(options) :: Supervisor.child_spec()
def child_spec(options) do
%{id: __MODULE__, start: {__MODULE__, :start_link, [options]}}
end
@doc """
Starts a reporter and links it to the calling process.
The available options are:
* `:metrics` - a list of Telemetry.Metrics metric definitions which will be published by the
reporter
* `:host` - hostname or IP address of the StatsD server. Defaults to `{127, 0, 0, 1}`. Keep
in mind Erlang's UDP implementation looks up the hostname each time it sends a packet.
Furthermore, telemetry handlers are blocking. For latency-critical applications, it is best
to use an IP here (or resolve it on startup).
* `:port` - port number of the StatsD server. Defaults to `8125`.
* `:formatter` - determines the format of the metrics sent to the target server. Can be either
`:standard` or `:datadog`. Defaults to `:standard`.
* `:prefix` - a prefix prepended to the name of each metric published by the reporter. Defaults
to `nil`.
* `:mtu` - Maximum Transmission Unit of the link between your application and the StatsD server in
bytes. This value should not be greater than the actual MTU since this could lead to the data loss
when the metrics are published. Defaults to `512`.
* `:global_tags` - Additional default tag values to be sent along with every published metric. These
can be overriden by tags sent via the `:telemetry.execute` call.
You can read more about all the options in the `TelemetryMetricsStatsd` module documentation.
## Example
import Telemetry.Metrics
TelemetryMetricsStatsd.start_link(
metrics: [
counter("http.request.count"),
sum("http.request.payload_size"),
last_value("vm.memory.total")
],
prefix: "my-service"
)
"""
@spec start_link(options) :: GenServer.on_start()
def start_link(options) do
config =
options
|> Enum.into(%{})
|> Map.put_new(:host, {127, 0, 0, 1})
|> Map.update!(:host, fn host ->
if(is_binary(host), do: to_charlist(host), else: host)
end)
|> Map.put_new(:port, @default_port)
|> Map.put_new(:mtu, @default_mtu)
|> Map.put_new(:prefix, nil)
|> Map.put_new(:formatter, @default_formatter)
|> Map.update!(:formatter, &validate_and_translate_formatter/1)
|> Map.put_new(:global_tags, Keyword.new())
GenServer.start_link(__MODULE__, config)
end
@doc false
@spec get_udp(pid()) :: UDP.t()
def get_udp(reporter) do
GenServer.call(reporter, :get_udp)
end
@doc false
@spec udp_error(pid(), UDP.t(), reason :: term) :: :ok
def udp_error(reporter, udp, reason) do
GenServer.cast(reporter, {:udp_error, udp, reason})
end
@impl true
def init(config) do
metrics = Map.fetch!(config, :metrics)
case UDP.open(config.host, config.port) do
{:ok, udp} ->
Process.flag(:trap_exit, true)
handler_ids =
EventHandler.attach(
metrics,
self(),
config.mtu,
config.prefix,
config.formatter,
config.global_tags
)
{:ok, %{udp: udp, handler_ids: handler_ids, host: config.host, port: config.port}}
{:error, reason} ->
{:error, {:udp_open_failed, reason}}
end
end
@impl true
def handle_call(:get_udp, _from, state) do
{:reply, state.udp, state}
end
@impl true
def handle_cast({:udp_error, udp, reason}, %{udp: udp} = state) do
Logger.error("Failed to publish metrics over UDP: #{inspect(reason)}")
case UDP.open(state.host, state.port) do
{:ok, udp} ->
{:noreply, %{state | udp: udp}}
{:error, reason} ->
Logger.error("Failed to reopen UDP socket: #{inspect(reason)}")
{:stop, {:udp_open_failed, reason}, state}
end
end
def handle_cast({:udp_error, _, _}, state) do
{:noreply, state}
end
@impl true
def handle_info({:EXIT, _pid, reason}, state) do
{:stop, reason, state}
end
@impl true
def terminate(_reason, state) do
EventHandler.detach(state.handler_ids)
:ok
end
defp validate_and_translate_formatter(:standard), do: TelemetryMetricsStatsd.Formatter.Standard
defp validate_and_translate_formatter(:datadog), do: TelemetryMetricsStatsd.Formatter.Datadog
defp validate_and_translate_formatter(_),
do: raise(ArgumentError, ":formatter needs to be either :standard or :datadog")
end | lib/telemetry_metrics_statsd.ex | 0.940285 | 0.689613 | telemetry_metrics_statsd.ex | starcoder |
defmodule Mix.Tasks.SyncGmailInbox do
use Mix.Task
@shortdoc "Script to test the upcoming Gmail inbox sync feature"
@moduledoc """
Example:
```
$ mix sync_gmail_inbox [ACCOUNT_ID]
$ mix sync_gmail_inbox [ACCOUNT_ID] [HISTORY_ID]
$ mix sync_gmail_inbox [ACCOUNT_ID] [HISTORY_ID] [LABEL_ID]
```
"""
require Logger
alias ChatApi.{Conversations, Customers, Google, Messages, Users}
alias ChatApi.Google.{Gmail, GmailConversationThread, GoogleAuthorization}
@spec run([binary()]) :: :ok
def run(args) do
Application.ensure_all_started(:chat_api)
case args do
[account_id] ->
sync_messages(account_id)
[account_id, history_id] ->
sync_messages(account_id, history_id)
[account_id, history_id, label_id] ->
sync_messages_by_label(account_id, history_id, label_id)
end
end
@spec sync_messages(binary()) :: :ok
def sync_messages(account_id) do
with %GoogleAuthorization{
refresh_token: refresh_token,
metadata: %{"next_history_id" => start_history_id}
} = authorization <-
Google.get_authorization_by_account(account_id, %{client: "gmail", type: "support"}),
{:ok, %{body: %{"emailAddress" => email}}} <- Gmail.get_profile(refresh_token),
{:ok, %{body: %{"historyId" => next_history_id, "history" => [_ | _] = history}}} <-
Gmail.list_history(refresh_token,
start_history_id: start_history_id,
history_types: "messageAdded"
) do
Logger.info("Authenticated email: #{inspect(email)}")
sync(history, authorization)
{:ok, _auth} =
Google.update_google_authorization(authorization, %{
metadata: %{next_history_id: next_history_id}
})
:ok
else
error -> Logger.info("Unable to sync Gmail messages: #{inspect(error)}")
end
end
@spec sync_messages(binary(), binary()) :: :ok
def sync_messages(account_id, start_history_id) do
with %GoogleAuthorization{refresh_token: refresh_token} = authorization <-
Google.get_authorization_by_account(account_id, %{client: "gmail", type: "support"}),
{:ok, %{body: %{"emailAddress" => email}}} <- Gmail.get_profile(refresh_token),
{:ok, %{body: %{"historyId" => next_history_id, "history" => [_ | _] = history}}} <-
Gmail.list_history(refresh_token,
start_history_id: start_history_id,
history_types: "messageAdded"
) do
Logger.info("Authenticated email: #{inspect(email)}")
sync(history, authorization)
{:ok, _auth} =
Google.update_google_authorization(authorization, %{
metadata: %{next_history_id: next_history_id}
})
:ok
else
error ->
Logger.info("Unable to sync Gmail messages: #{inspect(error)}")
end
end
@spec sync_messages_by_label(binary(), binary(), binary()) :: :ok | :error
def sync_messages_by_label(account_id, start_history_id, label_id) do
with %GoogleAuthorization{refresh_token: refresh_token} = authorization <-
Google.get_authorization_by_account(account_id, %{client: "gmail", type: "support"}),
{:ok, %{body: %{"emailAddress" => email}}} <- Gmail.get_profile(refresh_token),
{:ok, %{body: %{"historyId" => next_history_id, "history" => [_ | _] = history}}} <-
Gmail.list_history(refresh_token,
start_history_id: start_history_id,
label_id: label_id
) do
Logger.info("Authenticated email: #{inspect(email)}")
sync(history, authorization, "labelsAdded")
{:ok, _auth} =
Google.update_google_authorization(authorization, %{
metadata: %{next_history_id: next_history_id}
})
:ok
else
error ->
Logger.info("Unable to sync Gmail messages: #{inspect(error)}")
end
end
@spec sync(list(), GoogleAuthorization.t(), binary()) :: :ok
def sync(
history,
%GoogleAuthorization{refresh_token: refresh_token} = authorization,
event \\ "messagesAdded"
) do
# TODO: handle case where history results exist on next page token
history
|> Enum.flat_map(fn h ->
h |> Map.get(event, []) |> Enum.map(fn m -> m["message"] end)
end)
|> Enum.uniq_by(fn %{"threadId" => thread_id} -> thread_id end)
|> Enum.map(fn %{"threadId" => thread_id} ->
case Gmail.get_thread(refresh_token, thread_id) do
{:ok, %{body: thread}} ->
Gmail.format_thread(thread, exclude_labels: ["SPAM", "DRAFT", "CATEGORY_PROMOTIONS"])
_ ->
nil
end
end)
|> Enum.reject(&skip_processing_thread?/1)
|> Enum.each(fn thread ->
process_thread(thread, authorization)
# Sleep 1s between each thread
Process.sleep(1000)
end)
end
@spec skip_processing_thread?(Gmail.GmailThread.t() | nil) :: boolean
def skip_processing_thread?(nil), do: true
def skip_processing_thread?(%Gmail.GmailThread{} = thread) do
case thread do
%{messages: []} ->
true
%{messages: [_ | _] = messages} ->
Enum.all?(messages, fn msg ->
Enum.all?(msg.label_ids, fn label ->
Enum.member?(["CATEGORY_FORUM", "CATEGORY_UPDATES", "CATEGORY_SOCIAL"], label)
end)
end)
_ ->
false
end
end
@spec process_thread(Gmail.GmailThread.t(), GoogleAuthorization.t()) :: [Messages.Message.t()]
def process_thread(
%Gmail.GmailThread{thread_id: gmail_thread_id} = thread,
%GoogleAuthorization{} = authorization
) do
Logger.info("Processing thread: #{inspect(thread)}")
case Google.find_gmail_conversation_thread(%{gmail_thread_id: gmail_thread_id}) do
nil ->
handle_new_thread(thread, authorization)
gmail_conversation_thread ->
handle_existing_thread(thread, authorization, gmail_conversation_thread)
end
end
@spec handle_existing_thread(
Gmail.GmailThread.t(),
GoogleAuthorization.t(),
GmailConversationThread.t()
) :: [Messages.Message.t()]
def handle_existing_thread(
%Gmail.GmailThread{messages: [_ | _] = messages} = _thread,
%GoogleAuthorization{} = authorization,
%GmailConversationThread{conversation_id: conversation_id} = gmail_conversation_thread
) do
existing_gmail_ids =
conversation_id
|> Conversations.get_conversation!()
|> Map.get(:messages, [])
|> Enum.map(fn
%{metadata: %{"gmail_id" => gmail_id}} -> gmail_id
_ -> nil
end)
|> MapSet.new()
messages
|> Enum.reject(fn message ->
MapSet.member?(existing_gmail_ids, message.id)
end)
|> Enum.map(fn message ->
process_new_message(message, authorization, gmail_conversation_thread)
end)
end
@spec handle_new_thread(
Gmail.GmailThread.t(),
GoogleAuthorization.t()
) :: [Messages.Message.t()]
def handle_new_thread(
%Gmail.GmailThread{thread_id: gmail_thread_id, messages: [_ | _] = messages} = _thread,
%GoogleAuthorization{
account_id: account_id,
inbox_id: inbox_id,
user_id: authorization_user_id
} = authorization
) do
initial_message = List.first(messages)
was_proactively_sent = initial_message |> Map.get(:label_ids, []) |> Enum.member?("SENT")
[user_email, customer_email] =
if was_proactively_sent do
[initial_message.from, initial_message.to] |> Enum.map(&Gmail.extract_email_address/1)
else
[initial_message.to, initial_message.from] |> Enum.map(&Gmail.extract_email_address/1)
end
{:ok, customer} = Customers.find_or_create_by_email(customer_email, account_id)
assignee_id =
case Users.find_user_by_email(user_email, account_id) do
nil -> authorization_user_id
result -> result.id
end
{:ok, conversation} =
Conversations.create_conversation(%{
account_id: account_id,
inbox_id: inbox_id,
customer_id: customer.id,
assignee_id: assignee_id,
subject: initial_message.subject,
source: "email"
})
conversation
|> Conversations.Notification.broadcast_new_conversation_to_admin!()
|> Conversations.Notification.notify(:webhooks, event: "conversation:created")
{:ok, gmail_conversation_thread} =
Google.create_gmail_conversation_thread(%{
gmail_thread_id: gmail_thread_id,
gmail_initial_subject: initial_message.subject,
conversation_id: conversation.id,
account_id: account_id
})
Enum.map(messages, fn message ->
process_new_message(message, authorization, gmail_conversation_thread)
end)
end
@spec process_new_message(
Gmail.GmailMessage.t(),
GoogleAuthorization.t(),
GmailConversationThread.t()
) :: Messages.Message.t()
def process_new_message(
%Gmail.GmailMessage{} = message,
%GoogleAuthorization{
account_id: account_id,
user_id: authorization_user_id
},
%GmailConversationThread{conversation_id: conversation_id}
) do
sender_email = Gmail.extract_email_address(message.from)
admin_user = Users.find_user_by_email(sender_email, account_id)
is_sent = message |> Map.get(:label_ids, []) |> Enum.member?("SENT")
sender_params =
case {admin_user, is_sent} do
{%Users.User{id: user_id}, _} ->
%{user_id: user_id}
{_, true} ->
%{user_id: authorization_user_id}
{_, false} ->
{:ok, customer} = Customers.find_or_create_by_email(sender_email, account_id)
%{customer_id: customer.id}
end
sender_params
|> Map.merge(%{
body: message.formatted_text,
conversation_id: conversation_id,
account_id: account_id,
source: "email",
metadata: Gmail.format_message_metadata(message),
sent_at:
with {unix, _} <- Integer.parse(message.ts),
{:ok, datetime} <- DateTime.from_unix(unix, :millisecond) do
datetime
else
_ -> DateTime.utc_now()
end
})
|> Messages.create_and_fetch!()
|> Messages.Notification.notify(:webhooks)
|> Messages.Helpers.handle_post_creation_hooks()
end
end | lib/mix/tasks/sync_gmail_inbox.ex | 0.519278 | 0.603143 | sync_gmail_inbox.ex | starcoder |
defmodule Nebulex.Adapter.Persistence do
@moduledoc ~S"""
Specifies the adapter persistence API.
## Default implementation
This module provides a default implementation that uses `File` and `Stream`
under-the-hood. For dumping a cache to a file, the entries are streamed from
the cache and written in chunks (one chunk per line), and each chunk contains
N number of entries. For loading the entries from a file, the file is read
and streamed line-by-line, so that the entries collected on each line are
inserted in streaming fashion as well.
The default implementation accepts the following options only for `dump`
operation (there are not options for `load`):
* `objects_per_line` - The number of objects to be written per line in the
file. Defaults to `10`.
* `compression` - The compression level. The values are the same as
`:erlang.term_to_binary /2`. Defaults to `6`.
See `c:Nebulex.Cache.dump/2` and `c:Nebulex.Cache.load/2` for more
information.
"""
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Nebulex.Adapter.Persistence
alias Nebulex.Object
@impl true
def dump(cache, path, opts) do
path
|> File.open([:read, :write], fn io_dev ->
nil
|> cache.stream(return: :object)
|> Stream.filter(&(not Object.expired?(&1)))
|> Stream.chunk_every(Keyword.get(opts, :objects_per_line, 10))
|> Enum.each(fn objs ->
bin = Object.encode(objs, get_compression(opts))
:ok = IO.puts(io_dev, bin)
end)
end)
|> handle_response()
end
@impl true
def load(cache, path, _opts) do
path
|> File.open([:read], fn io_dev ->
io_dev
|> IO.stream(:line)
|> Stream.map(&String.trim/1)
|> Enum.each(fn line ->
objs = Object.decode(line, [:safe])
cache.__adapter__.set_many(cache, objs, [])
end)
end)
|> handle_response()
end
## Helpers
defp handle_response({:ok, _}), do: :ok
defp handle_response({:error, _} = error), do: error
defp get_compression(opts) do
case Keyword.get(opts, :compression) do
value when is_integer(value) and value >= 0 and value < 10 ->
[compressed: value]
_ ->
[:compressed]
end
end
end
end
@doc """
Dumps a cache to the given file `path`.
Returns `:ok` if successful, or `{:error, reason}` if an error occurs.
See `c:Nebulex.Cache.dump/2`.
"""
@callback dump(
cache :: Nebulex.Cache.t(),
path :: Path.t(),
opts :: Nebulex.Cache.opts()
) :: :ok | {:error, term}
@doc """
Loads a dumped cache from the given `path`.
Returns `:ok` if successful, or `{:error, reason}` if an error occurs.
See `c:Nebulex.Cache.load/2`.
"""
@callback load(
cache :: Nebulex.Cache.t(),
path :: Path.t(),
opts :: Nebulex.Cache.opts()
) :: :ok | {:error, term}
end | lib/nebulex/adapter/persistence.ex | 0.819785 | 0.609321 | persistence.ex | starcoder |
defmodule Bureaucrat.SwaggerSlateMarkdownWriter do
@moduledoc """
This markdown writer integrates swagger information and outputs in a slate-friendly markdown format.
It requires that the decoded swagger data be available via Application.get_env(:bureaucrat, :swagger),
eg by passing it as an option to the Bureaucrat.start/1 function.
"""
alias Bureaucrat.JSON
alias Plug.Conn
# pipeline-able puts
defp puts(file, string) do
IO.puts(file, string)
file
end
@doc """
Writes a list of Plug.Conn records to the given file path.
Each Conn should have request and response data populated,
and the private.phoenix_controller, private.phoenix_action values set for linking to swagger.
"""
def write(records, path) do
{:ok, file} = File.open(path, [:write, :utf8])
swagger = Application.get_env(:bureaucrat, :swagger)
file
|> write_overview(swagger)
|> write_authentication(swagger)
|> write_models(swagger)
records
|> tag_records(swagger)
|> group_records()
|> Enum.each(fn {tag, records_by_operation_id} ->
write_operations_for_tag(file, tag, records_by_operation_id, swagger)
end)
end
@doc """
Writes the document title and api summary description.
This corresponds to the info section of the swagger document.
"""
def write_overview(file, swagger) do
info = swagger["info"]
file
|> puts("""
---
title: #{info["title"]}
search: true
---
# #{info["title"]}
#{info["description"]}
""")
end
@doc """
Writes the authentication details to the given file.
This corresponds to the securityDefinitions section of the swagger document.
"""
def write_authentication(file, %{"security" => security} = swagger) do
file
|> puts("# Authentication\n")
# TODO: Document token based security
Enum.each(security, fn securityRequirement ->
name = Map.keys(securityRequirement) |> List.first()
definition = swagger["securityDefinitions"][name]
file
|> puts("## #{definition["type"]}\n")
|> puts("#{definition["description"]}\n")
end)
file
end
def write_authentication(file, _), do: file
@doc """
Writes the API request/response model schemas to the given file.
This corresponds to the definitions section of the swagger document.
Each top level definition will be written as a table.
Nested objects are flattened out to reduce the number of tables being produced.
"""
def write_models(file, swagger) do
puts(file, "# Models\n")
Enum.each(swagger["definitions"], fn definition ->
write_model(file, swagger, definition)
end)
file
end
@doc """
Writes a single API model schema to the given file.
Most of the work is delegated to the write_model_properties/3 recurive function.
The example json is output before the table just so slate will align them.
"""
def write_model(file, swagger, {name, model_schema}) do
file
|> puts("## #{name}\n")
|> puts("#{model_schema["description"]}")
|> write_model_example(model_schema)
|> puts("|Property|Description|Type|Required|")
|> puts("|--------|-----------|----|--------|")
|> write_model_properties(swagger, model_schema)
|> puts("")
end
def write_model_example(file, %{"example" => example}) do
json = JSON.encode!(example, pretty: true)
file
|> puts("\n```json")
|> puts(json)
|> puts("```\n")
end
def write_model_example(file, _) do
puts(file, "")
end
@doc """
Writes the fields of the given model to file.
prefix is output before each property name to enable nested objects to be flattened.
"""
def write_model_properties(file, swagger, model_schema, prefix \\ "") do
{objects, primitives} =
model_schema["properties"]
|> Enum.split_with(fn {_key, schema} -> schema["type"] == "object" end)
ordered = Enum.concat(primitives, objects)
Enum.each(ordered, fn {property, property_details} ->
{property_details, type} = resolve_type(swagger, property_details)
required? = is_required(property, model_schema)
write_model_property(file, swagger, "#{prefix}#{property}", property_details, type, required?)
end)
file
end
def resolve_type(swagger, %{"$ref" => schema_ref}) do
schema_name = String.replace_prefix(schema_ref, "#/definitions/", "")
property_details = swagger["definitions"][schema_name]
type = schema_ref_to_link(schema_ref)
{property_details, type}
end
def resolve_type(_swagger, property_details) do
{property_details, property_details["type"]}
end
def write_model_property(file, swagger, property, property_details, "object", _required?) do
write_model_properties(file, swagger, property_details, "#{property}.")
end
def write_model_property(file, swagger, property, property_details, "array", required?) do
schema = property_details["items"]
# TODO: handle arrays with inline schema
schema_ref = if schema != nil, do: schema["$ref"], else: nil
type = if schema_ref != nil, do: "array(#{schema_ref_to_link(schema_ref)})", else: "array(any)"
write_model_property(file, swagger, property, property_details, type, required?)
end
def write_model_property(file, _swagger, property, property_details, type, required?) do
puts(file, "|#{property}|#{property_details["description"]}|#{type}|#{required?}|")
end
defp is_required(property, %{"required" => required}), do: property in required
defp is_required(_property, _schema), do: false
# Convert a schema reference eg, #/definitions/User to a markdown link
def schema_ref_to_link("#/definitions/" <> type) do
"[#{type}](##{String.downcase(type)})"
end
@doc """
Populate each test record with private.swagger_tag and private.operation_id from swagger.
"""
def tag_records(records, swagger) do
tags_by_operation_id =
for {_path, actions} <- swagger["paths"],
{_action, details} <- actions do
[first_tag | _] = details["tags"]
{details["operationId"], first_tag}
end
|> Enum.into(%{})
Enum.map(records, &tag_record(&1, tags_by_operation_id))
end
@doc """
Tag a single record with swagger tag and operation_id.
"""
def tag_record(conn, tags_by_operation_id) do
operation_id = conn.assigns.bureaucrat_opts[:operation_id]
Conn.put_private(conn, :swagger_tag, tags_by_operation_id[operation_id])
end
@doc """
Group a list of tagged records, first by tag, then by operation_id.
"""
def group_records(records) do
by_tag = Enum.group_by(records, & &1.private.swagger_tag)
Enum.map(by_tag, fn {tag, records_with_tag} ->
by_operation_id = Enum.group_by(records_with_tag, & &1.assigns.bureaucrat_opts[:operation_id])
{tag, by_operation_id}
end)
end
@doc """
Writes the API details and exampels for operations having the given tag.
tag roughly corresponds to a phoenix controller, eg "Users"
records_by_operation_id are the examples collected during tests, grouped by operationId (Controller.action)
"""
def write_operations_for_tag(file, tag, records_by_operation_id, swagger) do
tag_details = swagger["tags"] |> Enum.find(&(&1["name"] == tag))
file
|> puts("# #{tag}\n")
|> puts("#{tag_details["description"]}\n")
Enum.each(records_by_operation_id, fn {operation_id, records} ->
write_action(file, operation_id, records, swagger)
end)
file
end
@doc """
Writes all examples of a given operation (Controller action) to file.
"""
def write_action(file, operation_id, records, swagger) do
details = find_operation_by_id(swagger, operation_id)
puts(file, "## #{details["summary"]}\n")
# write examples before params/schemas to get correct alignment in slate
Enum.each(records, &write_example(file, &1))
file
|> puts("#{details["description"]}\n")
|> write_parameters(details)
|> write_responses(details)
end
@doc """
Find the details of an API operation in swagger by operationId
"""
def find_operation_by_id(swagger, operation_id) do
Enum.flat_map(swagger["paths"], fn {_path, actions} ->
Enum.map(actions, fn {_action, details} -> details end)
end)
|> Enum.find(fn details ->
details["operationId"] == operation_id
end)
end
@doc """
Writes the parameters table for given swagger operation to file.
Uses the vendor extension "x-example" to provide example of each parameter.
TODO: detailed schema validation rules aren't shown yet (min/max/regex/etc...)
"""
def write_parameters(file, _ = %{"parameters" => params}) when length(params) > 0 or map_size(params) > 0 do
file
|> puts("#### Parameters\n")
|> puts("| Parameter | Description | In |Type | Required | Default | Example |")
|> puts("|-------------|-------------|----|----------|----------|---------|---------|")
Enum.each(params, fn param ->
content =
["name", "description", "in", "type", "required", "default", "x-example"]
|> Enum.map(¶m[&1])
|> Enum.map(&encode_parameter_table_cell/1)
|> Enum.join("|")
puts(file, "|#{content}|")
end)
puts(file, "")
end
def write_parameters(file, _), do: file
# Encode parameter table cell values as strings, using json library to convert lists/maps
defp encode_parameter_table_cell(param) when is_map(param) or is_list(param), do: JSON.encode!(param)
defp encode_parameter_table_cell(param), do: to_string(param)
@doc """
Writes the responses table for given swagger operation to file.
Swagger only allows a single description per status code, which can be limiting
when trying to describe all possible error responses. To work around this, add
markdown links into the description.
"""
def write_responses(file, swagger_operation) do
file
|> puts("#### Responses\n")
|> puts("| Status | Description | Schema |")
|> puts("|--------|-------------|--------|")
Enum.each(swagger_operation["responses"], fn {status, response} ->
ref = get_in(response, ["schema", "$ref"])
schema = if ref, do: schema_ref_to_link(ref), else: ""
puts(file, "|#{status} | #{response["description"]} | #{schema}|")
end)
end
@doc """
Writes a single request/response example to file
"""
def write_example(file, record) do
path =
case record.query_string do
"" -> record.request_path
str -> "#{record.request_path}?#{str}"
end
# Request with path and headers
file
|> puts("> #{record.assigns.bureaucrat_desc}\n")
|> puts("```plaintext")
|> puts("#{record.method} #{path}")
|> write_headers(record.req_headers)
|> puts("```\n")
# Request Body if applicable
unless record.body_params == %{} do
file
|> puts("```json")
|> puts("#{JSON.encode!(record.body_params, pretty: true)}")
|> puts("```\n")
end
# Response with status and headers
file
|> puts("> Response\n")
|> puts("```plaintext")
|> puts("#{record.status}")
|> write_headers(record.resp_headers)
|> puts("```\n")
# Response body
file
|> puts("```json")
|> puts("#{format_resp_body(record.resp_body)}")
|> puts("```\n")
end
@doc """
Write the list of request/response headers
"""
def write_headers(file, headers) do
Enum.each(headers, fn {header, value} ->
puts(file, "#{header}: #{value}")
end)
file
end
@doc """
Pretty-print a JSON response, handling body correctly
"""
def format_resp_body(string) do
case string do
"" -> ""
_ -> string |> JSON.decode!() |> JSON.encode!(pretty: true)
end
end
end | lib/bureaucrat/swagger_slate_markdown_writer.ex | 0.677154 | 0.475544 | swagger_slate_markdown_writer.ex | starcoder |
defmodule ApaDiv do
@moduledoc """
APA : Arbitrary Precision Arithmetic - Division - ApaDiv.
"""
# used in division to prevent the infinite loop
@precision_default Application.get_env(:apa, :precision_default, -1)
@precision_limit if @precision_default == -1, do: 28, else: @precision_default
# nearly correct limit - 10^308 digits is the limit for erlang :math.log10()
@max_erlang_math_log 123_456_789_012_345_678_901_234_567_890_123_456_789_012_345_678_901_234_567_890_123_456_789_012_345_678_901_234_567_890_123_456_789_012_345_678_901_234_567_890_123_456_789_012_345_678_901_234_567_890_123_456_789_012_345_678_901_234_567_890_123_456_789_012_345_678_901_234_567_890_123_456_789_012_345_678_901_234_567_890_123_456_789_012_345_678_901_234_567_890_123_456_789_012_345_678_901_234_567_890_123_456_789
@doc """
Division - internal function - please call Apa.div(left, right)
"""
@spec bc_div(term(), term(), integer(), integer()) :: String.t()
def bc_div(left, right, precision, scale) do
{left_int, left_exp} = Apa.new(left)
{right_int, right_exp} = Apa.new(right)
Apa.to_string(
bc_div_apa_number({left_int, left_exp}, {right_int, right_exp}, precision),
precision,
scale
)
end
# bc_div_apa_number/2
@spec bc_div_apa_number({integer(), integer()}, {integer(), integer()}) ::
{integer(), integer()}
def bc_div_apa_number({left_int, left_exp}, {right_int, right_exp}) do
bc_div_apa_number({left_int, left_exp}, {right_int, right_exp}, @precision_limit)
end
# bc_div_apa_number/3
@spec bc_div_apa_number({integer(), integer()}, {integer(), integer()}, integer()) ::
{integer(), integer()}
def bc_div_apa_number({0, _left_exp}, {0, _right_exp}, _precision) do
raise(ArgumentError, "Impossible operation - division by zero - 0 / 0 - see doc.")
end
def bc_div_apa_number({_left_int, _left_exp}, {0, _right_exp}, _precision) do
raise(ArgumentError, "Impossible operation - division by zero - divisor == 0 - see doc.")
end
def bc_div_apa_number({left_int, left_exp}, {right_int, right_exp}, precision) do
precision_limit = if precision < 0, do: @precision_limit, else: precision
bc_div_apa_number(
{left_int, left_exp},
{right_int, right_exp},
Kernel.rem(left_int, right_int),
0,
precision_limit
)
end
# bc_div_apa_number/5
defp bc_div_apa_number({left_int, left_exp}, {right_int, right_exp}, 0, _acc, _precision_limit) do
{Kernel.div(left_int, right_int), left_exp - right_exp}
end
defp bc_div_apa_number(
{left_int, left_exp},
{right_int, right_exp},
_rem,
_acc,
precision_limit
) do
left_digits = digits_length(abs(left_int))
right_digits = digits_length(abs(right_int))
diff_precision = right_digits - left_digits + precision_limit
{new_left_int, new_left_exp} = shift({left_int, left_exp}, diff_precision)
{Kernel.div(new_left_int, right_int), new_left_exp - right_exp}
end
defp shift({int_value, exp}, pow_value) when pow_value > 0 do
{int_value * ApaNumber.pow10(pow_value), exp - pow_value}
end
defp shift({int_value, exp}, pow_value) when pow_value <= 0 do
{int_value, exp}
end
defp digits_length(0), do: 1
defp digits_length(int_value) when int_value < @max_erlang_math_log do
trunc(:math.log10(int_value)) + 1
end
defp digits_length(int_value) do
int_value
|> Integer.to_string()
|> Kernel.byte_size()
end
end | lib/apa_div.ex | 0.753104 | 0.418222 | apa_div.ex | starcoder |
defmodule CanvasAPI.CanvasController do
use CanvasAPI.Web, :controller
alias CanvasAPI.CanvasService
plug CanvasAPI.CurrentAccountPlug when not action in [:show]
plug CanvasAPI.CurrentAccountPlug, [permit_none: true] when action in [:show]
plug :ensure_team when not action in [:show]
plug :ensure_user when not action in [:show]
plug :ensure_canvas when action in [:update]
@md_extensions ~w(markdown md mdown text txt)
@spec create(Plug.Conn.t, Plug.Conn.params) :: Plug.Conn.t
def create(conn, params) do
%{current_user: current_user, current_team: current_team} = conn.private
case CanvasService.create(
get_in(params, ~w(data attributes)),
creator: current_user,
team: current_team,
template: get_in(params, ~w(data relationships template data)),
notify: current_team.slack_id && current_user) do
{:ok, canvas} ->
conn
|> put_status(:created)
|> render("show.json", canvas: canvas)
{:error, changeset} ->
unprocessable_entity(conn, changeset)
end
end
@spec index(Plug.Conn.t, Plug.Conn.params) :: Plug.Conn.t
def index(conn, _params) do
canvases = CanvasService.list(user: conn.private.current_user)
render(conn, "index.json", canvases: canvases)
end
@spec index_templates(Plug.Conn.t, Plug.Conn.params) :: Plug.Conn.t
def index_templates(conn, _params) do
templates =
CanvasService.list(user: conn.private.current_user, only_templates: true)
render(conn, "index.json", canvases: templates)
end
@spec show(Plug.Conn.t, Plug.Conn.params) :: Plug.Conn.t
def show(conn, params = %{"id" => id, "team_id" => team_id}) do
case CanvasService.show(id,
account: conn.private.current_account,
team_id: team_id) do
{:ok, canvas} ->
render_show(conn, canvas, params["trailing_format"])
{:error, :not_found} ->
not_found(conn)
end
end
@spec update(Plug.Conn.t, Plug.Conn.params) :: Plug.Conn.t
def update(conn, params) do
%{current_user: current_user, current_team: current_team} = conn.private
case CanvasService.update(
conn.private.canvas,
get_in(params, ~w(data attributes)),
template: get_in(params, ~w(data relationships template data)),
notify: current_team.slack_id && current_user) do
{:ok, canvas} ->
render_show(conn, canvas)
{:error, changeset} ->
unprocessable_entity(conn, changeset)
end
end
@spec delete(Plug.Conn.t, Plug.Conn.params) :: Plug.Conn.t
def delete(conn, %{"id" => id}) do
account = conn.private.current_account
case CanvasService.delete(id, account: account) do
{:ok, _} ->
no_content(conn)
{:error, changeset} ->
unprocessable_entity(conn, changeset)
nil ->
not_found(conn)
end
end
@spec ensure_canvas(Plug.Conn.t, map) :: Plug.Conn.t
defp ensure_canvas(conn, _opts) do
CanvasService.get(conn.params["id"],
account: conn.private.current_account)
|> case do
{:ok, canvas} -> put_private(conn, :canvas, canvas)
{:error, :not_found} -> not_found(conn, halt: true)
end
end
@spec render_show(Plug.Conn.t, CanvasAPI.Canvas.t, String.t) :: Plug.Conn.t
defp render_show(conn, canvas, format \\ "json")
defp render_show(conn, canvas, "canvas") do
conn
|> put_resp_content_type("application/octet-stream")
|> render("canvas.json", canvas: canvas, json_api: false)
end
defp render_show(conn, canvas, format) when format in @md_extensions do
conn
|> put_resp_content_type("text/plain")
|> render("canvas.md", canvas: canvas)
end
defp render_show(conn, canvas, _) do
render(conn, "show.json", canvas: canvas)
end
end | web/controllers/canvas_controller.ex | 0.675444 | 0.432123 | canvas_controller.ex | starcoder |
defmodule Protox.Message do
@moduledoc """
This module provides functions to work with messages.
"""
@doc """
Singular fields of `msg` will be overwritten, if specified in `from`, except for
embedded messages which will be merged. Repeated fields will be concatenated.
Note that "specified" has a different meaning in protobuf 2 and 3:
- 2: if the singular field from `from` is nil, the value from `msg` is kept
- 3: if the singular field from `from` is set to the default value, the value from `msg` is
kept. This behaviour matches the C++ reference implementation behaviour.
- `msg` and `from` must be of the same type; or
- either `msg` or `from` is `nil`: the non-nil message is returned; or
- both are `nil`: `nil` is returned
# Example
iex> r1 = %Protobuf2{a: 0, s: :ONE}
iex> r2 = %Protobuf2{a: nil, s: :TWO}
iex> Protox.Message.merge(r1, r2)
%Protobuf2{a: 0, s: :TWO}
iex> Protox.Message.merge(r2, r1)
%Protobuf2{a: 0, s: :ONE}
"""
@spec merge(struct | nil, struct | nil) :: struct | nil
def merge(nil, from), do: from
def merge(msg, nil), do: msg
def merge(msg, from) do
Map.merge(msg, from, fn name, v1, v2 ->
if name == :__struct__ or name == msg.__struct__.unknown_fields_name() do
v1
else
merge_field(msg, name, v1, v2)
end
end)
end
defp merge_field(msg, name, v1, v2) do
defs = msg.__struct__.defs_by_name()
syntax = msg.__struct__.syntax()
case defs[name] do
{_, :packed, _} ->
v1 ++ v2
{_, :unpacked, _} ->
v1 ++ v2
{_, {:default, _}, {:message, _}} ->
merge(v1, v2)
{_, {:default, _}, _} ->
{:ok, default} = msg.__struct__.default(name)
merge_scalar(syntax, v1, v2, default)
nil ->
merge_oneof(v1, v2, defs)
{_, :map, {_, {:message, _}}} ->
Map.merge(v1, v2, fn _key, w1, w2 -> merge(w1, w2) end)
{_, :map, _} ->
Map.merge(v1, v2)
end
end
defp merge_scalar(:proto2, v1, nil, _default), do: v1
defp merge_scalar(:proto3, v1, v2, default) when v2 == default, do: v1
defp merge_scalar(_syntax, _v1, v2, _default), do: v2
defp merge_oneof({v1_field, v1_value}, v2 = {v2_field, v2_value}, defs)
when v1_field == v2_field do
case {defs[v1_field], defs[v2_field]} do
{{_, {:oneof, _}, {:message, _}}, {_, {:oneof, _}, {:message, _}}} ->
{v1_field, merge(v1_value, v2_value)}
_ ->
v2
end
end
defp merge_oneof(v1, nil, _defs), do: v1
defp merge_oneof(_v1, v2, _defs), do: v2
end | lib/protox/message.ex | 0.867204 | 0.650842 | message.ex | starcoder |
defmodule Sanbase.Signal.SqlQuery do
@table "signals"
@metadata_table "signal_metadata"
@moduledoc ~s"""
Define the SQL queries to access the signals in Clickhouse
The signals are stored in the '#{@table}' Clickhouse table
"""
use Ecto.Schema
import Sanbase.DateTimeUtils, only: [str_to_sec: 1]
import Sanbase.Metric.SqlQuery.Helper, only: [aggregation: 3, asset_id_filter: 2]
alias Sanbase.Signal.FileHandler
@name_to_signal_map FileHandler.name_to_signal_map()
schema @table do
field(:datetime, :utc_datetime, source: :dt)
field(:value, :float)
field(:asset_id, :integer)
field(:signal_id, :integer)
field(:computed_at, :utc_datetime)
end
def available_signals_query(slug) do
query = """
SELECT name
FROM #{@metadata_table}
PREWHERE signal_id in (
SELECT DISTINCT(signal_id)
FROM #{@table}
INNER JOIN (
SELECT * FROM asset_metadata FINAL PREWHERE name = ?1
) using(asset_id)
)
"""
args = [
slug
]
{query, args}
end
def available_slugs_query(signal) do
query = """
SELECT DISTINCT(name)
FROM asset_metadata
PREWHERE asset_id in (
SELECT DISTINCT(asset_id)
FROM #{@table}
INNER JOIN (
SELECT * FROM #{@metadata_table} PREWHERE name = ?1
) USING(signal_id))
"""
args = [
Map.get(@name_to_signal_map, signal)
]
{query, args}
end
def first_datetime_query(signal, slug) do
query = """
SELECT toUnixTimestamp(toDateTime(min(dt)))
FROM #{@table}
PREWHERE
signal_id = ( SELECT argMax(signal_id, version) FROM signal_metadata FINAL PREWHERE name = ?1 GROUP BY name LIMIT 1) AND
asset_id = ( select asset_id from asset_metadata FINAL PREWHERE name = ?2 LIMIT 1 )
"""
args = [
Map.get(@name_to_signal_map, signal),
slug
]
{query, args}
end
def raw_data_query(signals, from, to) do
# Clickhouse does not support multiple joins with using, so there's an extra
# nesting just for that
query = """
SELECT
dt, signal, slug, value, metadata
FROM(
SELECT
toUnixTimestamp(dt) AS dt, signal, asset_id, value, metadata, signal_id AS signal_id2
FROM signals FINAL
ANY LEFT JOIN (
SELECT argMax(signal_id, version) AS signal_id2, name AS signal FROM signal_metadata FINAL GROUP BY name
) USING signal_id2
PREWHERE
#{maybe_filter_signals(signals, argument_position: 3, trailing_and: true)}
dt >= toDateTime(?1) AND
dt < toDateTime(?2) AND
isNotNull(value) AND NOT isNaN(value)
)
ANY LEFT JOIN (
SELECT asset_id, name AS slug FROM asset_metadata FINAL
) USING asset_id
"""
args = [from |> DateTime.to_unix(), to |> DateTime.to_unix()]
args =
case signals do
:all -> args
[_ | _] -> args ++ [signals]
end
{query, args}
end
def timeseries_data_query(signal, slug_or_slugs, from, to, _interval, :none) do
query = """
SELECT
toUnixTimestamp(dt) AS dt,
value,
metadata
FROM #{@table} FINAL
PREWHERE
dt >= toDateTime(?1) AND
dt < toDateTime(?2) AND
isNotNull(value) AND NOT isNaN(value) AND
signal_id = ( SELECT argMax(signal_id, version) FROM #{@metadata_table} FINAL PREWHERE name = ?3 GROUP BY name LIMIT 1 ) AND
#{asset_id_filter(slug_or_slugs, argument_position: 4)}
ORDER BY dt
"""
args = [
from |> DateTime.to_unix(),
to |> DateTime.to_unix(),
Map.get(@name_to_signal_map, signal),
slug_or_slugs
]
{query, args}
end
def timeseries_data_query(signal, slug_or_slugs, from, to, interval, aggregation) do
query = """
SELECT
toUnixTimestamp(intDiv(toUInt32(toDateTime(dt)), ?1) * ?1) AS t,
#{aggregation(aggregation, "value", "dt")},
groupArray(metadata) AS metadata
FROM(
SELECT
asset_id,
dt,
value,
metadata
FROM #{@table} FINAL
PREWHERE
dt >= toDateTime(?2) AND
dt < toDateTime(?3) AND
isNotNull(value) AND NOT isNaN(value) AND
#{asset_id_filter(slug_or_slugs, argument_position: 5)} AND
signal_id = ( SELECT argMax(signal_id, version) FROM #{@metadata_table} FINAL PREWHERE name = ?4 GROUP BY name LIMIT 1 )
)
GROUP BY t
ORDER BY t
"""
args = [
str_to_sec(interval),
from |> DateTime.to_unix(),
to |> DateTime.to_unix(),
Map.get(@name_to_signal_map, signal),
slug_or_slugs
]
{query, args}
end
def aggregated_timeseries_data_query(signal, slug_or_slugs, from, to, aggregation) do
query = """
SELECT
name as slug,
toFloat32(#{aggregation(aggregation, "value", "dt")}) as value
FROM(
SELECT
dt,
asset_id,
value
FROM #{@table}
PREWHERE
dt >= toDateTime(?2) AND
dt < toDateTime(?3) AND
#{asset_id_filter(slug_or_slugs, argument_position: 4)} AND
signal_id = ( SELECT argMax(signal_id, version) FROM #{@metadata_table} FINAL PREWHERE name = ?1 GROUP BY name LIMIT 1 )
)
INNER JOIN (
SELECT asset_id, name
FROM asset_metadata FINAL
PREWHERE name IN (?4)
) USING (asset_id)
GROUP BY slug
"""
args = [
Map.get(@name_to_signal_map, signal),
from |> DateTime.to_unix(),
to |> DateTime.to_unix(),
slug_or_slugs
]
{query, args}
end
defp maybe_filter_signals(:all, _opts), do: ""
defp maybe_filter_signals([_ | _], opts) do
argument_position = Keyword.fetch!(opts, :argument_position)
trailing_and = if Keyword.get(opts, :trailing_and), do: " AND", else: ""
"""
signal_id IN (
SELECT argMax(signal_id, version)
FROM signal_metadata FINAL
PREWHERE name in (?#{argument_position})
GROUP BY name
)
""" <> trailing_and
end
end | lib/sanbase/signal/sql_query/signal_sql_query.ex | 0.594787 | 0.450541 | signal_sql_query.ex | starcoder |
defmodule VintageNet.Connectivity.Inspector do
@moduledoc """
This module looks at the network activity of all TCP socket connections known
to Erlang/OTP to deduce whether the internet is working.
To use it, call `check_internet/2`, save the returned cache, and then call it
again a minute later (or so). If any socket has transferred data in both
directions to an off-LAN host on the interface of interest, then it will
return that the internet is available.
This has a couple advantages:
1. No data is sent to perform the check which is good for metered connections
2. Most long-lived TCP connections have a keepalive mechanism that generates
traffic, so this piggy-backs off that existing connectivity check.
3. Devices can be behind very restrictive firewalls and internet connectivity
can still be verified without knowing which IP/port/protocol combinations
are allowed.
It is not perfect:
1. It only works on long-lived TCP connections.
2. The TCP connection must be sending and receiving data. If the keapalive is
longer than the `check_internet/2`
3. It doesn't help if nobody is using the network interface.
4. It may have scalability issues if there are a LOT of TCP sockets.
"""
@typedoc """
Cache for use between `check_internet/2` calls. Initialize to an empty map.
"""
@type cache() :: %{port() => {non_neg_integer(), non_neg_integer()}}
@typedoc """
Internet connectivity status
* `:internet` - the internet is available
* `:unknown` - not sure
* `:no_internet` - the internet is definitely not available
"""
@type status() :: :internet | :unknown | :no_internet
@typedoc """
The return tuple for `check_internet/2`
* `:internet` - at least one TCP connection sent and received data to a
non-LAN IP address
* `:unknown` - no conclusion could be made
* `:no_internet` - the interface didn't have an IP address, so Internet is
definitely not available
Save the cache away and pass it to the next call to `check_internet/2`.
"""
@type result() :: {status(), cache()}
@typep ip_address_and_mask() :: {:inet.ip_address(), :inet.ip_address()}
@doc """
Check whether the internet is accessible on the specified interface
Pass an empty map for the cache parameter for the first call. Then pass it
back the returned cache for each subsequent call. If any TCP socket that's
connected to a computer on another subnet and that's using the passed in
network interface has send AND received data since the previous call, then
`:internet` is returned. If not, then usually `:unknown` is returned to
signify that internet may be available, but we just don't know. If the
interface doesn't have an IP address, then `:no_internet` is returned, since
that's a prerequisite to communicating with anyone on the internet.
"""
@spec check_internet(VintageNet.ifname(), cache()) :: result()
def check_internet(ifname, cache) do
case get_addresses(ifname) do
[] ->
# If we don't even have an IP address, then there's no Internet for sure.
{:no_internet, %{}}
our_addresses ->
{:unknown, %{}}
|> check_ports(Port.list(), our_addresses, cache)
|> check_sockets(:socket.which_sockets(:tcp), our_addresses, cache)
end
end
@doc false
@spec check_ports(result(), [port()], [ip_address_and_mask()], cache()) :: result()
def check_ports(result, [], _our_addresses, _cache), do: result
def check_ports(result, [socket | rest], our_addresses, cache) do
new_result =
case Map.fetch(cache, socket) do
{:ok, previous_stats} ->
new_stats = get_port_stats(socket)
update_result(result, socket, previous_stats, new_stats)
_ ->
check_new_port(socket, our_addresses, result)
end
check_ports(new_result, rest, our_addresses, cache)
end
defp get_port_stats(socket) when is_port(socket) do
case :inet.getstat(socket, [:send_oct, :recv_oct]) do
{:ok, [send_oct: tx, recv_oct: rx]} ->
{tx, rx}
{:ok, [recv_oct: rx, send_oct: tx]} ->
{tx, rx}
{:error, _} ->
# Race condition. Socket was in the list, but by the time it was
# checked, it was closed. No big deal. It will be removed from the
# cache next time. Return bogus values that definitely won't update the
# result to indicate Internet availability.
{0, 0}
end
end
@doc false
@spec check_sockets(result(), [:socket.socket()], [ip_address_and_mask()], cache()) :: result()
def check_sockets(result, [], _our_addresses, _cache), do: result
def check_sockets(result, [socket | rest], our_addresses, cache) do
new_result =
case Map.fetch(cache, socket) do
{:ok, previous_stats} ->
new_stats = get_socket_stats(socket)
update_result(result, socket, previous_stats, new_stats)
_ ->
check_new_socket(socket, our_addresses, result)
end
check_sockets(new_result, rest, our_addresses, cache)
end
defp get_socket_stats(socket) do
# Socket API
%{counters: %{write_byte: tx, read_byte: rx}} = :socket.info(socket)
{tx, rx}
end
defp update_result({:unknown, cache}, socket, {tx1, rx1}, {tx2, rx2} = new_stats)
when tx2 > tx1 and rx2 > rx1 do
{:internet, Map.put(cache, socket, new_stats)}
end
defp update_result({status, cache}, socket, _previous_stats, new_stats) do
{status, Map.put(cache, socket, new_stats)}
end
defp check_new_port(socket, our_addresses, {status, cache}) do
with {:name, 'tcp_inet'} <- Port.info(socket, :name),
true <- connected?(socket),
{:ok, {src_ip, _src_port}} <- :inet.sockname(socket),
true <- on_interface?(src_ip, our_addresses),
{:ok, {dest_ip, _dest_port}} <- :inet.peername(socket),
false <- on_interface?(dest_ip, our_addresses) do
{status, Map.put(cache, socket, get_port_stats(socket))}
else
_ -> {status, cache}
end
end
defp connected?(socket) do
case :prim_inet.getstatus(socket) do
{:ok, status} -> :connected in status
_ -> false
end
end
defp check_new_socket(socket, our_addresses, {status, cache}) do
# Socket API
with %{protocol: :tcp, counters: %{write_byte: tx, read_byte: rx}} <- :socket.info(socket),
{:ok, %{addr: src_ip}} <- :socket.sockname(socket),
true <- on_interface?(src_ip, our_addresses),
{:ok, %{addr: dest_ip}} <- :socket.peername(socket),
false <- on_interface?(dest_ip, our_addresses) do
{status, Map.put(cache, socket, {tx, rx})}
else
_ -> {status, cache}
end
end
@doc """
Return true if an IP address is on one of the subnets in a list
"""
@spec on_interface?(:inet.ip_address(), [ip_address_and_mask()]) :: boolean
def on_interface?(_ip, []), do: false
def on_interface?(ip, [one_address | rest]) do
on_subnet?(ip, one_address) || on_interface?(ip, rest)
end
@doc """
Return true if an IP address is in the subnet
## Examples
iex> Inspector.on_subnet?({192,168,0,50}, {{192,168,0,1}, {255,255,255,0}})
true
iex> Inspector.on_subnet?({192,168,5,1}, {{192,168,0,1}, {255,255,255,0}})
false
"""
@spec on_subnet?(:inet.ip_address(), ip_address_and_mask()) :: boolean
def on_subnet?({a, b, c, d}, {{sa, sb, sc, sd}, {ma, mb, mc, md}}) do
:erlang.band(:erlang.bxor(a, sa), ma) == 0 and
:erlang.band(:erlang.bxor(b, sb), mb) == 0 and
:erlang.band(:erlang.bxor(c, sc), mc) == 0 and
:erlang.band(:erlang.bxor(d, sd), md) == 0
end
def on_subnet?(
{a, b, c, d, e, f, g, h},
{{sa, sb, sc, sd, se, sf, sg, sh}, {ma, mb, mc, md, me, mf, mg, mh}}
) do
:erlang.band(:erlang.bxor(a, sa), ma) == 0 and
:erlang.band(:erlang.bxor(b, sb), mb) == 0 and
:erlang.band(:erlang.bxor(c, sc), mc) == 0 and
:erlang.band(:erlang.bxor(d, sd), md) == 0 and
:erlang.band(:erlang.bxor(e, se), me) == 0 and
:erlang.band(:erlang.bxor(f, sf), mf) == 0 and
:erlang.band(:erlang.bxor(g, sg), mg) == 0 and
:erlang.band(:erlang.bxor(h, sh), mh) == 0
end
def on_subnet?(_ip, {_subnet_ip, _subnet_mask}) do
false
end
@doc false
@spec get_addresses(VintageNet.ifname()) :: [ip_address_and_mask()]
def get_addresses(ifname) do
with {:ok, interfaces} <- :inet.getifaddrs(),
{_, info} <- List.keyfind(interfaces, to_charlist(ifname), 0, []) do
extract_addr_mask(info, [])
else
_ ->
[]
end
end
defp extract_addr_mask([], acc), do: acc
defp extract_addr_mask([{:addr, a}, {:netmask, m} | rest], acc),
do: extract_addr_mask(rest, [{a, m} | acc])
defp extract_addr_mask([_ | rest], acc), do: extract_addr_mask(rest, acc)
end | lib/vintage_net/connectivity/inspector.ex | 0.792825 | 0.455199 | inspector.ex | starcoder |
defmodule HPAX.Types do
@moduledoc false
import Bitwise, only: [<<<: 2]
alias HPAX.Huffman
# This is used as a macro and not an inlined function because we want to be able to use it in
# guards.
defmacrop power_of_two(n) do
quote do: 1 <<< unquote(n)
end
## Encoding
@spec encode_integer(non_neg_integer(), 1..8) :: bitstring()
def encode_integer(integer, prefix)
def encode_integer(integer, prefix) when integer < power_of_two(prefix) - 1 do
<<integer::size(prefix)>>
end
def encode_integer(integer, prefix) do
initial = power_of_two(prefix) - 1
remaining = integer - initial
<<initial::size(prefix), encode_remaining_integer(remaining)::binary>>
end
defp encode_remaining_integer(remaining) when remaining >= 128 do
first = rem(remaining, 128) + 128
<<first::8, encode_remaining_integer(div(remaining, 128))::binary>>
end
defp encode_remaining_integer(remaining) do
<<remaining::8>>
end
@spec encode_binary(binary(), boolean()) :: iodata()
def encode_binary(binary, huffman?) do
binary = if huffman?, do: Huffman.encode(binary), else: binary
huffman_bit = if huffman?, do: 1, else: 0
binary_size = encode_integer(byte_size(binary), 7)
[<<huffman_bit::1, binary_size::bitstring>>, binary]
end
## Decoding
@spec decode_integer(bitstring, 1..8) :: {:ok, non_neg_integer(), binary()} | :error
def decode_integer(bitstring, prefix) when is_bitstring(bitstring) and prefix in 1..8 do
with <<value::size(prefix), rest::binary>> <- bitstring do
if value < power_of_two(prefix) - 1 do
{:ok, value, rest}
else
decode_remaining_integer(rest, value, 0)
end
else
_ -> :error
end
end
defp decode_remaining_integer(<<0::1, value::7, rest::binary>>, int, m) do
{:ok, int + (value <<< m), rest}
end
defp decode_remaining_integer(<<1::1, value::7, rest::binary>>, int, m) do
decode_remaining_integer(rest, int + (value <<< m), m + 7)
end
defp decode_remaining_integer(_, _, _) do
:error
end
@spec decode_binary(binary) :: {:ok, binary(), binary()} | :error
def decode_binary(binary) when is_binary(binary) do
with <<huffman_bit::1, rest::bitstring>> <- binary,
{:ok, length, rest} <- decode_integer(rest, 7),
<<contents::binary-size(length), rest::binary>> <- rest do
contents =
case huffman_bit do
0 -> contents
1 -> Huffman.decode(contents)
end
{:ok, contents, rest}
else
_ -> :error
end
end
end | lib/hpax/types.ex | 0.633297 | 0.421165 | types.ex | starcoder |
defmodule Gettext.Fuzzy do
@moduledoc false
alias Gettext.PO
alias Gettext.PO.Translation
alias Gettext.PO.PluralTranslation
@type translation_key :: binary | {binary, binary}
@doc """
Returns a matcher function that takes two translation keys and checks if they
match.
`String.jaro_distance/2` (which calculates the Jaro distance) is used to
measure the distance between the two translations. `threshold` is the minimum
distance that means a match. `{:match, distance}` is returned in case of a
match, `:nomatch` otherwise.
"""
@spec matcher(float) :: (translation_key, translation_key -> {:match, float} | :nomatch)
def matcher(threshold) do
fn(old_key, new_key) ->
distance = jaro_distance(old_key, new_key)
if distance >= threshold, do: {:match, distance}, else: :nomatch
end
end
@doc """
Finds the Jaro distance between the msgids of two translations.
To mimic the behaviour of the `msgmerge` tool, this function only calculates
the Jaro distance of the msgids of the two translations, even if one (or both)
of them is a plural translation.
"""
@spec jaro_distance(translation_key, translation_key) :: float
def jaro_distance(key1, key2)
# Apparently, msgmerge only looks at the msgid when performing fuzzy
# matching. This means that if we have two plural translations with similar
# msgids but very different msgid_plurals, they'll still fuzzy match.
def jaro_distance(k1, k2) when is_binary(k1) and is_binary(k2), do: String.jaro_distance(k1, k2)
def jaro_distance({k1, _}, k2) when is_binary(k2), do: String.jaro_distance(k1, k2)
def jaro_distance(k1, {k2, _}) when is_binary(k1), do: String.jaro_distance(k1, k2)
def jaro_distance({k1, _}, {k2, _}), do: String.jaro_distance(k1, k2)
@doc """
Merges a translation with the corresponding fuzzy match.
`new` is the newest translation and `existing` is the existing translation
that we use to populate the msgstr of the newest translation.
Note that if `new` is a regular translation, then the result will be a regular
translation; if `new` is a plural translation, then the result will be a
plural translation.
"""
@spec merge(PO.translation, PO.translation) :: PO.translation
def merge(new, existing) do
new |> do_merge_fuzzy(existing) |> PO.Translations.mark_as_fuzzy
end
defp do_merge_fuzzy(%Translation{} = new, %Translation{} = existing),
do: %{new | msgstr: existing.msgstr}
defp do_merge_fuzzy(%Translation{} = new, %PluralTranslation{} = existing),
do: %{new | msgstr: existing.msgstr[0]}
defp do_merge_fuzzy(%PluralTranslation{} = new, %Translation{} = existing),
do: %{new | msgstr: (for {i, _} <- new.msgstr, into: %{}, do: {i, existing.msgstr})}
defp do_merge_fuzzy(%PluralTranslation{} = new, %PluralTranslation{} = existing),
do: %{new | msgstr: existing.msgstr}
end | deps/gettext/lib/gettext/fuzzy.ex | 0.853501 | 0.525978 | fuzzy.ex | starcoder |
defmodule Nanoid do
@moduledoc """
Elixir port of NanoID ([https://github.com/ai/nanoid](https://github.com/ai/nanoid)), a tiny, secure URL-friendly unique string ID generator.
**Safe.** It uses cryptographically strong random APIs and guarantees a proper distribution of symbols.
**Small.** Only 179 bytes (minified and gzipped). No dependencies. It uses Size Limit to control size.
**Compact.** It uses a larger alphabet than UUID (A-Za-z0-9_~) and has a similar number of unique IDs in just 21 symbols instead of 36.
"""
@doc """
Generates a secure NanoID using the default alphabet.
## Example
Generate a NanoID with the default size of 21 characters.
iex> Nanoid.generate()
"mJUHrGXZBZpNX50x2xkzf"
"""
defdelegate generate, to: Nanoid.Secure
@doc """
Generates a secure NanoID using the default alphabet.
## Example
Generate a secure NanoID with a custom size of 64 characters.
iex> Nanoid.generate(64)
"wk9fsUrhK9k-MxY0hLazRKpcSlic8XYDFusks7Jb8FwCVnoQaKFSPsmmLHzP7qCX"
"""
defdelegate generate(size), to: Nanoid.Secure
@doc """
Generates a secure NanoID using a custom size and an individual alphabet.
## Example
Generate a secure NanoID with the default size of 21 characters and an individual alphabet.
iex> Nanoid.generate(21, "abcdef123")
"d1dcd2dee333cae1bfdea"
Generate a secure NanoID with custom size of 64 characters and an individual alphabet.
iex> Nanoid.generate(64, "abcdef123")
"aabbaca3c11accca213babed2bcd1213efb3e3fa1ad23ecbf11c2ffc123f3bbe"
"""
defdelegate generate(size, alphabet), to: Nanoid.Secure
@doc """
Generates a non-secure NanoID using the default alphabet.
## Example
Generate a non-secure NanoID with the default size of 21 characters.
iex> Nanoid.generate_non_secure()
"mJUHrGXZBZpNX50x2xkzf"
"""
defdelegate generate_non_secure, to: Nanoid.NonSecure, as: :generate
@doc """
Generates a non-secure NanoID using the default alphabet.
## Example
Generate a non-secure NanoID with a custom size of 64 characters.
iex> Nanoid.generate_non_secure(64)
"wk9fsUrhK9k-MxY0hLazRKpcSlic8XYDFusks7Jb8FwCVnoQaKFSPsmmLHzP7qCX"
"""
defdelegate generate_non_secure(size), to: Nanoid.NonSecure, as: :generate
@doc """
Generate a non-secure NanoID using a custom size and an individual alphabet.
## Example
Generate a non-secure NanoID with the default size of 21 characters and an individual alphabet.
iex> Nanoid.generate_non_secure(21, "abcdef123")
"d1dcd2dee333cae1bfdea"
Generate a non-secure NanoID with custom size of 64 characters and an individual alphabet.
iex> Nanoid.generate_non_secure(64, "abcdef123")
"aabbaca3c11accca213babed2bcd1213efb3e3fa1ad23ecbf11c2ffc123f3bbe"
"""
defdelegate generate_non_secure(size, alphabet), to: Nanoid.NonSecure, as: :generate
end | lib/nanoid.ex | 0.865622 | 0.776835 | nanoid.ex | starcoder |
defmodule Cldr.Rfc5646.Parser do
@moduledoc """
Implements parsing for [RFC5646](https://datatracker.ietf.org/doc/html/rfc5646) language
tags with [BCP47](https://tools.ietf.org/search/bcp47) extensions.
The primary interface to this module is the function
`Cldr.LanguageTag.parse/1`.
"""
alias Cldr.LanguageTag
import Cldr.Rfc5646.Helpers
def parse(rule \\ :language_tag, input) when is_atom(rule) and is_binary(input) do
apply(__MODULE__, rule, [input])
|> unwrap
end
defp unwrap({:ok, acc, "", _, _, _}) when is_list(acc),
do: {:ok, acc}
defp unwrap({:error, <<first::binary-size(1), reason::binary>>, rest, _, _, offset}),
do:
{:error,
{LanguageTag.ParseError,
"#{String.capitalize(first)}#{reason}. Could not parse the remaining #{inspect(rest)} " <>
"starting at position #{offset + 1}"}}
@doc """
Parses the given `binary` as language_tag.
Returns `{:ok, [token], rest, context, position, byte_offset}` or
`{:error, reason, rest, context, line, byte_offset}` where `position`
describes the location of the language_tag (start position) as `{line, column_on_line}`.
## Options
* `:byte_offset` - the byte offset for the whole binary, defaults to 0
* `:line` - the line and the byte offset into that line, defaults to `{1, byte_offset}`
* `:context` - the initial context value. It will be converted to a map
"""
@spec language_tag(binary, keyword) ::
{:ok, [term], rest, context, line, byte_offset}
| {:error, reason, rest, context, line, byte_offset}
when line: {pos_integer, byte_offset},
byte_offset: pos_integer,
rest: binary,
reason: String.t(),
context: map()
def language_tag(binary, opts \\ []) when is_binary(binary) do
context = Map.new(Keyword.get(opts, :context, []))
byte_offset = Keyword.get(opts, :byte_offset, 0)
line =
case(Keyword.get(opts, :line, 1)) do
{_, _} = line ->
line
line ->
{line, byte_offset}
end
case(language_tag__0(binary, [], [], context, line, byte_offset)) do
{:ok, acc, rest, context, line, offset} ->
{:ok, :lists.reverse(acc), rest, context, line, offset}
{:error, _, _, _, _, _} = error ->
error
end
end
defp language_tag__0(rest, acc, stack, context, line, offset) do
language_tag__39(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__2(rest, acc, stack, context, line, offset) do
language_tag__3(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__3(rest, acc, stack, context, line, offset) do
language_tag__10(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__5(rest, acc, stack, context, line, offset) do
language_tag__6(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__6(
<<"art-lojban", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__7(rest, ["art-lojban"] ++ acc, stack, context, comb__line, comb__offset + 10)
end
defp language_tag__6(
<<"cel-gaulish", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__7(rest, ["cel-gaulish"] ++ acc, stack, context, comb__line, comb__offset + 11)
end
defp language_tag__6(<<"no-bok", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__7(rest, ["no-bok"] ++ acc, stack, context, comb__line, comb__offset + 6)
end
defp language_tag__6(<<"no-nyn", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__7(rest, ["no-nyn"] ++ acc, stack, context, comb__line, comb__offset + 6)
end
defp language_tag__6(
<<"zh-guoyu", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__7(rest, ["zh-guoyu"] ++ acc, stack, context, comb__line, comb__offset + 8)
end
defp language_tag__6(
<<"zh-hakka", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__7(rest, ["zh-hakka"] ++ acc, stack, context, comb__line, comb__offset + 8)
end
defp language_tag__6(<<"zh-min", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__7(rest, ["zh-min"] ++ acc, stack, context, comb__line, comb__offset + 6)
end
defp language_tag__6(
<<"zh-min-nan", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__7(rest, ["zh-min-nan"] ++ acc, stack, context, comb__line, comb__offset + 10)
end
defp language_tag__6(
<<"zh-xiang", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__7(rest, ["zh-xiang"] ++ acc, stack, context, comb__line, comb__offset + 8)
end
defp language_tag__6(rest, _acc, _stack, context, line, offset) do
{:error,
"expected one of the regular language tags in BCP-47 while processing a grandfathered language tag inside a BCP47 language tag",
rest, context, line, offset}
end
defp language_tag__7(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__8(
rest,
[
regular:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__8(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__4(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__9(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__5(rest, [], stack, context, line, offset)
end
defp language_tag__10(rest, acc, stack, context, line, offset) do
language_tag__11(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__11(
<<"en-GB-oed", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__12(rest, ["en-GB-oed"] ++ acc, stack, context, comb__line, comb__offset + 9)
end
defp language_tag__11(<<"i-ami", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__12(rest, ["i-ami"] ++ acc, stack, context, comb__line, comb__offset + 5)
end
defp language_tag__11(<<"i-bnn", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__12(rest, ["i-bnn"] ++ acc, stack, context, comb__line, comb__offset + 5)
end
defp language_tag__11(
<<"i-default", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__12(rest, ["i-default"] ++ acc, stack, context, comb__line, comb__offset + 9)
end
defp language_tag__11(
<<"i-enochian", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__12(rest, ["i-enochian"] ++ acc, stack, context, comb__line, comb__offset + 10)
end
defp language_tag__11(<<"i-hak", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__12(rest, ["i-hak"] ++ acc, stack, context, comb__line, comb__offset + 5)
end
defp language_tag__11(
<<"i-klingon", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__12(rest, ["i-klingon"] ++ acc, stack, context, comb__line, comb__offset + 9)
end
defp language_tag__11(<<"i-lux", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__12(rest, ["i-lux"] ++ acc, stack, context, comb__line, comb__offset + 5)
end
defp language_tag__11(
<<"i-mingo", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__12(rest, ["i-mingo"] ++ acc, stack, context, comb__line, comb__offset + 7)
end
defp language_tag__11(
<<"i-navajo", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__12(rest, ["i-navajo"] ++ acc, stack, context, comb__line, comb__offset + 8)
end
defp language_tag__11(<<"i-pwn", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__12(rest, ["i-pwn"] ++ acc, stack, context, comb__line, comb__offset + 5)
end
defp language_tag__11(<<"i-tao", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__12(rest, ["i-tao"] ++ acc, stack, context, comb__line, comb__offset + 5)
end
defp language_tag__11(<<"i-tay", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__12(rest, ["i-tay"] ++ acc, stack, context, comb__line, comb__offset + 5)
end
defp language_tag__11(<<"i-tsu", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__12(rest, ["i-tsu"] ++ acc, stack, context, comb__line, comb__offset + 5)
end
defp language_tag__11(
<<"sgn-BE-FR", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__12(rest, ["sgn-BE-FR"] ++ acc, stack, context, comb__line, comb__offset + 9)
end
defp language_tag__11(
<<"sgn-BE-NL", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__12(rest, ["sgn-BE-NL"] ++ acc, stack, context, comb__line, comb__offset + 9)
end
defp language_tag__11(
<<"sgn-CH-DE", rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
) do
language_tag__12(rest, ["sgn-CH-DE"] ++ acc, stack, context, comb__line, comb__offset + 9)
end
defp language_tag__11(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__9(rest, acc, stack, context, line, offset)
end
defp language_tag__12(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__13(
rest,
[
irregular:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__13(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__4(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__4(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__14(
rest,
[grandfathered: :lists.reverse(user_acc)] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__14(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__1(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__15(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__2(rest, [], stack, context, line, offset)
end
defp language_tag__16(rest, acc, stack, context, line, offset) do
language_tag__17(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__17(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 === 120 or x0 === 88) and x1 === 45 do
language_tag__18(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp language_tag__17(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__15(rest, acc, stack, context, line, offset)
end
defp language_tag__18(rest, acc, stack, context, line, offset) do
language_tag__19(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__19(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__20(rest, [<<x0::integer>>] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__19(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__15(rest, acc, stack, context, line, offset)
end
defp language_tag__20(rest, acc, stack, context, line, offset) do
language_tag__22(rest, acc, [7 | stack], context, line, offset)
end
defp language_tag__22(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__23(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__22(rest, acc, stack, context, line, offset) do
language_tag__21(rest, acc, stack, context, line, offset)
end
defp language_tag__21(rest, acc, [_ | stack], context, line, offset) do
language_tag__24(rest, acc, stack, context, line, offset)
end
defp language_tag__23(rest, acc, [1 | stack], context, line, offset) do
language_tag__24(rest, acc, stack, context, line, offset)
end
defp language_tag__23(rest, acc, [count | stack], context, line, offset) do
language_tag__22(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__24(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__25(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__25(rest, acc, stack, context, line, offset) do
language_tag__27(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__27(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__28(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__27(rest, acc, stack, context, line, offset) do
language_tag__26(rest, acc, stack, context, line, offset)
end
defp language_tag__28(rest, acc, stack, context, line, offset) do
language_tag__29(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__29(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__30(rest, [<<x0::integer>>] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__29(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__26(rest, acc, stack, context, line, offset)
end
defp language_tag__30(rest, acc, stack, context, line, offset) do
language_tag__32(rest, acc, [7 | stack], context, line, offset)
end
defp language_tag__32(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__33(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__32(rest, acc, stack, context, line, offset) do
language_tag__31(rest, acc, stack, context, line, offset)
end
defp language_tag__31(rest, acc, [_ | stack], context, line, offset) do
language_tag__34(rest, acc, stack, context, line, offset)
end
defp language_tag__33(rest, acc, [1 | stack], context, line, offset) do
language_tag__34(rest, acc, stack, context, line, offset)
end
defp language_tag__33(rest, acc, [count | stack], context, line, offset) do
language_tag__32(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__34(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__35(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__26(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__36(rest, acc, stack, context, line, offset)
end
defp language_tag__35(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__27(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__36(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__37(
rest,
[private_use: :lists.reverse(user_acc)] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__37(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__1(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__38(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__16(rest, [], stack, context, line, offset)
end
defp language_tag__39(rest, acc, stack, context, line, offset) do
language_tag__40(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__40(rest, acc, stack, context, line, offset) do
language_tag__41(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__41(rest, acc, stack, context, line, offset) do
language_tag__42(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__42(rest, acc, stack, context, line, offset) do
language_tag__180(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__44(rest, acc, stack, context, line, offset) do
language_tag__45(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__45(rest, acc, stack, context, line, offset) do
language_tag__46(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__46(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) do
language_tag__47(
rest,
[<<x0::integer, x1::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 2
)
end
defp language_tag__46(rest, _acc, stack, context, line, offset) do
[_, _, _, _, _, _, acc | stack] = stack
language_tag__38(rest, acc, stack, context, line, offset)
end
defp language_tag__47(rest, acc, stack, context, line, offset) do
language_tag__49(rest, acc, [1 | stack], context, line, offset)
end
defp language_tag__49(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) do
language_tag__50(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__49(rest, acc, stack, context, line, offset) do
language_tag__48(rest, acc, stack, context, line, offset)
end
defp language_tag__48(rest, acc, [_ | stack], context, line, offset) do
language_tag__51(rest, acc, stack, context, line, offset)
end
defp language_tag__50(rest, acc, [1 | stack], context, line, offset) do
language_tag__51(rest, acc, stack, context, line, offset)
end
defp language_tag__50(rest, acc, [count | stack], context, line, offset) do
language_tag__49(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__51(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__52(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__52(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__53(
rest,
[
language:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__53(rest, acc, stack, context, line, offset) do
language_tag__57(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__55(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__54(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__56(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__55(rest, [], stack, context, line, offset)
end
defp language_tag__57(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__58(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__57(rest, acc, stack, context, line, offset) do
language_tag__56(rest, acc, stack, context, line, offset)
end
defp language_tag__58(rest, acc, stack, context, line, offset) do
language_tag__59(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__59(rest, acc, stack, context, line, offset) do
language_tag__61(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__61(rest, acc, stack, context, line, offset) do
language_tag__66(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__63(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__64(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__63(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__60(rest, acc, stack, context, line, offset)
end
defp language_tag__64(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__62(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__65(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__63(rest, [], stack, context, line, offset)
end
defp language_tag__66(rest, acc, stack, context, line, offset) do
language_tag__67(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__67(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__68(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__67(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__65(rest, acc, stack, context, line, offset)
end
defp language_tag__68(rest, acc, stack, context, line, offset) do
language_tag__70(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__70(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__71(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__70(rest, acc, stack, context, line, offset) do
language_tag__69(rest, acc, stack, context, line, offset)
end
defp language_tag__69(rest, acc, [_ | stack], context, line, offset) do
language_tag__72(rest, acc, stack, context, line, offset)
end
defp language_tag__71(rest, acc, [1 | stack], context, line, offset) do
language_tag__72(rest, acc, stack, context, line, offset)
end
defp language_tag__71(rest, acc, [count | stack], context, line, offset) do
language_tag__70(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__72(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__73(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__73(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__62(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__62(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__74(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__74(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__56(rest, acc, stack, context, line, offset)
end
defp language_tag__60(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__75(rest, acc, stack, context, line, offset)
end
defp language_tag__75(rest, acc, stack, context, line, offset) do
language_tag__155(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__77(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) do
language_tag__78(
rest,
[language_subtags: [<<x0::integer, x1::integer, x2::integer>>]] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__77(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__56(rest, acc, stack, context, line, offset)
end
defp language_tag__78(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__76(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__79(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__77(rest, [], stack, context, line, offset)
end
defp language_tag__80(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, x5::integer,
x6::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and x3 === 45 and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90)) and
((x5 >= 97 and x5 <= 122) or (x5 >= 65 and x5 <= 90)) and
((x6 >= 97 and x6 <= 122) or (x6 >= 65 and x6 <= 90)) do
language_tag__81(
rest,
[<<x4::integer, x5::integer, x6::integer>>, <<x0::integer, x1::integer, x2::integer>>] ++
acc,
stack,
context,
comb__line,
comb__offset + 7
)
end
defp language_tag__80(rest, acc, stack, context, line, offset) do
language_tag__79(rest, acc, stack, context, line, offset)
end
defp language_tag__81(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__76(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__82(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__80(rest, [], stack, context, line, offset)
end
defp language_tag__83(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, x5::integer,
x6::integer, x7::integer, x8::integer, x9::integer, x10::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and x3 === 45 and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90)) and
((x5 >= 97 and x5 <= 122) or (x5 >= 65 and x5 <= 90)) and
((x6 >= 97 and x6 <= 122) or (x6 >= 65 and x6 <= 90)) and x7 === 45 and
((x8 >= 97 and x8 <= 122) or (x8 >= 65 and x8 <= 90)) and
((x9 >= 97 and x9 <= 122) or (x9 >= 65 and x9 <= 90)) and
((x10 >= 97 and x10 <= 122) or (x10 >= 65 and x10 <= 90)) do
language_tag__84(
rest,
[
<<x8::integer, x9::integer, x10::integer>>,
<<x4::integer, x5::integer, x6::integer>>,
<<x0::integer, x1::integer, x2::integer>>
] ++ acc,
stack,
context,
comb__line,
comb__offset + 11
)
end
defp language_tag__83(rest, acc, stack, context, line, offset) do
language_tag__82(rest, acc, stack, context, line, offset)
end
defp language_tag__84(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__76(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__85(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__83(rest, [], stack, context, line, offset)
end
defp language_tag__86(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and x3 === 45 do
language_tag__87(
rest,
[language_subtags: [<<x0::integer, x1::integer, x2::integer>>]] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__86(rest, acc, stack, context, line, offset) do
language_tag__85(rest, acc, stack, context, line, offset)
end
defp language_tag__87(rest, acc, stack, context, line, offset) do
language_tag__88(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__88(rest, acc, stack, context, line, offset) do
language_tag__89(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__89(rest, acc, stack, context, line, offset) do
language_tag__91(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__91(rest, acc, stack, context, line, offset) do
language_tag__96(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__93(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__94(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__93(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__90(rest, acc, stack, context, line, offset)
end
defp language_tag__94(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__92(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__95(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__93(rest, [], stack, context, line, offset)
end
defp language_tag__96(rest, acc, stack, context, line, offset) do
language_tag__97(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__97(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__98(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__97(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__95(rest, acc, stack, context, line, offset)
end
defp language_tag__98(rest, acc, stack, context, line, offset) do
language_tag__100(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__100(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__101(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__100(rest, acc, stack, context, line, offset) do
language_tag__99(rest, acc, stack, context, line, offset)
end
defp language_tag__99(rest, acc, [_ | stack], context, line, offset) do
language_tag__102(rest, acc, stack, context, line, offset)
end
defp language_tag__101(rest, acc, [1 | stack], context, line, offset) do
language_tag__102(rest, acc, stack, context, line, offset)
end
defp language_tag__101(rest, acc, [count | stack], context, line, offset) do
language_tag__100(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__102(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__103(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__103(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__92(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__92(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__104(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__104(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__85(rest, acc, stack, context, line, offset)
end
defp language_tag__90(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__105(rest, acc, stack, context, line, offset)
end
defp language_tag__105(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__106(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__105(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__85(rest, acc, stack, context, line, offset)
end
defp language_tag__106(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__107(
rest,
[
script:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__107(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__76(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__108(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__86(rest, [], stack, context, line, offset)
end
defp language_tag__109(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, x5::integer,
x6::integer, x7::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and x3 === 45 and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90)) and
((x5 >= 97 and x5 <= 122) or (x5 >= 65 and x5 <= 90)) and
((x6 >= 97 and x6 <= 122) or (x6 >= 65 and x6 <= 90)) and x7 === 45 do
language_tag__110(
rest,
[
language_subtags: [
<<x0::integer, x1::integer, x2::integer>>,
<<x4::integer, x5::integer, x6::integer>>
]
] ++ acc,
stack,
context,
comb__line,
comb__offset + 8
)
end
defp language_tag__109(rest, acc, stack, context, line, offset) do
language_tag__108(rest, acc, stack, context, line, offset)
end
defp language_tag__110(rest, acc, stack, context, line, offset) do
language_tag__111(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__111(rest, acc, stack, context, line, offset) do
language_tag__112(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__112(rest, acc, stack, context, line, offset) do
language_tag__114(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__114(rest, acc, stack, context, line, offset) do
language_tag__119(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__116(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__117(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__116(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__113(rest, acc, stack, context, line, offset)
end
defp language_tag__117(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__115(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__118(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__116(rest, [], stack, context, line, offset)
end
defp language_tag__119(rest, acc, stack, context, line, offset) do
language_tag__120(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__120(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__121(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__120(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__118(rest, acc, stack, context, line, offset)
end
defp language_tag__121(rest, acc, stack, context, line, offset) do
language_tag__123(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__123(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__124(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__123(rest, acc, stack, context, line, offset) do
language_tag__122(rest, acc, stack, context, line, offset)
end
defp language_tag__122(rest, acc, [_ | stack], context, line, offset) do
language_tag__125(rest, acc, stack, context, line, offset)
end
defp language_tag__124(rest, acc, [1 | stack], context, line, offset) do
language_tag__125(rest, acc, stack, context, line, offset)
end
defp language_tag__124(rest, acc, [count | stack], context, line, offset) do
language_tag__123(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__125(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__126(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__126(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__115(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__115(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__127(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__127(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__108(rest, acc, stack, context, line, offset)
end
defp language_tag__113(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__128(rest, acc, stack, context, line, offset)
end
defp language_tag__128(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__129(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__128(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__108(rest, acc, stack, context, line, offset)
end
defp language_tag__129(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__130(
rest,
[
script:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__130(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__76(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__131(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__109(rest, [], stack, context, line, offset)
end
defp language_tag__132(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, x5::integer,
x6::integer, x7::integer, x8::integer, x9::integer, x10::integer, x11::integer,
rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and x3 === 45 and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90)) and
((x5 >= 97 and x5 <= 122) or (x5 >= 65 and x5 <= 90)) and
((x6 >= 97 and x6 <= 122) or (x6 >= 65 and x6 <= 90)) and x7 === 45 and
((x8 >= 97 and x8 <= 122) or (x8 >= 65 and x8 <= 90)) and
((x9 >= 97 and x9 <= 122) or (x9 >= 65 and x9 <= 90)) and
((x10 >= 97 and x10 <= 122) or (x10 >= 65 and x10 <= 90)) and x11 === 45 do
language_tag__133(
rest,
[
language_subtags: [
<<x0::integer, x1::integer, x2::integer>>,
<<x4::integer, x5::integer, x6::integer>>,
<<x8::integer, x9::integer, x10::integer>>
]
] ++ acc,
stack,
context,
comb__line,
comb__offset + 12
)
end
defp language_tag__132(rest, acc, stack, context, line, offset) do
language_tag__131(rest, acc, stack, context, line, offset)
end
defp language_tag__133(rest, acc, stack, context, line, offset) do
language_tag__134(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__134(rest, acc, stack, context, line, offset) do
language_tag__135(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__135(rest, acc, stack, context, line, offset) do
language_tag__137(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__137(rest, acc, stack, context, line, offset) do
language_tag__142(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__139(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__140(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__139(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__136(rest, acc, stack, context, line, offset)
end
defp language_tag__140(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__138(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__141(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__139(rest, [], stack, context, line, offset)
end
defp language_tag__142(rest, acc, stack, context, line, offset) do
language_tag__143(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__143(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__144(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__143(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__141(rest, acc, stack, context, line, offset)
end
defp language_tag__144(rest, acc, stack, context, line, offset) do
language_tag__146(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__146(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__147(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__146(rest, acc, stack, context, line, offset) do
language_tag__145(rest, acc, stack, context, line, offset)
end
defp language_tag__145(rest, acc, [_ | stack], context, line, offset) do
language_tag__148(rest, acc, stack, context, line, offset)
end
defp language_tag__147(rest, acc, [1 | stack], context, line, offset) do
language_tag__148(rest, acc, stack, context, line, offset)
end
defp language_tag__147(rest, acc, [count | stack], context, line, offset) do
language_tag__146(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__148(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__149(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__149(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__138(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__138(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__150(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__150(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__131(rest, acc, stack, context, line, offset)
end
defp language_tag__136(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__151(rest, acc, stack, context, line, offset)
end
defp language_tag__151(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__152(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__151(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__131(rest, acc, stack, context, line, offset)
end
defp language_tag__152(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__153(
rest,
[
script:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__153(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__76(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__154(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__132(rest, [], stack, context, line, offset)
end
defp language_tag__155(rest, acc, stack, context, line, offset) do
language_tag__156(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__156(rest, acc, stack, context, line, offset) do
language_tag__157(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__157(rest, acc, stack, context, line, offset) do
language_tag__159(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__159(rest, acc, stack, context, line, offset) do
language_tag__164(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__161(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__162(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__161(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__158(rest, acc, stack, context, line, offset)
end
defp language_tag__162(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__160(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__163(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__161(rest, [], stack, context, line, offset)
end
defp language_tag__164(rest, acc, stack, context, line, offset) do
language_tag__165(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__165(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__166(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__165(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__163(rest, acc, stack, context, line, offset)
end
defp language_tag__166(rest, acc, stack, context, line, offset) do
language_tag__168(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__168(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__169(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__168(rest, acc, stack, context, line, offset) do
language_tag__167(rest, acc, stack, context, line, offset)
end
defp language_tag__167(rest, acc, [_ | stack], context, line, offset) do
language_tag__170(rest, acc, stack, context, line, offset)
end
defp language_tag__169(rest, acc, [1 | stack], context, line, offset) do
language_tag__170(rest, acc, stack, context, line, offset)
end
defp language_tag__169(rest, acc, [count | stack], context, line, offset) do
language_tag__168(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__170(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__171(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__171(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__160(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__160(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__172(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__172(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__154(rest, acc, stack, context, line, offset)
end
defp language_tag__158(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__173(rest, acc, stack, context, line, offset)
end
defp language_tag__173(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__174(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__173(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__154(rest, acc, stack, context, line, offset)
end
defp language_tag__174(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__175(
rest,
[
script:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__175(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__76(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__76(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__54(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__54(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__43(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__176(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__44(rest, [], stack, context, line, offset)
end
defp language_tag__177(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__178(
rest,
[language: <<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__177(rest, acc, stack, context, line, offset) do
language_tag__176(rest, acc, stack, context, line, offset)
end
defp language_tag__178(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__43(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__179(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__177(rest, [], stack, context, line, offset)
end
defp language_tag__180(rest, acc, stack, context, line, offset) do
language_tag__181(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__181(rest, acc, stack, context, line, offset) do
language_tag__182(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__182(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90)) do
language_tag__183(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__182(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__179(rest, acc, stack, context, line, offset)
end
defp language_tag__183(rest, acc, stack, context, line, offset) do
language_tag__185(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__185(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) do
language_tag__186(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__185(rest, acc, stack, context, line, offset) do
language_tag__184(rest, acc, stack, context, line, offset)
end
defp language_tag__184(rest, acc, [_ | stack], context, line, offset) do
language_tag__187(rest, acc, stack, context, line, offset)
end
defp language_tag__186(rest, acc, [1 | stack], context, line, offset) do
language_tag__187(rest, acc, stack, context, line, offset)
end
defp language_tag__186(rest, acc, [count | stack], context, line, offset) do
language_tag__185(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__187(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__188(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__188(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__189(
rest,
[
language:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__189(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__43(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__43(rest, acc, stack, context, line, offset) do
language_tag__193(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__191(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__190(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__192(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__191(rest, [], stack, context, line, offset)
end
defp language_tag__193(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__194(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__193(rest, acc, stack, context, line, offset) do
language_tag__192(rest, acc, stack, context, line, offset)
end
defp language_tag__194(rest, acc, stack, context, line, offset) do
language_tag__195(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__195(rest, acc, stack, context, line, offset) do
language_tag__196(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__196(rest, acc, stack, context, line, offset) do
language_tag__198(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__198(rest, acc, stack, context, line, offset) do
language_tag__203(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__200(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__201(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__200(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__197(rest, acc, stack, context, line, offset)
end
defp language_tag__201(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__199(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__202(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__200(rest, [], stack, context, line, offset)
end
defp language_tag__203(rest, acc, stack, context, line, offset) do
language_tag__204(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__204(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__205(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__204(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__202(rest, acc, stack, context, line, offset)
end
defp language_tag__205(rest, acc, stack, context, line, offset) do
language_tag__207(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__207(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__208(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__207(rest, acc, stack, context, line, offset) do
language_tag__206(rest, acc, stack, context, line, offset)
end
defp language_tag__206(rest, acc, [_ | stack], context, line, offset) do
language_tag__209(rest, acc, stack, context, line, offset)
end
defp language_tag__208(rest, acc, [1 | stack], context, line, offset) do
language_tag__209(rest, acc, stack, context, line, offset)
end
defp language_tag__208(rest, acc, [count | stack], context, line, offset) do
language_tag__207(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__209(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__210(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__210(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__199(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__199(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__211(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__211(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__192(rest, acc, stack, context, line, offset)
end
defp language_tag__197(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__212(rest, acc, stack, context, line, offset)
end
defp language_tag__212(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__213(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__212(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__192(rest, acc, stack, context, line, offset)
end
defp language_tag__213(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__214(
rest,
[
script:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__214(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__190(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__190(rest, acc, stack, context, line, offset) do
language_tag__218(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__216(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__215(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__217(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__216(rest, [], stack, context, line, offset)
end
defp language_tag__218(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__219(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__218(rest, acc, stack, context, line, offset) do
language_tag__217(rest, acc, stack, context, line, offset)
end
defp language_tag__219(rest, acc, stack, context, line, offset) do
language_tag__220(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__220(rest, acc, stack, context, line, offset) do
language_tag__221(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__221(rest, acc, stack, context, line, offset) do
language_tag__223(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__223(rest, acc, stack, context, line, offset) do
language_tag__228(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__225(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__226(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__225(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__222(rest, acc, stack, context, line, offset)
end
defp language_tag__226(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__224(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__227(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__225(rest, [], stack, context, line, offset)
end
defp language_tag__228(rest, acc, stack, context, line, offset) do
language_tag__229(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__229(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__230(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__229(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__227(rest, acc, stack, context, line, offset)
end
defp language_tag__230(rest, acc, stack, context, line, offset) do
language_tag__232(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__232(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__233(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__232(rest, acc, stack, context, line, offset) do
language_tag__231(rest, acc, stack, context, line, offset)
end
defp language_tag__231(rest, acc, [_ | stack], context, line, offset) do
language_tag__234(rest, acc, stack, context, line, offset)
end
defp language_tag__233(rest, acc, [1 | stack], context, line, offset) do
language_tag__234(rest, acc, stack, context, line, offset)
end
defp language_tag__233(rest, acc, [count | stack], context, line, offset) do
language_tag__232(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__234(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__235(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__235(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__224(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__224(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__236(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__236(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__217(rest, acc, stack, context, line, offset)
end
defp language_tag__222(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__237(rest, acc, stack, context, line, offset)
end
defp language_tag__237(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) do
language_tag__238(
rest,
[<<x0::integer, x1::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 2
)
end
defp language_tag__237(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and (x1 >= 48 and x1 <= 57) and (x2 >= 48 and x2 <= 57) do
language_tag__238(
rest,
[x2 - 48 + (x1 - 48) * 10 + (x0 - 48) * 100] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__237(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__217(rest, acc, stack, context, line, offset)
end
defp language_tag__238(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__239(
rest,
[
territory:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__239(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__215(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__215(rest, acc, stack, context, line, offset) do
language_tag__241(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__241(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__242(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__241(rest, acc, stack, context, line, offset) do
language_tag__240(rest, acc, stack, context, line, offset)
end
defp language_tag__242(rest, acc, stack, context, line, offset) do
language_tag__243(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__243(rest, acc, stack, context, line, offset) do
language_tag__248(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__245(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__246(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__245(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__240(rest, acc, stack, context, line, offset)
end
defp language_tag__246(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__244(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__247(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__245(rest, [], stack, context, line, offset)
end
defp language_tag__248(rest, acc, stack, context, line, offset) do
language_tag__249(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__249(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__250(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__249(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__247(rest, acc, stack, context, line, offset)
end
defp language_tag__250(rest, acc, stack, context, line, offset) do
language_tag__252(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__252(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__253(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__252(rest, acc, stack, context, line, offset) do
language_tag__251(rest, acc, stack, context, line, offset)
end
defp language_tag__251(rest, acc, [_ | stack], context, line, offset) do
language_tag__254(rest, acc, stack, context, line, offset)
end
defp language_tag__253(rest, acc, [1 | stack], context, line, offset) do
language_tag__254(rest, acc, stack, context, line, offset)
end
defp language_tag__253(rest, acc, [count | stack], context, line, offset) do
language_tag__252(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__254(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__255(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__255(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__244(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__244(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__256(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__240(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__257(rest, acc, stack, context, line, offset)
end
defp language_tag__256(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__241(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__257(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__258(
rest,
[collapse_variants(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__258(rest, acc, stack, context, line, offset) do
language_tag__260(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__260(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__261(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__260(rest, acc, stack, context, line, offset) do
language_tag__259(rest, acc, stack, context, line, offset)
end
defp language_tag__261(rest, acc, stack, context, line, offset) do
language_tag__555(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__263(rest, acc, stack, context, line, offset) do
language_tag__264(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__264(rest, acc, stack, context, line, offset) do
language_tag__265(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__265(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 48 and x0 <= 57) or (x0 >= 97 and x0 <= 115) or (x0 >= 65 and x0 <= 83) or
(x0 >= 118 and x0 <= 119) or (x0 >= 86 and x0 <= 87) or (x0 >= 121 and x0 <= 122) or
(x0 >= 89 and x0 <= 90) do
language_tag__266(
rest,
[type: <<x0::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 1
)
end
defp language_tag__265(rest, _acc, stack, context, line, offset) do
[_, _, _, acc | stack] = stack
language_tag__259(rest, acc, stack, context, line, offset)
end
defp language_tag__266(rest, acc, stack, context, line, offset) do
language_tag__267(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__267(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__268(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__267(rest, _acc, stack, context, line, offset) do
[_, _, _, _, acc | stack] = stack
language_tag__259(rest, acc, stack, context, line, offset)
end
defp language_tag__268(rest, acc, stack, context, line, offset) do
language_tag__269(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__269(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) do
language_tag__270(
rest,
[<<x0::integer, x1::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 2
)
end
defp language_tag__269(rest, _acc, stack, context, line, offset) do
[_, _, _, _, _, acc | stack] = stack
language_tag__259(rest, acc, stack, context, line, offset)
end
defp language_tag__270(rest, acc, stack, context, line, offset) do
language_tag__272(rest, acc, [6 | stack], context, line, offset)
end
defp language_tag__272(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__273(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__272(rest, acc, stack, context, line, offset) do
language_tag__271(rest, acc, stack, context, line, offset)
end
defp language_tag__271(rest, acc, [_ | stack], context, line, offset) do
language_tag__274(rest, acc, stack, context, line, offset)
end
defp language_tag__273(rest, acc, [1 | stack], context, line, offset) do
language_tag__274(rest, acc, stack, context, line, offset)
end
defp language_tag__273(rest, acc, [count | stack], context, line, offset) do
language_tag__272(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__274(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__275(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__275(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__276(
rest,
[
attribute:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__276(rest, acc, stack, context, line, offset) do
language_tag__278(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__278(rest, acc, stack, context, line, offset) do
language_tag__279(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__279(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__280(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__279(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__277(rest, acc, stack, context, line, offset)
end
defp language_tag__280(rest, acc, stack, context, line, offset) do
language_tag__281(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__281(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) do
language_tag__282(
rest,
[<<x0::integer, x1::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 2
)
end
defp language_tag__281(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__277(rest, acc, stack, context, line, offset)
end
defp language_tag__282(rest, acc, stack, context, line, offset) do
language_tag__284(rest, acc, [6 | stack], context, line, offset)
end
defp language_tag__284(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__285(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__284(rest, acc, stack, context, line, offset) do
language_tag__283(rest, acc, stack, context, line, offset)
end
defp language_tag__283(rest, acc, [_ | stack], context, line, offset) do
language_tag__286(rest, acc, stack, context, line, offset)
end
defp language_tag__285(rest, acc, [1 | stack], context, line, offset) do
language_tag__286(rest, acc, stack, context, line, offset)
end
defp language_tag__285(rest, acc, [count | stack], context, line, offset) do
language_tag__284(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__286(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__287(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__287(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__288(
rest,
[
attribute:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__277(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__289(rest, acc, stack, context, line, offset)
end
defp language_tag__288(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__278(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__289(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__290(
rest,
[collapse_extension(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__290(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__291(
rest,
[
extension:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__291(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__262(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__292(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__263(rest, [], stack, context, line, offset)
end
defp language_tag__293(rest, acc, stack, context, line, offset) do
language_tag__294(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__294(rest, acc, stack, context, line, offset) do
language_tag__295(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__295(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 116 or x0 === 84 do
language_tag__296(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__295(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__292(rest, acc, stack, context, line, offset)
end
defp language_tag__296(rest, acc, stack, context, line, offset) do
language_tag__300(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__298(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__297(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__299(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__298(rest, [], stack, context, line, offset)
end
defp language_tag__300(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__301(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__300(rest, acc, stack, context, line, offset) do
language_tag__299(rest, acc, stack, context, line, offset)
end
defp language_tag__301(rest, acc, stack, context, line, offset) do
language_tag__302(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__302(rest, acc, stack, context, line, offset) do
language_tag__440(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__304(rest, acc, stack, context, line, offset) do
language_tag__305(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__305(rest, acc, stack, context, line, offset) do
language_tag__306(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__306(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) do
language_tag__307(
rest,
[<<x0::integer, x1::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 2
)
end
defp language_tag__306(rest, _acc, stack, context, line, offset) do
[_, _, _, _, acc | stack] = stack
language_tag__299(rest, acc, stack, context, line, offset)
end
defp language_tag__307(rest, acc, stack, context, line, offset) do
language_tag__309(rest, acc, [1 | stack], context, line, offset)
end
defp language_tag__309(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) do
language_tag__310(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__309(rest, acc, stack, context, line, offset) do
language_tag__308(rest, acc, stack, context, line, offset)
end
defp language_tag__308(rest, acc, [_ | stack], context, line, offset) do
language_tag__311(rest, acc, stack, context, line, offset)
end
defp language_tag__310(rest, acc, [1 | stack], context, line, offset) do
language_tag__311(rest, acc, stack, context, line, offset)
end
defp language_tag__310(rest, acc, [count | stack], context, line, offset) do
language_tag__309(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__311(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__312(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__312(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__313(
rest,
[
language:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__313(rest, acc, stack, context, line, offset) do
language_tag__317(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__315(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__314(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__316(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__315(rest, [], stack, context, line, offset)
end
defp language_tag__317(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__318(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__317(rest, acc, stack, context, line, offset) do
language_tag__316(rest, acc, stack, context, line, offset)
end
defp language_tag__318(rest, acc, stack, context, line, offset) do
language_tag__319(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__319(rest, acc, stack, context, line, offset) do
language_tag__321(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__321(rest, acc, stack, context, line, offset) do
language_tag__326(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__323(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__324(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__323(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__320(rest, acc, stack, context, line, offset)
end
defp language_tag__324(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__322(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__325(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__323(rest, [], stack, context, line, offset)
end
defp language_tag__326(rest, acc, stack, context, line, offset) do
language_tag__327(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__327(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__328(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__327(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__325(rest, acc, stack, context, line, offset)
end
defp language_tag__328(rest, acc, stack, context, line, offset) do
language_tag__330(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__330(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__331(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__330(rest, acc, stack, context, line, offset) do
language_tag__329(rest, acc, stack, context, line, offset)
end
defp language_tag__329(rest, acc, [_ | stack], context, line, offset) do
language_tag__332(rest, acc, stack, context, line, offset)
end
defp language_tag__331(rest, acc, [1 | stack], context, line, offset) do
language_tag__332(rest, acc, stack, context, line, offset)
end
defp language_tag__331(rest, acc, [count | stack], context, line, offset) do
language_tag__330(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__332(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__333(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__333(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__322(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__322(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__334(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__334(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__316(rest, acc, stack, context, line, offset)
end
defp language_tag__320(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__335(rest, acc, stack, context, line, offset)
end
defp language_tag__335(rest, acc, stack, context, line, offset) do
language_tag__415(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__337(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) do
language_tag__338(
rest,
[language_subtags: [<<x0::integer, x1::integer, x2::integer>>]] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__337(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__316(rest, acc, stack, context, line, offset)
end
defp language_tag__338(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__336(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__339(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__337(rest, [], stack, context, line, offset)
end
defp language_tag__340(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, x5::integer,
x6::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and x3 === 45 and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90)) and
((x5 >= 97 and x5 <= 122) or (x5 >= 65 and x5 <= 90)) and
((x6 >= 97 and x6 <= 122) or (x6 >= 65 and x6 <= 90)) do
language_tag__341(
rest,
[<<x4::integer, x5::integer, x6::integer>>, <<x0::integer, x1::integer, x2::integer>>] ++
acc,
stack,
context,
comb__line,
comb__offset + 7
)
end
defp language_tag__340(rest, acc, stack, context, line, offset) do
language_tag__339(rest, acc, stack, context, line, offset)
end
defp language_tag__341(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__336(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__342(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__340(rest, [], stack, context, line, offset)
end
defp language_tag__343(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, x5::integer,
x6::integer, x7::integer, x8::integer, x9::integer, x10::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and x3 === 45 and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90)) and
((x5 >= 97 and x5 <= 122) or (x5 >= 65 and x5 <= 90)) and
((x6 >= 97 and x6 <= 122) or (x6 >= 65 and x6 <= 90)) and x7 === 45 and
((x8 >= 97 and x8 <= 122) or (x8 >= 65 and x8 <= 90)) and
((x9 >= 97 and x9 <= 122) or (x9 >= 65 and x9 <= 90)) and
((x10 >= 97 and x10 <= 122) or (x10 >= 65 and x10 <= 90)) do
language_tag__344(
rest,
[
<<x8::integer, x9::integer, x10::integer>>,
<<x4::integer, x5::integer, x6::integer>>,
<<x0::integer, x1::integer, x2::integer>>
] ++ acc,
stack,
context,
comb__line,
comb__offset + 11
)
end
defp language_tag__343(rest, acc, stack, context, line, offset) do
language_tag__342(rest, acc, stack, context, line, offset)
end
defp language_tag__344(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__336(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__345(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__343(rest, [], stack, context, line, offset)
end
defp language_tag__346(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and x3 === 45 do
language_tag__347(
rest,
[language_subtags: [<<x0::integer, x1::integer, x2::integer>>]] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__346(rest, acc, stack, context, line, offset) do
language_tag__345(rest, acc, stack, context, line, offset)
end
defp language_tag__347(rest, acc, stack, context, line, offset) do
language_tag__348(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__348(rest, acc, stack, context, line, offset) do
language_tag__349(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__349(rest, acc, stack, context, line, offset) do
language_tag__351(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__351(rest, acc, stack, context, line, offset) do
language_tag__356(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__353(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__354(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__353(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__350(rest, acc, stack, context, line, offset)
end
defp language_tag__354(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__352(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__355(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__353(rest, [], stack, context, line, offset)
end
defp language_tag__356(rest, acc, stack, context, line, offset) do
language_tag__357(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__357(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__358(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__357(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__355(rest, acc, stack, context, line, offset)
end
defp language_tag__358(rest, acc, stack, context, line, offset) do
language_tag__360(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__360(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__361(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__360(rest, acc, stack, context, line, offset) do
language_tag__359(rest, acc, stack, context, line, offset)
end
defp language_tag__359(rest, acc, [_ | stack], context, line, offset) do
language_tag__362(rest, acc, stack, context, line, offset)
end
defp language_tag__361(rest, acc, [1 | stack], context, line, offset) do
language_tag__362(rest, acc, stack, context, line, offset)
end
defp language_tag__361(rest, acc, [count | stack], context, line, offset) do
language_tag__360(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__362(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__363(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__363(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__352(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__352(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__364(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__364(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__345(rest, acc, stack, context, line, offset)
end
defp language_tag__350(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__365(rest, acc, stack, context, line, offset)
end
defp language_tag__365(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__366(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__365(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__345(rest, acc, stack, context, line, offset)
end
defp language_tag__366(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__367(
rest,
[
script:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__367(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__336(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__368(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__346(rest, [], stack, context, line, offset)
end
defp language_tag__369(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, x5::integer,
x6::integer, x7::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and x3 === 45 and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90)) and
((x5 >= 97 and x5 <= 122) or (x5 >= 65 and x5 <= 90)) and
((x6 >= 97 and x6 <= 122) or (x6 >= 65 and x6 <= 90)) and x7 === 45 do
language_tag__370(
rest,
[
language_subtags: [
<<x0::integer, x1::integer, x2::integer>>,
<<x4::integer, x5::integer, x6::integer>>
]
] ++ acc,
stack,
context,
comb__line,
comb__offset + 8
)
end
defp language_tag__369(rest, acc, stack, context, line, offset) do
language_tag__368(rest, acc, stack, context, line, offset)
end
defp language_tag__370(rest, acc, stack, context, line, offset) do
language_tag__371(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__371(rest, acc, stack, context, line, offset) do
language_tag__372(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__372(rest, acc, stack, context, line, offset) do
language_tag__374(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__374(rest, acc, stack, context, line, offset) do
language_tag__379(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__376(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__377(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__376(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__373(rest, acc, stack, context, line, offset)
end
defp language_tag__377(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__375(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__378(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__376(rest, [], stack, context, line, offset)
end
defp language_tag__379(rest, acc, stack, context, line, offset) do
language_tag__380(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__380(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__381(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__380(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__378(rest, acc, stack, context, line, offset)
end
defp language_tag__381(rest, acc, stack, context, line, offset) do
language_tag__383(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__383(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__384(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__383(rest, acc, stack, context, line, offset) do
language_tag__382(rest, acc, stack, context, line, offset)
end
defp language_tag__382(rest, acc, [_ | stack], context, line, offset) do
language_tag__385(rest, acc, stack, context, line, offset)
end
defp language_tag__384(rest, acc, [1 | stack], context, line, offset) do
language_tag__385(rest, acc, stack, context, line, offset)
end
defp language_tag__384(rest, acc, [count | stack], context, line, offset) do
language_tag__383(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__385(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__386(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__386(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__375(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__375(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__387(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__387(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__368(rest, acc, stack, context, line, offset)
end
defp language_tag__373(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__388(rest, acc, stack, context, line, offset)
end
defp language_tag__388(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__389(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__388(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__368(rest, acc, stack, context, line, offset)
end
defp language_tag__389(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__390(
rest,
[
script:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__390(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__336(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__391(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__369(rest, [], stack, context, line, offset)
end
defp language_tag__392(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, x5::integer,
x6::integer, x7::integer, x8::integer, x9::integer, x10::integer, x11::integer,
rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and x3 === 45 and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90)) and
((x5 >= 97 and x5 <= 122) or (x5 >= 65 and x5 <= 90)) and
((x6 >= 97 and x6 <= 122) or (x6 >= 65 and x6 <= 90)) and x7 === 45 and
((x8 >= 97 and x8 <= 122) or (x8 >= 65 and x8 <= 90)) and
((x9 >= 97 and x9 <= 122) or (x9 >= 65 and x9 <= 90)) and
((x10 >= 97 and x10 <= 122) or (x10 >= 65 and x10 <= 90)) and x11 === 45 do
language_tag__393(
rest,
[
language_subtags: [
<<x0::integer, x1::integer, x2::integer>>,
<<x4::integer, x5::integer, x6::integer>>,
<<x8::integer, x9::integer, x10::integer>>
]
] ++ acc,
stack,
context,
comb__line,
comb__offset + 12
)
end
defp language_tag__392(rest, acc, stack, context, line, offset) do
language_tag__391(rest, acc, stack, context, line, offset)
end
defp language_tag__393(rest, acc, stack, context, line, offset) do
language_tag__394(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__394(rest, acc, stack, context, line, offset) do
language_tag__395(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__395(rest, acc, stack, context, line, offset) do
language_tag__397(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__397(rest, acc, stack, context, line, offset) do
language_tag__402(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__399(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__400(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__399(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__396(rest, acc, stack, context, line, offset)
end
defp language_tag__400(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__398(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__401(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__399(rest, [], stack, context, line, offset)
end
defp language_tag__402(rest, acc, stack, context, line, offset) do
language_tag__403(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__403(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__404(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__403(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__401(rest, acc, stack, context, line, offset)
end
defp language_tag__404(rest, acc, stack, context, line, offset) do
language_tag__406(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__406(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__407(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__406(rest, acc, stack, context, line, offset) do
language_tag__405(rest, acc, stack, context, line, offset)
end
defp language_tag__405(rest, acc, [_ | stack], context, line, offset) do
language_tag__408(rest, acc, stack, context, line, offset)
end
defp language_tag__407(rest, acc, [1 | stack], context, line, offset) do
language_tag__408(rest, acc, stack, context, line, offset)
end
defp language_tag__407(rest, acc, [count | stack], context, line, offset) do
language_tag__406(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__408(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__409(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__409(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__398(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__398(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__410(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__410(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__391(rest, acc, stack, context, line, offset)
end
defp language_tag__396(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__411(rest, acc, stack, context, line, offset)
end
defp language_tag__411(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__412(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__411(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__391(rest, acc, stack, context, line, offset)
end
defp language_tag__412(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__413(
rest,
[
script:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__413(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__336(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__414(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__392(rest, [], stack, context, line, offset)
end
defp language_tag__415(rest, acc, stack, context, line, offset) do
language_tag__416(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__416(rest, acc, stack, context, line, offset) do
language_tag__417(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__417(rest, acc, stack, context, line, offset) do
language_tag__419(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__419(rest, acc, stack, context, line, offset) do
language_tag__424(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__421(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__422(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__421(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__418(rest, acc, stack, context, line, offset)
end
defp language_tag__422(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__420(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__423(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__421(rest, [], stack, context, line, offset)
end
defp language_tag__424(rest, acc, stack, context, line, offset) do
language_tag__425(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__425(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__426(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__425(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__423(rest, acc, stack, context, line, offset)
end
defp language_tag__426(rest, acc, stack, context, line, offset) do
language_tag__428(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__428(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__429(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__428(rest, acc, stack, context, line, offset) do
language_tag__427(rest, acc, stack, context, line, offset)
end
defp language_tag__427(rest, acc, [_ | stack], context, line, offset) do
language_tag__430(rest, acc, stack, context, line, offset)
end
defp language_tag__429(rest, acc, [1 | stack], context, line, offset) do
language_tag__430(rest, acc, stack, context, line, offset)
end
defp language_tag__429(rest, acc, [count | stack], context, line, offset) do
language_tag__428(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__430(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__431(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__431(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__420(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__420(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__432(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__432(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__414(rest, acc, stack, context, line, offset)
end
defp language_tag__418(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__433(rest, acc, stack, context, line, offset)
end
defp language_tag__433(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__434(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__433(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__414(rest, acc, stack, context, line, offset)
end
defp language_tag__434(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__435(
rest,
[
script:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__435(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__336(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__336(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__314(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__314(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__303(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__436(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__304(rest, [], stack, context, line, offset)
end
defp language_tag__437(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__438(
rest,
[language: <<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__437(rest, acc, stack, context, line, offset) do
language_tag__436(rest, acc, stack, context, line, offset)
end
defp language_tag__438(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__303(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__439(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__437(rest, [], stack, context, line, offset)
end
defp language_tag__440(rest, acc, stack, context, line, offset) do
language_tag__441(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__441(rest, acc, stack, context, line, offset) do
language_tag__442(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__442(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90)) do
language_tag__443(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__442(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__439(rest, acc, stack, context, line, offset)
end
defp language_tag__443(rest, acc, stack, context, line, offset) do
language_tag__445(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__445(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) do
language_tag__446(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__445(rest, acc, stack, context, line, offset) do
language_tag__444(rest, acc, stack, context, line, offset)
end
defp language_tag__444(rest, acc, [_ | stack], context, line, offset) do
language_tag__447(rest, acc, stack, context, line, offset)
end
defp language_tag__446(rest, acc, [1 | stack], context, line, offset) do
language_tag__447(rest, acc, stack, context, line, offset)
end
defp language_tag__446(rest, acc, [count | stack], context, line, offset) do
language_tag__445(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__447(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__448(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__448(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__449(
rest,
[
language:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__449(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__303(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__303(rest, acc, stack, context, line, offset) do
language_tag__453(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__451(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__450(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__452(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__451(rest, [], stack, context, line, offset)
end
defp language_tag__453(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__454(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__453(rest, acc, stack, context, line, offset) do
language_tag__452(rest, acc, stack, context, line, offset)
end
defp language_tag__454(rest, acc, stack, context, line, offset) do
language_tag__455(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__455(rest, acc, stack, context, line, offset) do
language_tag__456(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__456(rest, acc, stack, context, line, offset) do
language_tag__458(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__458(rest, acc, stack, context, line, offset) do
language_tag__463(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__460(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__461(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__460(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__457(rest, acc, stack, context, line, offset)
end
defp language_tag__461(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__459(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__462(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__460(rest, [], stack, context, line, offset)
end
defp language_tag__463(rest, acc, stack, context, line, offset) do
language_tag__464(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__464(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__465(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__464(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__462(rest, acc, stack, context, line, offset)
end
defp language_tag__465(rest, acc, stack, context, line, offset) do
language_tag__467(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__467(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__468(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__467(rest, acc, stack, context, line, offset) do
language_tag__466(rest, acc, stack, context, line, offset)
end
defp language_tag__466(rest, acc, [_ | stack], context, line, offset) do
language_tag__469(rest, acc, stack, context, line, offset)
end
defp language_tag__468(rest, acc, [1 | stack], context, line, offset) do
language_tag__469(rest, acc, stack, context, line, offset)
end
defp language_tag__468(rest, acc, [count | stack], context, line, offset) do
language_tag__467(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__469(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__470(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__470(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__459(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__459(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__471(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__471(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__452(rest, acc, stack, context, line, offset)
end
defp language_tag__457(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__472(rest, acc, stack, context, line, offset)
end
defp language_tag__472(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90)) do
language_tag__473(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__472(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__452(rest, acc, stack, context, line, offset)
end
defp language_tag__473(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__474(
rest,
[
script:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__474(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__450(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__450(rest, acc, stack, context, line, offset) do
language_tag__478(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__476(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__475(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__477(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__476(rest, [], stack, context, line, offset)
end
defp language_tag__478(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__479(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__478(rest, acc, stack, context, line, offset) do
language_tag__477(rest, acc, stack, context, line, offset)
end
defp language_tag__479(rest, acc, stack, context, line, offset) do
language_tag__480(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__480(rest, acc, stack, context, line, offset) do
language_tag__481(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__481(rest, acc, stack, context, line, offset) do
language_tag__483(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__483(rest, acc, stack, context, line, offset) do
language_tag__488(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__485(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__486(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__485(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__482(rest, acc, stack, context, line, offset)
end
defp language_tag__486(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__484(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__487(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__485(rest, [], stack, context, line, offset)
end
defp language_tag__488(rest, acc, stack, context, line, offset) do
language_tag__489(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__489(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__490(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__489(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__487(rest, acc, stack, context, line, offset)
end
defp language_tag__490(rest, acc, stack, context, line, offset) do
language_tag__492(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__492(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__493(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__492(rest, acc, stack, context, line, offset) do
language_tag__491(rest, acc, stack, context, line, offset)
end
defp language_tag__491(rest, acc, [_ | stack], context, line, offset) do
language_tag__494(rest, acc, stack, context, line, offset)
end
defp language_tag__493(rest, acc, [1 | stack], context, line, offset) do
language_tag__494(rest, acc, stack, context, line, offset)
end
defp language_tag__493(rest, acc, [count | stack], context, line, offset) do
language_tag__492(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__494(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__495(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__495(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__484(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__484(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__496(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__496(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
[acc | stack] = stack
language_tag__477(rest, acc, stack, context, line, offset)
end
defp language_tag__482(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__497(rest, acc, stack, context, line, offset)
end
defp language_tag__497(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90)) do
language_tag__498(
rest,
[<<x0::integer, x1::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 2
)
end
defp language_tag__497(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and (x1 >= 48 and x1 <= 57) and (x2 >= 48 and x2 <= 57) do
language_tag__498(
rest,
[x2 - 48 + (x1 - 48) * 10 + (x0 - 48) * 100] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__497(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__477(rest, acc, stack, context, line, offset)
end
defp language_tag__498(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__499(
rest,
[
territory:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__499(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__475(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__475(rest, acc, stack, context, line, offset) do
language_tag__501(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__501(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__502(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__501(rest, acc, stack, context, line, offset) do
language_tag__500(rest, acc, stack, context, line, offset)
end
defp language_tag__502(rest, acc, stack, context, line, offset) do
language_tag__503(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__503(rest, acc, stack, context, line, offset) do
language_tag__508(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__505(
<<x0::integer, x1::integer, x2::integer, x3::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 >= 48 and x0 <= 57 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) do
language_tag__506(
rest,
[Enum.join([<<x0::integer>>, <<x1::integer, x2::integer, x3::integer>>])] ++ acc,
stack,
context,
comb__line,
comb__offset + 4
)
end
defp language_tag__505(rest, _acc, stack, context, line, offset) do
[_, _, acc | stack] = stack
language_tag__500(rest, acc, stack, context, line, offset)
end
defp language_tag__506(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__504(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__507(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__505(rest, [], stack, context, line, offset)
end
defp language_tag__508(rest, acc, stack, context, line, offset) do
language_tag__509(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__509(
<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) and
((x3 >= 97 and x3 <= 122) or (x3 >= 65 and x3 <= 90) or (x3 >= 48 and x3 <= 57)) and
((x4 >= 97 and x4 <= 122) or (x4 >= 65 and x4 <= 90) or (x4 >= 48 and x4 <= 57)) do
language_tag__510(
rest,
[<<x0::integer, x1::integer, x2::integer, x3::integer, x4::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 5
)
end
defp language_tag__509(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__507(rest, acc, stack, context, line, offset)
end
defp language_tag__510(rest, acc, stack, context, line, offset) do
language_tag__512(rest, acc, [3 | stack], context, line, offset)
end
defp language_tag__512(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__513(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__512(rest, acc, stack, context, line, offset) do
language_tag__511(rest, acc, stack, context, line, offset)
end
defp language_tag__511(rest, acc, [_ | stack], context, line, offset) do
language_tag__514(rest, acc, stack, context, line, offset)
end
defp language_tag__513(rest, acc, [1 | stack], context, line, offset) do
language_tag__514(rest, acc, stack, context, line, offset)
end
defp language_tag__513(rest, acc, [count | stack], context, line, offset) do
language_tag__512(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__514(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__515(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__515(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__504(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__504(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__516(
rest,
[
language_variant:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__500(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__517(rest, acc, stack, context, line, offset)
end
defp language_tag__516(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__501(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__517(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__518(
rest,
[collapse_variants(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__518(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__297(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__297(rest, acc, stack, context, line, offset) do
language_tag__519(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__519(rest, acc, stack, context, line, offset) do
language_tag__521(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__521(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__522(
rest,
[key: <<x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__521(rest, acc, stack, context, line, offset) do
language_tag__520(rest, acc, stack, context, line, offset)
end
defp language_tag__522(rest, acc, stack, context, line, offset) do
language_tag__526(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__524(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__523(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__525(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__524(rest, [], stack, context, line, offset)
end
defp language_tag__526(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__527(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__526(rest, acc, stack, context, line, offset) do
language_tag__525(rest, acc, stack, context, line, offset)
end
defp language_tag__527(rest, acc, stack, context, line, offset) do
language_tag__528(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__528(rest, acc, stack, context, line, offset) do
language_tag__529(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__529(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__530(
rest,
[<<x0::integer, x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__529(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__525(rest, acc, stack, context, line, offset)
end
defp language_tag__530(rest, acc, stack, context, line, offset) do
language_tag__532(rest, acc, [5 | stack], context, line, offset)
end
defp language_tag__532(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__533(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__532(rest, acc, stack, context, line, offset) do
language_tag__531(rest, acc, stack, context, line, offset)
end
defp language_tag__531(rest, acc, [_ | stack], context, line, offset) do
language_tag__534(rest, acc, stack, context, line, offset)
end
defp language_tag__533(rest, acc, [1 | stack], context, line, offset) do
language_tag__534(rest, acc, stack, context, line, offset)
end
defp language_tag__533(rest, acc, [count | stack], context, line, offset) do
language_tag__532(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__534(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__535(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__535(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__536(
rest,
[
type:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__536(rest, acc, stack, context, line, offset) do
language_tag__538(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__538(rest, acc, stack, context, line, offset) do
language_tag__539(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__539(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__540(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__539(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__537(rest, acc, stack, context, line, offset)
end
defp language_tag__540(rest, acc, stack, context, line, offset) do
language_tag__541(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__541(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__542(
rest,
[<<x0::integer, x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__541(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__537(rest, acc, stack, context, line, offset)
end
defp language_tag__542(rest, acc, stack, context, line, offset) do
language_tag__544(rest, acc, [5 | stack], context, line, offset)
end
defp language_tag__544(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__545(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__544(rest, acc, stack, context, line, offset) do
language_tag__543(rest, acc, stack, context, line, offset)
end
defp language_tag__543(rest, acc, [_ | stack], context, line, offset) do
language_tag__546(rest, acc, stack, context, line, offset)
end
defp language_tag__545(rest, acc, [1 | stack], context, line, offset) do
language_tag__546(rest, acc, stack, context, line, offset)
end
defp language_tag__545(rest, acc, [count | stack], context, line, offset) do
language_tag__544(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__546(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__547(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__547(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__548(
rest,
[
type:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__537(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__549(rest, acc, stack, context, line, offset)
end
defp language_tag__548(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__538(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__549(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__523(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__520(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__550(rest, acc, stack, context, line, offset)
end
defp language_tag__523(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__521(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__550(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__551(
rest,
[collapse_keywords(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__551(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__552(
rest,
[merge_langtag_and_transform(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__552(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__553(
rest,
[
transform:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__553(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__262(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__554(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__293(rest, [], stack, context, line, offset)
end
defp language_tag__555(rest, acc, stack, context, line, offset) do
language_tag__556(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__556(rest, acc, stack, context, line, offset) do
language_tag__557(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__557(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 117 or x0 === 85 do
language_tag__558(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__557(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__554(rest, acc, stack, context, line, offset)
end
defp language_tag__558(rest, acc, stack, context, line, offset) do
language_tag__595(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__560(rest, acc, stack, context, line, offset) do
language_tag__561(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__561(rest, acc, stack, context, line, offset) do
language_tag__563(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__563(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__564(
rest,
[key: <<x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__563(rest, acc, stack, context, line, offset) do
language_tag__562(rest, acc, stack, context, line, offset)
end
defp language_tag__564(rest, acc, stack, context, line, offset) do
language_tag__568(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__566(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__565(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__567(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__566(rest, [], stack, context, line, offset)
end
defp language_tag__568(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__569(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__568(rest, acc, stack, context, line, offset) do
language_tag__567(rest, acc, stack, context, line, offset)
end
defp language_tag__569(rest, acc, stack, context, line, offset) do
language_tag__570(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__570(rest, acc, stack, context, line, offset) do
language_tag__571(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__571(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__572(
rest,
[<<x0::integer, x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__571(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__567(rest, acc, stack, context, line, offset)
end
defp language_tag__572(rest, acc, stack, context, line, offset) do
language_tag__574(rest, acc, [5 | stack], context, line, offset)
end
defp language_tag__574(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__575(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__574(rest, acc, stack, context, line, offset) do
language_tag__573(rest, acc, stack, context, line, offset)
end
defp language_tag__573(rest, acc, [_ | stack], context, line, offset) do
language_tag__576(rest, acc, stack, context, line, offset)
end
defp language_tag__575(rest, acc, [1 | stack], context, line, offset) do
language_tag__576(rest, acc, stack, context, line, offset)
end
defp language_tag__575(rest, acc, [count | stack], context, line, offset) do
language_tag__574(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__576(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__577(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__577(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__578(
rest,
[
type:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__578(rest, acc, stack, context, line, offset) do
language_tag__580(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__580(rest, acc, stack, context, line, offset) do
language_tag__581(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__581(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__582(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__581(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__579(rest, acc, stack, context, line, offset)
end
defp language_tag__582(rest, acc, stack, context, line, offset) do
language_tag__583(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__583(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__584(
rest,
[<<x0::integer, x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__583(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__579(rest, acc, stack, context, line, offset)
end
defp language_tag__584(rest, acc, stack, context, line, offset) do
language_tag__586(rest, acc, [5 | stack], context, line, offset)
end
defp language_tag__586(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__587(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__586(rest, acc, stack, context, line, offset) do
language_tag__585(rest, acc, stack, context, line, offset)
end
defp language_tag__585(rest, acc, [_ | stack], context, line, offset) do
language_tag__588(rest, acc, stack, context, line, offset)
end
defp language_tag__587(rest, acc, [1 | stack], context, line, offset) do
language_tag__588(rest, acc, stack, context, line, offset)
end
defp language_tag__587(rest, acc, [count | stack], context, line, offset) do
language_tag__586(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__588(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__589(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__589(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__590(
rest,
[
type:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__579(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__591(rest, acc, stack, context, line, offset)
end
defp language_tag__590(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__580(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__591(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__565(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__562(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__592(rest, acc, stack, context, line, offset)
end
defp language_tag__565(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__563(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__592(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__593(
rest,
[collapse_keywords(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__593(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__559(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__594(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__560(rest, [], stack, context, line, offset)
end
defp language_tag__595(rest, acc, stack, context, line, offset) do
language_tag__596(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__596(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__597(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__596(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__594(rest, acc, stack, context, line, offset)
end
defp language_tag__597(rest, acc, stack, context, line, offset) do
language_tag__598(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__598(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__599(
rest,
[<<x0::integer, x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__598(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__594(rest, acc, stack, context, line, offset)
end
defp language_tag__599(rest, acc, stack, context, line, offset) do
language_tag__601(rest, acc, [5 | stack], context, line, offset)
end
defp language_tag__601(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__602(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__601(rest, acc, stack, context, line, offset) do
language_tag__600(rest, acc, stack, context, line, offset)
end
defp language_tag__600(rest, acc, [_ | stack], context, line, offset) do
language_tag__603(rest, acc, stack, context, line, offset)
end
defp language_tag__602(rest, acc, [1 | stack], context, line, offset) do
language_tag__603(rest, acc, stack, context, line, offset)
end
defp language_tag__602(rest, acc, [count | stack], context, line, offset) do
language_tag__601(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__603(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__604(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__604(rest, acc, stack, context, line, offset) do
language_tag__606(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__606(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__607(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__606(rest, acc, stack, context, line, offset) do
language_tag__605(rest, acc, stack, context, line, offset)
end
defp language_tag__607(rest, acc, stack, context, line, offset) do
language_tag__608(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__608(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__609(
rest,
[<<x0::integer, x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__608(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__605(rest, acc, stack, context, line, offset)
end
defp language_tag__609(rest, acc, stack, context, line, offset) do
language_tag__611(rest, acc, [5 | stack], context, line, offset)
end
defp language_tag__611(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__612(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__611(rest, acc, stack, context, line, offset) do
language_tag__610(rest, acc, stack, context, line, offset)
end
defp language_tag__610(rest, acc, [_ | stack], context, line, offset) do
language_tag__613(rest, acc, stack, context, line, offset)
end
defp language_tag__612(rest, acc, [1 | stack], context, line, offset) do
language_tag__613(rest, acc, stack, context, line, offset)
end
defp language_tag__612(rest, acc, [count | stack], context, line, offset) do
language_tag__611(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__613(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__614(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__605(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__615(rest, acc, stack, context, line, offset)
end
defp language_tag__614(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__606(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__615(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__616(
rest,
[attributes: :lists.reverse(user_acc)] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__616(rest, acc, stack, context, line, offset) do
language_tag__617(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__617(rest, acc, stack, context, line, offset) do
language_tag__619(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__619(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__620(
rest,
[key: <<x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__619(rest, acc, stack, context, line, offset) do
language_tag__618(rest, acc, stack, context, line, offset)
end
defp language_tag__620(rest, acc, stack, context, line, offset) do
language_tag__624(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__622(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__621(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__623(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__622(rest, [], stack, context, line, offset)
end
defp language_tag__624(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__625(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__624(rest, acc, stack, context, line, offset) do
language_tag__623(rest, acc, stack, context, line, offset)
end
defp language_tag__625(rest, acc, stack, context, line, offset) do
language_tag__626(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__626(rest, acc, stack, context, line, offset) do
language_tag__627(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__627(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__628(
rest,
[<<x0::integer, x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__627(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__623(rest, acc, stack, context, line, offset)
end
defp language_tag__628(rest, acc, stack, context, line, offset) do
language_tag__630(rest, acc, [5 | stack], context, line, offset)
end
defp language_tag__630(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__631(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__630(rest, acc, stack, context, line, offset) do
language_tag__629(rest, acc, stack, context, line, offset)
end
defp language_tag__629(rest, acc, [_ | stack], context, line, offset) do
language_tag__632(rest, acc, stack, context, line, offset)
end
defp language_tag__631(rest, acc, [1 | stack], context, line, offset) do
language_tag__632(rest, acc, stack, context, line, offset)
end
defp language_tag__631(rest, acc, [count | stack], context, line, offset) do
language_tag__630(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__632(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__633(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__633(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__634(
rest,
[
type:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__634(rest, acc, stack, context, line, offset) do
language_tag__636(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__636(rest, acc, stack, context, line, offset) do
language_tag__637(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__637(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__638(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__637(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__635(rest, acc, stack, context, line, offset)
end
defp language_tag__638(rest, acc, stack, context, line, offset) do
language_tag__639(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__639(
<<x0::integer, x1::integer, x2::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when ((x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57)) and
((x1 >= 97 and x1 <= 122) or (x1 >= 65 and x1 <= 90) or (x1 >= 48 and x1 <= 57)) and
((x2 >= 97 and x2 <= 122) or (x2 >= 65 and x2 <= 90) or (x2 >= 48 and x2 <= 57)) do
language_tag__640(
rest,
[<<x0::integer, x1::integer, x2::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 3
)
end
defp language_tag__639(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__635(rest, acc, stack, context, line, offset)
end
defp language_tag__640(rest, acc, stack, context, line, offset) do
language_tag__642(rest, acc, [5 | stack], context, line, offset)
end
defp language_tag__642(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__643(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__642(rest, acc, stack, context, line, offset) do
language_tag__641(rest, acc, stack, context, line, offset)
end
defp language_tag__641(rest, acc, [_ | stack], context, line, offset) do
language_tag__644(rest, acc, stack, context, line, offset)
end
defp language_tag__643(rest, acc, [1 | stack], context, line, offset) do
language_tag__644(rest, acc, stack, context, line, offset)
end
defp language_tag__643(rest, acc, [count | stack], context, line, offset) do
language_tag__642(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__644(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__645(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__645(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__646(
rest,
[
type:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__635(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__647(rest, acc, stack, context, line, offset)
end
defp language_tag__646(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__636(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__647(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__621(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__618(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__648(rest, acc, stack, context, line, offset)
end
defp language_tag__621(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__619(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__648(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__649(
rest,
[collapse_keywords(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__649(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__559(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__559(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__650(
rest,
[combine_attributes_and_keywords(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__650(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__651(
rest,
[
locale:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__651(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__262(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__259(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__652(rest, acc, stack, context, line, offset)
end
defp language_tag__262(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__260(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__652(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__653(
rest,
[collapse_extensions(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__653(rest, acc, stack, context, line, offset) do
language_tag__657(
rest,
[],
[{rest, context, line, offset}, acc | stack],
context,
line,
offset
)
end
defp language_tag__655(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__654(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__656(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
language_tag__655(rest, [], stack, context, line, offset)
end
defp language_tag__657(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__658(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__657(rest, acc, stack, context, line, offset) do
language_tag__656(rest, acc, stack, context, line, offset)
end
defp language_tag__658(rest, acc, stack, context, line, offset) do
language_tag__659(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__659(
<<x0::integer, x1::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 === 120 or x0 === 88) and x1 === 45 do
language_tag__660(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp language_tag__659(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__656(rest, acc, stack, context, line, offset)
end
defp language_tag__660(rest, acc, stack, context, line, offset) do
language_tag__661(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__661(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__662(
rest,
[<<x0::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 1
)
end
defp language_tag__661(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
language_tag__656(rest, acc, stack, context, line, offset)
end
defp language_tag__662(rest, acc, stack, context, line, offset) do
language_tag__664(rest, acc, [7 | stack], context, line, offset)
end
defp language_tag__664(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__665(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__664(rest, acc, stack, context, line, offset) do
language_tag__663(rest, acc, stack, context, line, offset)
end
defp language_tag__663(rest, acc, [_ | stack], context, line, offset) do
language_tag__666(rest, acc, stack, context, line, offset)
end
defp language_tag__665(rest, acc, [1 | stack], context, line, offset) do
language_tag__666(rest, acc, stack, context, line, offset)
end
defp language_tag__665(rest, acc, [count | stack], context, line, offset) do
language_tag__664(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__666(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__667(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__667(rest, acc, stack, context, line, offset) do
language_tag__669(
rest,
[],
[{rest, acc, context, line, offset} | stack],
context,
line,
offset
)
end
defp language_tag__669(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when x0 === 45 do
language_tag__670(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__669(rest, acc, stack, context, line, offset) do
language_tag__668(rest, acc, stack, context, line, offset)
end
defp language_tag__670(rest, acc, stack, context, line, offset) do
language_tag__671(rest, [], [acc | stack], context, line, offset)
end
defp language_tag__671(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__672(
rest,
[<<x0::integer>>] ++ acc,
stack,
context,
comb__line,
comb__offset + 1
)
end
defp language_tag__671(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
language_tag__668(rest, acc, stack, context, line, offset)
end
defp language_tag__672(rest, acc, stack, context, line, offset) do
language_tag__674(rest, acc, [7 | stack], context, line, offset)
end
defp language_tag__674(
<<x0::integer, rest::binary>>,
acc,
stack,
context,
comb__line,
comb__offset
)
when (x0 >= 97 and x0 <= 122) or (x0 >= 65 and x0 <= 90) or (x0 >= 48 and x0 <= 57) do
language_tag__675(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp language_tag__674(rest, acc, stack, context, line, offset) do
language_tag__673(rest, acc, stack, context, line, offset)
end
defp language_tag__673(rest, acc, [_ | stack], context, line, offset) do
language_tag__676(rest, acc, stack, context, line, offset)
end
defp language_tag__675(rest, acc, [1 | stack], context, line, offset) do
language_tag__676(rest, acc, stack, context, line, offset)
end
defp language_tag__675(rest, acc, [count | stack], context, line, offset) do
language_tag__674(rest, acc, [count - 1 | stack], context, line, offset)
end
defp language_tag__676(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__677(
rest,
[List.to_string(:lists.reverse(user_acc))] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__668(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
language_tag__678(rest, acc, stack, context, line, offset)
end
defp language_tag__677(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
language_tag__669(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp language_tag__678(rest, user_acc, [acc | stack], context, line, offset) do
_ = user_acc
language_tag__679(
rest,
[private_use: :lists.reverse(user_acc)] ++ acc,
stack,
context,
line,
offset
)
end
defp language_tag__679(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__654(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__654(rest, user_acc, [acc | stack], context, line, offset) do
case(flatten(rest, user_acc, context, line, offset)) do
{user_acc, context} when is_list(user_acc) ->
language_tag__680(rest, user_acc ++ acc, stack, context, line, offset)
{:error, reason} ->
{:error, reason, rest, context, line, offset}
end
end
defp language_tag__680(rest, acc, [_, previous_acc | stack], context, line, offset) do
language_tag__1(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp language_tag__1(<<""::binary>>, acc, stack, context, comb__line, comb__offset) do
language_tag__681("", [] ++ acc, stack, context, comb__line, comb__offset)
end
defp language_tag__1(rest, _acc, _stack, context, line, offset) do
{:error, "expected a BCP47 language tag", rest, context, line, offset}
end
defp language_tag__681(rest, acc, _stack, context, line, offset) do
{:ok, acc, rest, context, line, offset}
end
def error_on_remaining("", context, _line, _offset) do
{[], context}
end
def error_on_remaining(_rest, _context, _line, _offset) do
{:error, "invalid language tag"}
end
end | lib/cldr/language_tag/rfc5646_parser.ex | 0.873404 | 0.542863 | rfc5646_parser.ex | starcoder |
defmodule NewRelicAddons.Decorators do
@moduledoc """
Provides easy-to-use stackable decorators for the official New Relic library.
## Features
- decorators are stackable with others e.g. from other libraries
- allows to hide args in event tracer via `hide_args` option
- includes transaction tracer with process-based scoping and customizable category name
## Usage
You must first include decorators in your module:
use NewRelicAddons.Decorators
Then you can start decorating specific functions:
@decorate new_relic_event()
defp some_long_operation do
# ...
end
@decorate new_relic_event()
defp other_long_operation do
# ...
end
...or the entire module:
@decorate_all new_relic_event()
# ...
...or if you expect vulnerable function attributes:
@decorate new_relic_event(hide_args: true)
defp change_password(user, new_password) do
# ...
end
If these functions are called within Phoenix web request processes and you've already configured
`NewRelicPhoenix`, then you're good to go - decorated calls will now appear within your web
transactions.
If you however want to trace functions from outside of Phoenix flow (e.g. background jobs or
any GenServers), you'll also have to wrap the processing function in a transaction:
@decorate new_relic_transaction()
defp process do
some_long_operation()
other_long_operation()
end
@decorate new_relic_event()
defp some_long_operation do
# ...
end
@decorate new_relic_event()
defp other_long_operation do
# ...
end
You may also specify a custom category:
@decorate new_relic_transaction("RPC")
defp process_rpc_call(request) do
# ...
end
Keep in mind that the function wrapped in `transaction` decorator will be called on separate
process in order to control the lifecycle of the transaction. The process is spawned via
`Task.async` and awaited upon for infinity - thus not changing the initial behavior. Even though
the timeout is set to `:infinity`, the transaction function should not be long-running or you'll
run into trouble (see `NewRelic.start_transaction/2`).
"""
use Decorator.Define,
new_relic_transaction: 0,
new_relic_transaction: 1,
new_relic_event: 0,
new_relic_event: 1
def new_relic_transaction(opts \\ [], body, %{module: mod, name: func, arity: arity}) do
category_name = Keyword.get(opts, :category_name, "Background process")
name = "#{inspect(mod)}.#{func}/#{arity}"
unless is_binary(category_name), do: raise(ArgumentError, "expected category name string")
start_new_relic_transaction(Mix.env(), category_name, name, body)
end
# For test env we are not creating another tasks, as Ecto in shared mode is unable to close them.
defp start_new_relic_transaction(:test, category_name, name, body) do
quote do
NewRelic.start_transaction(unquote(category_name), unquote(name))
unquote(body)
end
end
defp start_new_relic_transaction(_, category_name, name, body) do
quote do
fn ->
NewRelic.start_transaction(unquote(category_name), unquote(name))
unquote(body)
end
|> Task.async()
|> Task.await(:infinity)
end
end
def new_relic_event(opts \\ [], body, %{module: mod, name: func, args: args}) do
name = Keyword.get(opts, :name, func)
args = if opts[:hide_args], do: List.duplicate(:hidden, length(args)), else: args
unless is_atom(name), do: raise(ArgumentError, "expected name atom")
NewRelic.Tracer.Macro.traced_function_body(body, mod, func, args, name)
end
end | lib/new_relic_addons/decorators.ex | 0.813016 | 0.430566 | decorators.ex | starcoder |
defmodule Ecto.LoggerJSON do
@moduledoc """
Keep in sync with https://github.com/elixir-ecto/ecto/blob/master/lib/ecto/log_entry.ex
Struct used for logging entries.
It is composed of the following fields:
* query - the query as string or a function that when invoked resolves to string;
* source - the query data source;
* params - the query parameters;
* result - the query result as an `:ok` or `:error` tuple;
* query_time - the time spent executing the query in native units;
* decode_time - the time spent decoding the result in native units (it may be nil);
* queue_time - the time spent to check the connection out in native units (it may be nil);
* connection_pid - the connection process that executed the query;
* ansi_color - the color that chould be used when logging the entry.
Notice all times are stored in native unit. You must convert them to
the proper unit by using `System.convert_time_unit/3` before logging.
"""
require Logger
@doc """
Overwritten to use JSON
Logs the given entry in debug mode.
The logger call will be removed at compile time if
`compile_time_purge_level` is set to higher than debug.
"""
@spec log(%{}) :: %{}
def log(entry) do
_ = Logger.debug(fn ->
%{query_time: query_time, decode_time: decode_time, queue_time: queue_time, query: query} = entry
[query_time, decode_time, queue_time] =
[query_time, decode_time, queue_time]
|> Enum.map(&format_time/1)
%{
"decode_time" => decode_time,
"duration" => Float.round(query_time + decode_time + queue_time, 3),
"log_type" => "persistence",
"request_id" => Logger.metadata[:request_id],
"query" => query,
"query_time" => query_time,
"queue_time" => queue_time
}
|> Poison.encode!
end)
entry
end
@doc """
Overwritten to use JSON
Logs the given entry in the given level.
The logger call won't be removed at compile time as
custom level is given.
"""
@spec log(%{}, atom) :: %{}
def log(entry, level) do
_ = Logger.log(level, fn ->
%{query_time: query_time, decode_time: decode_time, queue_time: queue_time, query: query} = entry
[query_time, decode_time, queue_time] =
[query_time, decode_time, queue_time]
|> Enum.map(&format_time/1)
%{
"decode_time" => decode_time,
"duration" => Float.round(query_time + decode_time + queue_time, 3),
"log_type" => "persistence",
"request_id" => Logger.metadata[:request_id],
"query" => query,
"query_time" => query_time,
"queue_time" => queue_time
}
|> Poison.encode!
end)
entry
end
## Helpers
defp format_time(nil), do: 0.0
defp format_time(time) do
ms = System.convert_time_unit(time, :native, :micro_seconds) / 1000
Float.round(ms, 3)
end
end | lib/ecto/logger_json.ex | 0.903502 | 0.504333 | logger_json.ex | starcoder |
defmodule PolicrMini.StatisticBusiness do
@moduledoc """
用户业务功能的实现。
"""
use PolicrMini, business: PolicrMini.Schema.Statistic
import Ecto.Query, only: [from: 2]
@type status :: :passed | :timeout | :wronged | :other
@spec create(map) :: written_returns
def create(params) do
%Statistic{} |> Statistic.changeset(params) |> Repo.insert()
end
@spec update(Statistic.t(), map) :: written_returns
def update(statistic, params) do
statistic |> Statistic.changeset(params) |> Repo.update()
end
@day_seconds 3600 * 24
@zero_oclock ~T[00:00:00]
@spec find_today(integer, status) :: Statistic.t() | nil
def find_today(chat_id, status), do: find(chat_id, status, range: :today)
@spec find_yesterday(integer, status) :: Statistic.t() | nil
def find_yesterday(chat_id, status), do: find(chat_id, status, range: :yesterday)
@type dt_conts ::
[{:range, :today | :yesterday}] | [{:begin_at, DateTime.t()}, {:end_at, DateTime.t()}]
@spec find(integer, status, dt_conts) :: Statistic.t() | nil
defp find(chat_id, status, dt_conts) do
{begin_at, end_at} =
case Keyword.get(dt_conts, :range) do
:today -> today_datetimes()
:yesterday -> yesterday_datetimes()
nil -> {Keyword.get(dt_conts, :begin_at), Keyword.get(dt_conts, :end_at)}
end
from(
s in Statistic,
where:
s.chat_id == ^chat_id and
s.verification_status == ^status and
s.begin_at == ^begin_at and
s.end_at == ^end_at
)
|> Repo.one()
end
def fetch_today(chat_id, status, params) do
Repo.transaction(fn ->
case find_today(chat_id, status) || create(params) do
{:ok, statistic} ->
# 创建了一个新的
statistic
{:error, e} ->
# 创建时发生错误
Repo.rollback(e)
statistic ->
# 已存在
statistic
end
end)
end
@doc """
自增一个当天的统计。
"""
@spec increment_one(integer, String.t(), status) :: {:ok, Statistic.t()} | {:error, any}
def increment_one(chat_id, language_code, status) do
language_code = language_code || "unknown"
{begin_at, end_at} = today_datetimes()
params = %{
chat_id: chat_id,
verifications_count: 0,
languages_top: %{language_code => 0},
begin_at: begin_at,
end_at: end_at,
verification_status: status
}
fetch_one = fn -> fetch_today(chat_id, status, params) end
trans_fun = fn ->
trans_r = increment_trans(fetch_one, language_code)
case trans_r do
{:ok, r} -> r
e -> e
end
end
# TODO: 此处的事务需保证具有回滚的能力并能够返回错误结果。
Repo.transaction(trans_fun)
end
defp increment_trans(fetch_stat, language_code) do
case fetch_stat.() do
{:ok, stat} ->
verifications_count = stat.verifications_count + 1
languages_top =
if count = stat.languages_top[language_code] do
Map.put(stat.languages_top, language_code, count + 1)
else
Map.put(stat.languages_top, language_code, 1)
end
update(stat, %{verifications_count: verifications_count, languages_top: languages_top})
e ->
e
end
end
defp today_datetimes do
begin_at = DateTime.new!(Date.utc_today(), @zero_oclock, "Etc/UTC")
end_at = DateTime.add(begin_at, @day_seconds - 1, :second)
{begin_at, end_at}
end
defp yesterday_datetimes do
today_date = Date.utc_today()
yesterday_date = Date.add(today_date, -1)
begin_at = DateTime.new!(yesterday_date, @zero_oclock, "Etc/UTC")
end_at = DateTime.add(begin_at, @day_seconds - 1, :second)
{begin_at, end_at}
end
end | lib/policr_mini/businesses/statistic_business.ex | 0.508788 | 0.425247 | statistic_business.ex | starcoder |
defmodule BlockBox.LayoutBlocks do
@moduledoc """
Defines generator functions for all [layout blocks](https://api.slack.com/reference/block-kit/blocks).
"""
alias BlockBox.CompositionObjects, as: CO
alias BlockBox.Utils, as: Utils
@doc """
Creates a [section block](https://api.slack.com/reference/block-kit/blocks#section).
## Options
Options are not included by default.
* `:block_id` - string
* `:fields` - list of `t:BlockBox.CompositionObjects.text_object/0`s
* `:accessory` - any element from `BlockBox.BlockElements`
"""
@spec section(String.t() | CO.text_object(), keyword()) :: map()
def section(text, opts \\ [])
def section(text, opts) when is_binary(text) do
CO.text_object(text, :mrkdwn)
|> section(opts)
end
def section(text_object, opts) do
%{
type: "section",
text: text_object
}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates a [divider block](https://api.slack.com/reference/block-kit/blocks#divider).
## Options
Options are not included by default.
* `:block_id` - String
"""
@spec divider(keyword()) :: map()
def divider(opts \\ []) do
%{type: "divider"}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates an [image block](https://api.slack.com/reference/block-kit/blocks#image).
## Options
Options are not included by default.
* `:title` - `t:BlockBox.CompositionObjects.plain_text_object/0` or String
* `:block_id` - String
"""
@spec image_block(String.t(), String.t(), keyword()) :: map()
def image_block(image_url, alt_text, opts \\ []) do
opts = Utils.convert_text_opts(opts, [:title])
%{
type: "image",
image_url: image_url,
alt_text: alt_text
}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates an [actions block](https://api.slack.com/reference/block-kit/blocks#actions).
## Options
Options are not included by default.
* `:block_id` - String
"""
@spec actions_block(list(), keyword()) :: map()
def actions_block(elements, opts \\ []) when is_list(elements) do
%{
type: "actions",
elements: elements
}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates a [context block](https://api.slack.com/reference/block-kit/blocks#context).
## Options
Options are not included by default.
* `:block_id` - String
"""
@spec context_block(list(), keyword()) :: map()
def context_block(elements, opts \\ []) when is_list(elements) do
%{
type: "context",
elements: elements
}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates an [input block](https://api.slack.com/reference/block-kit/blocks#input).
## Options
Options are not included by default.
* `:block_id` - String
* `:hint` - `t:BlockBox.CompositionObjects.plain_text_object/0` or String
* `:optional` - boolean
"""
@spec input(String.t() | CO.plain_text_object(), map(), keyword()) :: map()
def input(label, element, opts \\ [])
def input(label, element, opts) when is_binary(label) do
CO.text_object(label)
|> input(element, opts)
end
def input(label, element, opts) do
opts = Utils.convert_text_opts(opts, [:hint])
%{
type: "input",
element: element,
label: label
}
|> Map.merge(Enum.into(opts, %{}))
end
@doc """
Creates a [file block](https://api.slack.com/reference/block-kit/blocks#file).
## Options
Options are not included by default.
* `:block_id` - String
"""
def file_block(external_id, source \\ "remote", opts \\ []) do
%{
type: "file",
external_id: external_id,
source: source
}
|> Map.merge(Enum.into(opts, %{}))
end
end | lib/layout_blocks.ex | 0.864996 | 0.506164 | layout_blocks.ex | starcoder |
defmodule QuantumStorageMnesia.Impl do
@moduledoc false
alias QuantumStorageMnesia.{Mnesia, State}
require Mnesia
@spec init(module) :: State.t()
def init(name) do
nodes = [node()]
Mnesia.create_module(name)
Mnesia.Table.create!(name, nodes)
State.new(name)
end
@spec jobs(State.t()) :: :not_applicable | [Quantum.Job.t()]
def jobs(%{table: table}) do
fn ->
if Mnesia.Status.initialized?(table) do
Mnesia.Job.all(table)
else
:not_applicable
end
end
|> Mnesia.Helper.transaction([])
end
@spec last_execution_date(State.t()) :: :unknown | NaiveDateTime.t()
def last_execution_date(%{table: table}) do
case Mnesia.Helper.transaction(fn -> Mnesia.LastDate.get(table) end) do
nil -> :unknown
last_date -> last_date
end
end
@spec add_job(Quantum.Job.t(), State.t()) :: State.t()
def add_job(job, %{table: table} = state) do
fn ->
Mnesia.Job.add_or_update(table, job)
Mnesia.Status.initialize(table)
end
|> Mnesia.Helper.transaction!()
state
end
@spec delete_job(Quantum.Job.name(), State.t()) :: State.t()
def delete_job(job_name, %{table: table} = state) do
Mnesia.Helper.transaction!(fn -> Mnesia.Job.delete(table, job_name) end)
state
end
@spec update_job_state(Quantum.Job.name(), Quantum.Job.state(), State.t()) :: State.t()
def update_job_state(job_name, job_state, %{table: table} = state) do
fn ->
with job when not is_nil(job) <- Mnesia.Job.get(table, job_name) do
Mnesia.Job.add_or_update(table, %{job | state: job_state})
end
end
|> Mnesia.Helper.transaction!()
state
end
@spec update_last_execution_date(NaiveDateTime.t(), State.t()) :: State.t()
def update_last_execution_date(last_execution_date, %{table: table} = state) do
fn -> Mnesia.LastDate.add_or_update(table, last_execution_date) end
|> Mnesia.Helper.transaction!()
state
end
@spec purge(State.t()) :: State.t()
def purge(%{table: table} = state) do
Mnesia.Table.clear!(table)
state
end
end | lib/quantum_storage_mnesia/impl.ex | 0.762247 | 0.483892 | impl.ex | starcoder |
defmodule TicTacToeBoard do
@moduledoc """
Board logic for TicTacToe game
"""
defguard allowed_position_number?(value) when is_integer(value) and value >= 1 and value <= 9
@max_positions 9
@typedoc "Board position value type"
@type position_value :: nil | :x | :o
@typedoc "Board mapping type"
@type board :: %{
p1: position_value(),
p2: position_value(),
p3: position_value(),
p4: position_value(),
p5: position_value(),
p6: position_value(),
p7: position_value(),
p8: position_value(),
p9: position_value()
}
@doc """
Given a current board map of the game and position number,
returns `{:ok, true}` if the number is between `1` and `9` and is not a filled position in
the board, otherwise returns `{:error, :error_message}`.
"""
@spec valid_position?(board(), integer()) :: {:ok, true} | {:error, atom()}
def valid_position?(map, position) when allowed_position_number?(position) do
case position_available?(map, position) do
true -> {:ok, true}
false -> {:error, :filled_position}
end
end
def valid_position?(_, _), do: {:error, :position_value_not_allowed}
@doc """
Returns `true` if the position is not already filled,
otherwise return `false`.
"""
@spec position_available?(board(), integer()) :: boolean()
def position_available?(map, position) when is_integer(position) do
map |> Access.get("p#{position}" |> String.to_atom()) |> is_nil
end
def position_available?(_, _), do: false
@doc """
Returns `true` if one the criteria is true:
* All 9 positions are filled;
* One of the players won;
"""
@spec match_finished?(board()) :: boolean()
def match_finished?(map) do
positions_filled(map) >= @max_positions || fetch_winner(map) |> is_nil == false
end
@doc """
Returns `{true, player}` One of the players won;
Returns `{true, nil}` All 9 positions are filled and no winner;
Returns `{false, nil}` if have not finished;
"""
@spec match_finished?(board(), any(), any()) :: {boolean(), any()}
def match_finished?(map, p1, p2) do
case fetch_winner(map, p1, p2) do
nil ->
if positions_filled(map) >= @max_positions do
{true, nil}
else
{false, nil}
end
winner ->
{true, winner}
end
end
@doc """
Returns the number of filled positions on the board
"""
@spec positions_filled(board()) :: integer()
def positions_filled(map) do
map
|> Enum.reduce(0, fn {_, v}, acc ->
if is_nil(v), do: acc, else: acc + 1
end)
end
@doc """
Returns next turn number.
Eg. If 3 positions are filled, next turn is 4.
If all positions are filled, then returns `nil`.
"""
@spec next_turn(board()) :: integer() | nil
def next_turn(map) do
case map |> positions_filled do
9 -> nil
pos_filled -> pos_filled + 1
end
end
@doc """
Returns the next player or `nil` if there is any possible moviments to make.
"""
@spec fetch_winner(board(), any(), any()) :: any() | nil
def fetch_next_player(map, p1, p2) do
case map |> match_finished? do
true ->
nil
false ->
if rem(positions_filled(map), 2) == 0 do
p1
else
p2
end
end
end
@doc """
Returns the position value for the winner or `nil` if no winner is found
"""
@spec fetch_winner(board()) :: atom() | nil
def fetch_winner(map), do: fetch_winner_value(map)
@doc """
This function receives the board, anything related to the first player and anything related
to the second player.
Returns back the first player if :x is the winner, or the second player if :o is the winner,
or `nil` if no winner is found.
First player and Second player could be anything: an atom, a struct, a number...
Let's suppose:
iex> first_player = %Player{id: 1}
iex> second_player = %Player{id: 2}
Example where the board has first player as winner:
iex> TicTacToeBoard.fetch_winner(board, first_player, second_player) == first_player
Example where the board has second player as winner:
iex> TicTacToeBoard.fetch_winner(board, first_player, second_player) == second_player
Example where the board has no winner:
iex> TicTacToeBoard.fetch_winner(board, first_player, second_player) == nil
"""
@spec fetch_winner(board(), any(), any()) :: any() | nil
def fetch_winner(map, p1, p2) do
case fetch_winner_value(map) do
:x -> p1
:o -> p2
_ -> nil
end
end
defp fetch_winner_value(%{p1: :x, p2: :x, p3: :x}), do: :x
defp fetch_winner_value(%{p4: :x, p5: :x, p6: :x}), do: :x
defp fetch_winner_value(%{p7: :x, p8: :x, p9: :x}), do: :x
defp fetch_winner_value(%{p1: :x, p4: :x, p7: :x}), do: :x
defp fetch_winner_value(%{p2: :x, p5: :x, p8: :x}), do: :x
defp fetch_winner_value(%{p3: :x, p6: :x, p9: :x}), do: :x
defp fetch_winner_value(%{p1: :x, p5: :x, p9: :x}), do: :x
defp fetch_winner_value(%{p3: :x, p5: :x, p7: :x}), do: :x
defp fetch_winner_value(%{p1: :o, p2: :o, p3: :o}), do: :o
defp fetch_winner_value(%{p4: :o, p5: :o, p6: :o}), do: :o
defp fetch_winner_value(%{p7: :o, p8: :o, p9: :o}), do: :o
defp fetch_winner_value(%{p1: :o, p4: :o, p7: :o}), do: :o
defp fetch_winner_value(%{p2: :o, p5: :o, p8: :o}), do: :o
defp fetch_winner_value(%{p3: :o, p6: :o, p9: :o}), do: :o
defp fetch_winner_value(%{p1: :o, p5: :o, p9: :o}), do: :o
defp fetch_winner_value(%{p3: :o, p5: :o, p7: :o}), do: :o
defp fetch_winner_value(_), do: nil
end | lib/tic_tac_toe_board.ex | 0.903294 | 0.698495 | tic_tac_toe_board.ex | starcoder |
defmodule Day18 do
@moduledoc """
AoC 2019, Day 18 - Many-Worlds Interpretation
"""
defmodule Maze do
defstruct map: %{}, start: []
end
@doc """
Shortest path that collects all the keys
"""
def part1 do
Util.priv_file(:day18, "day18_input.txt")
|> File.read!()
|> shortest_path()
end
@doc """
Shortest path the multi-chamber maze that collects all the keys
"""
def part2 do
Util.priv_file(:day18, "day18_part2_input.txt")
|> File.read!()
|> multi_shortest_path()
end
@doc """
Return the shortest path through the multi-chamber maze
"""
def multi_shortest_path(str) do
maze = parse(str)
key_cnt = Map.values(maze.map) |> Enum.filter(fn {t, _v, _n} -> t == :key end) |> Enum.count()
visited = Enum.map(maze.start, &({&1, MapSet.new()}))
|> MapSet.new()
q = Enum.map(maze.start, &({&1, maze.start -- [&1], 0, MapSet.new()}))
|> :queue.from_list()
multi_reachable(maze.map, key_cnt, visited, q)
end
defp multi_reachable(map, num_keys, visited, queue) do
# IO.puts "\n\nQueue: #{inspect :queue.to_list(queue)}"
{{:value, {curr, rest, steps, keys}}, queue} = :queue.out(queue)
if MapSet.size(keys) == num_keys do
steps
else
n = neighbors(map, curr)
# IO.puts "Neighbors: #{inspect n}"
{new_v, new_q} = Enum.reduce(n, {visited, queue},
fn (loc, acc = {v, q}) ->
# IO.puts "N: #{inspect loc}"
cond do
MapSet.member?(visited, {loc, keys}) ->
# IO.puts "\tAlready visited"
acc
!unlocked(map, keys, loc) ->
# IO.puts "\tDoor locked"
acc
true ->
# IO.puts "\tValid loc"
new_keys = add_keys(map, keys, loc)
# IO.puts "\tKeys: #{inspect new_keys}"
add_combos(new_keys, v, q, loc, rest, steps)
end
end)
multi_reachable(map, num_keys, new_v, new_q)
end
end
defp add_combos(keys, visited, queue, curr, [a, b, c], steps) do
add_combo({visited, queue}, keys, steps, curr, [a, b, c])
|> add_combo(keys, steps, a, [curr, b, c])
|> add_combo(keys, steps, b, [a, curr, c])
|> add_combo(keys, steps, c, [a, b, curr])
end
defp add_combo({v, q}, keys, steps, curr, rest) do
if !MapSet.member?(v, {curr, keys}) do
{MapSet.put(v, {curr, keys}), :queue.in({curr, rest, steps+1, keys}, q)}
else
{v, q}
end
end
defp unlocked(map, keys, loc) do
{type, val, _neigh} = Map.get(map, loc)
type != :door || MapSet.member?(keys, val)
end
defp add_keys(map, keys, loc) do
{type, val, _neigh} = Map.get(map, loc)
if type == :key do
MapSet.put(keys, val)
else
keys
end
end
@doc """
Return the shortest path through the maze
"""
def shortest_path(str) do
maze = parse(str)
start = hd(maze.start)
key_cnt = Map.values(maze.map) |> Enum.filter(fn {t, _v, _n} -> t == :key end) |> Enum.count()
visited = MapSet.new([{start, MapSet.new()}])
q = :queue.from_list([{start, 0, MapSet.new()}])
reachable(maze.map, key_cnt, visited, q)
end
defp reachable(map, num_keys, visited, queue) do
{{:value, {loc, depth, keys}}, queue} = :queue.out(queue)
if MapSet.size(keys) == num_keys do
depth
else
{_type, _val, neigh} = Map.get(map, loc)
{new_v, new_q} = Enum.reduce(neigh, {visited, queue},
fn (nloc, {v, q}) ->
val = Map.get(map, nloc)
update(nloc, val, keys, depth, v, q)
end)
reachable(map, num_keys, new_v, new_q)
end
end
defp update(loc, {:space, _val, _neigh}, keys, depth, visited, queue) do
if !MapSet.member?(visited, {loc, keys}) do
{MapSet.put(visited, {loc, keys}), :queue.in({loc, depth+1, keys}, queue)}
else
{visited, queue}
end
end
defp update(loc, {:door, val, neigh}, keys, depth, visited, queue) do
if MapSet.member?(keys, val) do
update(loc, {:space, val, neigh}, keys, depth, visited, queue)
else
{visited, queue}
end
end
defp update(loc, {:key, val, neigh}, keys, depth, visited, queue) do
keys = MapSet.put(keys, val)
if !MapSet.member?(visited, {loc, keys}) do
update(loc, {:space, val, neigh}, keys, depth, visited, queue)
else
{visited, queue}
end
end
defp parse(str) do
maze = String.split(str, "\n", trim: true)
|> Enum.with_index()
|> Enum.reduce(%Maze{}, &parse_row/2)
new_map = Enum.reduce(Map.keys(maze.map), %{}, &(add_neighbors(maze.map, &1, &2)))
%Maze{maze | map: new_map}
end
defp add_neighbors(map, loc, acc) do
{type, val, _} = Map.get(map, loc)
neigh = neighbors(map, loc)
Map.put(acc, loc, {type, val, neigh})
end
defp neighbors(map, {x, y}) do
[{x+1, y}, {x-1, y}, {x, y+1}, {x, y-1}]
|> Enum.filter(&(Map.has_key?(map, &1)))
end
defp parse_row({vals, row_num}, acc), do: parse_row({0, row_num}, String.to_charlist(vals), acc)
defp parse_row(_loc, [], acc), do: acc
defp parse_row(loc = {col, row}, [head | rest], acc) do
new_acc = parse_entry(loc, head, acc)
parse_row({col+1, row}, rest, new_acc)
end
defp parse_entry(_loc, ?#, acc), do: acc
defp parse_entry(loc, ?., acc), do: add_valid(loc, {:space, nil}, acc)
defp parse_entry(loc, ?@, acc) do
new = add_valid(loc, {:space, nil}, acc)
%Maze{new | start: [loc | new.start]}
end
defp parse_entry(loc, val, acc) do
type = if <<val>> == String.upcase(<<val>>), do: :door, else: :key
v = String.downcase(<<val>>) |> String.to_charlist() |> hd()
add_valid(loc, {type, v}, acc)
end
defp add_valid(loc, {type, v}, acc), do: %Maze{acc | map: Map.put(acc.map, loc, {type, v, []})}
def print(maze) do
{{min_x, max_x}, {min_y, max_y}} = bounds(maze.map)
for y <- min_y..max_y do
for x <- min_x..max_x do
if {x, y} in maze.start do
IO.write('@')
else
IO.write(char(Map.get(maze.map, {x, y}, {:wall, nil, []})))
end
end
IO.write("\n")
end
IO.write("\n\n\n")
end
defp char({:space, _, _}), do: '.'
defp char({:wall, _, _}), do: '#'
defp char({:key, val, _}), do: <<val>>
defp char({:door, val, _}), do: String.upcase(<<val>>)
defp bounds(map) do
pts = Map.keys(map)
x = Enum.map(pts, fn {x, _y} -> x end) |> Enum.min_max()
y = Enum.map(pts, fn {_x, y} -> y end) |> Enum.min_max()
{x, y}
end
end | apps/day18/lib/day18.ex | 0.721253 | 0.486514 | day18.ex | starcoder |
defmodule Axon.Training do
@moduledoc """
Abstractions for training machine learning models.
"""
require Axon
require Axon.Updates
@doc """
Represents a single training step.
It expects a pair of 2-element tuples:
* The first pair contains the model initialization function
and the objective function. For a Neural Network, the objective
function is the loss function of the Neural Network prediction
* The second pairs contains the updater initialization function
and the update function itself
"""
def step({init_model_fn, objective_fn}, {init_update_fn, update_fn})
when is_function(init_model_fn, 0) and is_function(objective_fn, 3) and
is_function(init_update_fn, 1) and is_function(update_fn, 3) do
init_fn = fn ->
params = init_model_fn.()
optim_params = init_update_fn.(params)
{params, optim_params}
end
step_fn = fn model_state, input, target ->
{params, update_state} = model_state
{batch_loss, gradients} =
Nx.Defn.Kernel.value_and_grad(params, &objective_fn.(&1, input, target))
{updates, new_update_state} = update_fn.(gradients, update_state, params)
{{Axon.Updates.apply_updates(params, updates), new_update_state}, batch_loss}
end
{init_fn, step_fn}
end
@doc """
Represents a single training step using an Axon `model`,
`loss` function, and `optimizer`.
The `loss` function is either an atom or a two arity
anonymous function.
"""
def step(%Axon{} = model, loss, optimizer) when is_function(loss, 2) do
{init_fn, predict_fn} = Axon.compile(model)
objective_fn = fn params, input, target ->
preds = predict_fn.(params, input)
loss.(target, preds)
end
step({init_fn, objective_fn}, optimizer)
end
def step(%Axon{} = model, loss, optimizer) when is_atom(loss) do
loss_fn = &apply(Axon.Losses, loss, [&1, &2, [reduction: :mean]])
step(model, loss_fn, optimizer)
end
@doc """
Implements a common training loop.
Its arguments are:
* A tuple with the initialization function and the step function.
Often retrieved from `step/3` but it could also be manually provided.
* The inputs tensors
* The targets tensors
* A list of options
## Options
* `:epochs` - number of epochs to train for. Defaults to `5`.
* `:compiler` - `defn` compiler to use to run training loop.
Defaults to `Nx.Defn.Evaluator`.
All other options are given to the underlying compiler.
## A note on Nx and anonymous functions
When training, both `init_fn` and `step_fn` are executed within
the given Nx `:compiler`. Therefore, it is required that `init_fn`
and `step_fn` work on tensor expressions instead of tensor values.
For example, let's suppose you want to initialize the values with:
Nx.random_uniform({40, 28}, 0, 1)
The following won't work:
params = Nx.random_uniform({40, 28}, 0, 1)
init_fn = fn -> params end
Instead, we want to build the values inside the given compiler.
The correct way to build those values is by compuing them inside
a defn:
defn init_values, do: Nx.random_uniform({40, 28}, 0, 1)
And then:
init_fn = &init_values/0
"""
def train({init_fn, step_fn}, inputs, targets, opts \\ []) do
epochs = opts[:epochs] || 5
compiler = opts[:compiler] || Nx.Defn.Evaluator
jit_opts = [compiler: compiler] ++ opts
model_state = Nx.Defn.jit(init_fn, [], jit_opts)
for epoch <- 1..epochs, reduce: model_state do
model_state ->
{time, {model_state, avg_loss}} =
:timer.tc(
&train_epoch/6,
[step_fn, model_state, inputs, targets, epoch, jit_opts]
)
epoch_avg_loss =
avg_loss
|> Nx.backend_transfer()
|> Nx.to_scalar()
IO.puts("\n")
IO.puts("Epoch #{epoch} Time: #{time / 1_000_000}s")
IO.puts("Epoch #{epoch} Loss: #{epoch_avg_loss}")
IO.puts("\n")
model_state
end
end
## Helpers
defp train_epoch(step_fn, model_state, inputs, targets, epoch, jit_opts) do
total_batches = Enum.count(inputs)
dataset =
inputs
|> Enum.zip(targets)
|> Enum.with_index()
for {{inp, tar}, i} <- dataset, reduce: {model_state, Nx.tensor(0.0)} do
{model_state, state} ->
{model_state, batch_loss} = Nx.Defn.jit(step_fn, [model_state, inp, tar], jit_opts)
avg_loss =
state
|> Nx.multiply(i)
|> Nx.add(Nx.backend_transfer(batch_loss))
|> Nx.divide(i + 1)
IO.write(
"\rEpoch #{epoch}, batch #{i + 1} of #{total_batches} - " <>
"Average Loss: #{Nx.to_scalar(avg_loss)}"
)
{model_state, avg_loss}
end
end
end | lib/axon/training.ex | 0.935317 | 0.835685 | training.ex | starcoder |
defmodule StepFlow.Jobs.Status do
use Ecto.Schema
import Ecto.Changeset
import EctoEnum
alias StepFlow.Jobs.Job
alias StepFlow.Jobs.Status
alias StepFlow.Repo
@moduledoc false
defenum(StateEnum, [
"queued",
"skipped",
"processing",
"retrying",
"error",
"completed",
"ready_to_init",
"ready_to_start",
"update",
"stopped",
"initializing",
"starting",
"updating"
])
defp state_map_lookup(value) do
state_map = %{
0 => :queued,
1 => :skipped,
2 => :processing,
3 => :retrying,
4 => :error,
5 => :completed,
6 => :ready_to_init,
7 => :ready_to_start,
8 => :update,
9 => :stopped,
10 => :initializing,
11 => :starting,
12 => :updating
}
if is_number(value) do
state_map[value]
else
case Map.values(state_map) |> Enum.member?(value) do
true -> value
_ -> nil
end
end
end
def state_enum_label(value) do
to_atom(value)
|> Atom.to_string()
end
defp to_atom(value) do
case state_map_lookup(value) do
nil -> :unknown
value -> value
end
end
schema "step_flow_status" do
field(:state, StepFlow.Jobs.Status.StateEnum)
field(:description, :map, default: %{})
belongs_to(:job, Job, foreign_key: :job_id)
timestamps()
end
@doc false
def changeset(%Status{} = job, attrs) do
job
|> cast(attrs, [:state, :job_id, :description])
|> foreign_key_constraint(:job_id)
|> validate_required([:state, :job_id])
end
def set_job_status(job_id, status, description \\ %{}) do
%Status{}
|> Status.changeset(%{job_id: job_id, state: status, description: description})
|> Repo.insert()
end
@doc """
Returns the last updated status of a list of status.
"""
def get_last_status(status) when is_list(status) do
status
|> Enum.sort(fn state_1, state_2 ->
state_1.updated_at < state_2.updated_at
end)
|> List.last()
end
def get_last_status(%Status{} = status), do: status
def get_last_status(_status), do: nil
@doc """
Returns action linked to status
"""
def get_action(status) do
case status.state do
:queued -> "create"
:ready_to_init -> "init_process"
:ready_to_start -> "start_process"
:update -> "update_process"
:stopped -> "delete"
_ -> "none"
end
end
@doc """
Returns action linked to status as parameter
"""
def get_action_parameter(status) do
action = get_action(status)
[%{"id" => "action", "type" => "string", "value" => action}]
end
end | lib/step_flow/jobs/status.ex | 0.536313 | 0.452778 | status.ex | starcoder |
defmodule Xema.Behaviour do
@moduledoc """
A behaviour module for implementing a schema validator. This behaviour is
just for `Xema` and `JsonXema`.
"""
alias Xema.{
JsonSchema,
Loader,
Ref,
Schema,
SchemaError,
Utils,
ValidationError,
Validator
}
@typedoc """
The schema container.
"""
@type t :: struct
@inline_default true
@doc """
This callback initialize the schema. The function gets the data given to
`Xema.new/1` and returns a `Xema.Schema`.
"""
@callback init(any, keyword) :: Schema.t()
defmacro __using__(_opts) do
quote do
@behaviour Xema.Behaviour
alias Xema.Behaviour
@opt_fail [:immediately, :early, :finally]
@typedoc """
The return type of a validation run.
"""
@type result :: Xema.Validator.result()
@type schema :: Xema.Schema.t()
@enforce_keys [:schema]
@typedoc """
This struct contains the schema and references of the schema.
"""
@type t :: %__MODULE__{
schema: Schema.t(),
refs: map
}
defstruct schema: %Schema{},
refs: %{}
def new(data, opts \\ [])
def new(%Schema{} = schema, opts) do
schema = Behaviour.map_refs(schema)
remotes = Behaviour.get_remote_refs(schema, __MODULE__, opts)
xema =
struct!(
__MODULE__,
schema: schema,
refs: remotes
)
case opts[:remotes] do
nil -> Behaviour.update_refs(xema, opts)
_remotes -> xema
end
end
def new(data, opts) when is_map(data) or is_tuple(data) or is_list(data) or is_atom(data) do
data |> init(opts) |> new(opts)
end
@doc """
Returns `true` if the `value` is a valid value against the given `schema`;
otherwise returns `false`.
"""
@spec valid?(__MODULE__.t() | schema(), any) :: boolean
def valid?(schema, value), do: validate(schema, value, fail: :immediately) == :ok
@doc """
Returns `:ok` if the `value` is a valid value against the given `schema`;
otherwise returns an error tuple.
With the option `:fail`, you can define when the validation is aborted. This
also influences how many error reasons are returned.
- `:immediately` aborts the validation when the first validation fails.
- `:early` (default) aborts on failed validations, but runs validations
for all properties and items.
- `:finally` aborts after all possible validations.
"""
@spec validate(__MODULE__.t() | schema(), any, keyword) :: result()
def validate(%{} = schema, value, opts \\ []) do
if Keyword.get(opts, :fail, :early) not in @opt_fail do
raise ArgumentError,
message: "the optional option :fail must be one of #{inspect(@opt_fail)} when set"
end
with {:error, error} <- Validator.validate(schema, value, opts),
do: {:error, on_error(error)}
end
@doc """
Returns `:ok` if the `value` is a valid value against the given `schema`;
otherwise raises a `#{__MODULE__}.ValidationError`. See `validate3` for
available options.
"""
@spec validate!(__MODULE__.t() | schema(), any, keyword) :: :ok
def validate!(xema, value, opts \\ []) do
with {:error, reason} <- validate(xema, value, opts),
do: raise(reason)
end
# This function can be overwritten to transform the reason map of an error tuple.
defp on_error(error), do: ValidationError.exception(reason: error)
defoverridable on_error: 1
end
end
@doc false
@spec map_refs(Schema.t()) :: Schema.t()
def map_refs(%Schema{} = schema) do
map(schema, fn
%Schema{ref: ref} = schema, id when not is_nil(ref) ->
%{schema | ref: Ref.new(ref, id)}
value, _id ->
value
end)
end
@doc false
def update_refs(xema, opts) do
refs_map =
xema.refs
|> Map.keys()
|> Enum.reduce(%{master: []}, fn key, acc -> Map.put(acc, key, []) end)
|> get_refs_map(:master, xema)
refs_map =
Enum.reduce(xema.refs, refs_map, fn {key, xema}, acc ->
get_refs_map(acc, key, xema)
end)
xema
|> update_master_refs(Map.fetch!(refs_map, :master))
|> update_remote_refs(refs_map)
|> update_master_ids()
|> update_remote_ids()
|> inline(opts)
end
defp inline(xema, opts) do
case Keyword.get(opts, :inline, @inline_default) do
true -> inline(xema)
false -> xema
end
end
defp inline(xema) do
xema
|> circulars()
|> inline_refs(xema)
end
defp inline_refs(circulars, xema) do
schema = inline_refs(circulars, xema, nil, xema.schema)
refs =
xema
|> refs()
|> Enum.map(fn {ref, schema} = item ->
case {ref in circulars, schema} do
{false, _} ->
item
{true, :root} ->
item
{true, %Schema{} = schema} ->
{ref, inline_refs(circulars, xema, xema, schema)}
{true, %{schema: %Schema{} = schema} = master} ->
{ref, Map.put(master, :schema, inline_refs(circulars, master, xema, schema))}
end
end)
|> Enum.filter(fn {ref, _} -> Enum.member?(circulars, ref) end)
|> Enum.into(%{})
%{xema | schema: schema, refs: refs}
end
defp inline_refs(circulars, master, root, %Schema{} = schema) do
map(schema, fn
%Schema{ref: ref} = schema, _id when not is_nil(ref) ->
case Enum.member?(circulars, Ref.key(ref)) do
true ->
schema
false ->
case Ref.fetch!(ref, master, root) do
{%Schema{} = ref_schema, root} ->
inline_refs(circulars, master, root, ref_schema)
{xema, xema} ->
schema
{xema, root} ->
inline_refs(circulars, xema, root, xema.schema)
end
end
value, _id ->
value
end)
end
defp refs(xema), do: Enum.into(do_refs(xema), %{})
defp do_refs(%{refs: refs}) do
Enum.flat_map(refs, fn {key, schema} ->
[{key, schema} | do_refs(schema)]
end)
end
defp do_refs(_schema), do: []
defp update_master_ids(%{schema: schema} = xema) when not is_nil(schema) do
Map.update!(xema, :refs, fn value ->
Map.merge(value, get_ids(schema))
end)
end
defp update_master_ids(value), do: value
defp get_ids(%Schema{} = schema) do
reduce(schema, %{}, fn
%Schema{id: id}, acc, path when not is_nil(id) ->
case path == "#" do
false ->
Map.put(acc, id, Schema.fetch!(schema, path))
true ->
Map.put(acc, id, :root)
end
_xema, acc, _path ->
acc
end)
end
defp update_remote_ids(%{refs: refs} = xema) do
refs = Enum.into(refs, %{}, fn {key, ref} -> {key, update_master_ids(ref)} end)
Map.update!(xema, :refs, fn value ->
Map.merge(value, refs)
end)
end
defp update_master_refs(%{schema: schema} = xema, refs) do
Map.update!(xema, :refs, fn value ->
Map.merge(value, get_schema_refs(schema, refs))
end)
end
defp update_remote_refs(%{refs: refs} = xema, refs_map) do
refs =
Enum.into(refs, %{}, fn {key, ref} ->
case Map.has_key?(refs_map, key) do
true ->
{key, update_master_refs(ref, Map.get(refs_map, key))}
false ->
{key, ref}
end
end)
Map.update!(xema, :refs, fn value ->
Map.merge(value, refs)
end)
end
defp get_schema_refs(schema, refs) do
Enum.into(refs, %{}, fn key ->
{key, Schema.fetch!(schema, key)}
end)
end
defp get_refs_map(refs, key, %{schema: schema}) do
reduce(schema, refs, fn
%Ref{pointer: pointer, uri: nil}, acc, _path ->
case pointer do
"#/" <> _ -> Map.update!(acc, key, fn list -> [pointer | list] end)
_ -> acc
end
%Ref{uri: uri} = ref, acc, _path ->
case ref.uri.fragment do
nil ->
acc
"" ->
acc
fragment ->
key = Ref.key(uri)
Map.update!(acc, key, fn list -> ["##{fragment}" | list] end)
end
_value, acc, _path ->
acc
end)
end
@doc false
@spec get_remote_refs(Schema.t(), atom, keyword) ::
%{required(String.t()) => struct}
def get_remote_refs(%Schema{} = schema, module, opts) do
reduce(schema, %{}, fn
%Ref{} = ref, acc, _path ->
put_remote_ref(acc, ref, module, opts)
_, acc, _path ->
acc
end)
end
defp put_remote_ref(map, %Ref{uri: uri} = ref, module, opts) do
case remote?(ref) do
false ->
map
true ->
key = Ref.key(uri)
remote_set = opts[:remotes] || MapSet.new()
case MapSet.member?(remote_set, key) do
true ->
map
false ->
remote_set = MapSet.put(remote_set, key)
xema =
get_remote_schema(
ref,
module,
Keyword.put(opts, :remotes, remote_set)
)
remotes = xema.refs
xema = Map.put(xema, :refs, %{})
map
|> Map.put(key, xema)
|> Map.merge(remotes)
end
end
end
defp get_remote_schema(ref, module, opts) do
case resolve(ref.uri, opts[:loader]) do
{:ok, nil} ->
nil
{:ok, data} ->
case Keyword.get(opts, :draft, :xema) do
:xema -> module.new(data, opts)
_ -> data |> JsonSchema.to_xema(opts) |> module.new(opts)
end
{:error, reason} ->
raise SchemaError, reason
end
end
defp resolve(uri, nil), do: Loader.fetch(uri)
defp resolve(uri, loader), do: loader.fetch(uri)
defp remote?(%Ref{uri: nil}), do: false
defp remote?(%Ref{uri: %URI{path: nil}}), do: false
defp remote?(%Ref{uri: %URI{path: path}, pointer: pointer}),
do:
Regex.match?(~r/(\.[a-zA-Z]+)|\#$/, path) or
String.ends_with?(pointer, "#")
# Invokes `fun` for each element in the schema tree with the accumulator.
@spec reduce(Schema.t(), any, function) :: any
defp reduce(schema, acc, fun), do: reduce(schema, acc, "#", fun)
defp reduce(%Schema{} = schema, acc, path, fun),
do:
schema
|> Map.from_struct()
|> Enum.reduce(fun.(schema, acc, path), fn {key, value}, x ->
reduce(value, x, Path.join(path, to_string(key)), fun)
end)
defp reduce(%_{} = struct, acc, path, fun),
do: fun.(struct, acc, path)
defp reduce(map, acc, path, fun) when is_map(map),
do:
Enum.reduce(map, fun.(map, acc, path), fn
{%key{}, value}, acc ->
reduce(value, acc, Path.join(path, to_string(key)), fun)
{key, value}, acc ->
reduce(value, acc, Path.join(path, to_string(key)), fun)
end)
defp reduce(list, acc, path, fun) when is_list(list),
do:
Enum.reduce(list, acc, fn value, acc ->
reduce(value, acc, path, fun)
end)
defp reduce(nil, acc, _path, _fun), do: acc
defp reduce(value, acc, path, fun), do: fun.(value, acc, path)
# Returns a schema tree where each schema is the result of invoking `fun` on
# each schema. The function gets also the current `ìd` for the schema. The
# `id` could be `nil` or a `%URI{}` struct.
@spec map(Schema.t(), function) :: Schema.t() | Ref.t()
defp map(schema, fun), do: map(schema, fun, nil)
defp map(%Schema{} = schema, fun, id) do
id = Utils.update_uri(id, schema.id)
Schema
|> struct(
schema
|> Map.from_struct()
|> Enum.map(fn {k, v} -> {k, map(v, fun, id)} end)
)
|> fun.(id)
end
defp map(%_{} = struct, _fun, _id), do: struct
defp map(map, fun, id) when is_map(map),
do: Enum.into(map, %{}, fn {k, v} -> {k, map(v, fun, id)} end)
defp map(list, fun, id) when is_list(list),
do: Enum.map(list, fn v -> map(v, fun, id) end)
defp map(value, _fun, _id), do: value
defp circulars(%{refs: refs} = xema) do
Enum.reduce(refs, [], fn {ref, schema}, acc ->
Enum.concat(
circulars(schema),
case circular?(xema, ref) do
true -> [ref | acc]
false -> acc
end
)
end)
end
defp circulars(_schema), do: []
# Returns true if the `reference` builds up a circular reference.
@spec circular?(struct(), String.t()) :: boolean
defp circular?(xema, reference),
do: circular?(xema.refs[reference], reference, xema, [])
defp circular?(%Ref{} = ref, reference, root, acc) do
key = Ref.key(ref)
with false <- key == reference,
false <- key == "#" do
case Enum.member?(acc, key) do
true -> false
false -> circular?(root.refs[key], reference, root, [key | acc])
end
end
end
defp circular?(%_{} = struct, reference, root, acc),
do: struct |> Map.from_struct() |> circular?(reference, root, acc)
defp circular?(values, reference, root, acc)
when is_map(values),
do:
Enum.any?(values, fn {_, value} ->
circular?(value, reference, root, acc)
end)
defp circular?(values, reference, root, acc)
when is_list(values),
do:
Enum.any?(values, fn value ->
circular?(value, reference, root, acc)
end)
defp circular?(:root, _reference, _root, _acc), do: true
defp circular?(_ref, _reference, _root, _acc), do: false
end | lib/xema/behaviour.ex | 0.905823 | 0.482856 | behaviour.ex | starcoder |
defmodule AWS.Connect do
@moduledoc """
Amazon Connect is a cloud-based contact center solution that makes it easy
to set up and manage a customer contact center and provide reliable
customer engagement at any scale.
Amazon Connect provides rich metrics and real-time reporting that allow you
to optimize contact routing. You can also resolve customer issues more
efficiently by putting customers in touch with the right agents.
There are limits to the number of Amazon Connect resources that you can
create and limits to the number of requests that you can make per second.
For more information, see [Amazon Connect Service
Quotas](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-service-limits.html)
in the *Amazon Connect Administrator Guide*.
To connect programmatically to an AWS service, you use an endpoint. For a
list of Amazon Connect endpoints, see [Amazon Connect
Endpoints](https://docs.aws.amazon.com/general/latest/gr/connect_region.html).
<note> Working with contact flows? Check out the [Amazon Connect Flow
language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
</note>
"""
@doc """
Associates a set of queues with a routing profile.
"""
def associate_routing_profile_queues(client, instance_id, routing_profile_id, input, options \\ []) do
path_ = "/routing-profiles/#{URI.encode(instance_id)}/#{URI.encode(routing_profile_id)}/associate-queues"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a contact flow for the specified Amazon Connect instance.
You can also create and update contact flows using the [Amazon Connect Flow
language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
"""
def create_contact_flow(client, instance_id, input, options \\ []) do
path_ = "/contact-flows/#{URI.encode(instance_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new routing profile.
"""
def create_routing_profile(client, instance_id, input, options \\ []) do
path_ = "/routing-profiles/#{URI.encode(instance_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Creates a user account for the specified Amazon Connect instance.
For information about how to create user accounts using the Amazon Connect
console, see [Add
Users](https://docs.aws.amazon.com/connect/latest/adminguide/user-management.html)
in the *Amazon Connect Administrator Guide*.
"""
def create_user(client, instance_id, input, options \\ []) do
path_ = "/users/#{URI.encode(instance_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a user account from the specified Amazon Connect instance.
For information about what happens to a user's data when their account is
deleted, see [Delete Users from Your Amazon Connect
Instance](https://docs.aws.amazon.com/connect/latest/adminguide/delete-users.html)
in the *Amazon Connect Administrator Guide*.
"""
def delete_user(client, instance_id, user_id, input, options \\ []) do
path_ = "/users/#{URI.encode(instance_id)}/#{URI.encode(user_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Describes the specified contact flow.
You can also create and update contact flows using the [Amazon Connect Flow
language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
"""
def describe_contact_flow(client, contact_flow_id, instance_id, options \\ []) do
path_ = "/contact-flows/#{URI.encode(instance_id)}/#{URI.encode(contact_flow_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes the specified routing profile.
"""
def describe_routing_profile(client, instance_id, routing_profile_id, options \\ []) do
path_ = "/routing-profiles/#{URI.encode(instance_id)}/#{URI.encode(routing_profile_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes the specified user account. You can find the instance ID in the
console (it’s the final part of the ARN). The console does not display the
user IDs. Instead, list the users and note the IDs provided in the output.
"""
def describe_user(client, instance_id, user_id, options \\ []) do
path_ = "/users/#{URI.encode(instance_id)}/#{URI.encode(user_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes the specified hierarchy group.
"""
def describe_user_hierarchy_group(client, hierarchy_group_id, instance_id, options \\ []) do
path_ = "/user-hierarchy-groups/#{URI.encode(instance_id)}/#{URI.encode(hierarchy_group_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes the hierarchy structure of the specified Amazon Connect instance.
"""
def describe_user_hierarchy_structure(client, instance_id, options \\ []) do
path_ = "/user-hierarchy-structure/#{URI.encode(instance_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Disassociates a set of queues from a routing profile.
"""
def disassociate_routing_profile_queues(client, instance_id, routing_profile_id, input, options \\ []) do
path_ = "/routing-profiles/#{URI.encode(instance_id)}/#{URI.encode(routing_profile_id)}/disassociate-queues"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Retrieves the contact attributes for the specified contact.
"""
def get_contact_attributes(client, initial_contact_id, instance_id, options \\ []) do
path_ = "/contact/attributes/#{URI.encode(instance_id)}/#{URI.encode(initial_contact_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets the real-time metric data from the specified Amazon Connect instance.
For a description of each metric, see [Real-time Metrics
Definitions](https://docs.aws.amazon.com/connect/latest/adminguide/real-time-metrics-definitions.html)
in the *Amazon Connect Administrator Guide*.
"""
def get_current_metric_data(client, instance_id, input, options \\ []) do
path_ = "/metrics/current/#{URI.encode(instance_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Retrieves a token for federation.
"""
def get_federation_token(client, instance_id, options \\ []) do
path_ = "/user/federate/#{URI.encode(instance_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets historical metric data from the specified Amazon Connect instance.
For a description of each historical metric, see [Historical Metrics
Definitions](https://docs.aws.amazon.com/connect/latest/adminguide/historical-metrics-definitions.html)
in the *Amazon Connect Administrator Guide*.
"""
def get_metric_data(client, instance_id, input, options \\ []) do
path_ = "/metrics/historical/#{URI.encode(instance_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Provides information about the contact flows for the specified Amazon
Connect instance.
You can also create and update contact flows using the [Amazon Connect Flow
language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
For more information about contact flows, see [Contact
Flows](https://docs.aws.amazon.com/connect/latest/adminguide/concepts-contact-flows.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_contact_flows(client, instance_id, contact_flow_types \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/contact-flows-summary/#{URI.encode(instance_id)}"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(contact_flow_types) do
[{"contactFlowTypes", contact_flow_types} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides information about the hours of operation for the specified Amazon
Connect instance.
For more information about hours of operation, see [Set the Hours of
Operation for a
Queue](https://docs.aws.amazon.com/connect/latest/adminguide/set-hours-operation.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_hours_of_operations(client, instance_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/hours-of-operations-summary/#{URI.encode(instance_id)}"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides information about the phone numbers for the specified Amazon
Connect instance.
For more information about phone numbers, see [Set Up Phone Numbers for
Your Contact
Center](https://docs.aws.amazon.com/connect/latest/adminguide/contact-center-phone-number.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_phone_numbers(client, instance_id, max_results \\ nil, next_token \\ nil, phone_number_country_codes \\ nil, phone_number_types \\ nil, options \\ []) do
path_ = "/phone-numbers-summary/#{URI.encode(instance_id)}"
headers = []
query_ = []
query_ = if !is_nil(phone_number_types) do
[{"phoneNumberTypes", phone_number_types} | query_]
else
query_
end
query_ = if !is_nil(phone_number_country_codes) do
[{"phoneNumberCountryCodes", phone_number_country_codes} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides information about the prompts for the specified Amazon Connect
instance.
"""
def list_prompts(client, instance_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/prompts-summary/#{URI.encode(instance_id)}"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides information about the queues for the specified Amazon Connect
instance.
For more information about queues, see [Queues: Standard and
Agent](https://docs.aws.amazon.com/connect/latest/adminguide/concepts-queues-standard-and-agent.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_queues(client, instance_id, max_results \\ nil, next_token \\ nil, queue_types \\ nil, options \\ []) do
path_ = "/queues-summary/#{URI.encode(instance_id)}"
headers = []
query_ = []
query_ = if !is_nil(queue_types) do
[{"queueTypes", queue_types} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List the queues associated with a routing profile.
"""
def list_routing_profile_queues(client, instance_id, routing_profile_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/routing-profiles/#{URI.encode(instance_id)}/#{URI.encode(routing_profile_id)}/queues"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides summary information about the routing profiles for the specified
Amazon Connect instance.
For more information about routing profiles, see [Routing
Profiles](https://docs.aws.amazon.com/connect/latest/adminguide/concepts-routing.html)
and [Create a Routing
Profile](https://docs.aws.amazon.com/connect/latest/adminguide/routing-profiles.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_routing_profiles(client, instance_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/routing-profiles-summary/#{URI.encode(instance_id)}"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides summary information about the security profiles for the specified
Amazon Connect instance.
For more information about security profiles, see [Security
Profiles](https://docs.aws.amazon.com/connect/latest/adminguide/connect-security-profiles.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_security_profiles(client, instance_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/security-profiles-summary/#{URI.encode(instance_id)}"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the tags for the specified resource.
For sample policies that use tags, see [Amazon Connect Identity-Based
Policy
Examples](https://docs.aws.amazon.com/connect/latest/adminguide/security_iam_id-based-policy-examples.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides summary information about the hierarchy groups for the specified
Amazon Connect instance.
For more information about agent hierarchies, see [Set Up Agent
Hierarchies](https://docs.aws.amazon.com/connect/latest/adminguide/agent-hierarchy.html)
in the *Amazon Connect Administrator Guide*.
"""
def list_user_hierarchy_groups(client, instance_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/user-hierarchy-groups-summary/#{URI.encode(instance_id)}"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides summary information about the users for the specified Amazon
Connect instance.
"""
def list_users(client, instance_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/users-summary/#{URI.encode(instance_id)}"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
When a contact is being recorded, and the recording has been suspended
using SuspendContactRecording, this API resumes recording the call.
Only voice recordings are supported at this time.
"""
def resume_contact_recording(client, input, options \\ []) do
path_ = "/contact/resume-recording"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Initiates a contact flow to start a new chat for the customer. Response of
this API provides a token required to obtain credentials from the
[CreateParticipantConnection](https://docs.aws.amazon.com/connect-participant/latest/APIReference/API_CreateParticipantConnection.html)
API in the Amazon Connect Participant Service.
When a new chat contact is successfully created, clients need to subscribe
to the participant’s connection for the created chat within 5 minutes. This
is achieved by invoking
[CreateParticipantConnection](https://docs.aws.amazon.com/connect-participant/latest/APIReference/API_CreateParticipantConnection.html)
with WEBSOCKET and CONNECTION_CREDENTIALS.
A 429 error occurs in two situations:
<ul> <li> API rate limit is exceeded. API TPS throttling returns a
`TooManyRequests` exception from the API Gateway.
</li> <li> The [quota for concurrent active
chats](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-service-limits.html)
is exceeded. Active chat throttling returns a `LimitExceededException`.
</li> </ul> For more information about how chat works, see
[Chat](https://docs.aws.amazon.com/connect/latest/adminguide/chat.html) in
the *Amazon Connect Administrator Guide*.
"""
def start_chat_contact(client, input, options \\ []) do
path_ = "/contact/chat"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
This API starts recording the contact when the agent joins the call.
StartContactRecording is a one-time action. For example, if you use
StopContactRecording to stop recording an ongoing call, you can't use
StartContactRecording to restart it. For scenarios where the recording has
started and you want to suspend and resume it, such as when collecting
sensitive information (for example, a credit card number), use
SuspendContactRecording and ResumeContactRecording.
You can use this API to override the recording behavior configured in the
[Set recording
behavior](https://docs.aws.amazon.com/connect/latest/adminguide/set-recording-behavior.html)
block.
Only voice recordings are supported at this time.
"""
def start_contact_recording(client, input, options \\ []) do
path_ = "/contact/start-recording"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
This API places an outbound call to a contact, and then initiates the
contact flow. It performs the actions in the contact flow that's specified
(in `ContactFlowId`).
Agents are not involved in initiating the outbound API (that is, dialing
the contact). If the contact flow places an outbound call to a contact, and
then puts the contact in queue, that's when the call is routed to the
agent, like any other inbound case.
There is a 60 second dialing timeout for this operation. If the call is not
connected after 60 seconds, it fails.
<note> UK numbers with a 447 prefix are not allowed by default. Before you
can dial these UK mobile numbers, you must submit a service quota increase
request. For more information, see [Amazon Connect Service
Quotas](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-service-limits.html)
in the *Amazon Connect Administrator Guide*.
</note>
"""
def start_outbound_voice_contact(client, input, options \\ []) do
path_ = "/contact/outbound-voice"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Ends the specified contact.
"""
def stop_contact(client, input, options \\ []) do
path_ = "/contact/stop"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
When a contact is being recorded, this API stops recording the call.
StopContactRecording is a one-time action. If you use StopContactRecording
to stop recording an ongoing call, you can't use StartContactRecording to
restart it. For scenarios where the recording has started and you want to
suspend it for sensitive information (for example, to collect a credit card
number), and then restart it, use SuspendContactRecording and
ResumeContactRecording.
Only voice recordings are supported at this time.
"""
def stop_contact_recording(client, input, options \\ []) do
path_ = "/contact/stop-recording"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
When a contact is being recorded, this API suspends recording the call. For
example, you might suspend the call recording while collecting sensitive
information, such as a credit card number. Then use ResumeContactRecording
to restart recording.
The period of time that the recording is suspended is filled with silence
in the final recording.
Only voice recordings are supported at this time.
"""
def suspend_contact_recording(client, input, options \\ []) do
path_ = "/contact/suspend-recording"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds the specified tags to the specified resource.
The supported resource types are users, routing profiles, and contact
flows.
For sample policies that use tags, see [Amazon Connect Identity-Based
Policy
Examples](https://docs.aws.amazon.com/connect/latest/adminguide/security_iam_id-based-policy-examples.html)
in the *Amazon Connect Administrator Guide*.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes the specified tags from the specified resource.
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Creates or updates the contact attributes associated with the specified
contact.
You can add or update attributes for both ongoing and completed contacts.
For example, you can update the customer's name or the reason the customer
called while the call is active, or add notes about steps that the agent
took during the call that are displayed to the next agent that takes the
call. You can also update attributes for a contact using data from your CRM
application and save the data with the contact in Amazon Connect. You could
also flag calls for additional analysis, such as legal review or
identifying abusive callers.
Contact attributes are available in Amazon Connect for 24 months, and are
then deleted.
This operation is also available in the Amazon Connect Flow language. See
[UpdateContactAttributes](https://docs.aws.amazon.com/connect/latest/adminguide/contact-actions-updatecontactattributes.html).
**Important:** You cannot use the operation to update attributes for
contacts that occurred prior to the release of the API, September 12, 2018.
You can update attributes only for contacts that started after the release
of the API. If you attempt to update attributes for a contact that occurred
prior to the release of the API, a 400 error is returned. This applies also
to queued callbacks that were initiated prior to the release of the API but
are still active in your instance.
"""
def update_contact_attributes(client, input, options \\ []) do
path_ = "/contact/attributes"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the specified contact flow.
You can also create and update contact flows using the [Amazon Connect Flow
language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html).
"""
def update_contact_flow_content(client, contact_flow_id, instance_id, input, options \\ []) do
path_ = "/contact-flows/#{URI.encode(instance_id)}/#{URI.encode(contact_flow_id)}/content"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
The name of the contact flow.
"""
def update_contact_flow_name(client, contact_flow_id, instance_id, input, options \\ []) do
path_ = "/contact-flows/#{URI.encode(instance_id)}/#{URI.encode(contact_flow_id)}/name"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the channels that agents can handle in the Contact Control Panel
(CCP) for a routing profile.
"""
def update_routing_profile_concurrency(client, instance_id, routing_profile_id, input, options \\ []) do
path_ = "/routing-profiles/#{URI.encode(instance_id)}/#{URI.encode(routing_profile_id)}/concurrency"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the default outbound queue of a routing profile.
"""
def update_routing_profile_default_outbound_queue(client, instance_id, routing_profile_id, input, options \\ []) do
path_ = "/routing-profiles/#{URI.encode(instance_id)}/#{URI.encode(routing_profile_id)}/default-outbound-queue"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the name and description of a routing profile. The request accepts
the following data in JSON format. At least `Name` or `Description` must be
provided.
"""
def update_routing_profile_name(client, instance_id, routing_profile_id, input, options \\ []) do
path_ = "/routing-profiles/#{URI.encode(instance_id)}/#{URI.encode(routing_profile_id)}/name"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the properties associated with a set of queues for a routing
profile.
"""
def update_routing_profile_queues(client, instance_id, routing_profile_id, input, options \\ []) do
path_ = "/routing-profiles/#{URI.encode(instance_id)}/#{URI.encode(routing_profile_id)}/queues"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Assigns the specified hierarchy group to the specified user.
"""
def update_user_hierarchy(client, instance_id, user_id, input, options \\ []) do
path_ = "/users/#{URI.encode(instance_id)}/#{URI.encode(user_id)}/hierarchy"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the identity information for the specified user.
<important> Someone with the ability to invoke `UpdateUserIndentityInfo`
can change the login credentials of other users by changing their email
address. This poses a security risk to your organization. They can change
the email address of a user to the attacker's email address, and then reset
the password through email. We strongly recommend limiting who has the
ability to invoke `UpdateUserIndentityInfo`. For more information, see
[Best Practices for Security
Profiles](https://docs.aws.amazon.com/connect/latest/adminguide/security-profile-best-practices.html)
in the *Amazon Connect Administrator Guide*.
</important>
"""
def update_user_identity_info(client, instance_id, user_id, input, options \\ []) do
path_ = "/users/#{URI.encode(instance_id)}/#{URI.encode(user_id)}/identity-info"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the phone configuration settings for the specified user.
"""
def update_user_phone_config(client, instance_id, user_id, input, options \\ []) do
path_ = "/users/#{URI.encode(instance_id)}/#{URI.encode(user_id)}/phone-config"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Assigns the specified routing profile to the specified user.
"""
def update_user_routing_profile(client, instance_id, user_id, input, options \\ []) do
path_ = "/users/#{URI.encode(instance_id)}/#{URI.encode(user_id)}/routing-profile"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Assigns the specified security profiles to the specified user.
"""
def update_user_security_profiles(client, instance_id, user_id, input, options \\ []) do
path_ = "/users/#{URI.encode(instance_id)}/#{URI.encode(user_id)}/security-profiles"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "connect"}
host = build_host("connect", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end | lib/aws/generated/connect.ex | 0.770551 | 0.512144 | connect.ex | starcoder |
defmodule ParallelStream.Filter do
alias ParallelStream.FilterExecutor
alias ParallelStream.Producer
@moduledoc ~S"""
The filter iterator implementation
"""
defmodule Consumer do
@moduledoc ~S"""
The filter consumer - filters according to direction passed
"""
def build!(stream, direction) do
stream |> Stream.transform(0, fn items, acc ->
filtered = items |> Enum.reduce([], fn { outqueue, index }, list ->
outqueue |> send({ :next, index })
receive do
{ ^outqueue, { ^index, accepted, item } } ->
case !!accepted do
^direction -> list ++ [item]
_ -> list
end
end
end)
{ filtered, acc + 1 }
end)
end
end
@doc """
Creates a stream that will apply the given function on enumeration in
parallel and only pass the values for which the function returns truthy
downstream.
## Options
These are the options:
* `:num_workers` – The number of parallel operations to run when running the stream.
* `:worker_work_ratio` – The available work per worker, defaults to 5. Higher rates will mean more work sharing, but might also lead to work fragmentation slowing down the queues.
## Examples
Map and filter the even numbers:
iex> parallel_stream = 1..5 |> ParallelStream.filter(fn i -> i |> rem(2) == 0 end)
iex> parallel_stream |> Enum.to_list
[2,4]
"""
def filter(stream, mapper, options \\ []) do
stream
|> Producer.build!(mapper, FilterExecutor, options)
|> Consumer.build!(true)
end
@doc """
Creates a stream that will apply the given function on enumeration in
parallel and only pass the values for which the function returns falsy
downstream.
## Options
These are the options:
* `:num_workers` – The number of parallel operations to run when running the stream.
* `:worker_work_ratio` – The available work per worker, defaults to 5. Higher rates will mean more work sharing, but might also lead to work fragmentation slowing down the queues.
## Examples
Map and reject the even numbers:
iex> parallel_stream = 1..5 |> ParallelStream.reject(fn i -> i |> rem(2) == 0 end)
iex> parallel_stream |> Enum.to_list
[1,3,5]
"""
def reject(stream, mapper, options \\ []) do
stream
|> Producer.build!(mapper, FilterExecutor, options)
|> Consumer.build!(false)
end
end | data/web/deps/parallel_stream/lib/parallel_stream/filter.ex | 0.892627 | 0.501099 | filter.ex | starcoder |
defmodule ShouldI do
@moduledoc """
ShouldI is a testing DSL around ExUnit.
ShouldI supports having blocks for nested contexts,
convenience apis for behavioral naming.
## Examples
defmodule MyFatTest do
having "necessary_key" do
setup context do
assign context,
necessary_key: :necessary_value
end
should( "have necessary key", context ) do
assert context.necessary_key == :necessary_value
end
having "sometimes_necessary_key" do
setup context do
assign context,
:sometimes_necessary_key, :sometimes_necessary_value
end
# matchers to handle common testing idioms
should_match_key sometimes_necessary_key: :sometimes_necessary_value
end
end
end
ShouldI provides support for common idioms through matchers.
For example, these matchers are for plug:
should_respond_having :success
should_match_body_to "<div id="test">
"""
defmacro __using__(args) do
definition =
quote do
@shouldi_having_path []
@shouldi_matchers []
use ExUnit.Case, unquote(args)
import ShouldI
import ExUnit.Callbacks, except: [setup: 1, setup: 2]
end
helpers =
quote unquote: false do
# Store common code in a function definition to
# avoid injecting many variables into a context.
var!(define_matchers, ShouldI) = fn ->
matchers = @shouldi_matchers
|> Enum.reverse
|> ShouldI.Having.prepare_matchers
if matchers != [] do
@tag shouldi_having_path: Enum.reverse(@shouldi_having_path)
ExUnit.Case.test ShouldI.Having.test_name(__MODULE__, "should have passing matchers"), var!(context) do
_ = var!(context)
matcher_errors = unquote(matchers)
matcher_errors = Enum.reject(matcher_errors, &is_nil/1)
if matcher_errors != [] do
raise ExUnit.MultiError, errors: matcher_errors
end
end
end
end
end
[definition, helpers]
end
defmacro setup(var \\ quote(do: _), [do: block]) do
quote do
ExUnit.Callbacks.setup unquote(var) do
case unquote(block) do
:ok -> :ok
{:ok, list} -> {:ok, list}
map -> {:ok, map}
end
end
end
end
@doc """
Sometimes, when running a test concurrently, it's helpful to generate a unique identifier
so resources don't collide. This macro creates ids, optionally appended
## Examples
assert {:ok, _} = join_chatroom(uid("discussion"), Mock.user)
"""
defmacro uid(id \\ nil) do
{function, _} = __CALLER__.function
if String.starts_with?(Atom.to_string(function), "test ") do
"#{__CALLER__.module}.#{function} #{id}"
else
quote do
"#{var!(context).case}.#{var!(context).test} #{unquote(id)}"
end
end
end
@doc """
Create a test case having an optional context. This macro thinly wraps ExUnit's
test macro and names the test case having "should".
## Examples
should "add two numbers", do: (assert 2 + 2 = 4)
should "check context for :key", do: (assert context.key == :value)
"""
defmacro should(name, var \\ quote(do: _), options) do
quote do
test("should #{unquote name}", unquote(var), unquote(options))
end
end
@doc """
A function for wrapping together common setup code.
having is useful for nesting setup requirements:
## Examples
having "a logged in user" do
setup do
... setup a logged in user
end
having "a get to :index" do
setup do
assign context,
response: get(:index)
end
should_respond_having :success
should_match_body_to "some_string_to_match"
end
end
"""
defmacro having(context, opts) do
ShouldI.Having.having(__CALLER__, context, opts)
end
@doc """
Shorthand function for assigning context key/value pairs.
"""
def assign(context, options) do
Dict.merge(context, options)
end
end | lib/shouldi.ex | 0.834238 | 0.548492 | shouldi.ex | starcoder |
defmodule Dashwallet.Parser do
require Logger
@doc """
Maps a trailwallet data row into a `Map`.
Returns a parsed `Map`.
"""
def map_csv([
trip, date, local_currency, local_amount, home_currency,
home_amount, category, notes, tags, image
]) do
%{
trip: trip,
date: date,
local_currency: local_currency,
local_amount: convert_amount_to_float(local_amount),
home_currency: home_currency,
home_amount: convert_amount_to_float(home_amount),
category: category,
notes: notes,
tags: split_and_trim(tags),
image: image
}
end
def entries_for_trip(data) do
data
|> Enum.map(fn %{:trip => trip} -> trip end)
|> count_occurrences
end
def has_multiple_tags(row) do
Enum.count(row.tags) > 1
end
def group_by_tags(data) do
data
|> normalize
|> Enum.group_by(fn %{tags: [head]} -> head end)
end
@doc """
Calculates expenses for tags and groups them by tag.
Returns a `Map` in the following format:
`%{"Restaurant" => 130.23}`
"""
def expenses_by_tag(data) do
data
|> normalize
|> Enum.group_by(fn
%{tags: [""]} -> "Untagged"
%{tags: [head]} -> head
end,
fn x -> x.home_amount end
)
|> Enum.reduce(%{}, fn {k, v}, acc -> Map.put(acc, k, Float.round(Enum.sum(v), 2)) end)
end
# private
# selectors
defp tags(normalized_data) do
normalized_data
|> Stream.map(fn %{tags: [tag]} -> tag end)
|> Stream.uniq
|> Enum.to_list
end
# helper
defp normalize(data) do
single_tags = Stream.filter(data, &(!has_multiple_tags(&1)))
data
|> Stream.filter(&(has_multiple_tags(&1)))
|> Stream.map(fn x -> Enum.map(x.tags, &Map.merge(x, %{tags: [&1]})) end)
|> Stream.flat_map(fn x -> x end)
|> Stream.concat(single_tags)
|> Enum.to_list
end
defp split_and_trim(str) when is_binary(str) do
str
|> String.split(",")
|> Enum.map(&(String.trim(&1)))
end
defp count_occurrences(list) do
Enum.reduce(list, %{}, fn x, acc -> Map.update(acc, x, 1, &(&1 + 1)) end)
end
defp convert_amount_to_float(str) do
str
|> String.replace(",", ".")
|> fix_leading_zeros
|> Float.parse
|> case do
{float, _} -> float
:error -> raise ArgumentError, message: "Unable to convert given string to float."
end
end
# Adds a leading zero if the trailwallet data has an amount lower than 1.
# This is a hotfix for a bug in the csv export of trailwallet.
# Amounts < 1 get exported without a zero, like this: ",43"
defp fix_leading_zeros(amount) when is_binary(amount), do: String.replace(amount, ~r/^\./, "0.")
end | lib/dashwallet/parser/parser.ex | 0.83622 | 0.519887 | parser.ex | starcoder |
defmodule Instruments.Probe.Definitions do
@moduledoc false
use GenServer
alias Instruments.Probe
alias Instruments.Probe.Errors
@type definition_errors :: {:error, {:probe_names_taken, [String.t()]}}
@type definition_response :: {:ok, [String.t()]} | definition_errors
@probe_prefix Application.get_env(:instruments, :probe_prefix)
@table_name :probe_definitions
def start_link(), do: GenServer.start_link(__MODULE__, [], name: __MODULE__)
def init([]) do
table_name = @table_name
^table_name = :ets.new(table_name, [:named_table, :set, :protected, read_concurrency: true])
{:ok, nil}
end
@doc """
Defines a probe. If the definition fails, an exception is thrown.
@see define/3
"""
@spec define!(String.t(), Probe.probe_type(), Probe.probe_options()) :: [String.t()]
def define!(name, type, options) do
case define(name, type, options) do
{:ok, probe_names} ->
probe_names
{:error, {:probe_names_taken, taken_names}} ->
raise Errors.ProbeNameTakenError.exception(taken_names: taken_names)
end
end
@doc """
Defines a probe.
The probe type can be:
* `gauge`: A single emitted value
* `counter`: A value that's incremented or decremeted over time.
If the value is negative, a decrement command is issued,
otherwise an increment command is executed.
* `histogram`: A value combined into a series and then listed as percentages.
* `timing`: A millisecond timing value.
Returns `{:ok, [probe_name]}` or `{:error, reason}`.
"""
@spec define(String.t(), Probe.probe_type(), Probe.probe_options()) :: definition_response
def define(base_name, type, options) do
name = to_probe_name(@probe_prefix, base_name)
defn_fn = fn ->
cond do
Keyword.has_key?(options, :function) ->
Probe.Supervisor.start_probe(name, type, options, Probe.Function)
Keyword.has_key?(options, :mfa) ->
{{module, fun, args}, options} = Keyword.pop(options, :mfa)
probe_fn = fn -> :erlang.apply(module, fun, args) end
options = Keyword.put(options, :function, probe_fn)
Probe.Supervisor.start_probe(name, type, options, Probe.Function)
Keyword.has_key?(options, :module) ->
probe_module = Keyword.get(options, :module)
Probe.Supervisor.start_probe(name, type, options, probe_module)
end
end
definitions =
case Keyword.get(options, :keys) do
keys when is_list(keys) ->
Enum.map(keys, fn key -> "#{name}.#{key}" end)
nil ->
[name]
end
GenServer.call(__MODULE__, {:define, definitions, defn_fn})
end
def handle_call({:define, probe_names, transaction}, _from, _) do
response =
case used_probe_names(probe_names) do
[] ->
added_probes =
Enum.map(probe_names, fn probe_name ->
true = :ets.insert_new(@table_name, {probe_name, probe_name})
probe_name
end)
transaction.()
{:ok, added_probes}
used_probe_names ->
{:error, {:probe_names_taken, used_probe_names}}
end
{:reply, response, nil}
end
defp used_probe_names(probe_names) do
probe_names
|> Enum.map(&:ets.match(@table_name, {&1, :"$1"}))
|> List.flatten()
end
def to_probe_name(nil, base_name), do: base_name
def to_probe_name(probe_prefix, base_name), do: "#{probe_prefix}.#{base_name}"
end | lib/probe/definitions.ex | 0.803752 | 0.425456 | definitions.ex | starcoder |
defmodule Ecto.Validator.Predicates do
@moduledoc """
A handful of predicates to be used in validations.
The examples in this module use the syntax made
available via `Ecto.Model.Validations` in your
model.
"""
@type maybe_error :: [] | Keyword.t
@blank [nil, "", []]
@doc """
Validates the attribute is present (i.e. not nil,
nor an empty list nor an empty string).
## Options
* `:message` - defaults to "can't be blank"
## Examples
validate user,
name: present()
"""
@spec present(atom, term, Keyword.t) :: maybe_error
def present(attr, value, opts \\ [])
def present(attr, value, opts) when value in @blank and is_list(opts) do
[{ attr, opts[:message] || "can't be blank" }]
end
def present(_attr, _value, opts) when is_list(opts) do
[]
end
@doc """
Validates the attribute is absent (i.e. nil,
an empty list or an empty string).
## Options
* `:message` - defaults to "must be blank"
## Examples
validate user,
honeypot: absent()
"""
@spec absent(atom, term, Keyword.t) :: maybe_error
def absent(attr, value, opts \\ [])
def absent(_attr, value, opts) when value in @blank and is_list(opts) do
[]
end
def absent(attr, _value, opts) when is_list(opts) do
[{ attr, opts[:message] || "must be blank" }]
end
@doc """
Validates the attribute has a given format.
Nil values are not matched against (skipped).
## Options
* `:message` - defaults to "is invalid"
## Examples
validate user,
email: has_format(~r/@/)
"""
@spec has_format(atom, term, Regex.t | binary, Keyword.t) :: maybe_error
def has_format(attr, value, match_on, opts \\ []) when is_list(opts) do
if value == nil or value =~ match_on do
[]
else
[{ attr, opts[:message] || "is invalid" }]
end
end
@doc """
Validates the attribute has a given length according to Unicode
(i.e. it uses `String.length` under the scenes). That said, this
function should not be used to validate binary fields.
The length can be given as a range (indicating min and max),
as an integer (indicating exact match) or as keyword options,
indicating, min and max values.
Raises if the given argument is not a binary.
## Options
* `:too_long` - message when the length is too long
(defaults to "is too long (maximum is X characters)")
* `:too_short` - message when the length is too short
(defaults to "is too short (minimum is X characters)")
* `:no_match` - message when the length does not match
(defaults to "must be X characters")
## Examples
validate user,
password: <PASSWORD>(6..100)
validate user,
password: <PASSWORD>(min: 6, too_short: "requires a minimum length")
validate user,
code: has_length(3, no_match: "needs to be 3 characters")
"""
@spec has_length(atom, term, Regex.t | binary, Keyword.t) :: maybe_error
def has_length(attr, value, match_on, opts \\ [])
def has_length(_attr, nil, _match_on, opts) when is_list(opts) do
[]
end
def has_length(attr, value, min..max, opts) when is_binary(value) when is_list(opts) do
length = String.length(value)
too_short(attr, length, min, opts) || too_long(attr, length, max, opts) || []
end
def has_length(attr, value, exact, opts) when is_integer(exact) do
if String.length(value) != exact do
[{ attr, opts[:no_match] || "must be #{characters(exact)}" }]
else
[]
end
end
def has_length(attr, value, opts, other) when is_list(opts) and is_list(other) do
opts = Keyword.merge(opts, other)
length = String.length(value)
((min = opts[:min]) && too_short(attr, length, min, opts)) ||
((max = opts[:max]) && too_long(attr, length, max, opts)) ||
[]
end
defp too_short(attr, length, min, opts) when is_integer(min) do
if length < min do
[{ attr, opts[:too_short] || "is too short (minimum is #{characters(min)})" }]
end
end
defp too_long(attr, length, max, opts) when is_integer(max) do
if length > max do
[{ attr, opts[:too_long] || "is too long (maximum is #{characters(max)})" }]
end
end
defp characters(1), do: "1 character"
defp characters(x), do: "#{x} characters"
@doc """
Validates the given number is greater than the given value.
Expects numbers as value, raises otherwise.
## Options
* `:message` - defaults to "must be greater than X"
## Examples
validates user,
age: greater_than(18)
"""
def greater_than(attr, value, check, opts \\ [])
def greater_than(_attr, value, check, _opts) when
is_number(check) and (nil?(value) or value > check), do: []
def greater_than(attr, _value, check, opts) when is_number(check), do:
[{ attr, opts[:message] || "must be greater than #{check}" }]
@doc """
Validates the given number is greater than or equal to the given value.
Expects numbers as value, raises otherwise.
## Options
* `:message` - defaults to "must be greater than or equal to X"
## Examples
validates user,
age: greater_than_or_equal_to(18)
"""
def greater_than_or_equal_to(attr, value, check, opts \\ [])
def greater_than_or_equal_to(_attr, value, check, _opts) when
is_number(check) and (nil?(value) or value >= check), do: []
def greater_than_or_equal_to(attr, _value, check, opts) when is_number(check), do:
[{ attr, opts[:message] || "must be greater than or equal to #{check}" }]
@doc """
Validates the given number is less than the given value.
Expects numbers as value, raises otherwise.
## Options
* `:message` - defaults to "must be less than X"
## Examples
validates user,
age: less_than(18)
"""
def less_than(attr, value, check, opts \\ [])
def less_than(_attr, value, check, _opts) when
is_number(check) and (nil?(value) or value < check), do: []
def less_than(attr, _value, check, opts) when is_number(check), do:
[{ attr, opts[:message] || "must be less than #{check}" }]
@doc """
Validates the given number is less than or equal to the given value.
Expects numbers as value, raises otherwise.
## Options
* `:message` - defaults to "must be less than or equal to X"
## Examples
validates user,
age: less_than_or_equal_to(18)
"""
def less_than_or_equal_to(attr, value, check, opts \\ [])
def less_than_or_equal_to(_attr, value, check, _opts) when
is_number(check) and (nil?(value) or value <= check), do: []
def less_than_or_equal_to(attr, _value, check, opts) when is_number(check), do:
[{ attr, opts[:message] || "must be less than or equal to #{check}" }]
@doc """
Validates the given number is between the value.
Expects a range as value, raises otherwise.
## Options
* `:message` - defaults to "must be between X and Y"
## Examples
validates user,
age: between(18..21)
"""
def between(attr, value, min..max, opts \\ [])
def between(_attr, value, min..max, _opts) when
is_number(min) and is_number(max) and (nil?(value) or value in min..max), do: []
def between(attr, _value, min..max, opts) when is_number(min) and is_number(max), do:
[{ attr, opts[:message] || "must be between #{min} and #{max}" }]
@doc """
Validates the attribute is member of the given enumerable.
This validator has the same semantics as calling `Enum.member?/2`
with the given enumerable and value.
Nil values are not matched against (skipped).
## Options
* `:message` - defaults to "is not included in the list"
## Examples
validate user,
gender: member_of(~w(male female other))
validate user,
age: member_of(0..99)
"""
@spec member_of(atom, term, Enumerable.t, Keyword.t) :: maybe_error
def member_of(attr, value, enum, opts \\ []) when is_list(opts) do
if value == nil or value in enum do
[]
else
[{ attr, opts[:message] || "is not included in the list" }]
end
end
@doc """
Validates the attribute is not a member of the given enumerable.
This validator has the same semantics as calling
`not Enum.member?/2` with the given enumerable and value.
Nil values are not matched against (skipped).
## Options
* `:message` - defaults to "is reserved"
## Examples
validate user,
username: not_member_of(~w(admin superuser))
validate user,
password: not_member_of([user.username, user.name],
message: "cannot be the same as username or first name")
"""
@spec not_member_of(atom, term, Enumerable.t, Keyword.t) :: maybe_error
def not_member_of(attr, value, enum, opts \\ []) when is_list(opts) do
if value == nil or not(value in enum) do
[]
else
[{ attr, opts[:message] || "is reserved" }]
end
end
end | lib/ecto/validator/predicates.ex | 0.893391 | 0.53868 | predicates.ex | starcoder |
defmodule Nebulex.Adapters.Partitioned do
@moduledoc ~S"""
Built-in adapter for partitioned cache topology.
A partitioned cache is a clustered, fault-tolerant cache that has linear
scalability. Data is partitioned among all the machines of the cluster.
For fault-tolerance, partitioned caches can be configured to keep each piece
of data on one or more unique machines within a cluster. This adapter
in particular hasn't fault-tolerance built-in, each piece of data is kept
in a single node/machine (sharding), therefore, if a node fails, the data
kept by this node won't be available for the rest of the cluster.
PG2 is used under-the-hood by the adapter to manage the cluster nodes.
When the partitioned cache is started in a node, it creates a PG2 group
and joins it (the cache supervisor PID is joined to the group). Then,
when a function is invoked, the adapter picks a node from the node list
(using the PG2 group members), and then the function is executed on that
node. In the same way, when the supervisor process of the partitioned cache
dies, the PID of that process is automatically removed from the PG2 group;
this is why it's recommended to use a consistent hashing algorithm for the
node selector.
> **NOTE:** `pg2` will be replaced by `pg` in future, since the `pg2` module
is deprecated as of OTP 23 and scheduled for removal in OTP 24.
This adapter depends on a local cache adapter (primary storage), it adds
a thin layer on top of it in order to distribute requests across a group
of nodes, where is supposed the local cache is running already. However,
you don't need to define or declare an additional cache module for the
local store, instead, the adapter initializes it automatically (adds the
local cache store as part of the supervision tree) based on the given
options within the `primary:` argument.
## Features
* Support for partitioned topology (Sharding Distribution Model).
* Support for transactions via Erlang global name registration facility.
* Configurable primary storage adapter (local cache adapter).
* Configurable keyslot module to compute the node.
When used, the Cache expects the `:otp_app` and `:adapter` as options.
The `:otp_app` should point to an OTP application that has the cache
configuration. For example:
defmodule MyApp.PartitionedCache do
use Nebulex.Cache,
otp_app: :my_app,
adapter: Nebulex.Adapters.Partitioned
end
Optionally, you can configure the desired primary storage adapter with the
option `:primary_storage_adapter`; defaults to `Nebulex.Adapters.Local`.
defmodule MyApp.PartitionedCache do
use Nebulex.Cache,
otp_app: :my_app,
adapter: Nebulex.Adapters.Partitioned,
primary_storage_adapter: Nebulex.Adapters.Local
end
Also, you can provide a custom keyslot function:
defmodule MyApp.PartitionedCache do
use Nebulex.Cache,
otp_app: :my_app,
adapter: Nebulex.Adapters.Partitioned,
primary_storage_adapter: Nebulex.Adapters.Local
@behaviour Nebulex.Adapter.Keyslot
@impl true
def hash_slot(key, range) do
key
|> :erlang.phash2()
|> :jchash.compute(range)
end
end
Where the configuration for the cache must be in your application environment,
usually defined in your `config/config.exs`:
config :my_app, MyApp.PartitionedCache,
keyslot: MyApp.PartitionedCache,
primary: [
gc_interval: 3_600_000,
backend: :shards
]
For more information about the usage, see `Nebulex.Cache` documentation.
## Options
This adapter supports the following options and all of them can be given via
the cache configuration:
* `:primary` - The options that will be passed to the adapter associated
with the local primary storage. These options will depend on the local
adapter to use.
* `:keyslot` - Defines the module implementing `Nebulex.Adapter.Keyslot`
behaviour.
* `task_supervisor_opts` - Start-time options passed to
`Task.Supervisor.start_link/1` when the adapter is initialized.
## Shared options
Almost all of the cache functions outlined in `Nebulex.Cache` module
accept the following options:
* `:timeout` - The time-out value in milliseconds for the command that
will be executed. If the timeout is exceeded, then the current process
will exit. For executing a command on remote nodes, this adapter uses
`Task.await/2` internally for receiving the result, so this option tells
how much time the adapter should wait for it. If the timeout is exceeded,
the task is shut down but the current process doesn't exit, only the
result associated with that task is skipped in the reduce phase.
## Extended API
This adapter provides some additional convenience functions to the
`Nebulex.Cache` API.
Retrieving the primary storage or local cache module:
MyCache.__primary__()
Retrieving the cluster nodes associated with the given cache `name`:
MyCache.nodes()
MyCache.nodes(:cache_name)
Get a cluster node for the cache `name` based on the given `key`:
MyCache.get_node("mykey")
MyCache.get_node(:cache_name, "mykey")
> If no cache name is passed to the previous functions, the name of the
calling cache module is used by default
## Caveats of partitioned adapter
For `c:Nebulex.Cache.get_and_update/3` and `c:Nebulex.Cache.update/4`,
they both have a parameter that is the anonymous function, and it is compiled
into the module where it is created, which means it necessarily doesn't exists
on remote nodes. To ensure they work as expected, you must provide functions
from modules existing in all nodes of the group.
"""
# Provide Cache Implementation
@behaviour Nebulex.Adapter
@behaviour Nebulex.Adapter.Queryable
# Inherit default transaction implementation
use Nebulex.Adapter.Transaction
# Inherit default persistence implementation
use Nebulex.Adapter.Persistence
# Inherit default keyslot implementation
use Nebulex.Adapter.Keyslot
import Nebulex.Helpers
alias Nebulex.Adapter
alias Nebulex.Cache.{Cluster, Stats}
alias Nebulex.RPC
## Adapter
@impl true
defmacro __before_compile__(env) do
otp_app = Module.get_attribute(env.module, :otp_app)
opts = Module.get_attribute(env.module, :opts)
primary = Keyword.get(opts, :primary_storage_adapter, Nebulex.Adapters.Local)
quote do
defmodule Primary do
@moduledoc """
This is the cache for the primary storage.
"""
use Nebulex.Cache,
otp_app: unquote(otp_app),
adapter: unquote(primary)
end
@doc """
A convenience function for getting the primary storage cache.
"""
def __primary__, do: Primary
@doc """
A convenience function for getting the cluster nodes.
"""
def nodes(name \\ __MODULE__), do: Cluster.get_nodes(name)
@doc """
A convenience function to get the node of the given `key`.
"""
def get_node(name \\ __MODULE__, key) do
Adapter.with_meta(name, fn _adapter, %{keyslot: keyslot} ->
Cluster.get_node(name, key, keyslot)
end)
end
end
end
@impl true
def init(opts) do
# required cache name
cache = Keyword.fetch!(opts, :cache)
name = opts[:name] || cache
# maybe use stats
stat_counter = opts[:stat_counter] || Stats.init(opts)
# primary cache options
primary_opts =
opts
|> Keyword.get(:primary, [])
|> Keyword.put(:stat_counter, stat_counter)
# maybe put a name to primary storage
primary_opts =
if opts[:name],
do: [name: normalize_module_name([name, Primary])] ++ primary_opts,
else: primary_opts
# task supervisor to execute parallel and/or remote commands
task_sup_name = normalize_module_name([name, TaskSupervisor])
task_sup_opts = Keyword.get(opts, :task_supervisor_opts, [])
# keyslot module for selecting nodes
keyslot =
opts
|> Keyword.get(:keyslot, __MODULE__)
|> assert_behaviour(Nebulex.Adapter.Keyslot, "keyslot")
child_spec =
Nebulex.Adapters.Supervisor.child_spec(
name: normalize_module_name([name, Supervisor]),
strategy: :rest_for_one,
children: [
{cache.__primary__, primary_opts},
{Task.Supervisor, [name: task_sup_name] ++ task_sup_opts}
]
)
meta = %{
name: name,
primary_name: primary_opts[:name],
task_sup: task_sup_name,
keyslot: keyslot,
stat_counter: stat_counter
}
# join the cache to the cluster
:ok = Cluster.join(name)
{:ok, child_spec, meta}
rescue
e in ArgumentError ->
reraise RuntimeError, e.message, __STACKTRACE__
end
@impl true
def get(adapter_meta, key, opts) do
call(adapter_meta, key, :get, [key, opts], opts)
end
@impl true
def get_all(adapter_meta, keys, opts) do
map_reduce(
keys,
adapter_meta,
:get_all,
[opts],
Keyword.get(opts, :timeout),
{
%{},
fn
{:ok, res}, _, acc when is_map(res) ->
Map.merge(acc, res)
_, _, acc ->
acc
end
}
)
end
@impl true
def put(adapter_meta, key, value, _ttl, on_write, opts) do
do_put(on_write, adapter_meta, key, value, opts)
end
defp do_put(:put, adapter_meta, key, value, opts) do
:ok = call(adapter_meta, key, :put, [key, value, opts], opts)
true
end
defp do_put(:put_new, adapter_meta, key, value, opts) do
call(adapter_meta, key, :put_new, [key, value, opts], opts)
end
defp do_put(:replace, adapter_meta, key, value, opts) do
call(adapter_meta, key, :replace, [key, value, opts], opts)
end
@impl true
def put_all(adapter_meta, entries, _ttl, :put, opts) do
do_put_all(:put_all, adapter_meta, entries, opts)
end
def put_all(adapter_meta, entries, _ttl, :put_new, opts) do
do_put_all(:put_new_all, adapter_meta, entries, opts)
end
def do_put_all(action, adapter_meta, entries, opts) do
reducer = {
{true, []},
fn
{:ok, :ok}, {_, {_, _, [_, _, [kv, _]]}}, {bool, acc} ->
{bool, Enum.reduce(kv, acc, &[elem(&1, 0) | &2])}
{:ok, true}, {_, {_, _, [_, _, [kv, _]]}}, {bool, acc} ->
{bool, Enum.reduce(kv, acc, &[elem(&1, 0) | &2])}
{:ok, false}, _, {_, acc} ->
{false, acc}
{:error, _}, _, {_, acc} ->
{false, acc}
end
}
entries
|> map_reduce(
adapter_meta,
action,
[opts],
Keyword.get(opts, :timeout),
reducer
)
|> case do
{true, _} ->
true
{false, keys} ->
:ok = Enum.each(keys, &delete(adapter_meta, &1, []))
action == :put_all
end
end
@impl true
def delete(adapter_meta, key, opts) do
call(adapter_meta, key, :delete, [key, opts], opts)
end
@impl true
def take(adapter_meta, key, opts) do
call(adapter_meta, key, :take, [key, opts], opts)
end
@impl true
def has_key?(adapter_meta, key) do
call(adapter_meta, key, :has_key?, [key])
end
@impl true
def incr(adapter_meta, key, incr, _ttl, opts) do
call(adapter_meta, key, :incr, [key, incr, opts], opts)
end
@impl true
def ttl(adapter_meta, key) do
call(adapter_meta, key, :ttl, [key])
end
@impl true
def expire(adapter_meta, key, ttl) do
call(adapter_meta, key, :expire, [key, ttl])
end
@impl true
def touch(adapter_meta, key) do
call(adapter_meta, key, :touch, [key])
end
@impl true
def size(%{name: name, task_sup: task_sup} = meta) do
task_sup
|> RPC.multi_call(
Cluster.get_nodes(name),
__MODULE__,
:with_dynamic_cache,
[meta, :size, []]
)
|> handle_rpc_multi_call(:size, &Enum.sum/1)
end
@impl true
def flush(%{name: name, task_sup: task_sup} = meta) do
task_sup
|> RPC.multi_call(
Cluster.get_nodes(name),
__MODULE__,
:with_dynamic_cache,
[meta, :flush, []]
)
|> elem(0)
|> Enum.sum()
end
## Queryable
@impl true
def all(%{name: name, task_sup: task_sup} = meta, query, opts) do
task_sup
|> RPC.multi_call(
Cluster.get_nodes(name),
__MODULE__,
:with_dynamic_cache,
[meta, :all, [query, opts]],
opts
)
|> handle_rpc_multi_call(:all, &List.flatten/1)
end
@impl true
def stream(%{name: name, task_sup: task_sup} = meta, query, opts) do
Stream.resource(
fn ->
Cluster.get_nodes(name)
end,
fn
[] ->
{:halt, []}
[node | nodes] ->
elements =
rpc_call(
task_sup,
node,
__MODULE__,
:eval_stream,
[meta, query, opts],
opts
)
{elements, nodes}
end,
& &1
)
end
## Transaction
@impl true
def transaction(%{name: name} = adapter_meta, opts, fun) do
super(adapter_meta, Keyword.put(opts, :nodes, Cluster.get_nodes(name)), fun)
end
## Helpers
@doc """
Helper function to use dynamic cache for internal primary cache storage
when needed.
"""
def with_dynamic_cache(%{cache: cache, primary_name: nil}, action, args) do
apply(cache.__primary__, action, args)
end
def with_dynamic_cache(%{cache: cache, primary_name: primary_name}, action, args) do
cache.__primary__.with_dynamic_cache(primary_name, fn ->
apply(cache.__primary__, action, args)
end)
end
@doc """
Helper to perform `stream/3` locally.
"""
def eval_stream(meta, query, opts) do
meta
|> with_dynamic_cache(:stream, [query, opts])
|> Enum.to_list()
end
## Private Functions
defp get_node(%{name: name, keyslot: keyslot}, key) do
Cluster.get_node(name, key, keyslot)
end
defp call(adapter_meta, key, fun, args, opts \\ []) do
adapter_meta
|> get_node(key)
|> rpc_call(adapter_meta, fun, args, opts)
end
defp rpc_call(node, %{task_sup: task_sup} = meta, fun, args, opts) do
rpc_call(task_sup, node, __MODULE__, :with_dynamic_cache, [meta, fun, args], opts)
end
defp rpc_call(supervisor, node, mod, fun, args, opts) do
opts
|> Keyword.get(:timeout)
|> case do
nil -> RPC.call(supervisor, node, mod, fun, args)
val -> RPC.call(supervisor, node, mod, fun, args, val)
end
|> case do
{:badrpc, remote_ex} ->
raise remote_ex
response ->
response
end
end
defp group_keys_by_node(enum, adapter_meta) do
Enum.reduce(enum, %{}, fn
{key, _} = entry, acc ->
node = get_node(adapter_meta, key)
Map.put(acc, node, [entry | Map.get(acc, node, [])])
key, acc ->
node = get_node(adapter_meta, key)
Map.put(acc, node, [key | Map.get(acc, node, [])])
end)
end
defp map_reduce(
enum,
%{task_sup: task_sup} = meta,
action,
args,
timeout,
reducer
) do
groups =
enum
|> group_keys_by_node(meta)
|> Enum.map(fn {node, group} ->
{node, {__MODULE__, :with_dynamic_cache, [meta, action, [group | args]]}}
end)
RPC.multi_call(task_sup, groups, timeout: timeout, reducer: reducer)
end
defp handle_rpc_multi_call({res, []}, _action, fun) do
fun.(res)
end
defp handle_rpc_multi_call({_, errors}, action, _) do
raise Nebulex.RPCMultiCallError, action: action, errors: errors
end
end | lib/nebulex/adapters/partitioned.ex | 0.890223 | 0.729207 | partitioned.ex | starcoder |
defmodule Braintree.TransactionLineItem do
@moduledoc """
For fetching line items for a given transaction.
https://developers.braintreepayments.com/reference/response/transaction-line-item/ruby
"""
use Braintree.Construction
alias Braintree.HTTP
alias Braintree.ErrorResponse, as: Error
@type t :: %__MODULE__{
commodity_code: String.t(),
description: String.t(),
discount_amount: String.t(),
kind: String.t(),
name: String.t(),
product_code: String.t(),
quantity: String.t(),
tax_amount: String.t(),
total_amount: String.t(),
unit_amount: String.t(),
unit_of_measure: String.t(),
unit_tax_amount: String.t(),
url: String.t()
}
defstruct commodity_code: nil,
description: nil,
discount_amount: nil,
kind: nil,
name: nil,
product_code: nil,
quantity: nil,
tax_amount: nil,
total_amount: nil,
unit_amount: nil,
unit_of_measure: nil,
unit_tax_amount: nil,
url: nil
@doc """
Find transaction line items for the given transaction id.
## Example
{:ok, transaction_line_items} = TransactionLineItem.find("123")
"""
@spec find_all(String.t(), Keyword.t()) :: {:ok, [t]} | {:error, Error.t()}
def find_all(transaction_id, opts \\ []) do
path = "transactions/#{transaction_id}/line_items"
with {:ok, payload} <- HTTP.get(path, opts) do
{:ok, new(payload)}
end
end
@doc """
Converts a list of transaction line item maps into a list of transaction line items.
## Example
transaction_line_items =
Braintree.TransactionLineItem.new(%{
"name" => "item name",
"total_amount" => "100.00"
})
"""
@spec new(%{required(line_items :: String.t()) => [map]}) :: [t]
def new(%{"line_items" => line_item_maps}) do
Enum.map(line_item_maps, &super/1)
end
end | lib/transaction_line_item.ex | 0.915157 | 0.441191 | transaction_line_item.ex | starcoder |
defmodule Sushi.Schemas.Boat do
import Ecto.Changeset
use Ecto.Schema
@type t :: %__MODULE__{
x: Integer.t(),
y: Integer.t(),
length: Integer.t(),
rot: String.t(),
sunk: boolean()
}
@primary_key false
embedded_schema do
field(:x, :integer)
field(:y, :integer)
field(:length, :integer)
field(:rot, :string)
field(:sunk, :boolean, [default: false])
end
def changeset(initializer \\ %__MODULE__{}, data) do
initializer
|> cast(data, [:x, :y, :length, :rot])
|> validate_required([:x, :y, :length, :rot])
|> validate_xy_constraint
end
defp if_state(statement, iftrue, iffalse) do
if statement do
iftrue
else
iffalse
end
end
defp validate_xy_constraint(changeset) do
{x, y, length, rot} = {get_field(changeset, :x), get_field(changeset, :y), get_field(changeset, :length), get_field(changeset, :rot)}
changeset = if_state(x < 0 or x > 9, add_error(changeset, :x, "x is out of bounds 0..9", val: x), changeset)
changeset = if_state(y < 0 or y > 9, add_error(changeset, :y, "y is out of bounds 0..9", val: y), changeset)
changeset = if_state(length < 2 or length > 5, add_error(changeset, :length, "length is out of bounds 2..5", val: length), changeset)
changeset = if_state(not (rot == "x" or rot == "y"), add_error(changeset, :rot, "rot is neither of 'x' or 'y'", val: rot), changeset)
changeset = if_state(rot == "x" and x + length > 10, add_error(changeset, :length, "boat extends to far", val: length + x), changeset)
changeset = if_state(rot == "y" and y + length > 10, add_error(changeset, :length, "boat extends to far", val: length + y), changeset)
changeset
end
def validate_boats(changeset, field) do
boats = get_field(changeset, field)
result = Enum.reduce(boats, %{}, fn (v, acc) -> Map.put(acc, v.length, Map.get(acc, v.length, 0) + 1) end)
valid_amount_boats = case result do
_ when length(boats) != 5 ->
true
%{2 => 1, 3 => 2, 4 => 1, 5 => 1} ->
false
_ ->
true
end
big_boxes = Enum.reduce(boats, [], fn (v, acc) ->
{x, y} = {v.x - 1, v.y - 1}
{w, h} = case v.rot do
"x" ->
{2 + v.length, 3}
"y" ->
{3, 2 + v.length}
_ ->
{0, 0}
end
box = %{
x: x,
y: y,
w: w,
h: h
}
[box | acc]
end)
small_boxes = Enum.reduce(boats, [], fn (v, acc) ->
{x, y} = {v.x, v.y}
{w, h} = case v.rot do
"x" ->
{v.length, 1}
"y" ->
{1, v.length}
_ ->
{0, 0}
end
box = %{
x: x,
y: y,
w: w,
h: h
}
[box | acc]
end)
intersect = Enum.reduce(0..(length(boats) - 2), false, fn (i, acc) ->
cond do
acc -> acc
true ->
Enum.reduce((i + 1)..(length(boats) - 1), false, fn (j, acc) ->
cond do
acc -> acc
true ->
a = Enum.at(small_boxes, i)
b = Enum.at(big_boxes, j)
ahw = a.x + a.w / 2
bhw = b.x + b.w / 2
ahh = a.y + a.h / 2
bhh = b.y + b.h / 2
w = a.w + b.w
h = a.h + b.h
wa = abs(ahw - bhw) * 2
ha = abs(ahh - bhh) * 2
wa < w and ha < h
end
end)
end
end)
changeset = if_state(valid_amount_boats, add_error(changeset, field, "to many or to few boats or invalid amounts of them", val: boats), changeset)
changeset = if_state(intersect, add_error(changeset, field, "boats overlap", val: boats), changeset)
changeset
end
defimpl Jason.Encoder do
@fields ~w(x y sunk length rot)a
def encode(boat, opts) do
boat
|> Map.take(@fields)
|> Jason.Encoder.encode(opts)
end
end
def pointInsideBoat?(boat, x, y) do
valid = case boat.rot do
"x" -> boat.y == y
"y" -> boat.x == x
end
valid = valid and case boat.rot do
"x" -> x >= boat.x
"y" -> y >= boat.y
end
valid = valid and case boat.rot do
"x" -> x < boat.x + boat.length
"y" -> y < boat.y + boat.length
end
valid
end
end | pancake/lib/sushi/schemas/boat.ex | 0.675015 | 0.474266 | boat.ex | starcoder |
defmodule WechatPay.Utils.Signature do
@moduledoc """
Module to sign data
"""
alias WechatPay.Error
alias WechatPay.JSON
require JSON
@doc """
Generate the signature of data with API key
## Example
```elixir
iex> WechatPay.Utils.Signature.sign(%{...}, "wx9999")
...> "02696FC7E3E19F852A0335F2F007DD3E"
```
"""
@spec sign(map, String.t(), :md5 | :sha256) :: String.t()
def sign(data, api_key, :md5) when is_map(data) do
sign_string = generate_sign_string(data, api_key)
:md5
|> :crypto.hash(sign_string)
|> Base.encode16()
end
def sign(data, api_key, :sha256) when is_map(data) do
sign_string = generate_sign_string(data, api_key)
# :crypto.sign(:rsa, :sha256, sign_string, api_key)
:sha256
|> crypto_hmac(api_key, sign_string)
|> Base.encode16()
end
def sign(data, api_key, _other) when is_map(data) do
sign(data, api_key, :md5)
end
# https://erlang.org/doc/apps/crypto/new_api.html#the-new-api
if Code.ensure_loaded?(:crypto) && function_exported?(:crypto, :mac, 4) do
defp crypto_hmac(type, key, data) do
:crypto.mac(:hmac, type, key, data)
end
else
defp crypto_hmac(type, key, data) do
:crypto.hmac(type, key, data)
end
end
@doc """
Verify the signature of Wechat's response
## Example
```elixir
iex > WechatPay.Utils.Signature.verify(%{sign: "foobar"}, "a45a313dfbf0494288c3e56bcacf30daa")
... > :ok
```
"""
@spec verify(map, String.t(), :md5 | :sha256) :: :ok | {:error, Error.t()}
def verify(data, api_key, sign_type) when is_map(data) do
calculated =
data
|> Map.delete(:sign)
|> sign(api_key, sign_type)
if data.sign == calculated do
:ok
else
{:error, %Error{reason: "Invalid signature of wechat's response", type: :invalid_signature}}
end
end
defp process_param({_k, ""}) do
nil
end
defp process_param({_k, nil}) do
nil
end
defp process_param({k, v}) when is_map(v) do
"#{k}=#{JSON.encode!(v)}"
end
defp process_param({k, v}) do
"#{k}=#{v}"
end
defp generate_sign_string(data, api_key) do
data
|> Map.delete(:__struct__)
|> Enum.sort()
|> Enum.map(&process_param/1)
|> Enum.reject(&is_nil/1)
|> List.insert_at(-1, "key=#{api_key}")
|> Enum.join("&")
end
end | lib/wechat_pay/utils/signature.ex | 0.843863 | 0.779993 | signature.ex | starcoder |
defmodule Searchex.Command.Search.Bm25 do
@moduledoc false
@num_docs 100
@avg_doc_token_len 100
@doc_token_len 100
@term_freq_tuning_factor 1.2
@doc_len_tuning_param 0.72
@doc """
Document Scores
terms looks like:
["term1", "term2"]
doc_matches looks like:
[{"term1", %{"docid1" => [pos1, pos2], "docid2" => [pos3, pos4]}}, {"term2", %{}}]
matches_per_term_and_doc looks like:
%{{"term1", "docid1"} => 23, {"term1", "docid2"} => 4, ...}
"""
def doc_scores(terms, doc_matches, matches_per_term_and_doc) do
docids = uniq_doc_ids(doc_matches)
tuples = Enum.map docids, fn(docid) ->
{docid, doc_score(terms, docid, doc_matches, matches_per_term_and_doc, {@avg_doc_token_len, @num_docs})}
end
Enum.sort tuples, &(elem(&1,1) > elem(&2,1))
end
@doc """
Document Score
"""
def doc_score(terms, docid, doc_matches, matches_per_term_and_doc, {avg_doc_len, num_docs}) do
term_scores = Enum.map terms, fn(term) ->
match_count = case matches_per_term_and_doc[{term, docid}] do
nil -> 0
count -> count
end
term_doc_score(match_count,
num_docs_with_term(term, doc_matches),
avg_doc_len(@doc_token_len, avg_doc_len),
num_docs)
end
Enum.sum(term_scores)
end
@doc """
Term Document Score
## Examples
iex> Searchex.Command.Search.Bm25.term_doc_score(2, 10, 150, 200)
0.07362
"""
def term_doc_score(num_times_in_doc, num_docs_with_term, avg_doc_len, num_docs) do
term_freq = term_freq(num_times_in_doc, avg_doc_len)
idf = inverse_doc_freq_of_term(num_docs, num_docs_with_term)
result = (term_freq * idf)
if is_float(result), do: Float.round(result, 5), else: result
end
@doc """
Unique Document IDs
## Example
iex> Searchex.Command.Search.Bm25.uniq_doc_ids([{"term1", %{"docid1" => [2,4,5]}}])
["docid1"]
"""
def uniq_doc_ids(doc_matches) do
doc_matches
|> Enum.reduce([], fn({_term, map}, acc) -> acc ++ Map.keys(map) end)
|> List.flatten
|> Enum.uniq
end
@doc """
Number of Documents with Term
## Examples
iex> Searchex.Command.Search.Bm25.inverse_doc_freq_of_term(:NA, 0)
0
"""
def num_docs_with_term(term, doc_matches) do
case :lists.keyfind(term, 1, doc_matches) do
{_, doc_ids} -> Enum.count(doc_ids)
false -> 0
end
end
@doc """
Inverse Document Frequency of Term
## Examples
iex> Searchex.Command.Search.Bm25.inverse_doc_freq_of_term(:NA, 0)
0
iex> Searchex.Command.Search.Bm25.inverse_doc_freq_of_term(100, 5)
3.04452
"""
def inverse_doc_freq_of_term(_, 0) do
0
end
def inverse_doc_freq_of_term(total_num_docs, num_docs_with_term) do
base = :math.log(1.0 + total_num_docs / num_docs_with_term)
Float.round(base, 5)
end
@doc """
Term Frequency
## Examples
iex> Searchex.Command.Search.Bm25.term_freq(0, :NA)
0
iex> Searchex.Command.Search.Bm25.term_freq(5, 20)
0.37931
"""
def term_freq(0, _AverageDocumentLength) do
0
end
def term_freq(num_appearances_in_doc, avg_doc_len) do
base = (num_appearances_in_doc * (@term_freq_tuning_factor + 1)) /
(num_appearances_in_doc + avg_doc_len * @term_freq_tuning_factor)
Float.round(base, 5)
end
@doc """
Average Document Length
## Example
iex> Searchex.Command.Search.Bm25.avg_doc_len(20, 40)
0.64
"""
def avg_doc_len(doc_len_in_words, avg_doc_len_in_words) do
(1 - @doc_len_tuning_param) + @doc_len_tuning_param * doc_len_in_words / avg_doc_len_in_words
end
end | lib/searchex/command/search/bm25.ex | 0.654122 | 0.529568 | bm25.ex | starcoder |
defmodule Univrse.Alg.AES_GCM do
@moduledoc """
AES_GCM algorithm module.
Sign and encrypt messages using AES-GCM symetric encryption.
"""
alias Univrse.Key
@doc """
Decrypts the cyphertext with the key using the specified algorithm.
Accepted options:
* `aad` - Ephemeral public key
* `iv` - Agreement PartyUInfo
* `tag` - Agreement PartyVInfo
"""
@spec decrypt(String.t, binary, Key.t, keyword) :: {:ok, binary} | {:error, any}
def decrypt(alg, encrypted, key, opts \\ [])
def decrypt(alg, encrypted, %Key{type: "oct", params: %{k: k}}, opts)
when (alg == "A128GCM" and byte_size(k) == 16)
or (alg == "A256GCM" and byte_size(k) == 32)
do
aad = Keyword.get(opts, :aad, "")
iv = Keyword.get(opts, :iv, "")
tag = Keyword.get(opts, :tag, "")
case :crypto.crypto_one_time_aead(cipher(alg), k, iv, encrypted, aad, tag, false) do
result when is_binary(result) ->
{:ok, result}
{:error, _, error} ->
{:error, error}
:error ->
{:error, "Decrypt error"}
end
end
def decrypt(_alg, _encrypted, _key, _opts),
do: {:error, :invalid_key}
@doc """
Encrypts the message with the key using the specified algorithm. Returns a
three part tuple containing the encrypted cyphertext and any headers to add to
the Recipient.
Accepted options:
* `aad` - Ephemeral public key
* `iv` - Agreement PartyUInfo
"""
@spec encrypt(String.t, binary, Key.t, keyword) :: {:ok, binary, map} | {:error, any}
def encrypt(alg, message, key, opts \\ [])
def encrypt(alg, message, %Key{type: "oct", params: %{k: k}}, opts)
when (alg == "A128GCM" and byte_size(k) == 16)
or (alg == "A256GCM" and byte_size(k) == 32)
do
aad = Keyword.get(opts, :aad, "")
iv = Keyword.get(opts, :iv, :crypto.strong_rand_bytes(12))
case :crypto.crypto_one_time_aead(cipher(alg), k, iv, message, aad, true) do
{encrypted, tag} ->
{:ok, encrypted, %{"iv" => iv, "tag" => tag}}
{:error, _, error} ->
{:error, error}
end
end
def encrypt(_alg, _message, _key, _opts),
do: {:error, :invalid_key}
# Returns the cipher for the given algorithm
defp cipher("A128GCM"), do: :aes_128_gcm
defp cipher("A256GCM"), do: :aes_256_gcm
end | lib/univrse/alg/aes_gcm.ex | 0.755366 | 0.451992 | aes_gcm.ex | starcoder |
defmodule AWS.CodeBuild do
@moduledoc """
AWS CodeBuild
AWS CodeBuild is a fully managed build service in the cloud. AWS CodeBuild
compiles your source code, runs unit tests, and produces artifacts that are
ready to deploy. AWS CodeBuild eliminates the need to provision, manage,
and scale your own build servers. It provides prepackaged build
environments for the most popular programming languages and build tools,
such as Apach Maven, Gradle, and more. You can also fully customize build
environments in AWS CodeBuild to use your own build tools. AWS CodeBuild
scales automatically to meet peak build requests, and you pay only for the
build time you consume. For more information about AWS CodeBuild, see the
*AWS CodeBuild User Guide*.
AWS CodeBuild supports these operations:
<ul> <li> `BatchGetProjects`: Gets information about one or more build
projects. A *build project* defines how AWS CodeBuild will run a build.
This includes information such as where to get the source code to build,
the build environment to use, the build commands to run, and where to store
the build output. A *build environment* represents a combination of
operating system, programming language runtime, and tools that AWS
CodeBuild will use to run a build. Also, you can add tags to build projects
to help manage your resources and costs.
</li> <li> `CreateProject`: Creates a build project.
</li> <li> `DeleteProject`: Deletes a build project.
</li> <li> `ListProjects`: Gets a list of build project names, with each
build project name representing a single build project.
</li> <li> `UpdateProject`: Changes the settings of an existing build
project.
</li> <li> `BatchGetBuilds`: Gets information about one or more builds.
</li> <li> `ListBuilds`: Gets a list of build IDs, with each build ID
representing a single build.
</li> <li> `ListBuildsForProject`: Gets a list of build IDs for the
specified build project, with each build ID representing a single build.
</li> <li> `StartBuild`: Starts running a build.
</li> <li> `StopBuild`: Attempts to stop running a build.
</li> <li> `ListCuratedEnvironmentImages`: Gets information about Docker
images that are managed by AWS CodeBuild.
</li> </ul>
"""
@doc """
Gets information about builds.
"""
def batch_get_builds(client, input, options \\ []) do
request(client, "BatchGetBuilds", input, options)
end
@doc """
Gets information about build projects.
"""
def batch_get_projects(client, input, options \\ []) do
request(client, "BatchGetProjects", input, options)
end
@doc """
Creates a build project.
"""
def create_project(client, input, options \\ []) do
request(client, "CreateProject", input, options)
end
@doc """
Deletes a build project.
"""
def delete_project(client, input, options \\ []) do
request(client, "DeleteProject", input, options)
end
@doc """
Gets a list of build IDs, with each build ID representing a single build.
"""
def list_builds(client, input, options \\ []) do
request(client, "ListBuilds", input, options)
end
@doc """
Gets a list of build IDs for the specified build project, with each build
ID representing a single build.
"""
def list_builds_for_project(client, input, options \\ []) do
request(client, "ListBuildsForProject", input, options)
end
@doc """
Gets information about Docker images that are managed by AWS CodeBuild.
"""
def list_curated_environment_images(client, input, options \\ []) do
request(client, "ListCuratedEnvironmentImages", input, options)
end
@doc """
Gets a list of build project names, with each build project name
representing a single build project.
"""
def list_projects(client, input, options \\ []) do
request(client, "ListProjects", input, options)
end
@doc """
Starts running a build.
"""
def start_build(client, input, options \\ []) do
request(client, "StartBuild", input, options)
end
@doc """
Attempts to stop running a build.
"""
def stop_build(client, input, options \\ []) do
request(client, "StopBuild", input, options)
end
@doc """
Changes the settings of a build project.
"""
def update_project(client, input, options \\ []) do
request(client, "UpdateProject", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "codebuild"}
host = get_host("codebuild", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "CodeBuild_20161006.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end | lib/aws/code_build.ex | 0.765199 | 0.579847 | code_build.ex | starcoder |
defmodule Semver do
@moduledoc """
Utilities for working with [semver.org](http://semver.org)-compliant version strings.
"""
@vsn File.read!("VERSION") |> String.strip
@pattern ~r"""
^v?
(?<major>0|[1-9]\d*)\.
(?<minor>0|[1-9]\d*)\.
(?<patch>0|[1-9]\d*)
(-(?<prerelease>[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?
(\+(?<build>[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?
$
"""x
@type t :: %Semver{major: integer, minor: integer, patch: integer, prerelease: [String.t], build: [String.t]}
defstruct major: 0, minor: 0, patch: 0, prerelease: [], build: []
@doc """
Gets the version string of the module.
"""
@spec version() :: String.t
def version, do: @vsn
@doc """
Increment the named `part` of `version`.
Options for `part` are:
* `:patch` — Increments the patch version
* `:minor` — Increments the minor version and resets patch to zero
* `:major` — Increments the major version and resets the minor and patch versions to zero
No matter which part is selected, prerelease and build parts will be emptied.
## Examples
iex> Semver.increment("1.2.3", :minor)
"1.3.0"
iex> Semver.increment("1.2.3-alpha", :major)
"2.0.0"
"""
@spec increment(String.t, atom) :: String.t
@spec increment(t, atom) :: t
def increment(version, part) when is_binary(version) do
version
|> Semver.parse!
|> Semver.increment(part)
|> Semver.to_string
end
def increment(version, :major) do
%Semver{version | major: version.major + 1, minor: 0, patch: 0, prerelease: [], build: []}
end
def increment(version, :minor) do
%Semver{version | minor: version.minor + 1, patch: 0, prerelease: [], build: []}
end
def increment(version, :patch) do
%Semver{version | patch: version.patch + 1, prerelease: [], build: []}
end
@doc """
Validates that `version` is a valid Semver string.
"""
@spec is_valid(String.t) :: boolean
def is_valid(version), do: version =~ @pattern
@doc """
Parses `version` into a `Semver` struct.
"""
@spec parse(String.t) :: {:ok, t} | {:error, :invalid}
def parse(version) do
cond do
is_valid(version) -> parse_valid(version)
true -> {:error, :invalid}
end
end
@doc """
Parses a version string into a `Semver` struct. If `version` is not a valid version string, it
raises `Semver.Error`.
"""
@spec parse!(String.t) :: t | no_return
def parse!(version) do
case parse(version) do
{:ok, retval} -> retval
{:error, :invalid} -> raise Semver.Error, message: "Invalid version text: #{version}"
end
end
@doc """
Converts the `version` struct into a version string.
"""
@spec to_string(t) :: String.t
def to_string(version) do
"#{version.major}.#{version.minor}.#{version.patch}"
|> append_prerelease(version)
|> append_build(version)
end
defp append_build(text, %{build: []}), do: text
defp append_build(text, %{build: list}), do: "#{text}+#{Enum.join(list, ".")}"
defp append_prerelease(text, %{prerelease: []}), do: text
defp append_prerelease(text, %{prerelease: list}), do: "#{text}-#{Enum.join(list, ".")}"
defp correct_list([""]), do: []
defp correct_list(list), do: list
defp extract_integer(text) do
{number, _} = Integer.parse(text)
number
end
defp parse_valid(version) do
parts = Regex.named_captures(@pattern, version)
major = extract_integer(parts["major"])
minor = extract_integer(parts["minor"])
patch = extract_integer(parts["patch"])
prerelease = correct_list(String.split(parts["prerelease"], ~r/\./))
build = correct_list(String.split(parts["build"], ~r/\./))
{:ok, %Semver{major: major, minor: minor, patch: patch, prerelease: prerelease, build: build}}
end
end | lib/semver.ex | 0.817356 | 0.528533 | semver.ex | starcoder |
defmodule Forma.Typespecs do
def compile(module) do
module
|> Code.Typespec.fetch_types()
|> case do
{:ok, types} ->
types
# coveralls-ignore-start
:error ->
raise "Code.Typespec.fetch_types(#{module}) error"
# coveralls-ignore-end
end
|> rewrite(module)
|> Map.new(fn {t, d} -> {{module, t}, d} end)
end
# [{:type, _}, {:type, _}]
defp rewrite([x | xs], module) do
[rewrite(x, module) | rewrite(xs, module)]
end
# {:opaque, {:t, {}, [{:var, 37, :value}]}}
defp rewrite({:opaque, {name, _, _}}, _module) do
{name, :opaque}
end
defp rewrite({:type, {name, tree, _}}, module) do
{name, rewrite(tree, module)}
end
# {:type, {:t, {:user_type, 38, :t, [{:type, 38, :term, []}]}, []}}, []
defp rewrite({:type, {name, type}}, module) do
{name, rewrite(type, module)}
end
# {:type, 149, :map, tree}
defp rewrite({:type, _, :map, [{:type, _, :map_field_assoc, [key, value]} | _]}, module) do
{:assoc_map, {rewrite(key, module), rewrite(value, module)}}
end
defp rewrite({:type, _, :map, [{:type, _, :map_field_exact, type} | _] = tree}, module) do
case type do
[{:atom, _, :__struct__}, {:atom, _, struct_name}] ->
{:struct, struct_name, parse_struct(tree, module)}
[{:atom, _, _}, _] ->
{:exact_map, map(tree, module)}
[key, value] ->
{:exact_map, {rewrite(key, module), rewrite(value, module)}}
end
end
defp rewrite({:user_type, _, name, args}, module) do
{{module, name}, rewrite(args, module)}
end
defp rewrite({:remote_type, _, [{:atom, _, remote_module}, {:atom, _, type}, args]}, module) do
{{remote_module, type}, rewrite(args, module)}
end
defp rewrite({:type, _, :union, tree}, module) do
{:union, Enum.map(tree, &rewrite(&1, module))}
end
defp rewrite({:type, _, :tuple, tree}, module) do
{:tuple, Enum.map(tree, &rewrite(&1, module))}
end
defp rewrite({:type, _, :list, [tree | []]}, module) do
{:list, rewrite(tree, module)}
end
defp rewrite({:atom, _, val}, _module) do
{:atom, val}
end
# scalar types default to their type.
defp rewrite({:type, _, typ, tree}, _module) do
{typ, tree}
end
defp rewrite(x, _module) do
x
end
defp parse_struct([{:type, _, :map_field_exact, [{:atom, _, :__struct__}, {:atom, _, _}]} | rest], module),
do: map(rest, module)
defp map(tree, module) do
map(tree, module, %{})
end
# [{:type, 278, :map_field_exact, [{:atom, 0, :atom}, {:type, 285, :atom, []}]}
defp map([{:type, _, :map_field_exact, [{field_type, _, name}, typ]} | rest], module, acc) do
field =
case field_type do
:atom -> name
_ -> to_string(name)
end
acc = Map.put(acc, to_string(name), {field, rewrite(typ, module)})
map(rest, module, acc)
end
defp map([], _module, acc) do
acc
end
end | lib/forma/typespecs.ex | 0.611266 | 0.432363 | typespecs.ex | starcoder |
defmodule Aecore.Channel.ChannelOffChainUpdate do
@moduledoc """
Behaviour that states all the necessary functions that every update of the offchain state should implement.
This module implements helpers for applying updates to an offchain chainstate
"""
alias Aecore.Chain.Chainstate
alias Aecore.Channel.Updates.ChannelTransferUpdate
alias Aecore.Channel.Updates.ChannelDepositUpdate
alias Aecore.Channel.Updates.ChannelWidthdrawUpdate
@typedoc """
Possible types of an update
"""
@type update_types ::
ChannelTransferUpdate.t()
| ChannelDepositUpdate.t()
| ChannelWidthdrawUpdate.t()
@typedoc """
The type of errors returned by the functions in this module
"""
@type error :: {:error, String.t()}
@doc """
Callback for applying the update to the offchain chainstate.
While updating the chainstate the partially updated
"""
@callback update_offchain_chainstate!(Chainstate.t() | nil, update_types()) ::
Chainstate.t() | no_return()
@doc """
Encodes the update to list of binaries. This callback is compatible with the Serializable behaviour.
Epoch 0.16 does not treat updates as standard serializable objects but this changed in the later versions.
"""
@callback encode_to_list(update_types()) :: list(binary()) | error()
@doc """
Decodes the update from a list of binaries. This callback is compatible with the Serializable behaviour.
Epoch 0.16 does not treat updates as standard serializable objects but this changed in the later versions.
"""
@callback decode_from_list(list(binary())) :: update_types() | error()
@doc """
Preprocess checks for an incoming half signed update.
This callback should check for signs of the update being malicious(for instance transfer updates should validate if the transfer is in the correct direction).
The provided map contains values to check against.
"""
@callback half_signed_preprocess_check(update_types(), map()) :: :ok | error()
@doc """
Validates an update considering state before applying it to the provided chainstate.
"""
@callback fully_signed_preprocess_check(Chainstate.t() | nil, update_types(), non_neg_integer()) ::
:ok | error()
@doc """
Epoch 0.16 does not treat updates as standard serializable objects but this changed in the later versions.
To make upgrading easy updates will need to specify their ID which will act as their tag. To upgrade
to a recent version of epoch offchain updates will just need to be added as serializable objects to the serializer
and this temporary tag will need to be removed.
"""
@spec tag_to_module(non_neg_integer()) :: module()
def tag_to_module(0), do: {:ok, ChannelTransferUpdate}
def tag_to_module(1), do: {:ok, ChannelDepositUpdate}
def tag_to_module(2), do: {:ok, ChannelWidthdrawUpdate}
def tag_to_module(_), do: {:error, "#{__MODULE__} Error: Invalid update tag"}
@doc """
Converts the specified module to the associated tag.
"""
@spec module_to_tag(module()) :: non_neg_integer()
def module_to_tag(ChannelTransferUpdate), do: {:ok, 0}
def module_to_tag(ChannelDepositUpdate), do: {:ok, 1}
def module_to_tag(ChannelWidthdrawUpdate), do: {:ok, 2}
def module_to_tag(module),
do: {:error, "#{__MODULE__} Error: Unserializable module: #{inspect(module)}"}
@doc """
Encodes the given update to a list of binaries.
"""
@spec encode_to_list(update_types()) :: list(binary())
def encode_to_list(object) do
module = object.__struct__
{:ok, tag} = module_to_tag(module)
[:binary.encode_unsigned(tag)] ++ module.encode_to_list(object)
end
@doc """
Decodes the given update from a list of binaries.
"""
@spec decode_from_list(list(binary())) :: update_types() | error()
def decode_from_list([tag | rest]) do
decoded_tag = :binary.decode_unsigned(tag)
case tag_to_module(decoded_tag) do
{:ok, module} ->
module.decode_from_list(rest)
{:error, _} = err ->
err
end
end
@doc """
Applies each update in a list of updates to the offchain chainstate. Breaks on the first encountered error.
"""
@spec apply_updates(Chainstate.t() | nil, list(update_types()), non_neg_integer()) ::
{:ok, Chainstate.t()} | error()
def apply_updates(chainstate, updates, channel_reserve) do
Enum.reduce_while(
updates,
{:ok, chainstate},
&apply_single_update_to_chainstate(&1, &2, channel_reserve)
)
end
# Function passed to Enum.reduce. Aplies the given update to the chainstate.
@spec apply_single_update_to_chainstate(
update_types(),
{:ok, Chainstate.t() | nil},
non_neg_integer()
) :: {:ok, Chainstate.t()} | {:halt, error()}
defp apply_single_update_to_chainstate(update, {:ok, chainstate}, channel_reserve) do
with :ok <- fully_signed_preprocess_check(chainstate, update, channel_reserve),
{:ok, _} = updated_chainstate <- update_offchain_chainstate(chainstate, update) do
{:cont, updated_chainstate}
else
{:error, _} = err ->
{:halt, err}
end
end
@doc """
Updates the offchain chainstate acording to the specified update.
"""
@spec update_offchain_chainstate(Chainstate.t() | nil, update_types()) ::
{:ok, Chainstate.t()} | error()
def update_offchain_chainstate(chainstate, object) do
module = object.__struct__
{:ok, module.update_offchain_chainstate!(chainstate, object)}
end
@doc """
Validates an update considering state before applying it to the provided chainstate.
"""
@spec fully_signed_preprocess_check(Chainstate.t() | nil, update_types(), non_neg_integer()) ::
:ok | error()
def fully_signed_preprocess_check(chainstate, object, channel_reserve) do
module = object.__struct__
module.fully_signed_preprocess_check(chainstate, object, channel_reserve)
end
@doc """
Runs preprocess checks for an update which was signed by the foreign peer in the channel.
"""
@spec half_signed_preprocess_check(update_types(), map()) :: :ok | error()
def half_signed_preprocess_check(update, opts) do
module = update.__struct__
module.half_signed_preprocess_check(update, opts)
end
end | apps/aecore/lib/aecore/channel/channel_off_chain_update.ex | 0.885012 | 0.496765 | channel_off_chain_update.ex | starcoder |
defmodule Type.Function.Var do
@moduledoc """
a special container type indicating that the function has a type dependency.
### Example:
The following typespec:
```elixir
@spec identity(x) :: x when x: var
```
generates the following typespec:
```elixir
%Type.Function{
params: [%Type.Function.Var{name: :x}],
return: %Type.Function.Var{name: :x}
}
```
if you further put a restriction on this typespec:
```elixir
@spec identity(x) :: x when x: integer
```
the `Type.Function.Var` will further exhibit the issued constraint:
```elixir
%Type.Function{
params: [%Type.Function.Var{name: :x, constraint: %Type{name: :integer}}],
return: %Type.Function.Var{name: :x, constraint: %Type{name: :integer}}
}
```
"""
import Type, only: :macros
@enforce_keys [:name]
defstruct @enforce_keys ++ [constraint: builtin(:any)]
@type t :: %__MODULE__{
name: atom,
constraint: Type.t
}
end
defimpl Inspect, for: Type.Function.Var do
def inspect(var, _opts) do
"#{var.name}"
end
end
defimpl Type.Properties, for: Type.Function.Var do
alias Type.Function.Var
def typegroup(%{constraint: constraint}) do
Type.typegroup(constraint)
end
def compare(lhs, rhs = %Var{}) do
case Type.compare(lhs.constraint, rhs.constraint) do
comp when comp != :eq -> comp
:eq -> Type.compare(lhs.name, rhs.name)
end
end
def compare(%{constraint: constraint}, rhs) do
case Type.compare(constraint, rhs) do
:eq -> :lt
comp -> comp
end
end
import Type, only: :macros
import Type.Helpers
intersection do
def intersection(_, %Var{}) do
raise "can't intersect two var types"
end
def intersection(left = %Var{}, right) do
case Type.intersection(left.constraint, right) do
builtin(:none) -> builtin(:none)
type -> %{left | constraint: type}
end
end
end
subtype do
def subtype?(left, right = %Var{}) do
Type.subtype?(left.constraint, right.constraint)
end
def subtype?(left = %{}, right) do
Type.subtype?(left.constraint, right)
end
end
usable_as do
def usable_as(%Var{}, _right, _meta) do
raise "unreachable"
end
end
end | lib/type/function.var.ex | 0.928466 | 0.924756 | function.var.ex | starcoder |
defmodule Cassandra.Ecto.Migration do
@moduledoc """
Implements `Ecto.Adapter.Migration` behaviour.
## Defining Cassandra migrations
Your migration module should use `Cassandra.Ecto.Migration` instead of
`Ecto.Migration` to be able to use additional features.
Any table must have option `primary_key: false` because Cassandra doesn't
have `serial` type.
defmodule TestMigration do
use Cassandra.Ecto.Migration
def up do
create table(:test, primary_key: false) do
add :id, :uuid, primary_key: true
add :value, :integer
end
end
end
## Primary keys
There are two different methods to define primary keys.
1. With `:primary_key` column option
create table(:test, primary_key: false) do
add :id, :uuid, primary_key: true
add :id2, :uuid, primary_key: true
add :id3, :uuid, primary_key: true
end
In this case `id` column will be partition key and rest, `id2` and `id3`,
will be clustering columns.
2. With `:partition_key` and `:clustering_column` options
create table(:test, primary_key: false) do
add :id, :uuid, partition_key: true
add :id2, :uuid, paritition_key: true
add :id3, :uuid, clustering_column: true
end
In this case we have defined composite partition key and one clustering column.
More info about composite keys in [Using a composite partition key](https://docs.datastax.com/en/cql/3.3/cql/cql_reference/refCompositePk.html).
> NOTE: It is not possible to use both methods together. The rule of thumb is:
> if you don't use compound partition key just stay with `:primary_key`.
## Static columns
To define `static` column use column option `static: true`
create table(:test, primary_key: false) do
add :id, :uuid, primary_key: true
add :clustering_id, :uuid, primary_key: true
add :value, :integer, static: true
end
More info about static columns in [Sharing a static column](https://docs.datastax.com/en/cql/3.3/cql/cql_reference/refStaticCol.html).
## Setting table options
Use `:options` option to define additional settings for table:
create table(:test, primary_key: false, options: [
clustering_order_by: [value: :desc, value2: :asc],
id: "5a1c395e-b41f-11e5-9f22-ba0be0483c18", compact_storage: true,
comment: "Test", read_repair_chance: 1.0,
compression: [sstable_compression: "DeflateCompressor", chunk_length_kb: 64]]) do
add :id, :uuid, partition_key: true
add :id2, :uuid, partition_key: true
add :value, :integer, clustering_column: true
add :value2, :integer, clustering_column: true
end
For full list of properties please see
[Table properties](https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tabProp.html).
## Data type mapping
Migration type Cassandra type
-------------- --------------
:id int
:integer int
:datetime timestamp
:naive_datetime timestamp
:utc_datetime timestamp
:binary_id uuid
:uuid uuid
:binary blob
:string text
:counter counter
:map map<varchar, blob>
{:map, :integer} map<varchar, int>
{:map, :integer, :string} map<int, text>
{:array, :integer} list<int>
{:list, :integer} list<int>
{:set, :integer} set<int>
{:tuple, {:integer, :integer}} tuple<int, int>
{:frozen, :integer} frozen<int>
:udt_type udt_type
It is possible to nest types like so:
{:map, {:integer, {:frozen, {:map, {:integer, :integer}}}}}
## User defined types (UDT's)
It is possible to define Cassandra UDT's and use as column type in table
definitions.
defmodule PostsMigration do
use Cassandra.Ecto.Migration
def change do
create type(:comment) do
add :id, :uuid
add :text, :text
add :posted_at, :utc_datetime
end
create table(:posts, primary_key: false) do
add :id, :uuid, primary_key: true
add :title, :string
add :text, :text
add :comments, {:array, {:frozen, :comment}}
end
end
end
Later you can use it in your schema definition:
defmodule Comment do
use Schema
embedded_schema do
field :text, :string
field :posted_at, :utc_datetime
end
end
defmodule Post do
use Schema
alias Cassandra.Ecto.Spec.Support.Schemas.Comment
schema "posts" do
@primary_key {:id, :binary_id, autogenerate: true}
field :title, :string
field :text, :string
embeds_many :comments, Comment
end
end
More info about UDT's in [User-defined type](https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html).
## Custom indexes
You can define any custom index like so:
create index(:test, [:value], using: "org.apache.cassandra.index.sasi.SASIIndex",
options: [mode: :contains, case_sensitive: false,
analyzer_class: "org.apache.cassandra.index.sasi.analyzer.NonTokenizingAnalyzer"])
More info about custom indexes in [CREATE CUSTOM INDEX (SASI)](https://docs.datastax.com/en/cql/3.3/cql/cql_reference/refCreateSASIIndex.html).
"""
import __MODULE__.CQL, only: [to_cql: 1]
alias Ecto.Migration.Table
alias Cassandra.Ecto.Connection
defmacro __using__(_) do
quote do
use Ecto.Migration
import Cassandra.Ecto.Migration, only: [type: 1, materialized_view: 1, materialized_view: 2, function: 1, function: 3]
end
end
@doc """
Defines Cassandra UDT
### Example
create type(:comment) do
add :id, :uuid
add :text, :text
add :posted_at, :utc_datetime
end
"""
def type(name) do
struct(%Table{name: name, primary_key: false, options: [type: :type]})
end
@doc """
Defines Cassandra materialized view
### Example
create materialized_view(:cyclist_by_age,
as: (from c in "cyclist_mv", select: {c.age, c.birthday, c.name, c.country})),
primary_key: {:age, :cid}
"""
def materialized_view(name, options \\ []) do
options = Keyword.put(options, :type, :materialized_view)
prefix = Keyword.get(options, :prefix)
struct(%Table{name: name, primary_key: false, options: options, prefix: prefix})
end
@doc """
Defines Cassandra user defined function (UDF)
### Example
create function(:left, [column: :text, num: :int],
returns: :text, language: :javascript,
on_null_input: :returns_null,
as: "column.substring(0,num)")
"""
def function(name, vars \\ [], options \\ []) do
options = options
|> Keyword.put(:type, :function)
|> Keyword.put(:vars, vars)
|> Keyword.put_new(:language, :java)
|> Keyword.put_new(:on_null_input, :returns_null)
prefix = Keyword.get(options, :prefix)
struct(%Table{name: name, prefix: prefix, options: options})
end
@doc """
Defines Cassandra user defined aggregate (UDA)
### Example
create aggregate(:average, :int,
sfunc: function(:avgState),
stype: {:tuple, {:int, :bigint}},
finalfunc: function(:avgFinal),
initcond: {0, 0})
"""
def aggregate(name, var, options \\ []) do
options = options
|> Keyword.put(:type, :aggregate)
|> Keyword.put(:var, var)
prefix = Keyword.get(options, :prefix)
struct(%Table{name: name, prefix: prefix, options: options})
end
@doc """
See `Ecto.Adapter.Migration.execute_ddl/3`
"""
def execute_ddl(repo, command, opts) do
cql = to_cql(command)
{:ok, _} = Connection.query(repo, cql, [], opts)
end
end | lib/cassandra_ecto/migration.ex | 0.845672 | 0.528959 | migration.ex | starcoder |
defmodule EctoIPRange.IP6R do
@moduledoc """
Struct for PostgreSQL `:ip6r`.
## Usage
When used during a changeset cast the following values are accepted:
- `:inet.ip6_address()`: an IP6 tuple, e.g. `{8193, 3512, 34211, 0, 0, 35374, 880, 29492}` (single address only)
- `binary`
- `"2001:0db8:85a3:0000:0000:8a2e:0370:7334"`: single address
- `"2001:0db8:85a3:0000:0000:8a2e:0370:0000/112"`: CIDR notation for a range from
`2001:0db8:85a3:0000:0000:8a2e:0370:0000` to `2001:0db8:85a3:0000:0000:8a2e:0370:ffff`
- `"2001:0db8:85a3:0000:0000:8a2e:0370:7334-2001:0db8:85a3:0000:0000:8a2e:0370:7335"`: arbitrary range
- `EctoIPRange.IP4.t()`: a pre-casted struct
IP4 addresses (binary and tuple) will be converted to IP6 format when casted.
## Fields
* `range`
* `first_ip`
* `last_ip`
"""
use Ecto.Type
alias EctoIPRange.Util.CIDR
alias EctoIPRange.Util.Inet
@type t :: %__MODULE__{
range: binary,
first_ip: :inet.ip6_address(),
last_ip: :inet.ip6_address()
}
defstruct [:range, :first_ip, :last_ip]
@impl Ecto.Type
def type, do: :ip6r
@impl Ecto.Type
def cast({_, _, _, _} = ip4_address) do
case Inet.ipv4_to_ipv6(ip4_address) do
{:ok, ip6_address} -> cast(ip6_address)
_ -> :error
end
end
def cast({_, _, _, _, _, _, _, _} = ip6_address) do
case Inet.ntoa(ip6_address) do
address when is_binary(address) ->
{:ok,
%__MODULE__{
range: address <> "/128",
first_ip: ip6_address,
last_ip: ip6_address
}}
_ ->
:error
end
end
def cast(address) when is_binary(address) do
cond do
String.contains?(address, "-") -> cast_range(address)
String.contains?(address, "/") -> cast_cidr(address)
true -> cast_binary(address)
end
end
def cast(%__MODULE__{} = address), do: {:ok, address}
def cast(_), do: :error
@impl Ecto.Type
def load(%__MODULE__{} = address), do: {:ok, address}
def load(_), do: :error
@impl Ecto.Type
def dump(%__MODULE__{} = address), do: {:ok, address}
def dump(_), do: :error
defp cast_binary(address) do
case Inet.parse_ipv6_binary(address) do
{:ok, ip6_address} ->
{:ok,
%__MODULE__{
range: address <> "/128",
first_ip: ip6_address,
last_ip: ip6_address
}}
_ ->
:error
end
end
defp cast_cidr(cidr) do
with [address, maskstring] <- String.split(cidr, "/", parts: 2),
{maskbits, ""} when maskbits in 0..128 <- Integer.parse(maskstring),
{first_ip6_address, last_ip6_address} <- CIDR.parse_ipv6(address, maskbits) do
{:ok,
%__MODULE__{
range: cidr,
first_ip: first_ip6_address,
last_ip: last_ip6_address
}}
else
_ -> :error
end
end
defp cast_range(range) do
with [first_ip, last_ip] <- String.split(range, "-", parts: 2),
{:ok, first_ip6_address} <- Inet.parse_ipv6_binary(first_ip),
{:ok, last_ip6_address} <- Inet.parse_ipv6_binary(last_ip) do
{:ok,
%__MODULE__{
range: range,
first_ip: first_ip6_address,
last_ip: last_ip6_address
}}
else
_ -> :error
end
end
end | lib/ecto_ip_range/ip6r.ex | 0.879199 | 0.539347 | ip6r.ex | starcoder |
defmodule Zigler.Parser.Nif do
@moduledoc """
This datastructure represents structured information about a single nif
inside of a `Zigler.sigil_Z/2` block. This is used to generate the
`exported_nifs` variable which is an array of `ErlNifFunc` structs. The
following keys are implemented:
- name: (`t:atom/0`) the function name to be bound into the module
- arity: (`t:arity/0`) the arity of the erlang function (the zig
function may have a different arity).
- doc: (`t:iodata/0`) zig docstrings which should be turned into elixir docs
- args: (`t:String.t/0`) a list of zig types which are the arguments for
the function
- retval: (`t:String.t/0`) the type of the return value
- opts: (`t:keyword`) list of nif options.
- long: true -- if the nif should run in a separate OS thread.
- dirty: :cpu -- if the nif should run in a dirty cpu scheduler.
- dirty: :io -- if the nif should run in a dirty io scheduler.
"""
alias Zigler.Code.LongRunning
alias Zigler.Parser.Resource
@float_types ~w(f16 f32 f64)
@int_types ~w(u16 i32 u32 i64 u64 c_int c_uint c_long c_ulong isize usize)
@bool ["bool"]
@char ["u8"]
@beam_args ~w(beam.term beam.atom beam.pid)
@enif_args ~w(e.ErlNifTerm e.ErlNifPid)
@scalar_types @float_types ++ @int_types ++ @bool ++ @char ++ @beam_args ++ @enif_args
@void ["void"]
@env ~w(?*e.ErlNifEnv beam.env)
@array_types Enum.flat_map(@scalar_types, &["[]#{&1}", "[*c]#{&1}", "[_]#{&1}"])
@valid_args @scalar_types ++ @array_types ++ @env
@valid_retvals @scalar_types ++ @array_types ++ @void
@enforce_keys [:name, :arity]
defstruct @enforce_keys ++ [
doc: nil,
args: [],
retval: nil,
opts: [],
test: nil # only to be used for tests. This is the string name
# of the test which is going to be bound in.
]
@type option ::
{:long, boolean} |
{:dirty, :cpu | :io}
@type t :: %__MODULE__{
name: atom,
arity: arity,
doc: iodata | nil,
args: [String.t],
retval: String.t,
opts: [option],
test: atom
}
@beam_envs ["beam.env", "?*e.ErlNifEnv"]
# validate_arity/3: checks to make sure the arity of nif declaration matches the function
@spec validate_arity([String.t], Parser.t, non_neg_integer)
:: :ok | no_return
def validate_arity([env | rest], context, line) when env in @beam_envs do
validate_arity(rest, context, line)
end
def validate_arity(rest, context = %{local: %{arity: arity}}, line) when length(rest) != arity do
raise SyntaxError,
file: context.file,
line: line + context.zig_block_line - 1,
description: "nif declaration arity (#{arity}) doesn't match the expected function arity #{length(rest)}"
end
def validate_arity(_, _, _), do: :ok
# validate_args/3 : raises if an invalid argument type is sent to to the function
@spec validate_args([String.t], Parser.t, non_neg_integer)
:: :ok | no_return
def validate_args([], _context, _line), do: :ok
def validate_args([args | rest], context, line) when args in @valid_args do
validate_args(rest, context, line)
end
def validate_args([invalid_type | _], context, line) do
raise SyntaxError,
file: context.file,
line: line + context.zig_block_line,
description: "nif function #{context.local.name} demands an invalid argument type #{invalid_type}"
end
def validate_args(_, _, _), do: :ok
@spec validate_retval([String.t], Parser.t, non_neg_integer)
:: :ok | no_return
def validate_retval([retval | _], _context, _line) when retval in @valid_retvals, do: :ok
def validate_retval([retval | _], context, line) do
raise SyntaxError,
file: context.file,
line: line + context.zig_block_line,
description: "nif function #{context.local.name} returns an invalid type #{retval}"
end
def register_function_header([retval | args], context) do
final_nif = %{context.local | retval: retval, args: Enum.reverse(args)}
# long nifs require a resource
resource = if context.local.opts[:long] do
[%Resource{
name: LongRunning.cache_ptr(context.local.name),
cleanup: LongRunning.cache_cleanup(context.local.name)}]
else
[]
end
%{context | global: resource ++ [final_nif | context.global]}
end
end | lib/zigler/parser/nif.ex | 0.716516 | 0.539408 | nif.ex | starcoder |
defmodule SimpleBayes.Tokenizer do
@doc """
Converts a string into a list of words.
## Examples
iex> SimpleBayes.Tokenizer.tokenize("foobar")
["foobar"]
iex> SimpleBayes.Tokenizer.tokenize("foo bar")
["foo", "bar"]
iex> SimpleBayes.Tokenizer.tokenize(",foo bar .")
["foo", "bar"]
iex> SimpleBayes.Tokenizer.tokenize("Foo bAr")
["foo", "bar"]
iex> SimpleBayes.Tokenizer.tokenize("foo, bar")
["foo", "bar"]
iex> SimpleBayes.Tokenizer.tokenize("foo bar.")
["foo", "bar"]
iex> SimpleBayes.Tokenizer.tokenize(~s(fo-o's ba_r"ed.))
~w(fo-o's ba_r"ed)
"""
def tokenize(string) do
string
|> String.downcase()
|> String.replace(~r/[^0-9a-zA-Z _\-'"]+/, "")
|> String.split()
end
@doc """
Filters out a list based on another list.
## Examples
iex> SimpleBayes.Tokenizer.filter_out(["foo", "bar", "baz"], ["baz"])
["foo", "bar"]
iex> SimpleBayes.Tokenizer.filter_out(["foo", "bar", "baz"], ["baz", "bazz"])
["foo", "bar"]
iex> SimpleBayes.Tokenizer.filter_out(["foo", "bar", "baz", "baz"], ["baz"])
["foo", "bar"]
"""
def filter_out(list, filter_list) do
Enum.reject list, &(&1 in filter_list)
end
@doc """
Converts a list with a value into a map, and merges the maps with accumulated values.
## Examples
iex> SimpleBayes.Tokenizer.accumulate(%{}, [:cat, :dog], 1)
%{cat: 1, dog: 1}
iex> SimpleBayes.Tokenizer.accumulate(%{cat: 1, fish: 1}, [:cat, :dog], 2)
%{cat: 3, fish: 1, dog: 2}
iex> SimpleBayes.Tokenizer.accumulate(%{cat: 1, fish: 1}, [:cat, :cat, :dog], 1)
%{cat: 3, fish: 1, dog: 1}
"""
def accumulate(map, list, acc_size) do
list
|> map_values(acc_size)
|> Map.merge(map, fn (_k, v1, v2) -> v1 + v2 end)
end
@doc """
Converts a list with a value into a map.
## Examples
iex> SimpleBayes.Tokenizer.map_values([:cat, :dog], 1)
%{cat: 1, dog: 1}
iex> SimpleBayes.Tokenizer.map_values([:cat, :cat, :dog], 1)
%{cat: 2, dog: 1}
"""
def map_values(list, value) do
Enum.reduce(list, %{}, fn (k, acc) ->
v = if acc[k], do: value + acc[k], else: value
Map.put(acc, k, v)
end)
end
end | lib/simple_bayes/tokenizer.ex | 0.83752 | 0.459804 | tokenizer.ex | starcoder |
defmodule Maru.Params.Types.DateTime do
@moduledoc """
Buildin Type: DateTime
## Parser Arguments
* `:format` - how to parse a datetime value
* `:iso8601` - parse by `DateTime.from_iso8601/2` or `NaiveDateTime.from_iso8601/2`
* `{:unix, unit}` - parse by `DateTime.from_unix/2`
* `:unix - parse by `DateTime.from_unix/1`
* `:unit` - unit for unix datetime
* `:second` (default)
* `:millisecond`
* `:microsecond`
* `:nanosecond`
* `:naive` - return `DateTime` or `NaiveDateTime` struct
* `false` (default) - return `DateTime` struct
* `true` - return `NaiveDateTime` struct
* `:truncate` - unit to truncate the output
* `:microsecond`
* `:millisecond`
* `:second`
## Examples:
requires :created, DateTime, format: :iso8601, naive: true
optional :updated, DateTime, format: {:unix, :second}, truncate: :second
"""
use Maru.Params.Type
def parser_arguments, do: [:format, :naive, :truncate, :time_zone]
def parse(input, args) do
format = Map.get(args, :format)
naive = Map.get(args, :naive, false)
unit = Map.get(args, :truncate)
format
|> case do
:iso8601 when naive -> NaiveDateTime.from_iso8601(input)
:iso8601 -> DateTime.from_iso8601(input)
:unix -> input |> DateTime.from_unix()
{:unix, unix_unit} -> DateTime.from_unix(input, unix_unit)
_ when is_struct(input, DateTime) -> {:ok, input}
_ when is_struct(input, NaiveDateTime) -> {:ok, input}
_ -> {:error, "unsupported format"}
end
|> case do
{:ok, %DateTime{} = datetime, _} when naive -> {:ok, DateTime.to_naive(datetime)}
{:ok, %DateTime{} = datetime, _} -> {:ok, datetime}
{:ok, %DateTime{} = datetime} when naive -> {:ok, DateTime.to_naive(datetime)}
{:ok, %DateTime{} = datetime} -> {:ok, datetime}
{:ok, %NaiveDateTime{}} when not naive -> {:error, "unknown naive timezone"}
{:ok, %NaiveDateTime{} = datetime} -> {:ok, datetime}
{:error, reason} -> {:error, reason}
end
|> case do
{:ok, datetime} when is_nil(unit) -> {:ok, datetime}
{:ok, %DateTime{} = t} -> {:ok, DateTime.truncate(t, unit)}
{:ok, %NaiveDateTime{} = t} -> {:ok, NaiveDateTime.truncate(t, unit)}
{:error, reason} -> {:error, :parse, "#{inspect(reason)}: #{inspect(format)}"}
end
end
end | lib/maru/params/types/datetime.ex | 0.902313 | 0.67848 | datetime.ex | starcoder |
defmodule Expublish do
@moduledoc """
Main module putting everything together:
```
def major do
Tests.run!()
:major
|> Project.get_version!()
|> Semver.increase!()
|> Project.update_version!()
|> Changelog.write_entry!()
|> Git.commit_and_tag()
|> Git.push()
|> Hex.publish()
end
```
"""
alias Expublish.Changelog
alias Expublish.Git
alias Expublish.Hex
alias Expublish.Options
alias Expublish.Project
alias Expublish.Semver
alias Expublish.Tests
require Logger
@doc """
Publish major version of current project.
"""
@spec major(Options.t()) :: :ok
def major(options \\ %Options{}), do: run(:major, options)
@doc """
Publish minor version of current project.
"""
@spec minor(Options.t()) :: :ok
def minor(options \\ %Options{}), do: run(:minor, options)
@doc """
Publish patch version of current project.
"""
@spec patch(Options.t()) :: :ok
def patch(options \\ %Options{}), do: run(:patch, options)
@doc """
Publish alpha version of current project.
"""
@spec alpha(Options.t()) :: :ok
def alpha(options \\ %Options{}), do: run(:alpha, options)
@doc """
Publish beta version of current project.
"""
@spec beta(Options.t()) :: :ok
def beta(options \\ %Options{}), do: run(:beta, options)
@doc """
Publish release-candidate version of current project.
"""
@spec rc(Options.t()) :: :ok
def rc(options \\ %Options{}), do: run(:rc, options)
@doc """
Removes pre-release and publish version of current project.
"""
@spec stable(Options.t()) :: :ok
def stable(options \\ %Options{}), do: run(:stable, options)
@type level() :: :major | :minor | :patch | :rc | :beta | :alpha | :stable
@spec run(level(), Options.t()) :: :ok
defp run(level, options) do
with :ok <- Git.validate(options),
:ok <- Options.validate(options, level),
:ok <- Changelog.validate(options),
:ok <- Tests.validate(options, level) do
Project.get_version!(options)
|> Semver.increase!(level, options)
|> Project.update_version!(options)
|> Changelog.write_entry!(options)
|> Git.commit_and_tag(options)
|> Git.push(options)
|> Changelog.remove_release_file!(options)
|> Hex.publish(options)
|> finish(options)
:ok
else
error ->
Logger.error(error)
exit({:shutdown, 1})
end
end
defp finish(version, %Options{dry_run: true}) do
Logger.info("Finished dry run for new package version: #{version}.")
end
defp finish(version, _options) do
Logger.info("Finished release for new package version: #{version}.")
end
end | lib/expublish.ex | 0.697609 | 0.52275 | expublish.ex | starcoder |
defmodule Benchmark do
@moduledoc """
Benchmarks the CPU and Memory consumption for struct operations
with type checking comparing to native ones.
"""
alias Benchmark.{Inputs, Samples, Tweet}
def run do
count = 10_000
puts_title("Generate #{count} inputs, may take a while.")
list = Enum.take(Stream.zip(Samples.tweet_map(), Samples.user_map()), count)
{:ok, maps_pid} = Inputs.start_link(list)
tweet_user_list =
Enum.map(list, fn {tweet_map, user_map} ->
{struct(Tweet, Map.put(tweet_map, :user, nil)), struct(Tweet.User, user_map)}
end)
{:ok, tweet_user_pid} = Inputs.start_link(tweet_user_list)
for {title, fun} <- [
{"Construction of a struct", fn -> bench_construction(maps_pid) end},
{"A struct's field modification", fn -> bench_put(tweet_user_pid) end}
] do
puts_title(title)
fun.()
end
end
def puts_title(title) do
IO.puts("")
IO.puts(title)
IO.puts("=========================================")
end
defp bench_construction(pid) do
Benchee.run(%{
"__MODULE__.new!(arg)" => fn ->
{tweet_map, user_map} = Inputs.next_input(pid)
Tweet.new!(Map.merge(tweet_map, %{user: Tweet.User.new!(user_map)}))
end,
"struct!(__MODULE__, arg)" => fn ->
{tweet_map, user_map} = Inputs.next_input(pid)
struct!(Tweet, Map.merge(tweet_map, %{user: struct!(Tweet.User, user_map)}))
end
})
end
defp bench_put(pid) do
Benchee.run(%{
"%{tweet | user: arg} |> __MODULE__.ensure_type!()" => fn ->
{tweet, user} = Inputs.next_input(pid)
%{tweet | user: user} |> Tweet.ensure_type!()
end,
"struct!(tweet, user: arg)" => fn ->
{tweet, user} = Inputs.next_input(pid)
struct!(tweet, user: user)
end
})
end
end
defmodule Benchmark.Inputs do
use GenServer
def start_link(inputs) do
GenServer.start_link(__MODULE__, inputs)
end
def next_input(pid) do
GenServer.call(pid, :next_input)
end
@impl true
def init(list) when is_list(list) do
{:ok, {0, Enum.count(list), list}}
end
@impl true
def handle_call(:next_input, _caller, {idx, count, inputs}) do
new_idx = idx + 1
state = {
if(new_idx == count, do: 0, else: new_idx),
count,
inputs
}
{:reply, Enum.at(inputs, idx), state}
end
end | benchmark/lib/benchmark.ex | 0.784732 | 0.464416 | benchmark.ex | starcoder |
defmodule Slack.Channel do
@moduledoc """
A publicly listed communication channel in a team
"""
@base "channels"
use Slack.Request
@doc """
Archive a channel.
https://api.slack.com/methods/channels.archive
## Examples
Slack.client(token)
|> Slack.Channel.archive(channel: "C1234567890")
"""
@spec archive(Slack.Client.t, Keyword.t) :: Slack.response
defpost :archive
@doc """
Create a channel.
https://api.slack.com/methods/channels.create
## Examples
Slack.client(token)
|> Slack.Channel.create(name: "mychannel")
"""
@spec create(Slack.Client.t, Keyword.t) :: Slack.response
defpost :create
@doc """
Retrieve channel history.
https://api.slack.com/methods/channels.history
## Examples
Slack.client(token)
|> Slack.Channel.history(channel: "C1234567890")
"""
@spec history(Slack.Client.t, Keyword.t) :: Slack.response
defget :history
@doc """
Get channel info.
https://api.slack.com/methods/channels.info
## Examples
Slack.client(token)
|> Slack.Channel.info(channel: "C1234567890")
"""
@spec info(Slack.Client.t, Keyword.t) :: Slack.response
defget :info
@doc """
Invite a user to a channel.
https://api.slack.com/methods/channels.invite
## Examples
Slack.client(token)
|> Slack.Channel.invite(channel: "C1234567890", user: "U1234567890")
"""
@spec invite(Slack.Client.t, Keyword.t) :: Slack.response
defpost :invite
@doc """
Join a channel.
https://api.slack.com/methods/channels.join
## Examples
Slack.client(token)
|> Slack.Channel.join(channel: "C1234567890")
"""
@spec join(Slack.Client.t, Keyword.t) :: Slack.response
defpost :join
@doc """
Kick a user from a channel.
https://api.slack.com/methods/channels.kick
## Examples
Slack.client(token)
|> Slack.Channel.kick(channel: "C1234567890", user: "U1234567890")
"""
@spec kick(Slack.Client.t, Keyword.t) :: Slack.response
defpost :kick
@doc """
Leave a channel.
https://api.slack.com/methods/channels.leave
## Examples
Slack.client(token)
|> Slack.Channel.leave(channel: "C1234567890")
"""
@spec leave(Slack.Client.t, Keyword.t) :: Slack.response
defpost :leave
@doc """
List all of the channels in a team.
https://api.slack.com/methods/channels.list
## Examples
Slack.client(token)
|> Slack.Channel.list
"""
@spec list(Slack.Client.t, Keyword.t) :: Slack.response
defget :list
@doc """
Move the read cursor in a channel.
https://api.slack.com/methods/channels.mark
## Examples
Slack.client(token)
|> Slack.Channel.mark(channel: "C1234567890", ts: "1234567890.123456")
"""
@spec mark(Slack.Client.t, Keyword.t) :: Slack.response
defpost :mark
@doc """
Rename a channel.
https://api.slack.com/methods/channels.rename
## Examples
Slack.client(token)
|> Slack.Channel.rename(channel: "C1234567890", name: "newname")
"""
@spec rename(Slack.Client.t, Keyword.t) :: Slack.response
defpost :rename
@doc """
Set the purpose of a channel.
https://api.slack.com/methods/channels.setPurpose
## Examples
Slack.client(token)
|> Slack.Channel.setPurpose(channel: "C1234567890", purpose: "Purpose")
"""
@spec setPurpose(Slack.Client.t, Keyword.t) :: Slack.response
defpost :setPurpose
@doc """
Set the topic of a channel.
https://api.slack.com/methods/channels.setTopic
## Examples
Slack.client(token)
|> Slack.Channel.setTopic(channel: "C1234567890", topic: "Topic")
"""
@spec setTopic(Slack.Client.t, Keyword.t) :: Slack.response
defpost :setTopic
@doc """
Unarchive a channel.
https://api.slack.com/methods/channels.unarchive
## Examples
Slack.client(token)
|> Slack.Channel.unarchive(channel: "C1234567890")
"""
@spec unarchive(Slack.Client.t, Keyword.t) :: Slack.response
defpost :unarchive
end | lib/slack/channel.ex | 0.802323 | 0.493775 | channel.ex | starcoder |
defmodule ExWikipedia do
@moduledoc """
`ExWikipedia` is an Elixir client for the [Wikipedia API](https://en.wikipedia.org/w/api.php).
"""
alias ExWikipedia.Page
@callback fetch(input :: integer(), opts :: keyword()) :: {:ok, map()} | {:error, any()}
@doc """
Accepts an integer (or a binary representation) and returns a struct with key information extracted.
## Examples
iex> ExWikipedia.page(54173, [])
{:ok,
%ExWikipedia.Page{
categories: ["Webarchive template wayback links",
"All articles with dead external links",
"Films whose writer won the Best Original Screenplay BAFTA Award",
"Independent Spirit Award for Best Film winners", ...],
content: "Pulp Fiction is a 1994 American black comedy crime film" <> ...,
external_links: ["https://www.bbfc.co.uk/releases/pulp-fiction-film-0",
"https://boxofficemojo.com/movies/?id=pulpfiction.htm",
...],
images: ["https://upload.wikimedia.org/wikipedia/en/3/3b/Pulp_Fiction_%281994%29_poster.jpg",
"https://upload.wikimedia.org/wikipedia/en/thumb/2/2e/Willis_in_Pulp_Fiction.jpg/220px-Willis_in_Pulp_Fiction.jpg",
...],
is_redirect?: false,
page_id: 54173,
revision_id: 1069204423,
summary: "Pulp Fiction is a 1994 American black comedy crime film written" <> ...,
title: "Pulp Fiction",
url: "https://en.wikipedia.org/wiki/Pulp_Fiction"
}}
iex> ExWikipedia.page(1)
{:error, "There is no page with ID 1."}
iex> ExWikipedia.page(%{})
{:error, "The Wikipedia ID supplied is not valid."}
Redirects are allowed by default. Compare the following two results.
iex> ExWikipedia.page(10971271)
{:ok,
%ExWikipedia.Page{
# ...
is_redirect?: true,
page_id: 10971271,
title: "<NAME>",
url: "https://en.wikipedia.org/wiki/Irene_Angelico"
}}
iex> ExWikipedia.page(10971271, follow_redirect: false)
{:error,
"Content is from a redirected page, but `follow_redirect` is set to false"}
## Options
See `ExWikipedia.Page` for full implementation details.
"""
defdelegate page(wikipedia_id, opts \\ []), to: Page, as: :fetch
end | lib/ex_wikipedia.ex | 0.841696 | 0.453201 | ex_wikipedia.ex | starcoder |
defmodule Cog.Commands.Tee do
use Cog.Command.GenCommand.Base,
bundle: Cog.Util.Misc.embedded_bundle
require Logger
alias Cog.Command.Service.MemoryClient
alias Cog.Command.Service.DataStore
@data_namespace [ "commands", "tee" ]
@description "Save and pass through pipeline data"
@long_description """
The tee command passes the output of a Cog pipeline through to the next command in the pipeline while also saving it using the provided name. The saved output can be retreived later using the cat command.
If the name of a previously saved object is reused, tee will overwrite the existing data. There is not currently a way to delete saved content from Cog, but you can simulate this behavior by sending a replacement object to tee again with the name of the object you wish to delete.
Think carefully about the type of data that you store using tee since it will be retrievable by default by any Cog user. Careful use of rules and naming conventions could be used to limit access, though keep in mind that a simple typo in naming could cause unexpected data to be accessible. For example, the rules below would require you to have the "site:prod-data" permission in order to save or retrieve objects whose names begin with "prod-".
operable:rule create "when command is operable:tee with arg[0] == /^prod-.*/ must have site:prod-data"
operable:rule create "when command is operable:cat with arg[0] == /^prod-.*/ must have site:prod-data"
"""
@arguments "<name>"
@examples """
seed '{ "thing": "stuff" }' | tee foo
> '{ "thing": "stuff" }'
cat foo
> '{ "thing": "stuff" }'
"""
rule "when command is #{Cog.Util.Misc.embedded_bundle}:tee allow"
def handle_message(%{args: [key]} = req, state) do
root = req.services_root
token = req.service_token
step = req.invocation_step
value = req.cog_env
memory_key = req.invocation_id
MemoryClient.accum(root, token, memory_key, value)
case step do
step when step in ["first", nil] ->
{:reply, req.reply_to, nil, state}
"last"->
data =
MemoryClient.fetch(root, token, memory_key)
|> Enum.reject(fn(value) -> value == %{} end)
|> maybe_unwrap
MemoryClient.delete(root, token, memory_key)
case DataStore.replace(@data_namespace, key, data) do
{:error, reason} ->
{:error, req.reply_to, "Unable to store pipeline content: #{inspect reason}"}
{:ok, _} ->
{:reply, req.reply_to, data, state}
end
end
end
def handle_message(%{args: []} = req, state) do
{:error, req.reply_to, "#{Cog.Util.Misc.embedded_bundle}:tee requires a name to be specified for the pipeline content", state}
end
defp maybe_unwrap([data]), do: data
defp maybe_unwrap(data), do: data
end | lib/cog/commands/tee.ex | 0.748812 | 0.445107 | tee.ex | starcoder |
defmodule AOC.Day4 do
@moduledoc """
https://adventofcode.com/2018/day/4
"""
@doc """
iex> AOC.Day4.part_1([
...> %{
...> action: ["Guard", "#10", "begins", "shift"],
...> time: ~N[1518-11-01 00:00:00.000]
...> },
...> %{action: ["falls", "asleep"], time: ~N[1518-11-01 00:05:00.000]},
...> %{action: ["wakes", "up"], time: ~N[1518-11-01 00:25:00.000]},
...> %{action: ["falls", "asleep"], time: ~N[1518-11-01 00:30:00.000]},
...> %{action: ["wakes", "up"], time: ~N[1518-11-01 00:55:00.000]},
...> %{
...> action: ["Guard", "#99", "begins", "shift"],
...> time: ~N[1518-11-01 23:58:00.000]
...> },
...> %{action: ["falls", "asleep"], time: ~N[1518-11-02 00:40:00.000]},
...> %{action: ["wakes", "up"], time: ~N[1518-11-02 00:50:00.000]},
...> %{
...> action: ["Guard", "#10", "begins", "shift"],
...> time: ~N[1518-11-03 00:05:00.000]
...> },
...> %{action: ["falls", "asleep"], time: ~N[1518-11-03 00:24:00.000]},
...> %{action: ["wakes", "up"], time: ~N[1518-11-03 00:29:00.000]},
...> %{
...> action: ["Guard", "#99", "begins", "shift"],
...> time: ~N[1518-11-04 00:02:00.000]
...> },
...> %{action: ["falls", "asleep"], time: ~N[1518-11-04 00:36:00.000]},
...> %{action: ["wakes", "up"], time: ~N[1518-11-04 00:46:00.000]},
...> %{
...> action: ["Guard", "#99", "begins", "shift"],
...> time: ~N[1518-11-05 00:03:00.000]
...> },
...> %{action: ["falls", "asleep"], time: ~N[1518-11-05 00:45:00.000]},
...> %{action: ["wakes", "up"], time: ~N[1518-11-05 00:55:00.000]}
...>])
240
"""
def part_1(input \\ input()) do
input
|> Enum.reduce(%{}, fn curr, acc ->
%{action: action, time: time} = curr
{{_, _, _}, {_, minute, _}} = NaiveDateTime.to_erl(time)
case action do
["Guard", id | _] ->
Map.put(acc, :current, id)
["falls" | _] ->
Map.update(acc, acc.current, [%Range{first: minute}], fn all ->
[%Range{first: minute} | all]
end)
["wakes" | _] ->
Map.update!(acc, acc.current, fn [r | rest] -> [%{r | last: minute - 1} | rest] end)
end
end)
|> Map.drop([:current])
|> Enum.reduce(%{most_naps: []}, fn {id, naps}, acc ->
naps =
naps
|> Enum.map(&Enum.to_list/1)
|> List.flatten()
if length(naps) > length(acc.most_naps) do
"#" <> n = id
%{id: String.to_integer(n), most_naps: naps}
else
acc
end
end)
|> (fn %{id: id, most_naps: most_naps} ->
most_naps
|> Enum.group_by(& &1)
|> Map.values()
|> Enum.max_by(&Kernel.length/1)
|> List.first()
|> Kernel.*(id)
end).()
end
defp input do
"txts/day_4.txt"
|> File.read!()
|> String.split("\n")
|> Task.async_stream(fn string ->
[date, time | action] = String.split(string, ~r{[\[\]\s]}, trim: true)
%{
action: action,
time: NaiveDateTime.from_iso8601!(date <> "T" <> time <> ":00.000Z")
}
end)
|> Enum.map(&Kernel.elem(&1, 1))
|> Enum.sort(fn e1, e2 ->
e1.time
|> NaiveDateTime.compare(e2.time)
|> case do
:lt -> true
_ -> false
end
end)
end
end | lib/day_4.ex | 0.551815 | 0.52074 | day_4.ex | starcoder |
defmodule Exi.ConnectServer do
@moduledoc """
Nerves起動時に `Node.connect()` を受け入れるための準備をする。
## 使い方
application.exに以下を記載する。
```
{Exi.ConnectServer, [node_name, cookie]}
```
- node_name: 自分のノード名
- cookie: クッキー(共通鍵)
## 例
```
{Exi.ConnectServer, ["my_node_name", "comecomeeverybody"]}
```
"""
use GenServer
require Logger
@interval_init_ms 1_000
def start_link(node_option \\ []) do
# to init/1
GenServer.start_link(__MODULE__, node_option, name: __MODULE__)
end
def init(node_option) do
set_interval(:init, @interval_init_ms)
{:ok, node_option}
end
def set_interval(msg, ms) do
# to handle_info/2
Process.send_after(self(), msg, ms)
end
def handle_info(:init, node_option) do
init_node(eth0_ready?(), node_option)
{:noreply, node_option}
end
defp init_node(true, [node_name, cookie]) do
node_host = get_ipaddr_eth0_static()
System.cmd("epmd", ["-daemon"])
Node.start(:"#{node_name}@#{node_host}")
Node.set_cookie(:"#{cookie}")
Logger.info("=== Node.start -> #{node_name}@#{node_host} ===")
Logger.info("=== Node.set_cookie -> #{cookie} ===")
case [node_start?(), node_set_cookie?()] do
[true, true] ->
Logger.info("=== init_node -> success! Node.start & Node.set ===")
{:noreply, [node_name, cookie]}
[_, _] ->
Logger.info(
"=== init_node -> false, node_start(#{inspect(node_start?())}), node_set_cookie(#{inspect(node_set_cookie?())}) ==="
)
set_interval(:init, @interval_init_ms)
end
end
def node_start?() do
case Node.self() do
:nonode@nohost -> false
_ -> true
end
end
def node_set_cookie?() do
case Node.get_cookie() do
:nocookie -> false
_ -> true
end
end
def eth0_ready?() do
case get_ipaddr_eth0_static() do
nil -> false
_ -> true
end
end
def get_ipaddr_eth0_static() do
case VintageNet.get_by_prefix(["interface", "eth0", "config"]) do
[] ->
nil
[tuple_int_eth0_config] ->
tuple_int_eth0_config
|> (fn {_, list_settings} -> list_settings end).()
|> Map.get(:ipv4)
|> Map.get(:address)
end
end
end | dio/exibee/lib/exi/exi_connect_server.ex | 0.706494 | 0.687964 | exi_connect_server.ex | starcoder |
defmodule Xalsa do
@moduledoc """
Elixir ALSA connector. The Xalsa module implements the API.
The client may send any number of frames as a binary array of 32 bit floats and may
optionally receive back a notification in form of a :ready4more message 5-10 ms
before all frames are consumed by the ALSA driver.
The :xalsa application needs to be started before calling the API. Also check the
config/config.exs file on how to configure PCM devices for xalsa.
"""
@typedoc """
The sample rate is set in the application environment
(config file for the application).
"""
@type rates() :: 44100 | 48000 | 96000 | 192_000
@doc """
Send frames in a binary array of frame:32/float-native.
If the `notify` flag is true a :ready4more message will be sent to the
process in the `from` argument when the process frames are due to be consumed
within 5-10 ms. This so that the process may synthesize/produce more frames.
"""
@spec send_frames(
frames :: binary(),
channel :: pos_integer(),
notify :: boolean(),
from :: pid()
) :: :ok
def send_frames(frames, channel, notify \\ false, from \\ self()) do
notify = boolean_to_integer(notify)
{procid, localchannel} = :xalsa_manager.get_id(channel)
send(procid, {:frames, {from, localchannel, frames, notify}})
:ok
end
@doc """
The client will wait until :ready4more notification is sent from `xalsa` server.
"""
@spec wait_ready4more() :: :ok
def wait_ready4more() do
receive do
:ready4more -> :ok
end
end
@doc """
Flush any :ready4more messages from the `xalsa` server in input queue.
Returns true if there were any.
"""
@spec flush_ready4more() :: boolean()
def flush_ready4more(), do: flush_ready4more(false)
defp flush_ready4more(received) do
receive do
:ready4more -> flush_ready4more(true)
after
0 -> received
end
end
@doc """
Return the number of frames that the ALSA driver consumes per callback cycle.
"""
@spec period_size() :: pos_integer()
def period_size(), do: :xalsa_manager.period_size()
@doc "The sample rate, number of frames per second"
@spec rate() :: rates()
def rate(), do: :xalsa_manager.rate()
@doc """
Lists the PCM names and their number of channels respectively.
## Example
iex> Xalsa.pcms()
["plughw:1,0": 2]
"""
@spec pcms() :: list()
def pcms(), do: :xalsa_manager.pcms()
@doc "Total number of channels for all PCM devices together."
@spec no_of_channels() :: pos_integer()
def no_of_channels, do: :xalsa_manager.no_of_channels()
@doc """
The maximum buffer prepare time in µs, for all started pcm cards,
since application start or last reset.
## Examples
iex> Xalsa.max_mix_time()
[254]
"""
@spec max_mix_time() :: [pos_integer()]
def max_mix_time(), do: :xalsa_manager.max_mix_time()
@doc "Reset the max prepare time gauges to 0"
@spec clear_max_mix_time() :: :ok
def clear_max_mix_time(), do: :xalsa_manager.clear_max_mix_time()
@doc """
The maximum map size for output mixer, for all started pcm cards,
since application start or last reset. Indicates how many processes
that concurrently are queueing frames to be output.
"""
@spec max_map_size() :: [pos_integer()]
def max_map_size(), do: :xalsa_manager.max_map_size()
@doc "Reset the max prepare time gauges to 0"
@spec clear_max_map_size() :: :ok
def clear_max_map_size(), do: :xalsa_manager.clear_max_map_size()
@doc "Convert a list of (Erlang) floats to a binary of 32 bit (C) floats"
@spec float_list_to_binary([float()]) :: binary()
def float_list_to_binary(fl) do
:xalsa_pcm.float_list_to_binary(fl)
end
@spec boolean_to_integer(boolean()) :: 0 | 1
defp boolean_to_integer(true), do: 1
defp boolean_to_integer(false), do: 0
end | lib/xalsa.ex | 0.90704 | 0.451024 | xalsa.ex | starcoder |
defmodule TimeDiscountRate.Host do
alias TimeDiscountRate.Main
defp ensure_integer(integer) when is_integer(integer), do: integer
defp ensure_integer(str), do: Integer.parse(str) |> elem(0)
defp ensure_float(float) when is_float(float), do: float
defp ensure_float(str), do: Float.parse(str) |> elem(0)
def update_config(data, config) do
data = data |> Map.put(:uplim, ensure_float(config["uplim"]))
|>Map.put(:lowlim, ensure_float(config["lowlim"]))
|>Map.put(:rest_time, ensure_float(config["rest_time"]))
|>Map.put(:distace, ensure_integer(config["distance"]))
|>Map.put(:money, ensure_integer(config["money"]))
|>Map.put(:q_num, ensure_integer(config["q_num"]))
|>Map.put(:rest_interval, ensure_integer(config["rest_interval"]))
|>Map.put(:unit, config["unit"])
|>Map.put(:basetime, Enum.map(config["basetime"],fn time -> ensure_integer(time) end))
end
def update_unit(data, unit) do
data = Map.put(data, :unit, unit)
end
def update_question(data, text) do
data = Map.put(data, :question_text, text)
end
def change_page(data, page) do
if data.page == "waiting" && page == "experiment" do
data = data
|> Map.put(:anses, 0)
|> Map.put(:results, [])
|> reset()
end
data = Map.update!(data, :page, fn _ -> page end)
data
end
def reset(data) do
%{participants: participants } = data
updater = fn participant ->
%{ participant |
ansed: false,
rate: [],
question: [],
state: 0, # 0->waitng,1->answering,2->answered
slideIndex: 0
}
end
ids = Map.keys(participants)
participants = Enum.reduce(ids, %{}, fn key, map ->
Map.put(map, key,
%{ Map.get(participants,key) |
ansed: false,
rate: [],
question: [],
state: 0, # 0->waitng,1->answering,2->answered
slideIndex: 0
})
end)
%{data | participants: participants }
end
def visit(data) do
Map.put(data, :is_first_visit, false)
end
def get_filter(data) do
map = %{
_default: true,
is_first_visit: "isFirstVisit",
participants_number: "participantsNumber"
}
end
def filter_data(data) do
Transmap.transform(data, get_filter(data), diff: false)
end
end | lib/host.ex | 0.553505 | 0.428174 | host.ex | starcoder |
defmodule I18n2Elm.Types do
@moduledoc ~S"""
Specifies the main Elixir types used for describing the
intermediate representations of i18n resources.
"""
defmodule Translation do
@moduledoc ~S"""
Represents a parsed translation file.
JSON:
# da_DK.json
{
"Yes": "Ja",
"No": "Nej",
"Next": "Næste",
"Previous": "Forrige",
"Hello": "Hej, {0}. Leder du efter {1}?"
}
Elixir representation:
%Translation{language_tag: "da_DK",
translations: [
{"TidHello", [{"Hej, ", 0},
{". Leder du efter ", 1},
{"?"}]},
{"TidNext", [{"Næste"}]},
{"TidNo", [{"Nej"}]},
{"TidPrevious", [{"Forrige"}]}
{"TidYes", [{"Ja"}]},
]}
Elm code generated:
module Translations.DaDk exposing (daDkTranslations)
import Translations.Ids exposing (TranslationId(..))
daDkTranslations : TranslationId -> String
daDkTranslations tid =
case tid of
TidHello hole0 hole1 ->
"Hej, " ++ hole0 ++ ". Leder du efter " ++ hole1 ++ "?"
TidYes ->
"Ja"
TidNo ->
"Nej"
TidNext ->
"Næste"
TidPrevious ->
"Forrige"
"""
@type t :: %__MODULE__{language_tag: String.t(), translations: [{String.t(), String.t()}]}
defstruct [:language_tag, :translations]
@spec new(String.t(), [{String.t(), String.t()}]) :: t
def new(language_tag, translations) do
%__MODULE__{language_tag: language_tag, translations: translations}
end
end
defmodule LanguageResource do
@moduledoc ~S"""
Represents a translation file for a single language.
"""
@type t :: %__MODULE__{
module_name: String.t(),
file_name: String.t(),
translation_name: String.t(),
translations: [{String.t(), String.t()}]
}
defstruct [:module_name, :file_name, :translation_name, :translations]
@spec new(String.t(), String.t(), String.t(), [{String.t(), String.t()}]) :: t
def new(module_name, file_name, translation_name, translations) do
%__MODULE__{
module_name: module_name,
file_name: file_name,
translation_name: translation_name,
translations: translations
}
end
end
defmodule IdsResource do
@moduledoc ~S"""
Represents a file containing all translation IDs.
"""
@type t :: %__MODULE__{module_name: String.t(), ids: [String.t()]}
defstruct [:module_name, :ids]
@spec new(String.t(), [String.t()]) :: t
def new(module_name, ids) do
%__MODULE__{module_name: module_name, ids: ids}
end
end
defmodule UtilResource do
@moduledoc ~S"""
Represents a file util functions for parsing language tags and translation
IDs into translated values.
"""
@type t :: %__MODULE__{
module_name: String.t(),
imports: [%{file_name: String.t(), translation_name: String.t()}],
languages: [
%{string_value: String.t(), type_value: String.t(), translation_fun: String.t()}
]
}
defstruct [:module_name, :imports, :languages]
@spec new(String.t(), [map], [map]) :: t
def new(module_name, imports, languages) do
%__MODULE__{module_name: module_name, imports: imports, languages: languages}
end
end
end | lib/types.ex | 0.750095 | 0.424859 | types.ex | starcoder |
defmodule ISO8583.Bitmap do
alias ISO8583.Utils
@moduledoc """
This module is for building the bitmaps. It supports both Primary, Secondary and Tertiary bitmaps for fields `0-127`. You can also
use the same module to build bitamps for extended fields like `127.0-39` and `127.25.0-33`
"""
@doc """
Function to create bitmap for fields 0-127. Takes a message `map` and creates a bitmap representing all fields
in the message. Filed 0 is turned on my default because every message must have a valid `MTI`.
## Examples
iex> message = %{
iex>"0": "1200",
iex>"2": "4761739001010119",
iex>"3": "000000",
iex>"4": "000000005000",
iex>"6": "000000005000",
iex>"22": "051",
iex>"23": "001",
iex>"25": "00",
iex>"26": "12",
iex>"32": "423935",
iex>"33": "111111111",
iex>"35": "4761739001010119D22122011758928889",
iex>"41": "12345678"
iex>}
%{
"0": "1200",
"2": "4761739001010119",
"22": "051",
"23": "001",
"25": "00",
"26": "12",
"3": "000000",
"32": "423935",
"33": "111111111",
"35": "4761739001010119D22122011758928889",
"4": "000000005000",
"41": "12345678",
"6": "000000005000"
}
iex>Bitmap.fields_0_127(message)
"F40006C1A08000000000000000000000"
"""
def fields_0_127(message) do
message
|> Utils.atomify_map()
|> create_bitmap(128)
|> List.replace_at(0, 1)
|> ensure_127(message)
|> Enum.join()
|> Utils.pad_string(0, 128)
|> Utils.binary_to_hex()
|> Utils.pad_string("0", 32)
end
@doc """
Function to create bitmap for fields 127.0-39 Takes a message `map` and creates a bitmap representing all
the 127 extension fields
in the message.
## Examples
iex>message = %{
iex>"127.25": "7E1E5F7C0000000000000000200000000000000014A00000000310107C0000C2FF004934683D9B5D1447800280000000000000000410342031F024103021406010A03A42002008CE0D0C84042100000488004041709018000003276039079EDA",
iex>}
%{
"127.25": "7E1E5F7C0000000000000000200000000000000014A00000000310107C0000C2FF004934683D9B5D1447800280000000000000000410342031F024103021406010A03A42002008CE0D0C84042100000488004041709018000003276039079EDA",
}
iex>Bitmap.fields_0_127_0_39(message)
"0000008000000000"
"""
def fields_0_127_0_39(message) do
message
|> Utils.atomify_map()
|> create_bitmap(64, "127.")
|> List.replace_at(0, 0)
|> Enum.join()
|> Utils.binary_to_hex()
|> Utils.pad_string("0", 16)
end
@doc """
Function to create bitmap for fields 127.25.0-39 Takes a message `map` and creates a bitmap representing all
the 127.25 extension fields in the message.
## Examples
iex>message = %{
iex>"127.25.1": "7E1E5F7C00000000",
iex>"127.25.12": "4934683D9B5D1447",
iex>"127.25.13": "80",
iex>"127.25.14": "0000000000000000410342031F02",
iex>"127.25.15": "410302",
iex>"127.25.18": "06010A03A42002",
iex>"127.25.2": "000000002000",
iex>"127.25.20": "008C",
iex>"127.25.21": "E0D0C8",
iex>"127.25.22": "404",
iex>"127.25.23": "21",
iex>"127.25.24": "0000048800",
iex>"127.25.26": "404",
iex>"127.25.27": "170901",
iex>"127.25.28": "00000327",
iex>"127.25.29": "60",
iex>"127.25.3": "000000000000",
iex>"127.25.30": "39079EDA",
iex>"127.25.4": "A0000000031010",
iex>"127.25.5": "7C00",
iex>"127.25.6": "00C2",
iex>"127.25.7": "FF00"
iex> }
%{
"127.25.1": "7E1E5F7C00000000",
"127.25.12": "4934683D9B5D1447",
"127.25.13": "80",
"127.25.14": "0000000000000000410342031F02",
"127.25.15": "410302",
"127.25.18": "06010A03A42002",
"127.25.2": "000000002000",
"127.25.20": "008C",
"127.25.21": "E0D0C8",
"127.25.22": "404",
"127.25.23": "21",
"127.25.24": "0000048800",
"127.25.26": "404",
"127.25.27": "170901",
"127.25.28": "00000327",
"127.25.29": "60",
"127.25.3": "000000000000",
"127.25.30": "39079EDA",
"127.25.4": "A0000000031010",
"127.25.5": "7C00",
"127.25.6": "00C2",
"127.25.7": "FF00"
}
iex>Bitmap.fields_0_127_25_0_33(message)
"7E1E5F7C00000000"
"""
def fields_0_127_25_0_33(message) do
message
|> Utils.atomify_map()
|> create_bitmap(64, "127.25.")
|> List.replace_at(0, 0)
|> Enum.join()
|> Utils.binary_to_hex()
|> Utils.pad_string("0", 16)
end
defp create_bitmap(message, length) do
List.duplicate(0, length)
|> comprehend(message, "", length)
end
defp create_bitmap(message, length, field_extension) do
List.duplicate(0, length)
|> comprehend(message, field_extension, length)
end
defp comprehend(list, message, field_extension, length, iteration \\ 0)
defp comprehend(list, _, _, length, iteration) when iteration == length do
list
end
defp comprehend(list, message, field_extension, length, iteration) do
field =
field_extension
|> Kernel.<>(Integer.to_string(iteration + 1))
|> String.to_atom()
case Map.get(message, field) do
nil ->
list
|> comprehend(message, field_extension, length, iteration + 1)
_ ->
list
|> List.replace_at(iteration, 1)
|> comprehend(message, field_extension, length, iteration + 1)
end
end
defp ensure_127(bitmap, %{"127.1": _}) do
bitmap |> List.replace_at(126, 1)
end
defp ensure_127(bitmap, _), do: bitmap
end | lib/iso_8583/bitmap/bitmap.ex | 0.865807 | 0.42662 | bitmap.ex | starcoder |
defmodule ExUnitAssertMatch do
@moduledoc """
Provedes functionality to assert that given data structure is as expected.
The usage is on [README](./readme.html#usage).
"""
alias ExUnitAssertMatch.{Type, Types, Option, InternalState}
@doc """
Assert that given `data` match `type` specification.
alias ExUnitAssertMatch, as: Match
Match.assert Match.binary(), "Hello"
#=> pass
Match.assert Match.binary(), 123
#=> fail
This calls ExUnit `assert` macro internally.
"""
def assert(type, data, opts \\ [])
def assert(type, data, opts = %Option{}) do
Type.assert(type, data, opts, %InternalState{})
end
def assert(type, data, opts) do
assert(type, data, struct(Option, opts), %InternalState{})
end
def assert(type, data, opts, state) do
Type.assert(type, data, opts, state)
end
@doc """
Matches any map.
"""
def map() do
%Types.Map{example: nil}
end
@doc """
Matches a map that satisfies `example` specification.
alias ExUnitAssertMatch, as: Match
Match.assert Match.map(%{name: Match.binary()}), %{name: "John"}
#=> pass
Match.assert Match.map(%{name: Match.binary()}), %{name: nil}
#=> fail
You can assert that the map and `example` have exact same keys.
Match.assert Match.map(%{name: Match.binary()}, exact_same_keys: true), %{name: "Bob", age: 28}
#=> fail
"""
def map(example, opts \\ []) do
struct(Types.Map, Keyword.put(opts, :example, example))
end
@doc """
Matches any list.
"""
def list() do
%Types.List{example: nil}
end
@doc """
Matches a list that satisfies `example` specification.
alias ExUnitAssertMatch, as: Match
Match.assert Match.list_of(Match.binary()), ["1", "2"]
#=> pass
Match.assert Match.list_of(Match.binary()), ["1", 2]
#=> fail
"""
def list_of(example) do
%Types.List{example: example}
end
@doc """
Matches any atom.
"""
def atom() do
%Types.Atom{}
end
@doc """
Matches any binary. You can pass regex.
alias ExUnitAssertMatch, as: Match
Match.assert Match.binary(), "foo"
#=> pass
Match.assert Match.binary(regex: ~r/bar/), "foo"
#=> fail
"""
def binary(opts \\ []) do
struct(Types.Binary, opts)
end
@doc """
Matches any integer.
"""
def integer() do
%Types.Integer{}
end
@doc """
Matches any float.
"""
def float() do
%Types.Float{}
end
@doc """
Matches if they are equal.
This uses `==` to compare them.
"""
def literal(example) do
%Types.Literal{example: example}
end
@doc """
Same as `literal/1`.
"""
def equal(example) do
literal(example)
end
@doc """
Matches if it satisfies `example` or equals to `nil`.
"""
def nullable(example) do
%Types.Nullable{example: example}
end
@doc """
Matches everything.
"""
def any() do
%Types.Any{}
end
Enum.each(~w[map list atom binary integer float]a, fn type ->
name = String.to_atom("nullable_" <> to_string(type))
@doc """
Helper function to call `nullable(#{type}())`.
"""
def unquote(name)() do
nullable(unquote(type)())
end
end)
Enum.each(~w[map list_of literal]a, fn type ->
name = String.to_atom("nullable_" <> to_string(type))
@doc """
Helper function to call `nullable(#{type}(example))`.
"""
def unquote(name)(example) do
nullable(unquote(type)(example))
end
end)
end | lib/ex_unit_assert_match.ex | 0.873276 | 0.8398 | ex_unit_assert_match.ex | starcoder |
defmodule Mailroom.IMAP.BodyStructure do
defmodule Part do
defstruct section: nil,
params: %{},
multipart: false,
type: nil,
id: nil,
description: nil,
encoding: nil,
encoded_size: nil,
disposition: nil,
file_name: nil,
parts: []
end
@doc ~S"""
Generates a `BodyStructure` struct from the IMAP ENVELOPE list
"""
def new(list) do
list
|> build_structure
|> number_sections
end
def has_attachment?(%Part{disposition: "attachment"}), do: true
def has_attachment?(%Part{parts: []}), do: false
def has_attachment?(%Part{parts: parts}), do: Enum.any?(parts, &has_attachment?/1)
def get_attachments(body_structure, acc \\ [])
def get_attachments(%Part{parts: []}, acc), do: Enum.reverse(acc)
def get_attachments(%Part{parts: parts}, acc), do: get_attachments(parts, acc)
def get_attachments([%Part{disposition: "attachment"} = part | tail], acc),
do: get_attachments(tail, [part | acc])
def get_attachments([], acc), do: Enum.reverse(acc)
def get_attachments([_part | tail], acc), do: get_attachments(tail, acc)
defp build_structure([[_ | _] | _rest] = list) do
parse_multipart(list)
end
defp build_structure(list) do
[type, sub_type, params, id, description, encoding, encoded_size | tail] = list
%Part{
type: String.downcase("#{type}/#{sub_type}"),
params: parse_params(params),
id: id,
description: description,
encoding: downcase(encoding),
encoded_size: to_integer(encoded_size),
disposition: parse_disposition(tail),
file_name: parse_file_name(tail)
}
end
defp parse_multipart(list, parts \\ [])
defp parse_multipart([[_ | _] = part | rest], parts) do
parse_multipart(rest, [part | parts])
end
defp parse_multipart([<<type::binary>> | _rest], parts) do
parts = parts |> Enum.reverse() |> Enum.map(&build_structure/1)
%Part{type: String.downcase(type), multipart: true, parts: parts}
end
defp parse_params(list, params \\ %{})
defp parse_params(nil, params), do: params
defp parse_params([], params), do: params
defp parse_params([name, value | tail], params) do
parse_params(tail, Map.put(params, String.downcase(name), value))
end
defp parse_disposition([]), do: nil
defp parse_disposition([[disposition, [_ | _]] | _tail]), do: String.downcase(disposition)
defp parse_disposition([_ | tail]), do: parse_disposition(tail)
defp parse_file_name([]), do: nil
defp parse_file_name([[_, [_ | _] = params] | _tail]), do: file_name_from_params(params)
defp parse_file_name([_ | tail]), do: parse_file_name(tail)
defp file_name_from_params([]), do: nil
defp file_name_from_params(["FILENAME", file_name | _tail]), do: file_name
defp file_name_from_params(["filename", file_name | _tail]), do: file_name
defp file_name_from_params([_, _ | tail]), do: file_name_from_params(tail)
defp number_sections(map, prefix \\ nil, section \\ nil)
defp number_sections(map, prefix, section) do
section = [prefix, section] |> Enum.filter(& &1) |> join(".")
parts =
map.parts
|> Enum.with_index(1)
|> Enum.map(fn {part, index} ->
number_sections(part, section, index)
end)
%{map | section: section, parts: parts}
end
defp join([], _joiner), do: nil
defp join(enum, joiner), do: Enum.join(enum, joiner)
defp to_integer(nil), do: nil
defp to_integer(string), do: String.to_integer(string)
defp downcase(nil), do: nil
defp downcase(string), do: String.downcase(string)
end | lib/mailroom/imap/body_structure.ex | 0.508788 | 0.40539 | body_structure.ex | starcoder |
defmodule StrawHat.Review.Review do
@moduledoc """
Represents a Review Ecto Schema.
"""
use StrawHat.Review.Schema
alias StrawHat.Review.{Comment, Media, ReviewAspect, ReviewReaction}
@typedoc """
- `reviewee_id`: The object or user that receive the review.
- `reviewer_id`: The user that make the comment.
- `comment`: The user comment or appreciation above the reviewee.
- `comments`: List of `t:StrawHat.Review.Comment.t/0` associated with the
current review.
- `medias`: List of `t:StrawHat.Review.Media.t/0` associated with the
current review.
- `aspects`: List of `t:StrawHat.Review.ReviewAspect.t/0` associated with
the current review.
- `reactions`: List of `t:StrawHat.Review.ReviewReaction.t/0`
associated with the current review.
"""
@type t :: %__MODULE__{
__meta__: Ecto.Schema.Metadata.t(),
id: String.t() | nil,
inserted_at: DateTime.t() | nil,
updated_at: DateTime.t() | nil,
reviewee_id: String.t() | nil,
reviewer_id: String.t() | nil,
comment: String.t() | nil,
comments: [Comment.t()] | Ecto.Association.NotLoaded.t(),
medias: [Media.t()] | Ecto.Association.NotLoaded.t(),
aspects: [ReviewAspect.t()] | Ecto.Association.NotLoaded.t(),
reactions: [ReviewReaction.t()] | Ecto.Association.NotLoaded.t()
}
@typedoc """
Check `t:t/0` type for more information about the keys.
"""
@type review_attrs :: %{
reviewee_id: String.t(),
reviewer_id: String.t(),
comment: String.t(),
aspects: [ReviewAspect.t()],
medias: [%Plug.Upload{}]
}
@required_fields ~w(reviewee_id reviewer_id comment)a
schema "reviews" do
field(:reviewee_id, :string)
field(:reviewer_id, :string)
field(:comment, :string)
timestamps()
has_many(
:comments,
Comment,
on_replace: :delete,
on_delete: :delete_all
)
has_many(
:medias,
Media,
on_replace: :delete,
on_delete: :delete_all
)
has_many(
:aspects,
ReviewAspect,
on_replace: :delete,
on_delete: :delete_all
)
has_many(
:reactions,
ReviewReaction,
on_replace: :delete,
on_delete: :delete_all
)
end
@doc """
Validates the attributes and return a Ecto.Changeset for the current Review.
"""
@spec changeset(t, review_attrs | %{}) :: Ecto.Changeset.t()
def changeset(review, review_attrs) do
review
|> cast(review_attrs, @required_fields)
|> validate_required(@required_fields)
|> cast_assoc(:aspects, required: true)
|> cast_assoc(:medias)
end
end | lib/straw_hat_review/reviews/review.ex | 0.796213 | 0.462291 | review.ex | starcoder |
defmodule SteamEx.IEconService do
@moduledoc """
Additional Steam Economy methods that provide access to Steam Trading.
**NOTE:** This is a Service interface, methods in this interface should be called with the `input_json` parameter.
For more info on how to use the Steamworks Web API please see the [Web API Overview](https://partner.steamgames.com/doc/webapi_overview).
"""
import SteamEx.API.Base
@interface "IEconService"
@doc """
Gets a history of trades
| Name | Type | Required | Description |
| key | string | ✔ | Steamworks Web API user authentication key.|
| max_trades | uint32 | ✔ | The number of trades to return information for|
| start_after_time | uint32 | ✔ | The time of the last trade shown on the previous page of results, or the time of the first trade if navigating back|
| start_after_tradeid | uint64 | ✔ | The tradeid shown on the previous page of results, or the ID of the first trade if navigating back|
| navigating_back | bool | ✔ | The user wants the previous page of results, so return the previous max_trades trades before the start time and ID|
| get_descriptions | bool | ✔ | If set, the item display data for the items included in the returned trades will also be returned|
| language | string | ✔ | The language to use when loading item display data
| include_failed | bool | ✔ | |
| include_total | bool | ✔ | If set, the total number of trades the account has participated in will be included in the response|
See other: [https://partner.steamgames.com/doc/webapi/IEconService#GetTradeHistory](https://partner.steamgames.com/doc/webapi/IEconService#GetTradeHistory)
"""
def get_trade_history(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetTradeHistory/v1/", access_key, params, headers)
end
@doc """
Get a list of sent or received trade offers
| Name | Type | Required | Description |
| key | string | ✔ | Steamworks Web API user authentication key.|
| get_sent_offers | bool | ✔ | Request the list of sent offers.|
| get_received_offers | bool | ✔ | Request the list of received offers.|
| get_descriptions | bool | ✔ | If set, the item display data for the items included in the returned trade offers will also be returned.|
| language | string | ✔ | The language to use when loading item display data.|
| active_only | bool | ✔ | Indicates we should only return offers which are still active, or offers that have changed in state since the time_historical_cutoff|
| historical_only | bool | ✔ | Indicates we should only return offers which are not active.|
| time_historical_cutoff | uint32 | ✔ | When active_only is set, offers updated since this time will also be returned
See other: [https://partner.steamgames.com/doc/webapi/IEconService#GetTradeOffers](https://partner.steamgames.com/doc/webapi/IEconService#GetTradeOffers)
"""
def get_trade_offers(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetTradeOffers/v1/", access_key, params, headers)
end
@doc """
Get counts of pending and new trade offers
| Name | Type | Required | Description |
| key | string | ✔ | Steamworks Web API user authentication key.|
| time_last_visit | uint32 | ✔ | The time the user last visited. If not passed, will use the time the user last visited the trade offer page.|
See other: [https://partner.steamgames.com/doc/webapi/IEconService#GetTradeOffersSummary](https://partner.steamgames.com/doc/webapi/IEconService#GetTradeOffersSummary)
"""
def get_trade_offers_summary(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetTradeOffersSummary/v1/", access_key, params, headers)
end
@doc """
Decline a trade offer someone sent to us
| Name | Type | Required | Description |
| key | string | ✔ | Steamworks Web API user authentication key.|
| tradeofferid | uint64 | ✔ | |
See other: [https://partner.steamgames.com/doc/webapi/IEconService#DeclineTradeOffer](https://partner.steamgames.com/doc/webapi/IEconService#DeclineTradeOffer)
"""
def decline_trade_offer(access_key, params \\ %{}, headers \\ %{}) do
post(@interface <> "/DeclineTradeOffer/v1/", access_key, params, headers)
end
@doc """
Cancel a trade offer we sent
| Name | Type | Required | Description |
| key | string | ✔ | Steamworks Web API user authentication key.|
| tradeofferid | uint64 | ✔ | |
See other: [https://partner.steamgames.com/doc/webapi/IEconService#CancelTradeOffer](https://partner.steamgames.com/doc/webapi/IEconService#CancelTradeOffer)
"""
def cancel_trade_offer(access_key, params \\ %{}, headers \\ %{}) do
post(@interface <> "/CancelTradeOffer/v1/", access_key, params, headers)
end
end | lib/interfaces/i_econ_service.ex | 0.712932 | 0.537163 | i_econ_service.ex | starcoder |
defmodule AdventOfCode2019.FlawedFrequencyTransmission do
@moduledoc """
Day 16 — https://adventofcode.com/2019/day/16
"""
@base_pattern [0, 1, 0, -1]
@spec part1(Enumerable.t()) :: binary
def part1(in_stream) do
in_stream
|> read_input_signal()
|> repeat_phases(100)
|> Stream.take(8)
|> Enum.join()
end
@spec part2(Enumerable.t()) :: binary
def part2(in_stream) do
input_signal =
in_stream
|> read_input_signal()
offset =
Enum.slice(input_signal, 0, 7)
|> Enum.join()
|> String.to_integer()
repeat_phases(input_signal, 100, 10_000 * length(input_signal), offset)
|> Stream.take(8)
|> Enum.join()
end
@spec read_input_signal(Enumerable.t()) :: list
defp read_input_signal(in_stream) do
in_stream
|> Enum.take(1)
|> List.first()
|> String.trim()
|> String.graphemes()
|> Enum.map(&String.to_integer/1)
end
defp repeat_phases(input_signal, phases) do
Enum.reduce(1..phases, input_signal, &run_phases/2)
end
defp repeat_phases(input_signal, phases, size, offset) do
Stream.cycle(input_signal)
|> Stream.drop(offset)
|> Enum.take(size - offset)
|> Stream.iterate(&run_phases/1)
|> Enum.at(phases)
end
defp run_phases(_phase, input_signal) do
Stream.transform(1..length(input_signal), input_signal, &output_signal/2)
|> Enum.to_list()
end
defp run_phases(input_signal) do
{input_signal, _} = output_signal(input_signal)
input_signal
end
defp output_signal(i, input_signal) do
{[
Stream.zip(input_signal, repeat_pattern(i))
|> Stream.map(fn {a, b} -> a * b end)
|> Enum.sum()
|> rem(10)
|> abs()
], input_signal}
end
defp output_signal([n]), do: {[n], n}
defp output_signal([n | tail]) do
{tail, sum} = output_signal(tail)
{[rem(n + sum, 10) | tail], n + sum}
end
defp repeat_pattern(n) do
Stream.unfold({1, n, @base_pattern}, fn
{_, _, []} -> nil
{n, n, [head | tail]} -> {head, {1, n, tail}}
{i, n, [head | tail]} -> {head, {i + 1, n, [head | tail]}}
end)
|> Stream.cycle()
|> Stream.drop(1)
end
end | lib/advent_of_code_2019/day16.ex | 0.823115 | 0.470676 | day16.ex | starcoder |
defmodule Geo.Turf.Math do
@moduledoc """
All sorts of mathematical functions
"""
@type si_length_uk :: :meters | :kilometers | :centimeters | :millimeters
@type si_length_us :: :metres | :kilometres | :centimetres | :millimetres
@type imperial_length :: :miles | :nauticalmiles | :inches | :yards | :feet
@type length_unit :: si_length_uk | si_length_us | imperial_length
@earth_radius 6371008.8;
@factors %{
centimeters: @earth_radius * 100,
centimetres: @earth_radius * 100,
degrees: @earth_radius / 111325,
feet: @earth_radius * 3.28084,
inches: @earth_radius * 39.370,
kilometers: @earth_radius / 1000,
kilometres: @earth_radius / 1000,
meters: @earth_radius,
metres: @earth_radius,
miles: @earth_radius / 1609.344,
millimeters: @earth_radius * 1000,
millimetres: @earth_radius * 1000,
nauticalmiles: @earth_radius / 1852,
radians: 1,
yards: @earth_radius / 1.0936,
}
@units_factors %{
centimeters: 100,
centimetres: 100,
degrees: 1 / 111325,
feet: 3.28084,
inches: 39.370,
kilometers: 1 / 1000,
kilometres: 1 / 1000,
meters: 1,
metres: 1,
miles: 1 / 1609.344,
millimeters: 1000,
millimetres: 1000,
nauticalmiles: 1 / 1852,
radians: 1 / @earth_radius,
yards: 1 / 1.0936,
}
@area_factors %{
acres: 0.000247105,
centimeters: 10000,
centimetres: 10000,
feet: 10.763910417,
inches: 1550.003100006,
kilometers: 0.000001,
kilometres: 0.000001,
meters: 1,
metres: 1,
miles: 3.86e-7,
millimeters: 1000000,
millimetres: 1000000,
yards: 1.195990046,
}
@tau :math.pi * 2
@doc false
@spec factor(:atom) :: Number.t
def factor(factor), do: @factors[factor]
@doc false
@spec units_factors(:atom) :: Number.t
def units_factors(factor), do: @units_factors[factor]
@doc false
@spec area_factors(:atom) :: Number.t
def area_factors(factor), do: @area_factors[factor]
@doc false
@spec earth_radius() :: Number.t
def earth_radius(), do: @earth_radius
@spec radians_to_length(number(), length_unit) :: number()
def radians_to_length(radians, unit \\ :kilometers) when is_number(radians) do
radians * @factors[unit]
end
@spec length_to_radians(number(), length_unit) :: float()
def length_to_radians(length, unit \\ :kilometers) when is_number(length) do
length / @factors[unit]
end
@spec length_to_degrees(number(), length_unit) :: float()
def length_to_degrees(length, units \\ :kilometers) when is_number(length) do
radians_to_degrees(length_to_radians(length, units))
end
@spec radians_to_degrees(number()) :: float()
def radians_to_degrees(radians) when is_number(radians) do
degrees = mod(radians, @tau)
result = degrees * 180 / :math.pi;
if radians > 0, do: result, else: -result
end
@spec degrees_to_radians(number()) :: float()
def degrees_to_radians(degrees) when is_number(degrees) do
radians = mod(degrees, 360)
result = radians * :math.pi / 180;
if degrees > 0, do: result, else: -result
end
@spec bearing_to_azimuth(number()) :: number()
def bearing_to_azimuth(bearing) when is_number(bearing) do
angle = mod(bearing, 360)
if angle < 0, do: angle + 360, else: angle
end
@doc """
Round number to precision
## Example
iex> Geo.Turf.Math.rounded(120.4321)
120
iex> Geo.Turf.Math.rounded(120.4321, 3)
120.432
"""
def rounded(number, precision \\ 0) when is_number(number) and is_integer(precision) and precision >= 0 do
multiplier = :math.pow(10, precision)
case precision do
0 -> round(round(number * multiplier) / multiplier)
_ -> round(number * multiplier) / multiplier
end
end
@spec convert_length(number, length_unit, length_unit) :: number
def convert_length(length, from \\ :kilometers, to \\ :kilometers) when is_number(length) and length >= 0 do
radians_to_length(length_to_radians(length, from), to)
end
@spec convert_area(number, length_unit, length_unit) :: number
def convert_area(area, from \\ :meters, to \\ :kilometers) when is_number(area) and area >= 0 do
(area / @area_factors[from]) * @area_factors[to]
end
@doc """
Calculates the modulo of a number (integer or float).
Note that this function uses `floored division` whereas the builtin `rem`
function uses `truncated division`. See `Decimal.rem/2` if you want a
`truncated division` function for Decimals that will return the same value as
the BIF `rem/2` but in Decimal form.
See [Wikipedia](https://en.wikipedia.org/wiki/Modulo_operation) for an
explanation of the difference.
Taken from [cldr_utils](https://hex.pm/packages/cldr_utils) with thanks and gratitude.
## Examples
iex> Geo.Turf.Math.mod(1234.0, 5)
4.0
"""
@spec mod(Number.t(), Number.t()) :: Number.t()
def mod(number, modulus) when is_float(number) and is_number(modulus) do
number - Float.floor(number / modulus) * modulus
end
def mod(number, modulus) when is_integer(number) and is_integer(modulus) do
modulo =
number
|> Integer.floor_div(modulus)
|> Kernel.*(modulus)
number - modulo
end
def mod(number, modulus) when is_integer(number) and is_number(modulus) do
modulo =
number
|> Kernel./(modulus)
|> Float.floor()
|> Kernel.*(modulus)
number - modulo
end
end | lib/geo/turf/math.ex | 0.867233 | 0.809803 | math.ex | starcoder |
defmodule Membrane.RTP.VAD do
@moduledoc """
Simple vad based on audio level sent in RTP header.
To make this module work appropriate RTP header extension has to be set in SDP offer/answer.
If avg of audio level in packets in `time_window` exceeds `vad_threshold` it emits
notification `t:speech_notification_t/0`.
When avg falls below `vad_threshold` and doesn't exceed it in the next `vad_silence_timer`
it emits notification `t:silence_notification_t/0`.
Buffers that are processed by this element may or may not have been processed by
a depayloader and passed through a jitter buffer. If they have not, then the only timestamp
available for time comparison is the RTP timestamp. The delta between RTP timestamps is
dependent on the clock rate used by the encoding. For `OPUS` the clock rate is `48kHz` and
packets are sent every `20ms`, so the RTP timestamp delta between sequential packets should
be `48000 / 1000 * 20`, or `960`.
When calculating the epoch of the timestamp, we need to account for 32bit integer wrapping.
* `:current` - the difference between timestamps is low: the timestamp has not wrapped around.
* `:next` - the timestamp has wrapped around to 0. To simplify queue processing we reset the state.
* `:prev` - the timestamp has recently wrapped around. We might receive an out-of-order packet
from before the rollover, which we ignore.
"""
use Membrane.Filter
alias Membrane.RTP.{Header, Utils}
def_input_pad :input, availability: :always, caps: :any, demand_mode: :auto
def_output_pad :output, availability: :always, caps: :any, demand_mode: :auto
def_options vad_id: [
spec: 1..14,
description: "ID of VAD header extension."
],
clock_rate: [
spec: Membrane.RTP.clock_rate_t(),
default: 48_000,
description: "Clock rate (in `Hz`) for the encoding."
],
time_window: [
spec: pos_integer(),
default: 2_000,
description: "Time window (in `ms`) in which avg audio level is measured."
],
min_packet_num: [
spec: pos_integer(),
default: 50,
description: """
Minimal number of packets to count avg audio level from.
Speech won't be detected until there are enough packets.
"""
],
vad_threshold: [
spec: -127..0,
default: -50,
description: """
Audio level in dBov representing vad threshold.
Values above are considered to represent voice activity.
Value -127 represents digital silence.
"""
],
vad_silence_time: [
spec: pos_integer(),
default: 300,
description: """
Time to wait before emitting notification `t:silence_notification_t/0` after audio track is
no longer considered to represent speech.
If at this time audio track is considered to represent speech again the notification will not be sent.
"""
]
@typedoc """
Notification sent after detecting speech activity.
"""
@type speech_notification_t() :: {:vad, :speech}
@typedoc """
Notification sent after detecting silence activity.
"""
@type silence_notification_t() :: {:vad, :silence}
@impl true
def handle_init(opts) do
state = %{
vad_id: opts.vad_id,
audio_levels: Qex.new(),
clock_rate: opts.clock_rate,
vad: :silence,
vad_silence_timestamp: 0,
current_timestamp: nil,
rtp_timestamp_increment: opts.time_window * opts.clock_rate / 1000,
min_packet_num: opts.min_packet_num,
time_window: opts.time_window,
vad_threshold: opts.vad_threshold,
vad_silence_time: opts.vad_silence_time,
audio_levels_sum: 0,
audio_levels_count: 0
}
{:ok, state}
end
@impl true
def handle_process(:input, %Membrane.Buffer{} = buffer, _ctx, state) do
{extension, buffer} = Header.Extension.pop(buffer, state.vad_id)
handle_if_present(buffer, extension, state)
end
defp handle_if_present(buffer, nil, state), do: {{:ok, buffer: {:output, buffer}}, state}
@timestamp_limit Bitwise.bsl(1, 32)
defp handle_if_present(buffer, extension, state) do
<<_v::1, level::7>> = extension.data
new_extension = %Header.Extension{
identifier: :vad,
data: extension.data
}
buffer = Header.Extension.put(buffer, new_extension)
rtp_timestamp = buffer.metadata.rtp.timestamp
rollover = Utils.from_which_rollover(state.current_timestamp, rtp_timestamp, @timestamp_limit)
current_timestamp = state.current_timestamp || 0
cond do
rollover == :current && rtp_timestamp > current_timestamp ->
handle_vad(buffer, rtp_timestamp, level, state)
rollover == :next ->
{:ok, state} = handle_init(state)
{{:ok, buffer: {:output, buffer}}, state}
true ->
{{:ok, buffer: {:output, buffer}}, state}
end
end
defp handle_vad(buffer, rtp_timestamp, level, state) do
state = %{state | current_timestamp: rtp_timestamp}
state = filter_old_audio_levels(state)
state = add_new_audio_level(state, level)
audio_levels_vad = get_audio_levels_vad(state)
actions = [buffer: {:output, buffer}] ++ maybe_notify(audio_levels_vad, state)
state = update_vad_state(audio_levels_vad, state)
{{:ok, actions}, state}
end
defp filter_old_audio_levels(state) do
Enum.reduce_while(state.audio_levels, state, fn {level, timestamp}, state ->
if Ratio.sub(state.current_timestamp, timestamp)
|> Ratio.gt?(state.rtp_timestamp_increment) do
{_level, audio_levels} = Qex.pop(state.audio_levels)
state = %{
state
| audio_levels_sum: state.audio_levels_sum - level,
audio_levels_count: state.audio_levels_count - 1,
audio_levels: audio_levels
}
{:cont, state}
else
{:halt, state}
end
end)
end
defp add_new_audio_level(state, level) do
audio_levels = Qex.push(state.audio_levels, {-level, state.current_timestamp})
%{
state
| audio_levels: audio_levels,
audio_levels_sum: state.audio_levels_sum + -level,
audio_levels_count: state.audio_levels_count + 1
}
end
defp get_audio_levels_vad(state) do
if state.audio_levels_count >= state.min_packet_num and avg(state) >= state.vad_threshold,
do: :speech,
else: :silence
end
defp avg(state), do: state.audio_levels_sum / state.audio_levels_count
defp maybe_notify(audio_levels_vad, state) do
if vad_silence?(audio_levels_vad, state) or vad_speech?(audio_levels_vad, state) do
[notify: {:vad, audio_levels_vad}]
else
[]
end
end
defp update_vad_state(audio_levels_vad, state) do
cond do
vad_maybe_silence?(audio_levels_vad, state) ->
Map.merge(state, %{vad: :maybe_silence, vad_silence_timestamp: state.current_timestamp})
vad_silence?(audio_levels_vad, state) or vad_speech?(audio_levels_vad, state) ->
Map.merge(state, %{vad: audio_levels_vad})
true ->
state
end
end
defp vad_silence?(audio_levels_vad, state),
do: state.vad == :maybe_silence and audio_levels_vad == :silence and timer_expired?(state)
defp vad_speech?(audio_levels_vad, state) do
(state.vad == :maybe_silence and audio_levels_vad == :speech) or
(state.vad == :silence and audio_levels_vad == :speech)
end
defp vad_maybe_silence?(audio_levels_vad, state),
do: state.vad == :speech and audio_levels_vad == :silence
defp timer_expired?(state),
do: state.current_timestamp - state.vad_silence_timestamp > state.vad_silence_time
end | lib/membrane/rtp/vad.ex | 0.833968 | 0.505737 | vad.ex | starcoder |
defmodule Traverse.Fn do
use Traverse.Types
@moduledoc """
Implements convenience functions, and function wrappers to complete
partial functions.
The latter is done by catching `FunctionClauseError`.
iex> partial = fn x when is_atom(x) -> to_string(x) end
...> complete = Traverse.Fn.complete(partial, fn x -> x + 1 end)
...> Enum.map([1, :a], complete)
[2, "a"]
"""
@doc """
Allows to complete a partial function
iex> partial = fn x when is_number(x) -> x + 1 end
...> complete = Traverse.Fn.complete(partial, Traverse.Fn.identity())
...> Enum.map([1, :a, []], complete)
[2, :a, []]
There are common cases like this, and here are some convenience functions for them
* `complete_with_identity`
iex> partial = fn x when is_number(x) -> x + 1 end
...> complete = Traverse.Fn.complete_with_identity(partial)
...> Enum.map([1, :a, []], complete)
[2, :a, []]
* `complete_with_const`
iex> partial = fn x when is_number(x) -> x + 1 end
...> complete = Traverse.Fn.complete_with_const(partial, 32)
...> Enum.map([1, :a, []], complete)
[2, 32, 32]
Or with the default
iex> partial = fn x when is_number(x) -> x + 1 end
...> complete = Traverse.Fn.complete_with_const(partial)
...> Enum.map([1, :a, []], complete)
[2, nil, nil]
* `complete_with_ignore`
iex> partial = fn x when is_number(x) -> x + 1 end
...> complete = Traverse.Fn.complete_with_ignore(partial)
...> Enum.map([1, :a, []], complete)
[2, Traverse.Ignore, Traverse.Ignore]
"""
@spec complete((any -> any), (any -> any)) :: (any -> any)
def complete(partial_fn, with_fn) do
fn arg ->
try do
partial_fn.(arg)
rescue
FunctionClauseError -> with_fn.(arg)
ArgumentError -> with_fn.(arg)
end
end
end
@doc """
Convenience function as described in doc of `complete`.
"""
@spec complete_with_const((any -> any), any) :: (any -> any)
def complete_with_const(partial_fn, const \\ nil) do
complete(partial_fn, fn _ -> const end)
end
@doc """
Convenience function as described in doc of `complete`.
"""
@spec complete_with_identity((any -> any)) :: (any -> any)
def complete_with_identity(partial_fn), do: complete(partial_fn, identity())
@doc """
Convenience function as described in doc of `complete`.
"""
@spec complete_with_ignore((any -> any)) :: (any -> any)
def complete_with_ignore(partial_fn), do: complete_with_const(partial_fn, Traverse.Ignore)
@doc """
A convenience declaration of the identity function.
iex> Traverse.Fn.identity().(42)
42
"""
@spec identity :: (any -> any)
def identity do
fn any -> any end
end
end | lib/traverse/fn.ex | 0.723505 | 0.706558 | fn.ex | starcoder |
defmodule Toml.Test.JsonConverter do
@moduledoc false
def parse_toml_file!(path) do
case Toml.decode_file(path) do
{:ok, map} ->
Jason.encode!(to_typed_map(map), pretty: true)
{:error, _} = err ->
err
end
end
def parse_json_file!(path) do
Jason.decode!(File.read!(path))
end
def to_typed_map(map) when is_map(map) do
for {k, v} <- map, v2 = to_typed_value(v), into: %{} do
{k, v2}
end
end
defp to_typed_value(:infinity),
do: %{"type" => "integer", "value" => "Infinity"}
defp to_typed_value(:negative_infinity),
do: %{"type" => "integer", "value" => "-Infinity"}
defp to_typed_value(:nan),
do: %{"type" => "integer", "value" => "NaN"}
defp to_typed_value(:negative_nan),
do: %{"type" => "integer", "value" => "-NaN"}
defp to_typed_value(n) when is_integer(n),
do: %{"type" => "integer", "value" => Integer.to_string(n)}
defp to_typed_value(n) when is_float(n),
do: %{"type" => "float", "value" => Float.to_string(n)}
defp to_typed_value(s) when is_binary(s),
do: %{"type" => "string", "value" => s}
defp to_typed_value(true),
do: %{"type" => "bool", "value" => "true"}
defp to_typed_value(false),
do: %{"type" => "bool", "value" => "false"}
# Empty lists are treated plainly
defp to_typed_value([]), do: []
# Array of structs (values)
defp to_typed_value([%_{} | _] = list) do
%{"type" => "array", "value" => Enum.map(list, &to_typed_value/1)}
end
# Table array
defp to_typed_value([h | _] = list) when is_map(h) do
for table <- list, do: to_typed_map(table)
end
# Array value
defp to_typed_value(list) when is_list(list),
do: %{"type" => "array", "value" => Enum.map(list, &to_typed_value/1)}
defp to_typed_value(list) when is_list(list),
do: %{"type" => "array", "value" => Enum.map(list, &to_typed_value/1)}
defp to_typed_value(%Date{} = d),
do: %{"type" => "datetime", "value" => Date.to_iso8601(d)}
defp to_typed_value(%Time{} = d),
do: %{"type" => "datetime", "value" => Time.to_iso8601(d)}
defp to_typed_value(%DateTime{} = d),
do: %{"type" => "datetime", "value" => DateTime.to_iso8601(d)}
defp to_typed_value(%NaiveDateTime{} = d),
do: %{"type" => "datetime", "value" => NaiveDateTime.to_iso8601(d)}
defp to_typed_value(map) when is_map(map) do
to_typed_map(map)
end
end | test/support/converter.ex | 0.577257 | 0.515986 | converter.ex | starcoder |
defmodule Geo.WKB.Encoder do
@moduledoc false
@point 0x00_00_00_01
@point_m 0x40_00_00_01
@point_z 0x80_00_00_01
@point_zm 0xC0_00_00_01
@line_string 0x00_00_00_02
@line_string_z 0x80_00_00_02
@polygon 0x00_00_00_03
@polygon_z 0x80_00_00_03
@multi_point 0x00_00_00_04
@multi_point_z 0x80_00_00_04
@multi_line_string 0x00_00_00_05
@multi_line_string_z 0x80_00_00_05
@multi_polygon 0x00_00_00_06
@multi_polygon_z 0x80_00_00_06
@geometry_collection 0x00_00_00_07
@wkbsridflag 0x20000000
use Bitwise
alias Geo.{
Point,
PointZ,
PointM,
PointZM,
LineString,
LineStringZ,
Polygon,
PolygonZ,
MultiPoint,
MultiPointZ,
MultiLineString,
MultiLineStringZ,
MultiPolygon,
MultiPolygonZ,
GeometryCollection
}
defp add_srid(type), do: type + @wkbsridflag
def encode!(geom, endian \\ :ndr)
for {endian, endian_atom, modifier} <- [{1, :ndr, quote(do: little)}, {0, :xdr, quote(do: big)}] do
def encode!(geom, unquote(endian_atom)) do
{type, rest} = do_encode(geom, unquote(endian_atom))
binary =
if geom.srid do
<<add_srid(type)::unquote(modifier)-32, geom.srid::unquote(modifier)-32>>
else
<<type::unquote(modifier)-32>>
end
[unquote(endian), binary, rest]
end
def do_encode(%Point{coordinates: {x, y}}, unquote(endian_atom)) do
{@point, [<<x::unquote(modifier)-float-64>>, <<y::unquote(modifier)-float-64>>]}
end
def do_encode(%PointZ{coordinates: {x, y, z}}, unquote(endian_atom)) do
{@point_z,
[
<<x::unquote(modifier)-float-64>>,
<<y::unquote(modifier)-float-64>>,
<<z::unquote(modifier)-float-64>>
]}
end
def do_encode(%PointM{coordinates: {x, y, m}}, unquote(endian_atom)) do
{@point_m,
[
<<x::unquote(modifier)-float-64>>,
<<y::unquote(modifier)-float-64>>,
<<m::unquote(modifier)-float-64>>
]}
end
def do_encode(%PointZM{coordinates: {x, y, z, m}}, unquote(endian_atom)) do
{@point_zm,
[
<<x::unquote(modifier)-float-64>>,
<<y::unquote(modifier)-float-64>>,
<<z::unquote(modifier)-float-64>>,
<<m::unquote(modifier)-float-64>>
]}
end
def do_encode(%LineString{coordinates: coordinates}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(coordinates, 0, fn {x, y}, acc ->
{[<<x::unquote(modifier)-float-64>>, <<y::unquote(modifier)-float-64>>], acc + 1}
end)
{@line_string, [<<count::unquote(modifier)-32>> | coordinates]}
end
def do_encode(%LineStringZ{coordinates: coordinates}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(coordinates, 0, fn {x, y, z}, acc ->
{[
<<x::unquote(modifier)-float-64>>,
<<y::unquote(modifier)-float-64>>,
<<z::unquote(modifier)-float-64>>
], acc + 1}
end)
{@line_string_z, [<<count::unquote(modifier)-32>> | coordinates]}
end
def do_encode(%Polygon{coordinates: coordinates}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(coordinates, 0, fn ring, acc ->
{_, data} = do_encode(%LineString{coordinates: ring}, unquote(endian_atom))
{data, acc + 1}
end)
{@polygon, [<<count::unquote(modifier)-32>> | coordinates]}
end
def do_encode(%PolygonZ{coordinates: coordinates}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(coordinates, 0, fn ring, acc ->
{_, data} = do_encode(%LineStringZ{coordinates: ring}, unquote(endian_atom))
{data, acc + 1}
end)
{@polygon_z, [<<count::unquote(modifier)-32>> | coordinates]}
end
def do_encode(%MultiPoint{coordinates: coordinates}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(coordinates, 0, fn coordinate, acc ->
point = encode!(%Point{coordinates: coordinate}, unquote(endian_atom))
{point, acc + 1}
end)
{@multi_point, [<<count::unquote(modifier)-32>> | coordinates]}
end
def do_encode(%MultiPointZ{coordinates: coordinates}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(coordinates, 0, fn coordinate, acc ->
point = encode!(%PointZ{coordinates: coordinate}, unquote(endian_atom))
{point, acc + 1}
end)
{@multi_point_z, [<<count::unquote(modifier)-32>> | coordinates]}
end
def do_encode(%MultiLineString{coordinates: coordinates}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(coordinates, 0, fn coordinate, acc ->
geom = encode!(%LineString{coordinates: coordinate}, unquote(endian_atom))
{geom, acc + 1}
end)
{@multi_line_string, [<<count::unquote(modifier)-32>> | coordinates]}
end
def do_encode(%MultiLineStringZ{coordinates: coordinates}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(coordinates, 0, fn coordinate, acc ->
geom = encode!(%LineStringZ{coordinates: coordinate}, unquote(endian_atom))
{geom, acc + 1}
end)
{@multi_line_string_z, [<<count::unquote(modifier)-32>> | coordinates]}
end
def do_encode(%MultiPolygon{coordinates: coordinates}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(coordinates, 0, fn coordinate, acc ->
geom = encode!(%Polygon{coordinates: coordinate}, unquote(endian_atom))
{geom, acc + 1}
end)
{@multi_polygon, [<<count::unquote(modifier)-32>> | coordinates]}
end
def do_encode(%MultiPolygonZ{coordinates: coordinates}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(coordinates, 0, fn coordinate, acc ->
geom = encode!(%PolygonZ{coordinates: coordinate}, unquote(endian_atom))
{geom, acc + 1}
end)
{@multi_polygon_z, [<<count::unquote(modifier)-32>> | coordinates]}
end
def do_encode(%GeometryCollection{geometries: geometries}, unquote(endian_atom)) do
{coordinates, count} =
Enum.map_reduce(geometries, 0, fn geom, acc ->
geom = encode!(%{geom | srid: nil}, unquote(endian_atom))
{geom, acc + 1}
end)
{@geometry_collection, [<<count::unquote(modifier)-32>> | coordinates]}
end
end
end | lib/geo/wkb/encoder.ex | 0.531453 | 0.700396 | encoder.ex | starcoder |
defmodule Stream do
@moduledoc """
Functions for creating and composing streams.
Streams are composable, lazy enumerables (for an introduction on
enumerables, see the `Enum` module). Any enumerable that generates
elements one by one during enumeration is called a stream. For example,
Elixir's `Range` is a stream:
iex> range = 1..5
1..5
iex> Enum.map(range, &(&1 * 2))
[2, 4, 6, 8, 10]
In the example above, as we mapped over the range, the elements being
enumerated were created one by one, during enumeration. The `Stream`
module allows us to map the range, without triggering its enumeration:
iex> range = 1..3
iex> stream = Stream.map(range, &(&1 * 2))
iex> Enum.map(stream, &(&1 + 1))
[3, 5, 7]
Notice we started with a range and then we created a stream that is
meant to multiply each element in the range by 2. At this point, no
computation was done. Only when `Enum.map/2` is called we actually
enumerate over each element in the range, multiplying it by 2 and adding 1.
We say the functions in `Stream` are *lazy* and the functions in `Enum`
are *eager*.
Due to their laziness, streams are useful when working with large
(or even infinite) collections. When chaining many operations with `Enum`,
intermediate lists are created, while `Stream` creates a recipe of
computations that are executed at a later moment. Let's see another
example:
1..3
|> Enum.map(&IO.inspect(&1))
|> Enum.map(&(&1 * 2))
|> Enum.map(&IO.inspect(&1))
1
2
3
2
4
6
#=> [2, 4, 6]
Notice that we first printed each element in the list, then multiplied each
element by 2 and finally printed each new value. In this example, the list
was enumerated three times. Let's see an example with streams:
stream = 1..3
|> Stream.map(&IO.inspect(&1))
|> Stream.map(&(&1 * 2))
|> Stream.map(&IO.inspect(&1))
Enum.to_list(stream)
1
2
2
4
3
6
#=> [2, 4, 6]
Although the end result is the same, the order in which the elements were
printed changed! With streams, we print the first element and then print
its double. In this example, the list was enumerated just once!
That's what we meant when we said earlier that streams are composable,
lazy enumerables. Notice we could call `Stream.map/2` multiple times,
effectively composing the streams and keeping them lazy. The computations
are only performed when you call a function from the `Enum` module.
Like with `Enum`, the functions in this module work in linear time. This
means that, the time it takes to perform an operation grows at the same
rate as the length of the list. This is expected on operations such as
`Stream.map/2`. After all, if we want to traverse every element on a
stream, the longer the stream, the more elements we need to traverse,
and the longer it will take.
## Creating Streams
There are many functions in Elixir's standard library that return
streams, some examples are:
* `IO.stream/2` - streams input lines, one by one
* `URI.query_decoder/1` - decodes a query string, pair by pair
This module also provides many convenience functions for creating streams,
like `Stream.cycle/1`, `Stream.unfold/2`, `Stream.resource/3` and more.
Note the functions in this module are guaranteed to return enumerables.
Since enumerables can have different shapes (structs, anonymous functions,
and so on), the functions in this module may return any of those shapes
and this may change at any time. For example, a function that today
returns an anonymous function may return a struct in future releases.
"""
@doc false
defstruct enum: nil, funs: [], accs: [], done: nil
@type acc :: any
@type element :: any
@typedoc "Zero-based index."
@type index :: non_neg_integer
@type default :: any
# Require Stream.Reducers and its callbacks
require Stream.Reducers, as: R
defmacrop skip(acc) do
{:cont, acc}
end
defmacrop next(fun, entry, acc) do
quote(do: unquote(fun).(unquote(entry), unquote(acc)))
end
defmacrop acc(head, state, tail) do
quote(do: [unquote(head), unquote(state) | unquote(tail)])
end
defmacrop next_with_acc(fun, entry, head, state, tail) do
quote do
{reason, [head | tail]} = unquote(fun).(unquote(entry), [unquote(head) | unquote(tail)])
{reason, [head, unquote(state) | tail]}
end
end
## Transformers
@doc false
@deprecated "Use Stream.chunk_every/2 instead"
def chunk(enum, n), do: chunk(enum, n, n, nil)
@doc false
@deprecated "Use Stream.chunk_every/3 instead"
def chunk(enum, n, step) do
chunk_every(enum, n, step, nil)
end
@doc false
@deprecated "Use Stream.chunk_every/4 instead"
def chunk(enum, n, step, leftover)
when is_integer(n) and n > 0 and is_integer(step) and step > 0 do
chunk_every(enum, n, step, leftover || :discard)
end
@doc """
Shortcut to `chunk_every(enum, count, count)`.
"""
@doc since: "1.5.0"
@spec chunk_every(Enumerable.t(), pos_integer) :: Enumerable.t()
def chunk_every(enum, count), do: chunk_every(enum, count, count, [])
@doc """
Streams the enumerable in chunks, containing `count` elements each,
where each new chunk starts `step` elements into the enumerable.
`step` is optional and, if not passed, defaults to `count`, i.e.
chunks do not overlap.
If the last chunk does not have `count` elements to fill the chunk,
elements are taken from `leftover` to fill in the chunk. If `leftover`
does not have enough elements to fill the chunk, then a partial chunk
is returned with less than `count` elements.
If `:discard` is given in `leftover`, the last chunk is discarded
unless it has exactly `count` elements.
## Examples
iex> Stream.chunk_every([1, 2, 3, 4, 5, 6], 2) |> Enum.to_list()
[[1, 2], [3, 4], [5, 6]]
iex> Stream.chunk_every([1, 2, 3, 4, 5, 6], 3, 2, :discard) |> Enum.to_list()
[[1, 2, 3], [3, 4, 5]]
iex> Stream.chunk_every([1, 2, 3, 4, 5, 6], 3, 2, [7]) |> Enum.to_list()
[[1, 2, 3], [3, 4, 5], [5, 6, 7]]
iex> Stream.chunk_every([1, 2, 3, 4, 5, 6], 3, 3, []) |> Enum.to_list()
[[1, 2, 3], [4, 5, 6]]
"""
@doc since: "1.5.0"
@spec chunk_every(Enumerable.t(), pos_integer, pos_integer, Enumerable.t() | :discard) ::
Enumerable.t()
def chunk_every(enum, count, step, leftover \\ [])
when is_integer(count) and count > 0 and is_integer(step) and step > 0 do
R.chunk_every(&chunk_while/4, enum, count, step, leftover)
end
@doc """
Chunks the `enum` by buffering elements for which `fun` returns the same value.
Elements are only emitted when `fun` returns a new value or the `enum` finishes.
## Examples
iex> stream = Stream.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1))
iex> Enum.to_list(stream)
[[1], [2, 2], [3], [4, 4, 6], [7, 7]]
"""
@spec chunk_by(Enumerable.t(), (element -> any)) :: Enumerable.t()
def chunk_by(enum, fun) when is_function(fun, 1) do
R.chunk_by(&chunk_while/4, enum, fun)
end
@doc """
Chunks the `enum` with fine grained control when every chunk is emitted.
`chunk_fun` receives the current element and the accumulator and
must return `{:cont, element, acc}` to emit the given chunk and
continue with accumulator or `{:cont, acc}` to not emit any chunk
and continue with the return accumulator.
`after_fun` is invoked when iteration is done and must also return
`{:cont, element, acc}` or `{:cont, acc}`.
## Examples
iex> chunk_fun = fn element, acc ->
...> if rem(element, 2) == 0 do
...> {:cont, Enum.reverse([element | acc]), []}
...> else
...> {:cont, [element | acc]}
...> end
...> end
iex> after_fun = fn
...> [] -> {:cont, []}
...> acc -> {:cont, Enum.reverse(acc), []}
...> end
iex> stream = Stream.chunk_while(1..10, [], chunk_fun, after_fun)
iex> Enum.to_list(stream)
[[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
"""
@doc since: "1.5.0"
@spec chunk_while(
Enumerable.t(),
acc,
(element, acc -> {:cont, chunk, acc} | {:cont, acc} | {:halt, acc}),
(acc -> {:cont, chunk, acc} | {:cont, acc})
) :: Enumerable.t()
when chunk: any
def chunk_while(enum, acc, chunk_fun, after_fun)
when is_function(chunk_fun, 2) and is_function(after_fun, 1) do
lazy(
enum,
[acc | after_fun],
fn f1 -> chunk_while_fun(chunk_fun, f1) end,
&after_chunk_while/2
)
end
defp chunk_while_fun(callback, fun) do
fn entry, acc(head, [acc | after_fun], tail) ->
case callback.(entry, acc) do
{:cont, emit, acc} ->
# If we emit an element and then we have to halt,
# we need to disable the after_fun callback to
# avoid emitting even more elements.
case next(fun, emit, [head | tail]) do
{:halt, [head | tail]} -> {:halt, acc(head, [acc | &{:cont, &1}], tail)}
{command, [head | tail]} -> {command, acc(head, [acc | after_fun], tail)}
end
{:cont, acc} ->
skip(acc(head, [acc | after_fun], tail))
{:halt, acc} ->
{:halt, acc(head, [acc | after_fun], tail)}
end
end
end
defp after_chunk_while(acc(h, [acc | after_fun], t), f1) do
case after_fun.(acc) do
{:cont, emit, acc} -> next_with_acc(f1, emit, h, [acc | after_fun], t)
{:cont, acc} -> {:cont, acc(h, [acc | after_fun], t)}
end
end
@doc """
Creates a stream that only emits elements if they are different from the last emitted element.
This function only ever needs to store the last emitted element.
Elements are compared using `===/2`.
## Examples
iex> Stream.dedup([1, 2, 3, 3, 2, 1]) |> Enum.to_list()
[1, 2, 3, 2, 1]
"""
@spec dedup(Enumerable.t()) :: Enumerable.t()
def dedup(enum) do
dedup_by(enum, fn x -> x end)
end
@doc """
Creates a stream that only emits elements if the result of calling `fun` on the element is
different from the (stored) result of calling `fun` on the last emitted element.
## Examples
iex> Stream.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end) |> Enum.to_list()
[{1, :x}, {2, :y}, {1, :x}]
"""
@spec dedup_by(Enumerable.t(), (element -> term)) :: Enumerable.t()
def dedup_by(enum, fun) when is_function(fun, 1) do
lazy(enum, nil, fn f1 -> R.dedup(fun, f1) end)
end
@doc """
Lazily drops the next `n` elements from the enumerable.
If a negative `n` is given, it will drop the last `n` elements from
the collection. Note that the mechanism by which this is implemented
will delay the emission of any element until `n` additional elements have
been emitted by the enum.
## Examples
iex> stream = Stream.drop(1..10, 5)
iex> Enum.to_list(stream)
[6, 7, 8, 9, 10]
iex> stream = Stream.drop(1..10, -5)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@spec drop(Enumerable.t(), integer) :: Enumerable.t()
def drop(enum, n) when is_integer(n) and n >= 0 do
lazy(enum, n, fn f1 -> R.drop(f1) end)
end
def drop(enum, n) when is_integer(n) and n < 0 do
n = abs(n)
lazy(enum, {0, [], []}, fn f1 ->
fn
entry, [h, {count, buf1, []} | t] ->
do_drop(:cont, n, entry, h, count, buf1, [], t)
entry, [h, {count, buf1, [next | buf2]} | t] ->
{reason, [h | t]} = f1.(next, [h | t])
do_drop(reason, n, entry, h, count, buf1, buf2, t)
end
end)
end
defp do_drop(reason, n, entry, h, count, buf1, buf2, t) do
buf1 = [entry | buf1]
count = count + 1
if count == n do
{reason, [h, {0, [], :lists.reverse(buf1)} | t]}
else
{reason, [h, {count, buf1, buf2} | t]}
end
end
@doc """
Creates a stream that drops every `nth` element from the enumerable.
The first element is always dropped, unless `nth` is 0.
`nth` must be a non-negative integer.
## Examples
iex> stream = Stream.drop_every(1..10, 2)
iex> Enum.to_list(stream)
[2, 4, 6, 8, 10]
iex> stream = Stream.drop_every(1..1000, 1)
iex> Enum.to_list(stream)
[]
iex> stream = Stream.drop_every([1, 2, 3, 4, 5], 0)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@spec drop_every(Enumerable.t(), non_neg_integer) :: Enumerable.t()
def drop_every(enum, nth)
def drop_every(enum, 0), do: %Stream{enum: enum}
def drop_every([], _nth), do: %Stream{enum: []}
def drop_every(enum, nth) when is_integer(nth) and nth > 0 do
lazy(enum, nth, fn f1 -> R.drop_every(nth, f1) end)
end
@doc """
Lazily drops elements of the enumerable while the given
function returns a truthy value.
## Examples
iex> stream = Stream.drop_while(1..10, &(&1 <= 5))
iex> Enum.to_list(stream)
[6, 7, 8, 9, 10]
"""
@spec drop_while(Enumerable.t(), (element -> as_boolean(term))) :: Enumerable.t()
def drop_while(enum, fun) when is_function(fun, 1) do
lazy(enum, true, fn f1 -> R.drop_while(fun, f1) end)
end
@doc """
Executes the given function for each element.
Useful for adding side effects (like printing) to a stream.
## Examples
iex> stream = Stream.each([1, 2, 3], fn x -> send(self(), x) end)
iex> Enum.to_list(stream)
iex> receive do: (x when is_integer(x) -> x)
1
iex> receive do: (x when is_integer(x) -> x)
2
iex> receive do: (x when is_integer(x) -> x)
3
"""
@spec each(Enumerable.t(), (element -> term)) :: Enumerable.t()
def each(enum, fun) when is_function(fun, 1) do
lazy(enum, fn f1 ->
fn x, acc ->
fun.(x)
f1.(x, acc)
end
end)
end
@doc """
Maps the given `fun` over `enumerable` and flattens the result.
This function returns a new stream built by appending the result of invoking `fun`
on each element of `enumerable` together.
## Examples
iex> stream = Stream.flat_map([1, 2, 3], fn x -> [x, x * 2] end)
iex> Enum.to_list(stream)
[1, 2, 2, 4, 3, 6]
iex> stream = Stream.flat_map([1, 2, 3], fn x -> [[x]] end)
iex> Enum.to_list(stream)
[[1], [2], [3]]
"""
@spec flat_map(Enumerable.t(), (element -> Enumerable.t())) :: Enumerable.t()
def flat_map(enum, mapper) when is_function(mapper, 1) do
transform(enum, nil, fn val, nil -> {mapper.(val), nil} end)
end
@doc """
Creates a stream that filters elements according to
the given function on enumeration.
## Examples
iex> stream = Stream.filter([1, 2, 3], fn x -> rem(x, 2) == 0 end)
iex> Enum.to_list(stream)
[2]
"""
@spec filter(Enumerable.t(), (element -> as_boolean(term))) :: Enumerable.t()
def filter(enum, fun) when is_function(fun, 1) do
lazy(enum, fn f1 -> R.filter(fun, f1) end)
end
@doc false
@deprecated "Use Stream.filter/2 + Stream.map/2 instead"
def filter_map(enum, filter, mapper) do
lazy(enum, fn f1 -> R.filter_map(filter, mapper, f1) end)
end
@doc """
Creates a stream that emits a value after the given period `n`
in milliseconds.
The values emitted are an increasing counter starting at `0`.
This operation will block the caller by the given interval
every time a new element is streamed.
Do not use this function to generate a sequence of numbers.
If blocking the caller process is not necessary, use
`Stream.iterate(0, & &1 + 1)` instead.
## Examples
iex> Stream.interval(10) |> Enum.take(10)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
@spec interval(non_neg_integer) :: Enumerable.t()
def interval(n) when is_integer(n) and n >= 0 do
unfold(0, fn count ->
Process.sleep(n)
{count, count + 1}
end)
end
@doc """
Injects the stream values into the given collectable as a side-effect.
This function is often used with `run/1` since any evaluation
is delayed until the stream is executed. See `run/1` for an example.
"""
@spec into(Enumerable.t(), Collectable.t(), (term -> term)) :: Enumerable.t()
def into(enum, collectable, transform \\ fn x -> x end) when is_function(transform, 1) do
&do_into(enum, collectable, transform, &1, &2)
end
defp do_into(enum, collectable, transform, acc, fun) do
{initial, into} = Collectable.into(collectable)
composed = fn x, [acc | collectable] ->
collectable = into.(collectable, {:cont, transform.(x)})
{reason, acc} = fun.(x, acc)
{reason, [acc | collectable]}
end
do_into(&Enumerable.reduce(enum, &1, composed), initial, into, acc)
end
defp do_into(reduce, collectable, into, {command, acc}) do
try do
reduce.({command, [acc | collectable]})
catch
kind, reason ->
into.(collectable, :halt)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:suspended, [acc | collectable], continuation} ->
{:suspended, acc, &do_into(continuation, collectable, into, &1)}
{reason, [acc | collectable]} ->
into.(collectable, :done)
{reason, acc}
end
end
@doc """
Creates a stream that will apply the given function on
enumeration.
## Examples
iex> stream = Stream.map([1, 2, 3], fn x -> x * 2 end)
iex> Enum.to_list(stream)
[2, 4, 6]
"""
@spec map(Enumerable.t(), (element -> any)) :: Enumerable.t()
def map(enum, fun) when is_function(fun, 1) do
lazy(enum, fn f1 -> R.map(fun, f1) end)
end
@doc """
Creates a stream that will apply the given function on
every `nth` element from the enumerable.
The first element is always passed to the given function.
`nth` must be a non-negative integer.
## Examples
iex> stream = Stream.map_every(1..10, 2, fn x -> x * 2 end)
iex> Enum.to_list(stream)
[2, 2, 6, 4, 10, 6, 14, 8, 18, 10]
iex> stream = Stream.map_every([1, 2, 3, 4, 5], 1, fn x -> x * 2 end)
iex> Enum.to_list(stream)
[2, 4, 6, 8, 10]
iex> stream = Stream.map_every(1..5, 0, fn x -> x * 2 end)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@doc since: "1.4.0"
@spec map_every(Enumerable.t(), non_neg_integer, (element -> any)) :: Enumerable.t()
def map_every(enum, nth, fun) when is_integer(nth) and nth >= 0 and is_function(fun, 1) do
map_every_after_guards(enum, nth, fun)
end
defp map_every_after_guards(enum, 1, fun), do: map(enum, fun)
defp map_every_after_guards(enum, 0, _fun), do: %Stream{enum: enum}
defp map_every_after_guards([], _nth, _fun), do: %Stream{enum: []}
defp map_every_after_guards(enum, nth, fun) do
lazy(enum, nth, fn f1 -> R.map_every(nth, fun, f1) end)
end
@doc """
Creates a stream that will reject elements according to
the given function on enumeration.
## Examples
iex> stream = Stream.reject([1, 2, 3], fn x -> rem(x, 2) == 0 end)
iex> Enum.to_list(stream)
[1, 3]
"""
@spec reject(Enumerable.t(), (element -> as_boolean(term))) :: Enumerable.t()
def reject(enum, fun) when is_function(fun, 1) do
lazy(enum, fn f1 -> R.reject(fun, f1) end)
end
@doc """
Runs the given stream.
This is useful when a stream needs to be run, for side effects,
and there is no interest in its return result.
## Examples
Open up a file, replace all `#` by `%` and stream to another file
without loading the whole file in memory:
File.stream!("/path/to/file")
|> Stream.map(&String.replace(&1, "#", "%"))
|> Stream.into(File.stream!("/path/to/other/file"))
|> Stream.run()
No computation will be done until we call one of the `Enum` functions
or `run/1`.
"""
@spec run(Enumerable.t()) :: :ok
def run(stream) do
_ = Enumerable.reduce(stream, {:cont, nil}, fn _, _ -> {:cont, nil} end)
:ok
end
@doc """
Creates a stream that applies the given function to each
element, emits the result and uses the same result as the accumulator
for the next computation. Uses the first element in the enumerable
as the starting value.
## Examples
iex> stream = Stream.scan(1..5, &(&1 + &2))
iex> Enum.to_list(stream)
[1, 3, 6, 10, 15]
"""
@spec scan(Enumerable.t(), (element, acc -> any)) :: Enumerable.t()
def scan(enum, fun) when is_function(fun, 2) do
lazy(enum, :first, fn f1 -> R.scan2(fun, f1) end)
end
@doc """
Creates a stream that applies the given function to each
element, emits the result and uses the same result as the accumulator
for the next computation. Uses the given `acc` as the starting value.
## Examples
iex> stream = Stream.scan(1..5, 0, &(&1 + &2))
iex> Enum.to_list(stream)
[1, 3, 6, 10, 15]
"""
@spec scan(Enumerable.t(), acc, (element, acc -> any)) :: Enumerable.t()
def scan(enum, acc, fun) when is_function(fun, 2) do
lazy(enum, acc, fn f1 -> R.scan3(fun, f1) end)
end
@doc """
Lazily takes the next `count` elements from the enumerable and stops
enumeration.
If a negative `count` is given, the last `count` values will be taken.
For such, the collection is fully enumerated keeping up to `2 * count`
elements in memory. Once the end of the collection is reached,
the last `count` elements will be executed. Therefore, using
a negative `count` on an infinite collection will never return.
## Examples
iex> stream = Stream.take(1..100, 5)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
iex> stream = Stream.take(1..100, -5)
iex> Enum.to_list(stream)
[96, 97, 98, 99, 100]
iex> stream = Stream.cycle([1, 2, 3]) |> Stream.take(5)
iex> Enum.to_list(stream)
[1, 2, 3, 1, 2]
"""
@spec take(Enumerable.t(), integer) :: Enumerable.t()
def take(enum, count) when is_integer(count) do
take_after_guards(enum, count)
end
defp take_after_guards(_enum, 0), do: %Stream{enum: []}
defp take_after_guards([], _count), do: %Stream{enum: []}
defp take_after_guards(enum, count) when count > 0 do
lazy(enum, count, fn f1 -> R.take(f1) end)
end
defp take_after_guards(enum, count) when count < 0 do
&Enumerable.reduce(Enum.take(enum, count), &1, &2)
end
@doc """
Creates a stream that takes every `nth` element from the enumerable.
The first element is always included, unless `nth` is 0.
`nth` must be a non-negative integer.
## Examples
iex> stream = Stream.take_every(1..10, 2)
iex> Enum.to_list(stream)
[1, 3, 5, 7, 9]
iex> stream = Stream.take_every([1, 2, 3, 4, 5], 1)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
iex> stream = Stream.take_every(1..1000, 0)
iex> Enum.to_list(stream)
[]
"""
@spec take_every(Enumerable.t(), non_neg_integer) :: Enumerable.t()
def take_every(enum, nth) when is_integer(nth) and nth >= 0 do
take_every_after_guards(enum, nth)
end
defp take_every_after_guards(_enum, 0), do: %Stream{enum: []}
defp take_every_after_guards([], _nth), do: %Stream{enum: []}
defp take_every_after_guards(enum, nth) do
lazy(enum, nth, fn f1 -> R.take_every(nth, f1) end)
end
@doc """
Lazily takes elements of the enumerable while the given
function returns a truthy value.
## Examples
iex> stream = Stream.take_while(1..100, &(&1 <= 5))
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5]
"""
@spec take_while(Enumerable.t(), (element -> as_boolean(term))) :: Enumerable.t()
def take_while(enum, fun) when is_function(fun, 1) do
lazy(enum, fn f1 -> R.take_while(fun, f1) end)
end
@doc """
Creates a stream that emits a single value after `n` milliseconds.
The value emitted is `0`. This operation will block the caller by
the given time until the element is streamed.
## Examples
iex> Stream.timer(10) |> Enum.to_list()
[0]
"""
@spec timer(non_neg_integer) :: Enumerable.t()
def timer(n) when is_integer(n) and n >= 0 do
take(interval(n), 1)
end
@doc """
Transforms an existing stream.
It expects an accumulator and a function that receives each stream element
and an accumulator. It must return a tuple, where the first element is a new
stream (often a list) or the atom `:halt`, and the second element is the
accumulator to be used by the next element, if any, in both cases.
Note: this function is equivalent to `Enum.flat_map_reduce/3`, except this
function does not return the accumulator once the stream is processed.
## Examples
`Stream.transform/3` is useful as it can be used as the basis to implement
many of the functions defined in this module. For example, we can implement
`Stream.take(enum, n)` as follows:
iex> enum = 1001..9999
iex> n = 3
iex> stream = Stream.transform(enum, 0, fn i, acc ->
...> if acc < n, do: {[i], acc + 1}, else: {:halt, acc}
...> end)
iex> Enum.to_list(stream)
[1001, 1002, 1003]
"""
@spec transform(Enumerable.t(), acc, fun) :: Enumerable.t()
when fun: (element, acc -> {Enumerable.t(), acc} | {:halt, acc}),
acc: any
def transform(enum, acc, reducer) when is_function(reducer, 2) do
&do_transform(enum, fn -> acc end, reducer, &1, &2, nil)
end
@doc """
Transforms an existing stream with function-based start and finish.
The accumulator is only calculated when transformation starts. It also
allows an after function to be given which is invoked when the stream
halts or completes.
This function can be seen as a combination of `Stream.resource/3` with
`Stream.transform/3`.
"""
@spec transform(Enumerable.t(), (() -> acc), fun, (acc -> term)) :: Enumerable.t()
when fun: (element, acc -> {Enumerable.t(), acc} | {:halt, acc}),
acc: any
def transform(enum, start_fun, reducer, after_fun)
when is_function(start_fun, 0) and is_function(reducer, 2) and is_function(after_fun, 1) do
&do_transform(enum, start_fun, reducer, &1, &2, after_fun)
end
defp do_transform(enumerables, user_acc, user, inner_acc, fun, after_fun) do
inner = &do_transform_each(&1, &2, fun)
step = &do_transform_step(&1, &2)
next = &Enumerable.reduce(enumerables, &1, step)
funs = {user, fun, inner, after_fun}
do_transform(user_acc.(), :cont, next, inner_acc, funs)
end
defp do_transform(user_acc, _next_op, next, {:halt, inner_acc}, funs) do
{_, _, _, after_fun} = funs
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, inner_acc}
end
defp do_transform(user_acc, next_op, next, {:suspend, inner_acc}, funs) do
{:suspended, inner_acc, &do_transform(user_acc, next_op, next, &1, funs)}
end
defp do_transform(user_acc, :halt, _next, {_, inner_acc}, funs) do
{_, _, _, after_fun} = funs
do_after(after_fun, user_acc)
{:halted, inner_acc}
end
defp do_transform(user_acc, :cont, next, inner_acc, funs) do
{_, _, _, after_fun} = funs
try do
next.({:cont, []})
catch
kind, reason ->
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:suspended, vals, next} ->
do_transform_user(:lists.reverse(vals), user_acc, :cont, next, inner_acc, funs)
{_, vals} ->
do_transform_user(:lists.reverse(vals), user_acc, :halt, next, inner_acc, funs)
end
end
defp do_transform_user([], user_acc, next_op, next, inner_acc, funs) do
do_transform(user_acc, next_op, next, inner_acc, funs)
end
defp do_transform_user([val | vals], user_acc, next_op, next, inner_acc, funs) do
{user, fun, inner, after_fun} = funs
try do
user.(val, user_acc)
catch
kind, reason ->
next.({:halt, []})
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{[], user_acc} ->
do_transform_user(vals, user_acc, next_op, next, inner_acc, funs)
{list, user_acc} when is_list(list) ->
reduce = &Enumerable.List.reduce(list, &1, fun)
do_list_transform(vals, user_acc, next_op, next, inner_acc, reduce, funs)
{:halt, user_acc} ->
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, elem(inner_acc, 1)}
{other, user_acc} ->
reduce = &Enumerable.reduce(other, &1, inner)
do_enum_transform(vals, user_acc, next_op, next, inner_acc, reduce, funs)
end
end
defp do_list_transform(vals, user_acc, next_op, next, inner_acc, reduce, funs) do
{_, _, _, after_fun} = funs
try do
reduce.(inner_acc)
catch
kind, reason ->
next.({:halt, []})
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:done, acc} ->
do_transform_user(vals, user_acc, next_op, next, {:cont, acc}, funs)
{:halted, acc} ->
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, acc}
{:suspended, acc, continuation} ->
resume = &do_list_transform(vals, user_acc, next_op, next, &1, continuation, funs)
{:suspended, acc, resume}
end
end
defp do_enum_transform(vals, user_acc, next_op, next, {op, inner_acc}, reduce, funs) do
{_, _, _, after_fun} = funs
try do
reduce.({op, [:outer | inner_acc]})
catch
kind, reason ->
next.({:halt, []})
do_after(after_fun, user_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
# Only take into account outer halts when the op is not halt itself.
# Otherwise, we were the ones wishing to halt, so we should just stop.
{:halted, [:outer | acc]} when op != :halt ->
do_transform_user(vals, user_acc, next_op, next, {:cont, acc}, funs)
{:halted, [_ | acc]} ->
next.({:halt, []})
do_after(after_fun, user_acc)
{:halted, acc}
{:done, [_ | acc]} ->
do_transform_user(vals, user_acc, next_op, next, {:cont, acc}, funs)
{:suspended, [_ | acc], continuation} ->
resume = &do_enum_transform(vals, user_acc, next_op, next, &1, continuation, funs)
{:suspended, acc, resume}
end
end
defp do_after(nil, _user_acc), do: :ok
defp do_after(fun, user_acc), do: fun.(user_acc)
defp do_transform_each(x, [:outer | acc], f) do
case f.(x, acc) do
{:halt, res} -> {:halt, [:inner | res]}
{op, res} -> {op, [:outer | res]}
end
end
defp do_transform_step(x, acc) do
{:suspend, [x | acc]}
end
@doc """
Creates a stream that only emits elements if they are unique.
Keep in mind that, in order to know if an element is unique
or not, this function needs to store all unique values emitted
by the stream. Therefore, if the stream is infinite, the number
of elements stored will grow infinitely, never being garbage-collected.
## Examples
iex> Stream.uniq([1, 2, 3, 3, 2, 1]) |> Enum.to_list()
[1, 2, 3]
"""
@spec uniq(Enumerable.t()) :: Enumerable.t()
def uniq(enum) do
uniq_by(enum, fn x -> x end)
end
@doc false
@deprecated "Use Stream.uniq_by/2 instead"
def uniq(enum, fun) do
uniq_by(enum, fun)
end
@doc """
Creates a stream that only emits elements if they are unique, by removing the
elements for which function `fun` returned duplicate elements.
The function `fun` maps every element to a term which is used to
determine if two elements are duplicates.
Keep in mind that, in order to know if an element is unique
or not, this function needs to store all unique values emitted
by the stream. Therefore, if the stream is infinite, the number
of elements stored will grow infinitely, never being garbage-collected.
## Example
iex> Stream.uniq_by([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) |> Enum.to_list()
[{1, :x}, {2, :y}]
iex> Stream.uniq_by([a: {:tea, 2}, b: {:tea, 2}, c: {:coffee, 1}], fn {_, y} -> y end) |> Enum.to_list()
[a: {:tea, 2}, c: {:coffee, 1}]
"""
@spec uniq_by(Enumerable.t(), (element -> term)) :: Enumerable.t()
def uniq_by(enum, fun) when is_function(fun, 1) do
lazy(enum, %{}, fn f1 -> R.uniq_by(fun, f1) end)
end
@doc """
Creates a stream where each element in the enumerable will
be wrapped in a tuple alongside its index.
If an `offset` is given, we will index from the given offset instead of from zero.
## Examples
iex> stream = Stream.with_index([1, 2, 3])
iex> Enum.to_list(stream)
[{1, 0}, {2, 1}, {3, 2}]
iex> stream = Stream.with_index([1, 2, 3], 3)
iex> Enum.to_list(stream)
[{1, 3}, {2, 4}, {3, 5}]
"""
@spec with_index(Enumerable.t(), integer) :: Enumerable.t()
def with_index(enum, offset \\ 0) when is_integer(offset) do
lazy(enum, offset, fn f1 -> R.with_index(f1) end)
end
## Combiners
@doc """
Creates a stream that enumerates each enumerable in an enumerable.
## Examples
iex> stream = Stream.concat([1..3, 4..6, 7..9])
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
@spec concat(Enumerable.t()) :: Enumerable.t()
def concat(enumerables) do
flat_map(enumerables, & &1)
end
@doc """
Creates a stream that enumerates the first argument, followed by the second.
## Examples
iex> stream = Stream.concat(1..3, 4..6)
iex> Enum.to_list(stream)
[1, 2, 3, 4, 5, 6]
iex> stream1 = Stream.cycle([1, 2, 3])
iex> stream2 = Stream.cycle([4, 5, 6])
iex> stream = Stream.concat(stream1, stream2)
iex> Enum.take(stream, 6)
[1, 2, 3, 1, 2, 3]
"""
@spec concat(Enumerable.t(), Enumerable.t()) :: Enumerable.t()
def concat(first, second) do
flat_map([first, second], & &1)
end
@doc """
Zips two collections together, lazily.
The zipping finishes as soon as any enumerable completes.
## Examples
iex> concat = Stream.concat(1..3, 4..6)
iex> cycle = Stream.cycle([:a, :b, :c])
iex> Stream.zip(concat, cycle) |> Enum.to_list()
[{1, :a}, {2, :b}, {3, :c}, {4, :a}, {5, :b}, {6, :c}]
"""
@spec zip(Enumerable.t(), Enumerable.t()) :: Enumerable.t()
def zip(left, right), do: zip([left, right])
@doc """
Zips corresponding elements from a finite collection of enumerables
into one stream of tuples.
The zipping finishes as soon as any enumerable in the given collection completes.
## Examples
iex> concat = Stream.concat(1..3, 4..6)
iex> cycle = Stream.cycle(["foo", "bar", "baz"])
iex> Stream.zip([concat, [:a, :b, :c], cycle]) |> Enum.to_list()
[{1, :a, "foo"}, {2, :b, "bar"}, {3, :c, "baz"}]
"""
@doc since: "1.4.0"
@spec zip(enumerables) :: Enumerable.t() when enumerables: [Enumerable.t()] | Enumerable.t()
def zip(enumerables) do
&prepare_zip(enumerables, &1, &2)
end
defp prepare_zip(enumerables, acc, fun) do
step = &do_zip_step(&1, &2)
enum_funs =
Enum.map(enumerables, fn enum ->
{&Enumerable.reduce(enum, &1, step), [], :cont}
end)
do_zip(enum_funs, acc, fun)
end
# This implementation of do_zip/3 works for any number of
# streams to zip, even if right now zip/2 only zips two streams.
defp do_zip(zips, {:halt, acc}, _fun) do
do_zip_close(zips)
{:halted, acc}
end
defp do_zip(zips, {:suspend, acc}, fun) do
{:suspended, acc, &do_zip(zips, &1, fun)}
end
defp do_zip([], {:cont, acc}, _callback) do
{:done, acc}
end
defp do_zip(zips, {:cont, acc}, callback) do
try do
do_zip_next_tuple(zips, acc, callback, [], [])
catch
kind, reason ->
do_zip_close(zips)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:next, buffer, acc} ->
do_zip(buffer, acc, callback)
{:done, _acc} = other ->
other
end
end
# do_zip_next_tuple/5 computes the next tuple formed by
# the next element of each zipped stream.
defp do_zip_next_tuple([{_, [], :halt} | zips], acc, _callback, _yielded_elems, buffer) do
do_zip_close(:lists.reverse(buffer, zips))
{:done, acc}
end
defp do_zip_next_tuple([{fun, [], :cont} | zips], acc, callback, yielded_elems, buffer) do
case fun.({:cont, []}) do
{:suspended, [elem | next_acc], fun} ->
next_buffer = [{fun, next_acc, :cont} | buffer]
do_zip_next_tuple(zips, acc, callback, [elem | yielded_elems], next_buffer)
{_, [elem | next_acc]} ->
next_buffer = [{fun, next_acc, :halt} | buffer]
do_zip_next_tuple(zips, acc, callback, [elem | yielded_elems], next_buffer)
{_, []} ->
# The current zipped stream terminated, so we close all the streams
# and return {:halted, acc} (which is returned as is by do_zip/3).
do_zip_close(:lists.reverse(buffer, zips))
{:done, acc}
end
end
defp do_zip_next_tuple([{fun, zip_acc, zip_op} | zips], acc, callback, yielded_elems, buffer) do
[elem | rest] = zip_acc
next_buffer = [{fun, rest, zip_op} | buffer]
do_zip_next_tuple(zips, acc, callback, [elem | yielded_elems], next_buffer)
end
defp do_zip_next_tuple([] = _zips, acc, callback, yielded_elems, buffer) do
# "yielded_elems" is a reversed list of results for the current iteration of
# zipping: it needs to be reversed and converted to a tuple to have the next
# tuple in the list resulting from zipping.
zipped = List.to_tuple(:lists.reverse(yielded_elems))
{:next, :lists.reverse(buffer), callback.(zipped, acc)}
end
defp do_zip_close(zips) do
:lists.foreach(fn {fun, _, _} -> fun.({:halt, []}) end, zips)
end
defp do_zip_step(x, acc) do
{:suspend, :lists.reverse([x | acc])}
end
## Sources
@doc """
Creates a stream that cycles through the given enumerable,
infinitely.
## Examples
iex> stream = Stream.cycle([1, 2, 3])
iex> Enum.take(stream, 5)
[1, 2, 3, 1, 2]
"""
@spec cycle(Enumerable.t()) :: Enumerable.t()
def cycle(enumerable)
def cycle([]) do
raise ArgumentError, "cannot cycle over empty enumerable"
end
def cycle(enumerable) when is_list(enumerable) do
unfold({enumerable, enumerable}, fn
{source, [h | t]} -> {h, {source, t}}
{source = [h | t], []} -> {h, {source, t}}
end)
end
def cycle(enumerable) do
fn acc, fun ->
inner = &do_cycle_each(&1, &2, fun)
outer = &Enumerable.reduce(enumerable, &1, inner)
reduce = check_cycle_first_element(outer)
do_cycle(reduce, outer, acc)
end
end
defp do_cycle(_reduce, _cycle, {:halt, acc}) do
{:halted, acc}
end
defp do_cycle(reduce, cycle, {:suspend, acc}) do
{:suspended, acc, &do_cycle(reduce, cycle, &1)}
end
defp do_cycle(reduce, cycle, acc) do
try do
reduce.(acc)
catch
{:stream_cycle, acc} ->
{:halted, acc}
else
{state, acc} when state in [:done, :halted] ->
do_cycle(cycle, cycle, {:cont, acc})
{:suspended, acc, continuation} ->
{:suspended, acc, &do_cycle(continuation, cycle, &1)}
end
end
defp do_cycle_each(x, acc, f) do
case f.(x, acc) do
{:halt, h} -> throw({:stream_cycle, h})
{_, _} = o -> o
end
end
defp check_cycle_first_element(reduce) do
fn acc ->
case reduce.(acc) do
{state, []} when state in [:done, :halted] ->
raise ArgumentError, "cannot cycle over empty enumerable"
other ->
other
end
end
end
@doc """
Emits a sequence of values, starting with `start_value`. Successive
values are generated by calling `next_fun` on the previous value.
## Examples
iex> Stream.iterate(0, &(&1 + 1)) |> Enum.take(5)
[0, 1, 2, 3, 4]
"""
@spec iterate(element, (element -> element)) :: Enumerable.t()
def iterate(start_value, next_fun) when is_function(next_fun, 1) do
unfold({:ok, start_value}, fn
{:ok, value} ->
{value, {:next, value}}
{:next, value} ->
next = next_fun.(value)
{next, {:next, next}}
end)
end
@doc """
Returns a stream generated by calling `generator_fun` repeatedly.
## Examples
# Although not necessary, let's seed the random algorithm
iex> :rand.seed(:exrop, {1, 2, 3})
iex> Stream.repeatedly(&:rand.uniform/0) |> Enum.take(3)
[0.7498295129076106, 0.06161655489244533, 0.7924073127680873]
"""
@spec repeatedly((() -> element)) :: Enumerable.t()
def repeatedly(generator_fun) when is_function(generator_fun, 0) do
&do_repeatedly(generator_fun, &1, &2)
end
defp do_repeatedly(generator_fun, {:suspend, acc}, fun) do
{:suspended, acc, &do_repeatedly(generator_fun, &1, fun)}
end
defp do_repeatedly(_generator_fun, {:halt, acc}, _fun) do
{:halted, acc}
end
defp do_repeatedly(generator_fun, {:cont, acc}, fun) do
do_repeatedly(generator_fun, fun.(generator_fun.(), acc), fun)
end
@doc """
Emits a sequence of values for the given resource.
Similar to `transform/3` but the initial accumulated value is
computed lazily via `start_fun` and executes an `after_fun` at
the end of enumeration (both in cases of success and failure).
Successive values are generated by calling `next_fun` with the
previous accumulator (the initial value being the result returned
by `start_fun`) and it must return a tuple containing a list
of elements to be emitted and the next accumulator. The enumeration
finishes if it returns `{:halt, acc}`.
As the name says, this function is useful to stream values from
resources.
## Examples
Stream.resource(
fn -> File.open!("sample") end,
fn file ->
case IO.read(file, :line) do
data when is_binary(data) -> {[data], file}
_ -> {:halt, file}
end
end,
fn file -> File.close(file) end
)
iex> Stream.resource(
...> fn ->
...> {:ok, pid} = StringIO.open("string")
...> pid
...> end,
...> fn pid ->
...> case IO.getn(pid, "", 1) do
...> :eof -> {:halt, pid}
...> char -> {[char], pid}
...> end
...> end,
...> fn pid -> StringIO.close(pid) end
...> ) |> Enum.to_list()
["s", "t", "r", "i", "n", "g"]
"""
@spec resource((() -> acc), (acc -> {[element], acc} | {:halt, acc}), (acc -> term)) ::
Enumerable.t()
def resource(start_fun, next_fun, after_fun)
when is_function(start_fun, 0) and is_function(next_fun, 1) and is_function(after_fun, 1) do
&do_resource(start_fun.(), next_fun, &1, &2, after_fun)
end
defp do_resource(next_acc, next_fun, {:suspend, acc}, fun, after_fun) do
{:suspended, acc, &do_resource(next_acc, next_fun, &1, fun, after_fun)}
end
defp do_resource(next_acc, _next_fun, {:halt, acc}, _fun, after_fun) do
after_fun.(next_acc)
{:halted, acc}
end
defp do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun) do
try do
next_fun.(next_acc)
catch
kind, reason ->
after_fun.(next_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:halt, next_acc} ->
do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun)
{[], next_acc} ->
do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun)
{[v], next_acc} ->
do_element_resource(next_acc, next_fun, acc, fun, after_fun, v)
{list, next_acc} when is_list(list) ->
reduce = &Enumerable.List.reduce(list, &1, fun)
do_list_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun, reduce)
{enum, next_acc} ->
inner = &do_resource_each(&1, &2, fun)
reduce = &Enumerable.reduce(enum, &1, inner)
do_enum_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun, reduce)
end
end
defp do_element_resource(next_acc, next_fun, acc, fun, after_fun, v) do
try do
fun.(v, acc)
catch
kind, reason ->
after_fun.(next_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
acc ->
do_resource(next_acc, next_fun, acc, fun, after_fun)
end
end
defp do_list_resource(next_acc, next_fun, acc, fun, after_fun, reduce) do
try do
reduce.(acc)
catch
kind, reason ->
after_fun.(next_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:done, acc} ->
do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun)
{:halted, acc} ->
do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun)
{:suspended, acc, c} ->
{:suspended, acc, &do_list_resource(next_acc, next_fun, &1, fun, after_fun, c)}
end
end
defp do_enum_resource(next_acc, next_fun, {op, acc}, fun, after_fun, reduce) do
try do
reduce.({op, [:outer | acc]})
catch
kind, reason ->
after_fun.(next_acc)
:erlang.raise(kind, reason, __STACKTRACE__)
else
{:halted, [:outer | acc]} ->
do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun)
{:halted, [:inner | acc]} ->
do_resource(next_acc, next_fun, {:halt, acc}, fun, after_fun)
{:done, [_ | acc]} ->
do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun)
{:suspended, [_ | acc], c} ->
{:suspended, acc, &do_enum_resource(next_acc, next_fun, &1, fun, after_fun, c)}
end
end
defp do_resource_each(x, [:outer | acc], f) do
case f.(x, acc) do
{:halt, res} -> {:halt, [:inner | res]}
{op, res} -> {op, [:outer | res]}
end
end
@doc """
Emits a sequence of values for the given accumulator.
Successive values are generated by calling `next_fun` with the previous
accumulator and it must return a tuple with the current value and next
accumulator. The enumeration finishes if it returns `nil`.
## Examples
iex> Stream.unfold(5, fn
...> 0 -> nil
...> n -> {n, n - 1}
...> end) |> Enum.to_list()
[5, 4, 3, 2, 1]
"""
@spec unfold(acc, (acc -> {element, acc} | nil)) :: Enumerable.t()
def unfold(next_acc, next_fun) when is_function(next_fun, 1) do
&do_unfold(next_acc, next_fun, &1, &2)
end
defp do_unfold(next_acc, next_fun, {:suspend, acc}, fun) do
{:suspended, acc, &do_unfold(next_acc, next_fun, &1, fun)}
end
defp do_unfold(_next_acc, _next_fun, {:halt, acc}, _fun) do
{:halted, acc}
end
defp do_unfold(next_acc, next_fun, {:cont, acc}, fun) do
case next_fun.(next_acc) do
nil -> {:done, acc}
{v, next_acc} -> do_unfold(next_acc, next_fun, fun.(v, acc), fun)
end
end
@doc """
Lazily intersperses `intersperse_element` between each element of the enumeration.
## Examples
iex> Stream.intersperse([1, 2, 3], 0) |> Enum.to_list()
[1, 0, 2, 0, 3]
iex> Stream.intersperse([1], 0) |> Enum.to_list()
[1]
iex> Stream.intersperse([], 0) |> Enum.to_list()
[]
"""
@doc since: "1.6.0"
@spec intersperse(Enumerable.t(), any) :: Enumerable.t()
def intersperse(enumerable, intersperse_element) do
Stream.transform(enumerable, false, fn
element, true -> {[intersperse_element, element], true}
element, false -> {[element], true}
end)
end
## Helpers
@compile {:inline, lazy: 2, lazy: 3, lazy: 4}
defp lazy(%Stream{done: nil, funs: funs} = lazy, fun), do: %{lazy | funs: [fun | funs]}
defp lazy(enum, fun), do: %Stream{enum: enum, funs: [fun]}
defp lazy(%Stream{done: nil, funs: funs, accs: accs} = lazy, acc, fun),
do: %{lazy | funs: [fun | funs], accs: [acc | accs]}
defp lazy(enum, acc, fun), do: %Stream{enum: enum, funs: [fun], accs: [acc]}
defp lazy(%Stream{done: nil, funs: funs, accs: accs} = lazy, acc, fun, done),
do: %{lazy | funs: [fun | funs], accs: [acc | accs], done: done}
defp lazy(enum, acc, fun, done), do: %Stream{enum: enum, funs: [fun], accs: [acc], done: done}
end
defimpl Enumerable, for: Stream do
@compile :inline_list_funs
def count(_lazy), do: {:error, __MODULE__}
def member?(_lazy, _value), do: {:error, __MODULE__}
def slice(_lazy), do: {:error, __MODULE__}
def reduce(lazy, acc, fun) do
do_reduce(lazy, acc, fn x, [acc] ->
{reason, acc} = fun.(x, acc)
{reason, [acc]}
end)
end
defp do_reduce(%Stream{enum: enum, funs: funs, accs: accs, done: done}, acc, fun) do
composed = :lists.foldl(fn entry_fun, acc -> entry_fun.(acc) end, fun, funs)
reduce = &Enumerable.reduce(enum, &1, composed)
do_each(reduce, done && {done, fun}, :lists.reverse(accs), acc)
end
defp do_each(reduce, done, accs, {command, acc}) do
case reduce.({command, [acc | accs]}) do
{:suspended, [acc | accs], continuation} ->
{:suspended, acc, &do_each(continuation, done, accs, &1)}
{:halted, accs} ->
do_done({:halted, accs}, done)
{:done, accs} ->
do_done({:done, accs}, done)
end
end
defp do_done({reason, [acc | _]}, nil), do: {reason, acc}
defp do_done({reason, [acc | t]}, {done, fun}) do
[h | _] = Enum.reverse(t)
case done.([acc, h], fun) do
{:cont, [acc | _]} -> {reason, acc}
{:halt, [acc | _]} -> {:halted, acc}
{:suspend, [acc | _]} -> {:suspended, acc, &{:done, elem(&1, 1)}}
end
end
end
defimpl Inspect, for: Stream do
import Inspect.Algebra
def inspect(%{enum: enum, funs: funs}, opts) do
inner = [enum: enum, funs: Enum.reverse(funs)]
concat(["#Stream<", to_doc(inner, opts), ">"])
end
end | lib/elixir/lib/stream.ex | 0.88856 | 0.775562 | stream.ex | starcoder |
defmodule Stellar.Base.StrKey do
@moduledoc false
# Logic copied from https://github.com/stellar/js-stellar-base/blob/master/src/strkey.js
import Bitwise
@version_bytes %{
# G
ed25519PublicKey: 6 <<< 3,
# S
ed25519SecretSeed: 18 <<< 3,
# T
preAuthTx: 19 <<< 3,
# X
sha256Hash: 23 <<< 3
}
def is_valid_ed25519_public_key(public_key) do
is_valid(:ed25519PublicKey, public_key)
end
def encode_ed25519_public_key(data) do
encode_check(:ed25519PublicKey, data)
end
def decode_ed25519_public_key(data) do
decode_check(:ed25519PublicKey, data)
end
def is_valid_ed25519_secret_seed(secret_seed) do
is_valid(:ed25519SecretSeed, secret_seed)
end
def encode_ed25519_secret_seed(data) do
encode_check(:ed25519SecretSeed, data)
end
def decode_ed25519_secret_seed(data) do
decode_check(:ed25519SecretSeed, data)
end
def encode_pre_auth_tx(data) do
encode_check(:preAuthTx, data)
end
def decode_pre_auth_tx(data) do
decode_check(:preAuthTx, data)
end
def encode_sha256_hash(data) do
encode_check(:sha256Hash, data)
end
def decode_sha256_hash(data) do
decode_check(:sha256Hash, data)
end
def is_valid(_, encoded) when byte_size(encoded) != 56, do: false
def is_valid(_, nil), do: false
def is_valid(version_byte_name, encoded) do
case decode_check(version_byte_name, encoded) do
{:error, _} -> false
decoded when byte_size(decoded) == 32 -> true
_ -> false
end
end
def encode_check(_, nil) do
{:error, "cannot encode nil data"}
end
def encode_check(version_byte_name, _)
when version_byte_name not in [
:ed25519PublicKey,
:ed25519SecretSeed,
:preAuthTx,
:sha256Hash
] do
{:error,
"#{version_byte_name} is not a valid version byte name. expected one of :ed25519PublicKey, :ed25519SecretSeed, :preAuthTx, :sha256Hash"}
end
def encode_check(version_byte_name, data) do
version_byte = @version_bytes[version_byte_name]
payload = <<version_byte>> <> data
checksum = CRC.crc(:crc_16_xmodem, payload)
unencoded = payload <> <<checksum::little-16>>
Base.encode32(unencoded, padding: false)
end
def decode_check(version_byte_name, encoded) do
case Base.decode32(encoded) do
{:ok, decoded} ->
<<version_byte::size(8), data::binary-size(32), checksum::little-integer-size(16)>> =
decoded
expected_version = @version_bytes[version_byte_name]
cond do
is_nil(expected_version) ->
{:error, "#{version_byte_name} is not a valid version byte name"}
version_byte != expected_version ->
{:error, "invalid version byte. expected #{expected_version}, got #{version_byte}"}
true ->
expected_checksum = CRC.crc(:crc_16_xmodem, <<version_byte>> <> data)
if checksum != expected_checksum, do: {:error, "invalid checksum"}, else: data
end
_ ->
{:error, "invalid encoded string"}
end
end
end | lib/stellar/base/str_key.ex | 0.832849 | 0.502075 | str_key.ex | starcoder |
defmodule Azalea.Zipper do
@moduledoc """
A zipper is an omni-directionally traversable wrapper around a tree that focuses on a single node, but stores enough data
to be able to reconstruct the entire tree from that point.
`Azalea.Zipper` provides such a wrapper around `Azalea.Tree`, using a stack of `Azalea.Zipper.Crumb` data structures to retain a
history of navigation through the tree. Since a tree holds references to its own children, and a crumb holds references
to the tree's parent and siblings, this allows traversal through the tree in any direction by pushing or popping crumbs
on to/from the stack.
See [Huet](https://www.st.cs.uni-saarland.de/edu/seminare/2005/advanced-fp/docs/huet-zipper.pdf) for a more complete definition.
"""
alias Azalea.{Tree, Zipper, Zipper.Crumb}
defstruct [:focus, :crumbs]
@type t :: %Azalea.Zipper{focus: Tree.t, crumbs: [Crumb.t]}
@type no_sibling_error :: {:error, :root_has_no_siblings}
@no_sibling_error {:error, :root_has_no_siblings}
@doc """
Creates a zipper focused on the given tree
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> Azalea.Zipper.from_tree(tree)
%Azalea.Zipper{
focus: %Azalea.Tree{
value: :a,
children: [
%Azalea.Tree{value: :b, children: []},
%Azalea.Tree{value: :c, children: []},
%Azalea.Tree{value: :d, children: []}
]
},
crumbs: []
}
"""
@spec from_tree(Tree.t) :: Zipper.t
def from_tree(tree = %Tree{}) do
%Zipper{
focus: tree,
crumbs: []
}
end
@doc """
Returns true if the zipper is currently focused on the root of the tree
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> Azalea.Zipper.is_root?(zipper)
true
iex> down_zipper = Azalea.Zipper.down(zipper)
iex> Azalea.Zipper.is_root?(down_zipper)
false
"""
@spec is_root?(Zipper.t) :: boolean
def is_root?(%Zipper{crumbs: []}), do: true
def is_root?(%Zipper{crumbs: [%{parent: nil}|_]}), do: true
def is_root?(%Zipper{}), do: false
@doc """
Returns true if the zipper is currently focused on the last (depth-first) node in the tree
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> Azalea.Zipper.is_end?(zipper)
false
iex> end_zipper = zipper |> Azalea.Zipper.down |> Azalea.Zipper.rightmost
iex> Azalea.Zipper.is_end?(end_zipper)
true
"""
@spec is_end?(Zipper.t) :: boolean
def is_end?(%Zipper{focus: %{children: []}, crumbs: [%{right: []}|_]}), do: true
def is_end?(%Zipper{}), do: false
@doc """
Returns a zipper all the way back up to the tree's root.
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> end_zipper = zipper |> Azalea.Zipper.down |> Azalea.Zipper.rightmost
iex> end_zipper.focus.value
:d
iex> root_zipper = Azalea.Zipper.to_root(end_zipper)
iex> root_zipper.focus.value
:a
"""
@spec to_root(Zipper.t) :: Zipper.t
def to_root(zipper = %Zipper{}) do
case is_root?(zipper) do
true -> zipper
false -> zipper |> up |> to_root
end
end
@doc """
Moves to the leftmost child of the current focus, or returns an error tuple if there are no children
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> Azalea.Zipper.down(zipper).focus.value
:b
iex> zipper |> Azalea.Zipper.down |> Azalea.Zipper.down
{:error, :no_children}
"""
@spec down(Zipper.t) :: Zipper.t | {:error, :no_children}
def down(%Zipper{focus: %Tree{children: []}}) do
{:error, :no_children}
end
def down(zipper = %Zipper{focus: focus = %Tree{children: [c|r]}}) do
%Zipper{
focus: c,
crumbs: [%Crumb{
parent: focus,
left: [],
right: r
}|zipper.crumbs]
}
end
@doc """
Moves focus to the immediate right of the current focus, or returns an error tuple if there is no right sibling
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> Azalea.Zipper.right(zipper)
{:error, :no_right_sibling}
iex> zipper = zipper |> Azalea.Zipper.down |> Azalea.Zipper.right
iex> zipper.focus.value
:c
"""
@spec right(Zipper.t) :: Zipper.t | {:error, :no_right_sibling}
def right(%Zipper{crumbs: []}) do
{:error, :no_right_sibling}
end
def right(%Zipper{crumbs: [%Zipper.Crumb{right: []}|_]}) do
{:error, :no_right_sibling}
end
def right(zipper = %Zipper{}) do
with crumbs = [crumb|_] <- zipper.crumbs do
new_left = (crumb.left ++ [zipper.focus])
[new_focus|new_right] = crumb.right
new_crumb = %Zipper.Crumb{
left: new_left,
right: new_right,
parent: crumb.parent
}
%Zipper{
focus: new_focus,
crumbs: [new_crumb|crumbs]
}
end
end
@doc """
Moves focus to the parent of the current focus, or returns an error tuple if there is no parent
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> Azalea.Zipper.up(zipper)
{:error, :no_parent}
iex> zipper = zipper |> Azalea.Zipper.down
iex> zipper.focus.value
:b
iex> Azalea.Zipper.up(zipper).focus.value
:a
"""
@spec up(Zipper.t) :: Zipper.t | {:error, :no_parent}
def up(%Zipper{crumbs: []}) do
{:error, :no_parent}
end
def up(%Zipper{crumbs: [%Zipper.Crumb{parent: nil}|_]}) do
{:error, :no_parent}
end
def up(zipper = %Zipper{}) do
with [crumb|crumbs] <- zipper.crumbs do
%Zipper{
focus: crumb.parent,
crumbs: Enum.drop_while(crumbs, fn c -> c.parent == crumb.parent end)
}
end
end
@doc """
Moves focus to the immediate left of the current focus, or returns an error tuple if there is no left sibling
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree) |> Azalea.Zipper.down
iex> Azalea.Zipper.left(zipper)
{:error, :no_left_sibling}
iex> zipper = zipper |> Azalea.Zipper.right |> Azalea.Zipper.right
iex> zipper.focus.value
:d
iex> Azalea.Zipper.left(zipper).focus.value
:c
"""
@spec left(Zipper.t) :: Zipper.t | {:error, :no_left_sibling}
def left(%Zipper{crumbs: []}) do
{:error, :no_left_sibling}
end
def left(%Zipper{crumbs: [%Zipper.Crumb{left: []}|_]}) do
{:error, :no_left_sibling}
end
def left(zipper = %Zipper{}) do
with [crumb|crumbs] <- zipper.crumbs do
new_right = [zipper.focus|crumb.right]
{new_focus, new_left} = List.pop_at(crumb.left, -1)
new_crumb = %Zipper.Crumb{
left: new_left,
right: new_right,
parent: crumb.parent
}
%Zipper{
focus: new_focus,
crumbs: [new_crumb|crumbs]
}
end
end
@doc """
Moves focus to the rightmost sibling of the current focus, or returns the current focus if it is the rightmost
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> Azalea.Zipper.rightmost(zipper).focus.value
:a
iex> zipper = zipper |> Azalea.Zipper.down |> Azalea.Zipper.rightmost
iex> zipper.focus.value
:d
iex> Azalea.Zipper.rightmost(zipper).focus.value
:d
"""
@spec rightmost(Zipper.t) :: Zipper.t
def rightmost(zipper = %Zipper{crumbs: []}), do: zipper
def rightmost(zipper = %Zipper{crumbs: [%{right: []}]}), do: zipper
def rightmost(zipper = %Zipper{}) do
with [crumb|crumbs] <- zipper.crumbs do
{new_focus, new_left} = List.pop_at(crumb.right, -1)
new_crumb = %Zipper.Crumb{
parent: crumb.parent,
right: [],
left: crumb.left ++ [zipper.focus|new_left]
}
%Zipper{
focus: new_focus,
crumbs: [new_crumb|crumbs]
}
end
end
@doc """
Moves focus to the leftmost sibling of the current focus, or returns the current focus if it is the leftmost
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> Azalea.Zipper.leftmost(zipper).focus.value
:a
iex> zipper = zipper |> Azalea.Zipper.down |> Azalea.Zipper.leftmost
iex> zipper.focus.value
:b
"""
@spec leftmost(Zipper.t) :: Zipper.t
def leftmost(zipper = %Zipper{crumbs: []}), do: zipper
def leftmost(zipper = %Zipper{crumbs: [%{left: []}]}), do: zipper
def leftmost(zipper = %Zipper{}) do
with [crumb|crumbs] <- zipper.crumbs do
[new_focus|new_right] = crumb.left
new_crumb = %Zipper.Crumb{
parent: crumb.parent,
right: new_right ++ [zipper.focus|crumb.right],
left: []
}
%Zipper{
focus: new_focus,
crumbs: [new_crumb|crumbs]
}
end
end
@doc """
Adds a new child as the rightmost child of the current focus, without changing focus
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> zipper = Azalea.Zipper.append_child(zipper, :e)
iex> zipper.focus.value
:a
iex> zipper = zipper |> Azalea.Zipper.down |> Azalea.Zipper.rightmost
iex> zipper.focus.value
:e
"""
@spec append_child(Zipper.t, any) :: Zipper.t
def append_child(zipper = %Zipper{focus: %Tree{}}, child) do
%Zipper{
focus: Tree.insert_child(zipper.focus, child, -1),
crumbs: zipper.crumbs
}
end
@doc """
Adds a new child as the leftmost child of the current focus, without changing focus
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> zipper = Azalea.Zipper.insert_child(zipper, :e)
iex> zipper.focus.value
:a
iex> zipper = zipper |> Azalea.Zipper.down |> Azalea.Zipper.leftmost
iex> zipper.focus.value
:e
"""
@spec insert_child(Zipper.t, any) :: Zipper.t
def insert_child(zipper = %Zipper{focus: %Tree{}}, child) do
%Zipper{
focus: Tree.insert_child(zipper.focus, child, 0),
crumbs: zipper.crumbs
}
end
@doc """
Adds a new sibling immediately to the left of the current focus, without changing focus
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> zipper = Azalea.Zipper.down(zipper)
iex> zipper.focus.value
:b
iex> zipper = zipper |> Azalea.Zipper.insert_left(:e)
iex> zipper.focus.value
:b
iex> Azalea.Zipper.left(zipper).focus.value
:e
"""
@spec insert_left(Zipper.t, any) :: Zipper.t | no_sibling_error
def insert_left(%Zipper{crumbs: []}, _), do: @no_sibling_error
def insert_left(%Zipper{crumbs: [%{parent: nil}|_]}, _), do: @no_sibling_error
def insert_left(zipper = %Zipper{}, sibling) do
[crumb|crumbs] = zipper.crumbs
new_parent = Tree.insert_child(crumb.parent, sibling, length(crumb.left))
new_left = crumb.left ++ [Enum.at(new_parent.children, length(crumb.left))]
new_crumb = %{crumb | parent: new_parent, left: new_left}
new_crumbs = Enum.map(crumbs, fn c ->
case c.parent == crumb.parent do
true -> %{c | parent: new_parent}
false -> c
end
end)
%{zipper | crumbs: [new_crumb|new_crumbs]}
end
@doc """
Adds a new sibling immediately to the right of the current focus, without changing focus
iex> tree = Azalea.Tree.new(:a, [:b, :c, :d])
iex> zipper = Azalea.Zipper.from_tree(tree)
iex> zipper = Azalea.Zipper.down(zipper)
iex> zipper.focus.value
:b
iex> zipper = zipper |> Azalea.Zipper.insert_right(:e)
iex> zipper.focus.value
:b
iex> Azalea.Zipper.right(zipper).focus.value
:e
"""
@spec insert_right(Zipper.t, any) :: Zipper.t | no_sibling_error
def insert_right(%Zipper{crumbs: []}, _), do: @no_sibling_error
def insert_right(%Zipper{crumbs: [%{parent: nil}|_]}, _), do: @no_sibling_error
def insert_right(zipper = %Zipper{}, sibling) do
[crumb|crumbs] = zipper.crumbs
new_sibling_index = length(crumb.left) + 1
new_parent = Tree.insert_child(crumb.parent, sibling, new_sibling_index)
new_right = [Enum.at(new_parent.children, new_sibling_index)|crumb.right]
new_crumb = %{crumb | parent: new_parent, right: new_right}
new_crumbs = Enum.map(crumbs, fn c ->
case c.parent == crumb.parent do
true -> %{c | parent: new_parent}
false -> c
end
end)
%{zipper | crumbs: [new_crumb|new_crumbs]}
end
end | lib/azalea/zipper.ex | 0.914434 | 0.728833 | zipper.ex | starcoder |
defmodule Easypost.Client do
@moduledoc """
Access the Easypost API from Elixir using maps and returning structs
##Usage:
First, add test key to config/test.exs and config/dev.exs like this:
config :myapp, easypost_endpoint: "https://api.easypost.com/v2/",
easypost_key: "your test key"
Then, define endpoint and key in module where you will use client:
defmodule Myapp.Mymodule do
use Easypost.Client, endpoint: Application.get_env(:my_app, :easypost_endpoint),
key: Application.get_env(:my_app, :easypost_key)
#omitted...
end
Now, the Easypost functions will be available in the module:
#add an address by passing a binary key map (%{"foo" => "bar"})
create_address(user.address)
# Create and verify an address
create_and_verify_address(user.address)
#gets a list of rate quotes
create_shipment(shipment)
#purchases a shipment using a particular rate where shipment_id is the Easypost id of the shipment and rate is a map containing the Easypost rate id
buy_shipment(shipment_id, rate)
#creates a batch of shipments using either a list of previously created shipping ids or shipment maps
create_batch(shipments)
All functions return either {:ok, (struct)} or {:error, %Easypost.Error{}}, so you should pattern match the result of the functions.
For more examples, see tests.
"""
defmacro __using__(config) do
quote do
def conf, do: unquote(config)
def create_address(address) do
unquote(Easypost.Address).create_address(conf(), address)
end
def create_and_verify_address(address) do
unquote(Easypost.Address).create_and_verify_address(conf(), address)
end
def create_parcel(parcel) do
unquote(Easypost.Parcel).create_parcel(conf(), parcel)
end
def create_shipment(shipment) do
unquote(Easypost.Shipment).create_shipment(conf(), shipment)
end
def create_batch(shipments) do
unquote(Easypost.Batch).create_batch(conf(), shipments)
end
def create_and_buy_batch(shipments) do
unquote(Easypost.Batch).create_and_buy_batch(conf(), shipments)
end
def batch_labels(batch_id, label) do
unquote(Easypost.Batch).batch_labels(conf(), batch_id, label)
end
def add_to_batch(batch_id, shipments) do
unquote(Easypost.Batch).add_to_batch(conf(), batch_id, shipments)
end
def remove_from_batch(batch_id, shipments) do
unquote(Easypost.Batch).remove_from_batch(conf(), batch_id, shipments)
end
def insure_shipment(shipment_id, insurance) do
unquote(Easypost.Shipment).insure_shipment(conf(), shipment_id, insurance)
end
def buy_shipment(shipment_id, rate) do
unquote(Easypost.Shipment).buy_shipment(conf(), shipment_id, rate)
end
def create_customs_info(customs_info) do
unquote(Easypost.CustomsInfo).create_customs_info(conf(), customs_info)
end
def create_pickup(pickup) do
unquote(Easypost.Pickup).create_pickup(conf(), pickup)
end
def buy_pickup(pickup_id, pickup) do
unquote(Easypost.Pickup).buy_pickup(conf(), pickup_id, pickup)
end
def cancel_pickup(pickup_id) do
unquote(Easypost.Pickup).cancel_pickup(conf(), pickup_id)
end
def track(tracking) do
unquote(Easypost.Tracker).track(conf(), tracking)
end
def create_user(user) do
unquote(Easypost.User).create_user(conf(), user)
end
def get_child_api_keys() do
unquote(Easypost.User).get_child_api_keys(conf())
end
def add_carrier_account(carrier) do
unquote(Easypost.User).add_carrier_account(conf(), carrier)
end
def refund_usps_label(shipment_id) do
unquote(Easypost.Shipment).refund_usps_label(conf(), shipment_id)
end
end
end
end | lib/client.ex | 0.698741 | 0.501404 | client.ex | starcoder |
defmodule Pair2.Matcher do
@moduledoc """
Rules-based matcher for finding optimal 1:1 matches between two lists of maps.
"""
alias Pair2.{
Comparer,
Index
}
@doc """
Performs 1:1 match of two lists of maps, list_l and list_r, by applying
rules from a list of rule structs. For two maps to match, their match score
must be >= min_score.
"""
def match(list_l, list_r, rules, min_score) do
with {:ok, indexed_attrs} <- get_indexed_rule_attrs(rules),
{:ok, index_map} <- Index.load_indexes(list_r, indexed_attrs),
{:ok, all_matches} <- get_all_matches(index_map, list_l, indexed_attrs, rules, min_score)
do
{:ok, resolve(all_matches)}
else
{:error, reason} -> raise reason
end
end
defp get_indexed_rule_attrs(rules) do
indexed_attrs = rules
|> Enum.filter(&(&1.indexed))
|> Enum.map(&(&1.right_attr))
case Enum.count(indexed_attrs) do
0 -> {:error, "At least one attribute must be indexed"}
_ -> {:ok, indexed_attrs}
end
end
defp get_all_matches(index_map, list_l, indexed_attrs, rules, min_score) do
matches = list_l
|> Enum.map(fn left_map ->
right_matches = get_right_matches(left_map, index_map, indexed_attrs, rules, min_score)
{left_map.id, right_matches}
end)
|> Enum.filter(fn {_left, rights} -> Enum.count(rights) > 0 end) # remove lefts with matches
|> Enum.reduce(%{}, fn {left, rights}, map -> # convert to map of form %{left => [right1, right2, ...]}
Map.put(map, left, rights)
end)
{:ok, matches}
end
defp get_right_matches(left_map, index_map, indexed_attrs, rules, min_score) do
case Index.get_potential_matches(left_map, index_map, indexed_attrs) do
[nil] -> []
rights ->
rights
|> Enum.map(fn right_map ->
{right_map.id, Comparer.compare_maps(left_map, right_map, rules)}
end)
|> Enum.filter(fn {_rm, score} -> score >= min_score end)
|> Enum.sort(&(elem(&1, 1) >= elem(&2, 1))) # sort by best score desc
end
end
@doc """
Resolves conflicts between left and right sides. Conflicts occur when a single
right map is the highest-scoring match to more than one left map.
Returns a list of finalized match tuples of form:
{left, right, score}
For each left map:
1) Add all to an "unresolved" list.
2) For each left map in the unresolved list, choose the highest available match
if it hasn't already been assigned. If it has been assigned, award the match
to the pair with the highest score. Add the losing map back onto the unresolved list.
3) Continue until the unresolved list is empty.
"""
def resolve(matches) do
unresolved = Map.keys(matches)
resolve(unresolved, matches, %{})
end
defp resolve([], _all, matched_rights) do
# Return list of form { left, right, score }
matched_rights
|> Map.keys
|> Enum.reduce([], fn right, list ->
{left, score} = Map.fetch!(matched_rights, right)
[{left, right, score}] ++ list
end)
|> Enum.reverse
end
defp resolve([uh|ut], all, matched_rights) do
rights = Map.fetch!(all, uh)
if Enum.empty?(rights) do
resolve(ut, all, matched_rights)
else
{right_match, score, new_rights, unresolved} = best_match(uh, rights, matched_rights)
# Update the list of all matches with a reduced list of right match
# options. All options are retained until conflict resolution is
# complete because a given left map may be temporarily matched to
# multiple right maps during the process.
new_all = Map.put(all, uh, new_rights)
new_unresolved = case unresolved do
nil ->
ut
_ ->
[unresolved] ++ ut
end
case right_match do
nil ->
resolve(new_unresolved, new_all, matched_rights)
_ ->
resolve(new_unresolved, new_all, Map.put(matched_rights, right_match, {uh, score}))
end
end
end
# For a given left map, find the highest-scoring right map
# that is available for matching. If a previously-existing matched pair
# has a lower score, replace it and add that previous left map back to
# the unresolved list.
defp best_match(_l, [], _mr), do: {nil, 0.0, [], nil} # SOL
defp best_match(left, [rh|rt], matched_rights) do
{right, score} = rh
case Map.fetch(matched_rights, right) do
{:ok, {previous_left, previous_score}} ->
if score > previous_score do
# Replace the previous winner with this left.
{right, score, rt, previous_left}
else
# Previous winner remains. Keep searching.
best_match(left, rt, matched_rights)
end
:error ->
# No previous match so this left is the winner.
{right, score, rt, nil}
end
end
end | lib/matcher.ex | 0.84966 | 0.580471 | matcher.ex | starcoder |
defmodule Artificery.Console.Table do
@moduledoc """
A printer for tabular data.
"""
@doc """
Given a title, header, and rows, prints the data as a table.
Takes the same options as `format/4`.
"""
def print(title, header, rows, opts \\ []) do
IO.write(format(title, header, rows, opts))
end
@doc """
Given a title, header, and rows, formats the data as a table.
Takes an optional keyword list of options:
* `:padding` - (integer) sets the padding around columns
This function formats the data as iodata, it is up to the caller to print it.
"""
def format(title, header, rows, opts \\ []) do
padding = Keyword.get(opts, :padding, 1)
header = [header]
rows = stringify(rows)
widths = rows |> transpose() |> column_widths()
widths =
header
|> transpose()
|> column_widths()
|> Enum.with_index()
|> Enum.map(fn {c, i} -> max(c, Enum.at(widths, i)) end)
head =
header
|> pad_cells(widths, padding)
|> Enum.map(&[&1, ?\n])
head_len =
head
|> IO.iodata_to_binary()
|> String.length()
separator = [String.duplicate("-", head_len)]
tail =
rows
|> pad_cells(widths, padding)
|> Enum.map(&[&1, ?\n, separator, ?\n])
[IO.ANSI.bright(), title, IO.ANSI.reset(), ?\n, ?\n, head, separator, ?\n, tail]
end
defp stringify(rows, acc \\ [])
defp stringify([], acc), do: Enum.reverse(acc)
defp stringify([row | rest], acc), do: stringify(rest, [Enum.map(row, &to_string/1) | acc])
defp transpose(rows), do: rows |> List.zip() |> Enum.map(&Tuple.to_list/1)
defp column_widths(cols) do
Enum.map(cols, fn c -> c |> Enum.map(&byte_size/1) |> Enum.max() end)
end
defp pad_cells(rows, widths, padding) do
for r <- rows do
last_i = length(r) - 1
for {{val, width}, i} <- r |> Enum.zip(widths) |> Enum.with_index() do
if i == last_i do
# Don't pad last col
val
else
calculated_padding = max(width - byte_size(val) + padding, padding)
[val, String.pad_leading("", calculated_padding), ?|, ?\s]
end
end
end
end
end | lib/console/table.ex | 0.756807 | 0.599895 | table.ex | starcoder |
defmodule Boundary.Classifier do
@moduledoc false
@type t :: %{boundaries: %{Boundary.name() => Boundary.t()}, modules: %{module() => Boundary.name()}}
@spec new :: t
def new, do: %{boundaries: %{}, modules: %{}}
@spec delete(t, atom) :: t
def delete(classifier, app) do
boundaries_to_delete =
classifier.boundaries
|> Map.values()
|> Stream.filter(&(&1.app == app))
|> Enum.map(& &1.name)
boundaries = Map.drop(classifier.boundaries, boundaries_to_delete)
modules =
for {_, boundary} = entry <- classifier.modules,
Map.has_key?(boundaries, boundary),
do: entry,
into: %{}
%{classifier | boundaries: boundaries, modules: modules}
end
@spec classify(t, [module], [Boundary.t()]) :: t
def classify(classifier, modules, boundaries) do
trie = build_trie(boundaries)
classifier = %{
classifier
| boundaries:
trie
|> boundaries()
|> Stream.map(fn
%{top_level?: true} = boundary -> %{boundary | ancestors: []}
%{top_level?: false} = boundary -> boundary
end)
|> Stream.map(&Map.delete(&1, :top_level?))
|> Enum.into(classifier.boundaries, &{&1.name, &1})
}
for module <- modules,
boundary = find_boundary(trie, module),
reduce: classifier do
classifier -> Map.update!(classifier, :modules, &Map.put(&1, module, boundary.name))
end
end
defp boundaries(trie, ancestors \\ []) do
ancestors = if is_nil(trie.boundary), do: ancestors, else: [trie.boundary.name | ancestors]
child_boundaries =
trie.children
|> Map.values()
|> Enum.flat_map(&boundaries(&1, ancestors))
if is_nil(trie.boundary),
do: child_boundaries,
else: [Map.put(trie.boundary, :ancestors, tl(ancestors)) | child_boundaries]
end
defp build_trie(boundaries), do: Enum.reduce(boundaries, new_trie(), &add_boundary(&2, &1))
defp new_trie, do: %{boundary: nil, children: %{}}
defp find_boundary(trie, module) when is_atom(module) do
case Boundary.Definition.classified_to(module) do
nil ->
if Boundary.protocol_impl?(module),
do: nil,
else: find_boundary(trie, Module.split(module))
classified_to ->
boundary = find_boundary(trie, classified_to.boundary)
unless boundary do
message = "invalid boundary #{inspect(classified_to.boundary)}"
raise Boundary.Error, message: message, file: classified_to.file, line: classified_to.line
end
boundary
end
end
defp find_boundary(_trie, []), do: nil
defp find_boundary(trie, [part | rest]) do
case Map.fetch(trie.children, part) do
{:ok, child_trie} -> find_boundary(child_trie, rest) || child_trie.boundary
:error -> nil
end
end
defp add_boundary(trie, boundary),
do: add_boundary(trie, Module.split(boundary.name), boundary)
defp add_boundary(trie, [], boundary), do: %{trie | boundary: boundary}
defp add_boundary(trie, [part | rest], boundary) do
Map.update!(
trie,
:children,
fn children ->
children
|> Map.put_new_lazy(part, &new_trie/0)
|> Map.update!(part, &add_boundary(&1, rest, boundary))
end
)
end
end | lib/boundary/classifier.ex | 0.806167 | 0.4206 | classifier.ex | starcoder |
defmodule Phoenix.Router.Route do
# This module defines the Route struct that is used
# throughout Phoenix's router. This struct is private
# as it contains internal routing information.
@moduledoc false
alias Phoenix.Router.Route
@doc """
The `Phoenix.Router.Route` struct. It stores:
* :verb - the HTTP verb as an upcased string
* :kind - the kind of route, one of `:match`, `:forward`
* :path - the normalized path as string
* :host - the request host or host prefix
* :plug - the plug module
* :opts - the plug options
* :helper - the name of the helper as a string (may be nil)
* :private - the private route info
* :assigns - the route info
* :pipe_through - the pipeline names as a list of atoms
"""
defstruct [:verb, :kind, :path, :host, :plug, :opts,
:helper, :private, :pipe_through, :assigns]
@type t :: %Route{}
@doc """
Receives the verb, path, plug, options and helper
and returns a `Phoenix.Router.Route` struct.
"""
@spec build(:match | :forward, String.t, String.t, String.t | nil, atom, atom, atom | nil, atom, %{}, %{}) :: t
def build(kind, verb, path, host, plug, opts, helper, pipe_through, private, assigns)
when is_atom(verb) and (is_binary(host) or is_nil(host)) and
is_atom(plug) and (is_binary(helper) or is_nil(helper)) and
is_list(pipe_through) and is_map(private and is_map(assigns))
and kind in [:match, :forward] do
%Route{kind: kind, verb: verb, path: path, host: host, private: private,
plug: plug, opts: opts, helper: helper,
pipe_through: pipe_through, assigns: assigns}
end
@doc """
Builds the expressions used by the route.
"""
def exprs(route) do
{path, binding} = build_path_and_binding(route)
%{path: path,
host: build_host(route.host),
verb_match: verb_match(route.verb),
binding: binding,
dispatch: build_dispatch(route, binding)}
end
defp verb_match(:*), do: Macro.var(:_verb, nil)
defp verb_match(verb), do: verb |> to_string() |> String.upcase()
defp build_path_and_binding(%Route{path: path} = route) do
{params, segments} = case route.kind do
:forward -> Plug.Router.Utils.build_path_match(path <> "/*_forward_path_info")
:match -> Plug.Router.Utils.build_path_match(path)
end
binding = for var <- params, var != :_forward_path_info do
{Atom.to_string(var), Macro.var(var, nil)}
end
{segments, binding}
end
defp build_host(host) do
cond do
is_nil(host) -> quote do: _
String.last(host) == "." -> quote do: unquote(host) <> _
true -> host
end
end
defp build_dispatch(route, binding) do
exprs =
[maybe_binding(binding),
maybe_merge(:private, route.private),
maybe_merge(:assigns, route.assigns),
build_pipes(route)]
{:__block__, [], Enum.filter(exprs, & &1 != nil)}
end
defp maybe_merge(key, data) do
if map_size(data) > 0 do
quote do
var!(conn) =
update_in var!(conn).unquote(key), &Map.merge(&1, unquote(Macro.escape(data)))
end
end
end
defp maybe_binding([]), do: nil
defp maybe_binding(binding) do
quote do
var!(conn) =
update_in var!(conn).params, &Map.merge(&1, unquote({:%{}, [], binding}))
end
end
defp build_pipes(%Route{kind: :forward} = route) do
{_params, fwd_segments} = Plug.Router.Utils.build_path_match(route.path)
quote do
var!(conn)
|> Plug.Conn.put_private(:phoenix_pipelines, unquote(route.pipe_through))
|> Plug.Conn.put_private(:phoenix_route, fn conn ->
opts = unquote(route.plug).init(unquote(route.opts))
Phoenix.Router.Route.forward(conn, unquote(fwd_segments), unquote(route.plug), opts)
end)
end |> pipe_through(route)
end
defp build_pipes(route) do
quote do
var!(conn)
|> Plug.Conn.put_private(:phoenix_pipelines, unquote(route.pipe_through))
|> Plug.Conn.put_private(:phoenix_route, fn conn ->
opts = unquote(route.plug).init(unquote(route.opts))
unquote(route.plug).call(conn, opts)
end)
end |> pipe_through(route)
end
defp pipe_through(initial, route) do
Enum.reduce(route.pipe_through, initial, &{&1, [], [&2, []]})
end
@doc """
Forwards requests to another Plug at a new path.
"""
def forward(%Plug.Conn{path_info: path, script_name: script} = conn, fwd_segments, target, opts) do
new_path = path -- fwd_segments
{base, ^new_path} = Enum.split(path, length(path) - length(new_path))
conn = %{conn | path_info: new_path, script_name: script ++ base} |> target.call(opts)
%{conn | path_info: path, script_name: script}
end
@doc """
Validates and returns the list of forward path segments.
Raises RuntimeError plug is already forwarded or path contains
a dynamic segment.
"""
def forward_path_segments(path, plug, phoenix_forwards) do
case Plug.Router.Utils.build_path_match(path) do
{[], path_segments} ->
if phoenix_forwards[plug] do
raise ArgumentError, "`#{inspect plug}` has already been forwarded to. A module can only be forwarded a single time."
end
path_segments
_ ->
raise ArgumentError, "Dynamic segment `\"#{path}\"` not allowed when forwarding. Use a static path instead."
end
end
end | lib/phoenix/router/route.ex | 0.790207 | 0.478224 | route.ex | starcoder |
defmodule Mix.Triplex do
@moduledoc """
Useful functions for any triplex mix task.
Here is the list of tasks we have for now:
- [`mix triplex.gen.migration`](./Mix.Tasks.Triplex.Gen.Migration.html) -
generates a tenant migration for the repo
- [`mix triplex.migrate`](./Mix.Tasks.Triplex.Migrate.html) -
runs the repository tenant migrations
- [`mix triplex.migrations`](./Mix.Tasks.Triplex.Migrations.html) -
displays the repository migration status
- [`mix triplex.rollback`](./Mix.Tasks.Triplex.Rollback.html) -
rolls back the repository tenant migrations
"""
alias Mix.Project
import Mix.Ecto, only: [source_repo_priv: 1]
import Triplex, only: [config: 0]
@doc """
Returns the path for your tenant migrations.
"""
def migrations_path(repo \\ config().repo)
def migrations_path(nil) do
""
end
def migrations_path(repo) do
repo
|> source_repo_priv()
|> Path.join(config().migrations_path)
end
@doc """
Ensures the migrations path exists for the given `repo`.
You can optionally give us the project `config` keyword list, the options we
use are:
- `apps_path` - this will be used to decide if it is an umbrella project, in
this case it never fails, because umbrellas does not have migrations and
that's right
- `app_path` - and this will be used to get the full path to the migrations
directory, which is relative to this path
Returns the unchanged `repo` if succeed or raises a `Mix.raise` if it fails.
"""
def ensure_tenant_migrations_path(repo, config \\ Project.config()) do
with false <- Project.umbrella?(config),
path = relative_migrations_path(repo, config),
false <- File.dir?(path) do
Mix.raise """
Could not find tenant migrations directory #{inspect path} for
repo #{inspect repo}
"""
end
repo
end
defp relative_migrations_path(repo, config) do
repo
|> migrations_path()
|> Path.relative_to(Project.app_path(config))
end
end | lib/mix/triplex.ex | 0.859147 | 0.55097 | triplex.ex | starcoder |
defmodule Matrex.IDX do
@moduledoc false
# IDX format data types
@unsigned_byte 0x08
@signed_byte 0x09
@short 0x0B
@integer 0x0C
@float 0x0D
@double 0x0E
@spec load(binary) :: binary
def load(data) when is_binary(data) do
<<0, 0, data_type, dimensions_count>> = binary_part(data, 0, 4)
dimensions = binary_part(data, 4, dimensions_count * 4) |> binary_to_list_of_integers()
[rows | other] = dimensions
cols = Enum.reduce(other, 1, &(&1 * &2))
initial = <<rows::unsigned-integer-little-32, cols::unsigned-integer-little-32>>
idx_data =
binary_part(data, 4 + dimensions_count * 4, byte_size(data) - (4 + dimensions_count * 4))
idx_to_float_binary(initial, idx_data, data_type)
end
def read!(file_name) do
{:ok, file} = File.open(file_name)
<<0, 0, data_type, dimensions>> = IO.binread(file, 4)
dimensions = IO.binread(file, dimensions * 4) |> binary_to_list_of_integers()
[rows | other] = dimensions
cols = Enum.reduce(other, 1, &(&1 * &2))
initial = <<rows::unsigned-integer-little-32, cols::unsigned-integer-little-32>>
idx_data = IO.binread(file, :all)
File.close(file)
idx_to_float_binary(initial, idx_data, data_type)
end
def idx_to_float_binary(result, <<>>, _), do: result
def idx_to_float_binary(result, <<elem::unsigned-integer-8, rest::binary>>, @unsigned_byte),
do: idx_to_float_binary(<<result::binary, elem::float-little-32>>, rest, @unsigned_byte)
def idx_to_float_binary(result, <<elem::signed-integer-8, rest::binary>>, @signed_byte),
do: idx_to_float_binary(<<result::binary, elem::float-little-32>>, rest, @signed_byte)
def idx_to_float_binary(result, <<elem::unsigned-integer-big-16, rest::binary>>, @short),
do: idx_to_float_binary(<<result::binary, elem::float-little-32>>, rest, @short)
def idx_to_float_binary(result, <<elem::integer-big-32, rest::binary>>, @integer),
do: idx_to_float_binary(<<result::binary, elem::float-little-32>>, rest, @integer)
def idx_to_float_binary(result, <<elem::float-big-32, rest::binary>>, @float),
do: idx_to_float_binary(<<result::binary, elem::float-little-32>>, rest, @float)
def idx_to_float_binary(result, <<elem::float-big-64, rest::binary>>, @double),
do: idx_to_float_binary(<<result::binary, elem::float-little-32>>, rest, @double)
defp binary_to_list_of_integers(binary, init \\ [])
defp binary_to_list_of_integers(<<>>, list), do: Enum.reverse(list)
defp binary_to_list_of_integers(<<value::unsigned-integer-big-32, rest::binary>>, list),
do: binary_to_list_of_integers(rest, [value | list])
end | lib/matrex/idx.ex | 0.584034 | 0.420957 | idx.ex | starcoder |
defmodule AWS.KinesisAnalyticsV2 do
@moduledoc """
Amazon Kinesis Data Analytics is a fully managed service that you can use to
process and analyze streaming data using Java, SQL, or Scala.
The service enables you to quickly author and run Java, SQL, or Scala code
against streaming sources to perform time series analytics, feed real-time
dashboards, and create real-time metrics.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Kinesis Analytics V2",
api_version: "2018-05-23",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "kinesisanalytics",
global?: false,
protocol: "json",
service_id: "Kinesis Analytics V2",
signature_version: "v4",
signing_name: "kinesisanalytics",
target_prefix: "KinesisAnalytics_20180523"
}
end
@doc """
Adds an Amazon CloudWatch log stream to monitor application configuration
errors.
"""
def add_application_cloud_watch_logging_option(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"AddApplicationCloudWatchLoggingOption",
input,
options
)
end
@doc """
Adds a streaming source to your SQL-based Kinesis Data Analytics application.
You can add a streaming source when you create an application, or you can use
this operation to add a streaming source after you create an application. For
more information, see `CreateApplication`.
Any configuration update, including adding a streaming source using this
operation, results in a new version of the application. You can use the
`DescribeApplication` operation to find the current application version.
"""
def add_application_input(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddApplicationInput", input, options)
end
@doc """
Adds an `InputProcessingConfiguration` to a SQL-based Kinesis Data Analytics
application.
An input processor pre-processes records on the input stream before the
application's SQL code executes. Currently, the only input processor available
is [AWS Lambda](https://docs.aws.amazon.com/lambda/).
"""
def add_application_input_processing_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"AddApplicationInputProcessingConfiguration",
input,
options
)
end
@doc """
Adds an external destination to your SQL-based Kinesis Data Analytics
application.
If you want Kinesis Data Analytics to deliver data from an in-application stream
within your application to an external destination (such as an Kinesis data
stream, a Kinesis Data Firehose delivery stream, or an AWS Lambda function), you
add the relevant configuration to your application using this operation. You can
configure one or more outputs for your application. Each output configuration
maps an in-application stream and an external destination.
You can use one of the output configurations to deliver data from your
in-application error stream to an external destination so that you can analyze
the errors.
Any configuration update, including adding a streaming source using this
operation, results in a new version of the application. You can use the
`DescribeApplication` operation to find the current application version.
"""
def add_application_output(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddApplicationOutput", input, options)
end
@doc """
Adds a reference data source to an existing SQL-based Kinesis Data Analytics
application.
Kinesis Data Analytics reads reference data (that is, an Amazon S3 object) and
creates an in-application table within your application. In the request, you
provide the source (S3 bucket name and object key name), name of the
in-application table to create, and the necessary mapping information that
describes how data in an Amazon S3 object maps to columns in the resulting
in-application table.
"""
def add_application_reference_data_source(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddApplicationReferenceDataSource", input, options)
end
@doc """
Adds a Virtual Private Cloud (VPC) configuration to the application.
Applications can use VPCs to store and access resources securely.
Note the following about VPC configurations for Kinesis Data Analytics
applications:
* VPC configurations are not supported for SQL applications.
* When a VPC is added to a Kinesis Data Analytics application, the
application can no longer be accessed from the Internet directly. To enable
Internet access to the application, add an Internet gateway to your VPC.
"""
def add_application_vpc_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddApplicationVpcConfiguration", input, options)
end
@doc """
Creates a Kinesis Data Analytics application.
For information about creating a Kinesis Data Analytics application, see
[Creating an Application](https://docs.aws.amazon.com/kinesisanalytics/latest/java/getting-started.html).
"""
def create_application(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateApplication", input, options)
end
@doc """
Creates and returns a URL that you can use to connect to an application's
extension.
Currently, the only available extension is the Apache Flink dashboard.
The IAM role or user used to call this API defines the permissions to access the
extension. After the presigned URL is created, no additional permission is
required to access this URL. IAM authorization policies for this API are also
enforced for every HTTP request that attempts to connect to the extension.
You control the amount of time that the URL will be valid using the
`SessionExpirationDurationInSeconds` parameter. If you do not provide this
parameter, the returned URL is valid for twelve hours.
The URL that you get from a call to CreateApplicationPresignedUrl must be used
within 3 minutes to be valid. If you first try to use the URL after the 3-minute
limit expires, the service returns an HTTP 403 Forbidden error.
"""
def create_application_presigned_url(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateApplicationPresignedUrl", input, options)
end
@doc """
Creates a snapshot of the application's state data.
"""
def create_application_snapshot(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateApplicationSnapshot", input, options)
end
@doc """
Deletes the specified application.
Kinesis Data Analytics halts application execution and deletes the application.
"""
def delete_application(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteApplication", input, options)
end
@doc """
Deletes an Amazon CloudWatch log stream from an Kinesis Data Analytics
application.
"""
def delete_application_cloud_watch_logging_option(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DeleteApplicationCloudWatchLoggingOption",
input,
options
)
end
@doc """
Deletes an `InputProcessingConfiguration` from an input.
"""
def delete_application_input_processing_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DeleteApplicationInputProcessingConfiguration",
input,
options
)
end
@doc """
Deletes the output destination configuration from your SQL-based Kinesis Data
Analytics application's configuration.
Kinesis Data Analytics will no longer write data from the corresponding
in-application stream to the external output destination.
"""
def delete_application_output(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteApplicationOutput", input, options)
end
@doc """
Deletes a reference data source configuration from the specified SQL-based
Kinesis Data Analytics application's configuration.
If the application is running, Kinesis Data Analytics immediately removes the
in-application table that you created using the
`AddApplicationReferenceDataSource` operation.
"""
def delete_application_reference_data_source(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DeleteApplicationReferenceDataSource",
input,
options
)
end
@doc """
Deletes a snapshot of application state.
"""
def delete_application_snapshot(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteApplicationSnapshot", input, options)
end
@doc """
Removes a VPC configuration from a Kinesis Data Analytics application.
"""
def delete_application_vpc_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteApplicationVpcConfiguration", input, options)
end
@doc """
Returns information about a specific Kinesis Data Analytics application.
If you want to retrieve a list of all applications in your account, use the
`ListApplications` operation.
"""
def describe_application(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeApplication", input, options)
end
@doc """
Returns information about a snapshot of application state data.
"""
def describe_application_snapshot(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeApplicationSnapshot", input, options)
end
@doc """
Provides a detailed description of a specified version of the application.
To see a list of all the versions of an application, invoke the
`ListApplicationVersions` operation.
This operation is supported only for Amazon Kinesis Data Analytics for Apache
Flink.
"""
def describe_application_version(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeApplicationVersion", input, options)
end
@doc """
Infers a schema for a SQL-based Kinesis Data Analytics application by evaluating
sample records on the specified streaming source (Kinesis data stream or Kinesis
Data Firehose delivery stream) or Amazon S3 object.
In the response, the operation returns the inferred schema and also the sample
records that the operation used to infer the schema.
You can use the inferred schema when configuring a streaming source for your
application. When you create an application using the Kinesis Data Analytics
console, the console uses this operation to infer a schema and show it in the
console user interface.
"""
def discover_input_schema(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DiscoverInputSchema", input, options)
end
@doc """
Lists information about the current application snapshots.
"""
def list_application_snapshots(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListApplicationSnapshots", input, options)
end
@doc """
Lists all the versions for the specified application, including versions that
were rolled back.
The response also includes a summary of the configuration associated with each
version.
To get the complete description of a specific application version, invoke the
`DescribeApplicationVersion` operation.
This operation is supported only for Amazon Kinesis Data Analytics for Apache
Flink.
"""
def list_application_versions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListApplicationVersions", input, options)
end
@doc """
Returns a list of Kinesis Data Analytics applications in your account.
For each application, the response includes the application name, Amazon
Resource Name (ARN), and status.
If you want detailed information about a specific application, use
`DescribeApplication`.
"""
def list_applications(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListApplications", input, options)
end
@doc """
Retrieves the list of key-value tags assigned to the application.
For more information, see [Using Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Reverts the application to the previous running version.
You can roll back an application if you suspect it is stuck in a transient
status.
You can roll back an application only if it is in the `UPDATING` or
`AUTOSCALING` status.
When you rollback an application, it loads state data from the last successful
snapshot. If the application has no snapshots, Kinesis Data Analytics rejects
the rollback request.
This action is not supported for Kinesis Data Analytics for SQL applications.
"""
def rollback_application(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RollbackApplication", input, options)
end
@doc """
Starts the specified Kinesis Data Analytics application.
After creating an application, you must exclusively call this operation to start
your application.
"""
def start_application(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartApplication", input, options)
end
@doc """
Stops the application from processing data.
You can stop an application only if it is in the running status, unless you set
the `Force` parameter to `true`.
You can use the `DescribeApplication` operation to find the application status.
Kinesis Data Analytics takes a snapshot when the application is stopped, unless
`Force` is set to `true`.
"""
def stop_application(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopApplication", input, options)
end
@doc """
Adds one or more key-value tags to a Kinesis Data Analytics application.
Note that the maximum number of application tags includes system tags. The
maximum number of user-defined application tags is 50. For more information, see
[Using Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes one or more tags from a Kinesis Data Analytics application.
For more information, see [Using Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates an existing Kinesis Data Analytics application.
Using this operation, you can update application code, input configuration, and
output configuration.
Kinesis Data Analytics updates the `ApplicationVersionId` each time you update
your application.
You cannot update the `RuntimeEnvironment` of an existing application. If you
need to update an application's `RuntimeEnvironment`, you must delete the
application and create it again.
"""
def update_application(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateApplication", input, options)
end
@doc """
Updates the maintenance configuration of the Kinesis Data Analytics application.
You can invoke this operation on an application that is in one of the two
following states: `READY` or `RUNNING`. If you invoke it when the application is
in a state other than these two states, it throws a `ResourceInUseException`.
The service makes use of the updated configuration the next time it schedules
maintenance for the application. If you invoke this operation after the service
schedules maintenance, the service will apply the configuration update the next
time it schedules maintenance for the application. This means that you might not
see the maintenance configuration update applied to the maintenance process that
follows a successful invocation of this operation, but to the following
maintenance process instead.
To see the current maintenance configuration of your application, invoke the
`DescribeApplication` operation.
For information about application maintenance, see [Kinesis Data Analytics for Apache Flink
Maintenance](https://docs.aws.amazon.com/kinesisanalytics/latest/java/maintenance.html).
This operation is supported only for Amazon Kinesis Data Analytics for Apache
Flink.
"""
def update_application_maintenance_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"UpdateApplicationMaintenanceConfiguration",
input,
options
)
end
end | lib/aws/generated/kinesis_analytics_v2.ex | 0.895154 | 0.57344 | kinesis_analytics_v2.ex | starcoder |
defexception Dynamo.Router.InvalidSpecError, message: "invalid route specification"
defmodule Dynamo.Router.Utils do
@moduledoc false
@doc """
Convert a given verb to its connection representation.
"""
def normalize_verb(verb) do
String.upcase(to_string(verb))
end
@doc """
Generates a representation that will only match routes
according to the given `spec`.
## Examples
generate_match("/foo/:id") => ["foo", { :id, [], nil }]
"""
def generate_match(spec) when is_binary(spec) do
generate_match list_split(spec), [], []
end
def generate_match(match) do
{ [], match }
end
@doc """
Generates a fowarding representation that will match any
route starting with the given `spec`.
## Examples
generate_forward("/foo/:id") => ["foo", { :id, [], nil } | _glob]
"""
def generate_forward({ :_, _, _ }) do
generate_forward ""
end
def generate_forward(list) when is_list(list) do
[h|t] = Enum.reverse(list)
glob = { :glob, [], nil }
{ [], Enum.reverse [ { :|, [], [h, glob] } | t ] }
end
def generate_forward(spec) when is_binary(spec) do
generate_match list_split(spec) ++ ['*glob'], [], []
end
@doc """
Splits the given path into several segments.
It ignores both leading and trailing slashes in the path.
## Examples
split("/foo/bar") #=> ['foo', 'bar']
"""
def split(bin) do
for segment <- String.split(bin, "/"), segment != "", do: segment
end
## Helpers
# Loops each segment checking for matches.
defp generate_match([h|t], vars, acc) do
handle_segment_match segment_match(h, []), t, vars, acc
end
defp generate_match([], vars, acc) do
{ vars |> Enum.uniq |> Enum.reverse, Enum.reverse(acc) }
end
# Handle each segment match. They can either be a
# :literal ('foo'), an identifier (':bar') or a glob ('*path')
def handle_segment_match({ :literal, literal }, t, vars, acc) do
generate_match t, vars, [literal|acc]
end
def handle_segment_match({ :identifier, identifier, expr }, t, vars, acc) do
generate_match t, [identifier|vars], [expr|acc]
end
def handle_segment_match({ :glob, identifier, expr }, t, vars, acc) do
if t != [] do
raise(Dynamo.Router.InvalidSpecError, message: "cannot have a *glob followed by other segments")
end
case acc do
[hs|ts] ->
acc = [{ :|, [], [hs, expr] } | ts]
generate_match([], [identifier|vars], acc)
_ ->
{ vars, expr } = generate_match([], [identifier|vars], [expr])
{ vars, hd(expr) }
end
end
# In a given segment, checks if there is a match.
defp segment_match([?:|argument], []) do
identifier = list_to_atom(argument)
{ :identifier, identifier, { identifier, [], nil } }
end
defp segment_match([?*|argument], []) do
identifier = list_to_atom(argument)
{ :glob, identifier, { identifier, [], nil } }
end
defp segment_match([?:|argument], buffer) do
identifier = list_to_atom(argument)
var = { identifier, [], nil }
expr = quote do
unquote(binary_from_buffer(buffer)) <> unquote(var)
end
{ :identifier, identifier, expr }
end
defp segment_match([?*|argument], buffer) do
identifier = list_to_atom(argument)
var = { identifier, [], nil }
expr = quote [hygiene: [vars: false]] do
[unquote(binary_from_buffer(buffer)) <> _ | _] = unquote(var)
end
{ :glob, identifier, expr }
end
defp segment_match([h|t], buffer) do
segment_match t, [h|buffer]
end
defp segment_match([], buffer) do
{ :literal, binary_from_buffer(buffer) }
end
defp list_split(bin) do
for segment <- String.split(bin, "/"), segment != "", do: List.from_char_data!(segment)
end
defp binary_from_buffer(buffer) do
iodata_to_binary(Enum.reverse(buffer))
end
def is_function_exported?(module, function, arity) do
case is_tuple(module) do
true ->
function_exported?(elem(module, 0), function, arity + 1)
false ->
function_exported?(module, function, arity)
end
end
end | lib/dynamo/router/utils.ex | 0.845544 | 0.531088 | utils.ex | starcoder |
defmodule Numerix.Tensor do
@moduledoc """
Defines a data structure for a tensor and its operations.
You can construct a `Tensor` by calling `Tensor.new/1` and passing it a list, or a list of lists, or a list of lists of...you get the idea.
Example
use Numerix.Tensor
x = Tensor.new([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
Once you have a `Tensor` (or three), you can then use it in normal math operations, e.g. elementwise matrix operations.
Example
x = Tensor.new([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
y = Tensor.new([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
assert x / y == Tensor.new([[1, 1, 1], [1, 1, 1], [1, 1, 1]])
As it turns out, this is very handy when you need to implement complex math formulae as the code looks more like math functions than noisy code with a bunch of calls to `Enum.map/2`, `Enum.zip/2` and the like.
Example
x = Tensor.new([[0, 0.1, 0.5, 0.9, 1.0]])
m = max(x)
e = exp(x - m)
s = sum(e)
assert e / s == Tensor.new([[0.1119598021340303, 0.12373471731203411, 0.18459050724175335, 0.2753766776533774, 0.30433829565880477]])
Oh, I should also mention that this API uses `Flow` to parallelize independent pieces of computation to speed things up! Depending on the type of calculations you're doing, the bigger the data and the more cores you have, the faster it gets.
"""
alias Numerix.Tensor
import Kernel, except: [+: 1, -: 1, abs: 1, +: 2, -: 2, *: 2, /: 2, max: 2]
@max_long_value 9_223_372_036_854_775_807
defstruct items: [], dims: 1, shape: {0}
defmacro __using__(_opts) do
quote do
import Kernel, except: [+: 1, -: 1, abs: 1, +: 2, -: 2, *: 2, /: 2, max: 2]
import Numerix.Tensor
alias Numerix.Tensor
end
end
@doc """
Creates a new tensor from the given scalar, list or nested list.
"""
@spec new(number | [number]) :: %Tensor{} | no_return()
def new(x) when is_number(x) do
%Tensor{items: x, dims: 0, shape: {}}
end
def new(x) when is_list(x) do
shape = x |> calculate_shape |> Enum.reverse()
%Tensor{items: x, dims: length(shape), shape: List.to_tuple(shape)}
end
def new(x) do
raise "Tensor must be a numeric scalar, list or nested list, but got #{inspect(x)} instead"
end
@doc """
Returns the biggest element in the given tensor.
"""
@spec max(%Tensor{}) :: number
def max(x = %Tensor{}) do
x.items
|> List.flatten()
|> Enum.max()
end
@doc """
Compares the given scalar with each element of the given tensor and returns the biggest of the two.
"""
@spec max(number, %Tensor{}) :: %Tensor{}
def max(s, x = %Tensor{}) when is_number(s) do
fn i -> max(s, i) end
|> t_apply(x)
end
@doc """
Compares each element of the two given tensors element-wise and returns the biggest value.
"""
@spec max(%Tensor{}, %Tensor{}) :: %Tensor{}
def max(x = %Tensor{}, y = %Tensor{}) do
fn i, j -> max(i, j) end
|> t_apply(x, y)
end
def max(first, second) do
# Falls back to Elixir's default `max` function.
Kernel.max(first, second)
end
@doc """
Calculates the sum of all the elements in the given tensor.
"""
@spec sum(%Tensor{}) :: number
def sum(x) do
x.items
|> List.flatten()
|> Enum.sum()
end
Enum.each([:exp, :log, :sqrt, :tanh], fn fun ->
@doc """
Returns the `#{fun}` of the given tensor element-wise.
"""
def unquote(:"#{fun}")(x) do
fn i -> apply(:math, unquote(fun), [i]) end
|> t_apply(x)
end
end)
Enum.each([:pow], fn fun ->
@doc """
Returns the result of applying `#{fun}` to the given tensor element-wise.
"""
def unquote(:"#{fun}")(x = %Tensor{}, s) when is_number(s) do
fn i -> apply(:math, unquote(fun), [i, s]) end
|> t_apply(x)
end
end)
Enum.each([:+, :-, :abs], fn op ->
@doc """
Returns the result of applying `#{op}` to the given tensor element-wise.
"""
def unquote(:"#{op}")(x = %Tensor{}) do
fn i -> apply(Kernel, unquote(op), [i]) end
|> t_apply(x)
end
def unquote(:"#{op}")(x) do
# Falls back to Elixir's default `#{op}/1` function.
apply(Kernel, unquote(op), [x])
end
end)
Enum.each([:+, :-, :*, :/], fn op ->
@doc """
Returns the result of applying `#{op}` to the given tensor and scalar element-wise.
"""
def unquote(:"#{op}")(x = %Tensor{}, s) when is_number(s) do
fn i -> apply(Kernel, unquote(op), [i, s]) end
|> t_apply(x)
end
@doc """
Returns the result of applying `#{op}` to the given scalar and tensor element-wise.
"""
def unquote(:"#{op}")(s, x = %Tensor{}) when is_number(s) do
fn i -> apply(Kernel, unquote(op), [s, i]) end
|> t_apply(x)
end
@doc """
Returns the result of applying `#{op}` to the given tensors element-wise.
"""
def unquote(:"#{op}")(x = %Tensor{}, y = %Tensor{}) do
fn i, j -> apply(Kernel, unquote(op), [i, j]) end
|> t_apply(x, y)
end
def unquote(:"#{op}")(x, y) do
# Falls back to Elixir's default `#{op}/2` function.
apply(Kernel, unquote(op), [x, y])
end
end)
@doc """
Applies the given function to the tensor element-wise and returns the result as a new tensor.
"""
@spec t_apply(fun(), %Tensor{}) :: %Tensor{}
def t_apply(fun, x) do
fun
|> do_apply(x.items, x.dims)
|> Tensor.new()
end
@doc """
Applies the given function to the two tensors element-wise and returns the result as a new tensor.
"""
@spec t_apply(fun(), %Tensor{}, %Tensor{}) :: %Tensor{}
def t_apply(fun, x, y) do
fun
|> do_apply(x.items, y.items, x.dims)
|> Tensor.new()
end
defp calculate_shape(x, shape \\ [])
defp calculate_shape([], shape), do: [0 | shape]
defp calculate_shape(x = [y | _], shape) when is_number(y) do
[length(x) | shape]
end
defp calculate_shape(x = [y | _], shape) do
# credo:disable-for-next-line
Enum.reduce(x, fn curr, prev ->
if length(curr) != length(prev) do
raise "Unexpected tensor shape, make sure every dimension has consistent lengths"
end
curr
end)
calculate_shape(y, [length(x) | shape])
end
defp calculate_shape(_, _) do
raise "Tensor must be a numeric scalar, list or nested list"
end
defp do_apply(fun, x, 0) do
fun.(x)
rescue
ArithmeticError -> @max_long_value
end
defp do_apply(fun, items, dim) do
items
|> Enum.with_index()
|> Flow.from_enumerable()
|> Flow.map(fn {a, i} -> {i, do_apply(fun, a, dim - 1)} end)
|> to_ordered_values()
end
defp do_apply(fun, x, y, 0) do
fun.(x, y)
end
defp do_apply(fun, x, y, dim) do
x
|> Stream.zip(y)
|> Stream.with_index()
|> Flow.from_enumerable()
|> Flow.map(fn {{a, b}, i} ->
{i, do_apply(fun, a, b, dim - 1)}
end)
|> to_ordered_values()
end
defp to_ordered_values(flow) do
flow
|> Enum.to_list()
|> List.keysort(0)
|> Keyword.values()
end
end | lib/tensor.ex | 0.949389 | 0.880026 | tensor.ex | starcoder |
defmodule StateMachine.Ecto do
@moduledoc """
This addition makes StateMachine fully compatible with Ecto.
State setter and getter are abstracted in order to provide a way to update a state
in the middle of transition for a various types of models. With Ecto, we call `change() |> Repo.update`.
We also wrap every event in transaction, which is rolled back if transition fails to finish.
This unlocks a lot of beautiful effects. For example, you can enqueue some tasks into db-powered queue in callbacks,
and if transition failes, those tasks will naturally disappear.
### Usage
To use Ecto, simply pass `repo` param to `defmachine`, you can optionally pass a name of the `Ecto.Type`
implementation, that will be generated automatically under state machine namespace:
defmodule EctoMachine do
use StateMachine
defmachine field: :state, repo: TestApp.Repo, ecto_type: CustomMod do
state :resting
state :working
# ...
end
end
In your schema you can refer to state type as `EctoMachine.CustomMod`, with `ecto_type` omitted
it would generate `EctoMachine.StateType`. This custom type is needed to transparently use atoms as states.
"""
@doc """
This macro defines an Ecto.Type implementation inside of StateMachine namespace.
The default name is `StateType`, but you can supply any module name.
The purpose of this is to be able to cast string into atom and back safely,
validating it against StateMachine defition.
"""
defmacro define_ecto_type(kind) do
quote do
variants = Module.get_attribute(__MODULE__, :"#{unquote(kind)}_names")
name = Module.get_attribute(__MODULE__, :"#{unquote(kind)}_type")
unless variants do
raise CompileError, [
file: __ENV__.file,
line: __ENV__.line,
description: "Ecto type should be declared inside of state machine definition"
]
end
defmodule Module.concat(__MODULE__, name) do
@variants variants
StateMachine.Ecto.define_enum(@variants)
end
end
end
defmacro define_enum(variants) do
quote do
@behaviour Ecto.Type
def type, do: :string
def cast(value) do
if s = Enum.find(unquote(variants), &to_string(&1) == to_string(value)) do
{:ok, s}
else
:error
end
end
def load(value) do
{:ok, String.to_atom(value)}
end
def dump(value) when value in unquote(variants) do
{:ok, to_string(value)}
end
def dump(_), do: :error
def equal?(s1, s2), do: to_string(s1) == to_string(s2)
def embed_as(_), do: :self
end
end
@behaviour StateMachine.State
@impl true
def get(ctx) do
Map.get(ctx.model, ctx.definition.field)
end
@impl true
def set(ctx, state) do
Ecto.Changeset.change(ctx.model, [{ctx.definition.field, state}])
|> ctx.definition.misc[:repo].update()
|> case do
{:ok, model} ->
%{ctx | model: model}
{:error, e} ->
%{ctx | status: :failed, message: {:set_state, e}}
end
end
end | lib/state_machine/ecto.ex | 0.81637 | 0.545588 | ecto.ex | starcoder |
defmodule OpcUA.NodeId do
use IsEnumerable
use IsAccessible
@moduledoc """
An identifier for a node in the address space of an OPC UA Server.
An OPC UA information model is made up of nodes and references between nodes.
Every node has a unique NodeId. NodeIds refer to a namespace with an additional
identifier value that can be an integer, a string, a guid or a bytestring.
"""
alias OpcUA.NodeId
@enforce_keys [:ns_index, :identifier_type, :identifier]
@identifier_types ["integer", "string", "guid", "bytestring"]
defstruct ns_index: nil,
identifier_type: nil,
identifier: nil
@doc """
Creates an structure for a node in the address space of an OPC UA Server.
"""
@spec new(list()) :: %NodeId{}
def new(ns_index: ns_index, identifier_type: id_type, identifier: identifier) when is_integer(ns_index) and id_type in @identifier_types do
new_node_id(ns_index, id_type, identifier)
end
def new(_invalid_data), do: raise("Invalid Namespace index or identifier type")
defp new_node_id(ns_index, "integer", identifier) when is_integer(identifier),
do: %NodeId{ns_index: ns_index, identifier_type: 0, identifier: identifier}
defp new_node_id(ns_index, "string", identifier) when is_binary(identifier),
do: %NodeId{ns_index: ns_index, identifier_type: 1, identifier: identifier}
defp new_node_id(ns_index, "guid", {_data1, _data2, _data3, data4} = identifier) when is_tuple(identifier) and is_binary(data4),
do: %NodeId{ns_index: ns_index, identifier_type: 2, identifier: identifier}
defp new_node_id(ns_index, "bytestring", identifier) when is_binary(identifier),
do: %NodeId{ns_index: ns_index, identifier_type: 3, identifier: identifier}
defp new_node_id(_ns_index, _id_type, _identifier), do: raise("Identifier type does not match with identifier data_type")
end
defmodule OpcUA.ExpandedNodeId do
use IsEnumerable
use IsAccessible
@moduledoc """
A NodeId that allows the namespace URI to be specified instead of an index.
"""
alias OpcUA.{ExpandedNodeId, NodeId}
@enforce_keys [:node_id, :name_space_uri, :server_index]
defstruct node_id: nil,
name_space_uri: nil,
server_index: nil
@doc """
Creates an structure for an expanded node in the address space of an OPC UA Server.
"""
@spec new(list()) :: %ExpandedNodeId{}
def new(node_id: %NodeId{} = node_id, name_space_uri: name_space_uri, server_index: server_index) when is_binary(name_space_uri) and is_integer(server_index) do
%ExpandedNodeId{node_id: node_id, name_space_uri: name_space_uri, server_index: server_index}
end
def new(_invalid_data), do: raise("Invalid Namespace index or identifier type")
end | lib/opc_ua/nodestore/node_id.ex | 0.619932 | 0.433862 | node_id.ex | starcoder |
defmodule Cog.Queries.User do
import Ecto.Query, only: [from: 2, where: 3]
alias Cog.Models.Permission
alias Cog.Models.User
@doc """
Given a `token`, find the User it belongs to. `ttl_in_seconds` is
the current amount of time that a token can be considered valid; if
it was inserted more than `ttl_in_seconds` seconds in the past, it
is considered expired.
If the token exists in the system, this query will return a tuple of
the type `{%User{}, boolean}`, where the boolean value indicates
whether the token is still valid.
If the token does not exist in the system, the query returns nothing.
"""
def for_token(token, ttl_in_seconds) do
from u in User,
join: t in assoc(u, :tokens),
where: t.value == ^token,
select: {u, datetime_add(t.inserted_at, ^ttl_in_seconds, "second") > ^Ecto.DateTime.utc}
end
@doc """
Chainable query fragment that selects all users that have a
given permission, whether directly, by role, or by (recursive) group
membership, or any combination thereof.
The queryable that is given must ultimately resolve to a user, and
if not given, defaults to the `User` model for maximum flexibility
"""
def with_permission(queryable \\ User, %Permission{}=permission) do
id = Cog.UUID.uuid_to_bin(permission.id)
from u in queryable,
# TODO: Use a fragment join instead?
where: u.id in fragment("SELECT * FROM users_with_permission(?)", ^id)
end
@doc """
Chainable query fragment that selects all users that have a
chat handle for a given chat provider.
The queryable that is given must ultimately resolve to a user, and
if not given, defaults to the `User` model for maximum flexibility
"""
def for_chat_provider(queryable \\ User, chat_provider_name) when is_binary(chat_provider_name) do
from u in queryable,
join: ch in assoc(u, :chat_handles),
join: cp in assoc(ch, :chat_provider),
where: cp.name == ^chat_provider_name,
preload: [chat_handles: ch]
end
def for_chat_provider_user_id(chat_provider_user_id, chat_provider_name) do
chat_provider_user_id = to_string(chat_provider_user_id)
chat_provider_name
|> for_chat_provider
|> where([_u, ch], ch.chat_provider_user_id == ^chat_provider_user_id)
end
end | lib/cog/queries/user.ex | 0.604983 | 0.438304 | user.ex | starcoder |
defmodule Graphmath.Mat44 do
@moduledoc """
This is the 3D mathematics library for graphmath.
This submodule handles 4x4 matrices using tuples of floats.
"""
@type mat44 ::
{float, float, float, float, float, float, float, float, float, float, float, float,
float, float, float, float}
@type vec4 :: {float, float, float, float}
@type vec3 :: {float, float, float}
@doc """
`identity()` creates an identity `mat44`.
This returns an identity `mat44`.
"""
@spec identity() :: mat44
def identity() do
{1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0}
end
@doc """
`zero()` creates a zeroed `mat44`.
This returns a zeroed `mat44`.
"""
@spec zero() :: mat44
def zero() do
{0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0}
end
@doc """
`add(a,b)` adds one `mat44` to another `mat44`.
`a` is the first `mat44`.
`b` is the second `mat44`.
This returns a `mat44` which is the element-wise sum of `a` and `b`.
"""
@spec add(mat44, mat44) :: mat44
def add(a, b) do
{a11, a12, a13, a14, a21, a22, a23, a24, a31, a32, a33, a34, a41, a42, a43, a44} = a
{b11, b12, b13, b14, b21, b22, b23, b24, b31, b32, b33, b34, b41, b42, b43, b44} = b
{a11 + b11, a12 + b12, a13 + b13, a14 + b14, a21 + b21, a22 + b22, a23 + b23, a24 + b24,
a31 + b31, a32 + b32, a33 + b33, a34 + b34, a41 + b41, a42 + b42, a43 + b43, a44 + b44}
end
@doc """
`subtract(a,b)` subtracts one `mat44` from another `mat44`.
`a` is the minuend.
`b` is the subtraherd.
This returns a `mat44` formed by the element-wise subtraction of `b` from `a`.
"""
@spec subtract(mat44, mat44) :: mat44
def subtract(a, b) do
{a11, a12, a13, a14, a21, a22, a23, a24, a31, a32, a33, a34, a41, a42, a43, a44} = a
{b11, b12, b13, b14, b21, b22, b23, b24, b31, b32, b33, b34, b41, b42, b43, b44} = b
{a11 - b11, a12 - b12, a13 - b13, a14 - b14, a21 - b21, a22 - b22, a23 - b23, a24 - b24,
a31 - b31, a32 - b32, a33 - b33, a34 - b34, a41 - b41, a42 - b42, a43 - b43, a44 - b44}
end
@doc """
`scale( a, k )` scales every element in a `mat44` by a coefficient k.
`a` is the `mat44` to scale.
`k` is the float to scale by.
This returns a `mat44` `a` scaled element-wise by `k`.
"""
@spec scale(mat44, float) :: mat44
def scale(a, k) do
{a11, a12, a13, a14, a21, a22, a23, a24, a31, a32, a33, a34, a41, a42, a43, a44} = a
{a11 * k, a12 * k, a13 * k, a14 * k, a21 * k, a22 * k, a23 * k, a24 * k, a31 * k, a32 * k,
a33 * k, a34 * k, a41 * k, a42 * k, a43 * k, a44 * k}
end
@doc """
`make_scale( k )` creates a `mat44` that uniformly scales.
`k` is the float value to scale by.
This returns a `mat44` whose diagonal is all `k`s.
"""
@spec make_scale(float) :: mat44
def make_scale(k) do
{k, 0.0, 0.0, 0.0, 0.0, k, 0.0, 0.0, 0.0, 0.0, k, 0.0, 0.0, 0.0, 0.0, k}
end
@doc """
`make_scale( sx, sy, sz, sw )` creates a `mat44` that scales each axis independently.
`sx` is a float for scaling the x-axis.
`sy` is a float for scaling the y-axis.
`sz` is a float for scaling the z-axis.
`sw` is a float for scaling the w-axis.
This returns a `mat44` whose diagonal is `{ sx, sy, sz, sw }`.
Note that, when used with `vec3`s via the *transform* methods, `sw` will have no effect.
"""
@spec make_scale(float, float, float, float) :: mat44
def make_scale(sx, sy, sz, sw) do
{sx, 0.0, 0.0, 0.0, 0.0, sy, 0.0, 0.0, 0.0, 0.0, sz, 0.0, 0.0, 0.0, 0.0, sw}
end
@doc """
`make_translate( tx, ty, tz )` creates a mat44 that translates a point by tx, ty, and tz.
`make_translate( tx, ty, tz )` creates a mat44 that translates a vec3 by (tx, ty, tz).
`tx` is a float for translating along the x-axis.
`ty` is a float for translating along the y-axis.
`tz` is a float for translating along the z-axis.
This returns a `mat44` which translates by a `vec3` `{ tx, ty, tz }`.
"""
@spec make_translate(float, float, float) :: mat44
def make_translate(tx, ty, tz) do
{1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, tx, ty, tz, 1.0}
end
@doc """
`make_rotate_x( theta )` creates a `mat44` that rotates a `vec3` by `theta` radians about the +X axis.
`theta` is the float of the number of radians of rotation the matrix will provide.
This returns a `mat44` which rotates by `theta` radians about the +X axis.
"""
@spec make_rotate_x(float) :: mat44
def make_rotate_x(theta) do
st = :math.sin(theta)
ct = :math.cos(theta)
{1.0, 0.0, 0.0, 0.0, 0.0, ct, st, 0.0, 0.0, -st, ct, 0.0, 0.0, 0.0, 0.0, 1.0}
end
@doc """
`make_rotate_y( theta )` creates a `mat44` that rotates a `vec3` by `theta` radians about the +Y axis.
`theta` is the float of the number of radians of rotation the matrix will provide.
This returns a `mat44` which rotates by `theta` radians about the +Y axis.
"""
@spec make_rotate_y(float) :: mat44
def make_rotate_y(theta) do
st = :math.sin(theta)
ct = :math.cos(theta)
{ct, 0.0, st, 0.0, 0.0, 1.0, 0.0, 0.0, -st, 0.0, ct, 0.0, 0.0, 0.0, 0.0, 1.0}
end
@doc """
`make_rotate_Z( theta )` creates a `mat44` that rotates a `vec3` by `theta` radians about the +Z axis.
`theta` is the float of the number of radians of rotation the matrix will provide.
This returns a `mat44` which rotates by `theta` radians about the +Z axis.
"""
@spec make_rotate_z(float) :: mat44
def make_rotate_z(theta) do
st = :math.sin(theta)
ct = :math.cos(theta)
{ct, st, 0.0, 0.0, -st, ct, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0}
end
@doc """
`round( a, sigfigs )` rounds every element of a `mat44` to some number of decimal places.
`a` is the `mat44` to round.
`sigfigs` is an integer on [0,15] of the number of decimal places to round to.
This returns a `mat44` which is the result of rounding `a`.
"""
@spec round(mat44, 0..15) :: mat44
def round(a, sigfigs) do
{a11, a12, a13, a14, a21, a22, a23, a24, a31, a32, a33, a34, a41, a42, a43, a44} = a
{
Float.round(1.0 * a11, sigfigs),
Float.round(1.0 * a12, sigfigs),
Float.round(1.0 * a13, sigfigs),
Float.round(1.0 * a14, sigfigs),
Float.round(1.0 * a21, sigfigs),
Float.round(1.0 * a22, sigfigs),
Float.round(1.0 * a23, sigfigs),
Float.round(1.0 * a24, sigfigs),
Float.round(1.0 * a31, sigfigs),
Float.round(1.0 * a32, sigfigs),
Float.round(1.0 * a33, sigfigs),
Float.round(1.0 * a34, sigfigs),
Float.round(1.0 * a41, sigfigs),
Float.round(1.0 * a42, sigfigs),
Float.round(1.0 * a43, sigfigs),
Float.round(1.0 * a44, sigfigs)
}
end
@doc """
`multiply( a, b )` multiply two matrices a and b together.
`a` is the `mat44` multiplicand.
`b` is the `mat44` multiplier.
This returns the `mat44` product of the `a` and `b`.
"""
@spec multiply(mat44, mat44) :: mat44
def multiply(a, b) do
{a11, a12, a13, a14, a21, a22, a23, a24, a31, a32, a33, a34, a41, a42, a43, a44} = a
{b11, b12, b13, b14, b21, b22, b23, b24, b31, b32, b33, b34, b41, b42, b43, b44} = b
{
a11 * b11 + a12 * b21 + a13 * b31 + a14 * b41,
a11 * b12 + a12 * b22 + a13 * b32 + a14 * b42,
a11 * b13 + a12 * b23 + a13 * b33 + a14 * b43,
a11 * b14 + a12 * b24 + a13 * b34 + a14 * b44,
a21 * b11 + a22 * b21 + a23 * b31 + a24 * b41,
a21 * b12 + a22 * b22 + a23 * b32 + a24 * b42,
a21 * b13 + a22 * b23 + a23 * b33 + a24 * b43,
a21 * b14 + a22 * b24 + a23 * b34 + a24 * b44,
a31 * b11 + a32 * b21 + a33 * b31 + a34 * b41,
a31 * b12 + a32 * b22 + a33 * b32 + a34 * b42,
a31 * b13 + a32 * b23 + a33 * b33 + a34 * b43,
a31 * b14 + a32 * b24 + a33 * b34 + a34 * b44,
a41 * b11 + a42 * b21 + a43 * b31 + a44 * b41,
a41 * b12 + a42 * b22 + a43 * b32 + a44 * b42,
a41 * b13 + a42 * b23 + a43 * b33 + a44 * b43,
a41 * b14 + a42 * b24 + a43 * b34 + a44 * b44
}
end
@doc """
`multiply_transpose( a, b )` multiply two matrices a and b<sup>T</sup> together.
`a` is the `mat44` multiplicand.
`b` is the `mat44` multiplier.
This returns the `mat44` product of the `a` and `b`<sup>T</sup>.
"""
@spec multiply_transpose(mat44, mat44) :: mat44
def multiply_transpose(a, b) do
{a11, a12, a13, a14, a21, a22, a23, a24, a31, a32, a33, a34, a41, a42, a43, a44} = a
{b11, b21, b31, b41, b12, b22, b32, b42, b13, b23, b33, b43, b14, b24, b34, b44} = b
{
a11 * b11 + a12 * b21 + a13 * b31 + a14 * b41,
a11 * b12 + a12 * b22 + a13 * b32 + a14 * b42,
a11 * b13 + a12 * b23 + a13 * b33 + a14 * b43,
a11 * b14 + a12 * b24 + a13 * b34 + a14 * b44,
a21 * b11 + a22 * b21 + a23 * b31 + a24 * b41,
a21 * b12 + a22 * b22 + a23 * b32 + a24 * b42,
a21 * b13 + a22 * b23 + a23 * b33 + a24 * b43,
a21 * b14 + a22 * b24 + a23 * b34 + a24 * b44,
a31 * b11 + a32 * b21 + a33 * b31 + a34 * b41,
a31 * b12 + a32 * b22 + a33 * b32 + a34 * b42,
a31 * b13 + a32 * b23 + a33 * b33 + a34 * b43,
a31 * b14 + a32 * b24 + a33 * b34 + a34 * b44,
a41 * b11 + a42 * b21 + a43 * b31 + a44 * b41,
a41 * b12 + a42 * b22 + a43 * b32 + a44 * b42,
a41 * b13 + a42 * b23 + a43 * b33 + a44 * b43,
a41 * b14 + a42 * b24 + a43 * b34 + a44 * b44
}
end
@doc """
`column0( a )` selects the first column of a `mat44`.
`a` is the `mat44` to take the first column of.
This returns a `vec4` representing the first column of `a`.
"""
@spec column0(mat44) :: vec4
def column0(a) do
{a11, _, _, _, a21, _, _, _, a31, _, _, _, a41, _, _, _} = a
{a11, a21, a31, a41}
end
@doc """
`column1( a )` selects the second column of a `mat44`.
`a` is the `mat44` to take the second column of.
This returns a `vec4` representing the second column of `a`.
"""
@spec column1(mat44) :: vec4
def column1(a) do
{_, a12, _, _, _, a22, _, _, _, a32, _, _, _, a42, _, _} = a
{a12, a22, a32, a42}
end
@doc """
`column2( a )` selects the third column of a `mat44`.
`a` is the `mat44` to take the third column of.
This returns a `vec4` representing the third column of `a`.
"""
@spec column2(mat44) :: vec4
def column2(a) do
{_, _, a13, _, _, _, a23, _, _, _, a33, _, _, _, a43, _} = a
{a13, a23, a33, a43}
end
@doc """
`column3( a )` selects the fourth column of a `mat44`.
`a` is the `mat44` to take the fourth column of.
This returns a `vec4` representing the fourth column of `a`.
"""
@spec column3(mat44) :: vec4
def column3(a) do
{_, _, _, a14, _, _, _, a24, _, _, _, a34, _, _, _, a44} = a
{a14, a24, a34, a44}
end
@doc """
`row0( a )` selects the first row of a `mat44`.
`a` is the `mat44` to take the first row of.
This returns a `vec4` representing the first row of `a`.
"""
@spec row0(mat44) :: vec4
def row0(a) do
{a11, a12, a13, a14, _, _, _, _, _, _, _, _, _, _, _, _} = a
{a11, a12, a13, a14}
end
@doc """
`row1( a )` selects the second row of a `mat44`.
`a` is the `mat44` to take the second row of.
This returns a `vec4` representing the second row of `a`.
"""
@spec row1(mat44) :: vec4
def row1(a) do
{_, _, _, _, a21, a22, a23, a24, _, _, _, _, _, _, _, _} = a
{a21, a22, a23, a24}
end
@doc """
`row2( a )` selects the third row of a `mat44`.
`a` is the `mat44` to take the third row of.
This returns a `vec4` representing the third row of `a`.
"""
@spec row2(mat44) :: vec4
def row2(a) do
{_, _, _, _, _, _, _, _, a31, a32, a33, a34, _, _, _, _} = a
{a31, a32, a33, a34}
end
@doc """
`row3( a )` selects the fourth row of a `mat44`.
`a` is the `mat44` to take the fourth row of.
This returns a `vec4` representing the fourth row of `a`.
"""
@spec row3(mat44) :: vec4
def row3(a) do
{_, _, _, _, _, _, _, _, _, _, _, _, a41, a42, a43, a44} = a
{a41, a42, a43, a44}
end
@doc """
`diag( a )` selects the diagonal of a `mat44`.
`a` is the `mat44` to take the diagonal of.
This returns a `vec4` representing the diagonal of `a`.
"""
@spec diag(mat44) :: vec4
def diag(a) do
{a11, _, _, _, _, a22, _, _, _, _, a33, _, _, _, _, a44} = a
{a11, a22, a33, a44}
end
@doc """
`at( a, i, j)` selects an element of a `mat44`.
`a` is the `mat44` to index.
`i` is the row integer index [0,3].
`j` is the column integer index [0,3].
This returns a float from the matrix at row `i` and column `j`.
"""
@spec at(mat44, non_neg_integer, non_neg_integer) :: float
def at(a, i, j) do
elem(a, 4 * i + j)
end
@doc """
`apply( a, v )` transforms a `vec4` by a `mat44`.
`a` is the `mat44` to transform by.
`v` is the `vec4` to be transformed.
This returns a `vec4` representing **A****v**.
This is the "full" application of a matrix, and uses all elements.
"""
@spec apply(mat44, vec4) :: vec4
def apply(a, v) do
{a11, a12, a13, a14, a21, a22, a23, a24, a31, a32, a33, a34, a41, a42, a43, a44} = a
{x, y, z, w} = v
{
a11 * x + a12 * y + a13 * z + a14 * w,
a21 * x + a22 * y + a23 * z + a24 * w,
a31 * x + a32 * y + a33 * z + a34 * w,
a41 * x + a42 * y + a43 * z + a44 * w
}
end
@doc """
`apply_transpose( a, v )` transforms a `vec4` by a a transposed `mat44`.
`a` is the `mat44` to transform by.
`v` is the `vec4` to be transformed.
This returns a `vec4` representing **A**<sup>T</sup>**v**.
This is the "full" application of a matrix, and uses all elements.
"""
@spec apply_transpose(mat44, vec4) :: vec4
def apply_transpose(a, v) do
{a11, a21, a31, a41, a12, a22, a32, a42, a13, a23, a33, a43, a14, a24, a34, a44} = a
{x, y, z, w} = v
{
a11 * x + a12 * y + a13 * z + a14 * w,
a21 * x + a22 * y + a23 * z + a24 * w,
a31 * x + a32 * y + a33 * z + a34 * w,
a41 * x + a42 * y + a43 * z + a44 * w
}
end
@doc """
`apply_left( v, a )` transforms a `vec4` by a `mat44`, applied on the left.
`a` is the `mat44` to transform by.
`v` is the `vec4` to be transformed.
This returns a `vec4` representing **v****A**.
This is the "full" application of a matrix, and uses all elements.
"""
@spec apply_left(vec4, mat44) :: vec4
def apply_left(v, a) do
{a11, a12, a13, a14, a21, a22, a23, a24, a31, a32, a33, a34, a41, a42, a43, a44} = a
{x, y, z, w} = v
{
a11 * x + a21 * y + a31 * z + a41 * w,
a12 * x + a22 * y + a32 * z + a42 * w,
a13 * x + a23 * y + a33 * z + a43 * w,
a14 * x + a24 * y + a34 * z + a44 * w
}
end
@doc """
`apply_left_transpose( v, a )` transforms a `vec3` by a transposed `mat33`, applied on the left.
`a` is the `mat44` to transform by.
`v` is the `vec4` to be transformed.
This returns a `vec4` representing **v****A**<sup>T</sup>.
This is the "full" application of a matrix, and uses all elements.
"""
@spec apply_left_transpose(vec4, mat44) :: vec4
def apply_left_transpose(v, a) do
{a11, a21, a31, a41, a12, a22, a32, a42, a13, a23, a33, a43, a14, a24, a34, a44} = a
{x, y, z, w} = v
{
a11 * x + a21 * y + a31 * z + a41 * w,
a12 * x + a22 * y + a32 * z + a42 * w,
a13 * x + a23 * y + a33 * z + a43 * w,
a14 * x + a24 * y + a34 * z + a44 * w
}
end
@doc """
`transform_point( a, v )` transforms a `vec3` point by a `mat44`.
`a` is a `mat44` used to transform the point.
`v` is a `vec3` to be transformed.
This returns a `vec3` representing the application of `a` to `v`.
The point `a` is internally treated as having a fourth coordinate equal to 1.0.
Note that transforming a point will work for all transforms.
"""
@spec transform_point(mat44, vec3) :: vec3
def transform_point(a, v) do
{a11, a21, a31, _, a12, a22, a32, _, a13, a23, a33, _, a14, a24, a34, _} = a
{x, y, z} = v
{
a11 * x + a12 * y + a13 * z + a14,
a21 * x + a22 * y + a23 * z + a24,
a31 * x + a32 * y + a33 * z + a34
}
end
@doc """
`transform_vector( a, v )` transforms a `vec3` vector by a `mat44`.
`a` is a `mat44` used to transform the point.
`v` is a `vec3` to be transformed.
This returns a `vec3` representing the application of `a` to `v`.
The point `a` is internally treated as having a fourth coordinate equal to 0.0.
Note that transforming a vector will work for only rotations, scales, and shears.
"""
@spec transform_vector(mat44, vec3) :: vec3
def transform_vector(a, v) do
{a11, a21, a31, _, a12, a22, a32, _, a13, a23, a33, _, _, _, _, _} = a
{x, y, z} = v
{
a11 * x + a12 * y + a13 * z,
a21 * x + a22 * y + a23 * z,
a31 * x + a32 * y + a33 * z
}
end
@doc """
`inverse(a)` calculates the inverse matrix
`a` is a `mat44` to be inverted
Returs a `mat44` representing `a`<sup>-1</sup>
Raises an error when you try to calculate inverse of a matrix whose determinant is `zero`
"""
@spec inverse(mat44) :: mat44
def inverse(a) do
{m00, m01, m02, m03, m10, m11, m12, m13, m20, m21, m22, m23, m30, m31, m32, m33} = a
v0 = m20 * m31 - m21 * m30
v1 = m20 * m32 - m22 * m30
v2 = m20 * m33 - m23 * m30
v3 = m21 * m32 - m22 * m31
v4 = m21 * m33 - m23 * m31
v5 = m22 * m33 - m23 * m32
t00 = +(v5 * m11 - v4 * m12 + v3 * m13)
t10 = -(v5 * m10 - v2 * m12 + v1 * m13)
t20 = +(v4 * m10 - v2 * m11 + v0 * m13)
t30 = -(v3 * m10 - v1 * m11 + v0 * m12)
f_det = t00 * m00 + t10 * m01 + t20 * m02 + t30 * m03
if f_det == 0.0, do: raise("Matrices with determinant equal to zero does not have inverse")
inv_det = 1.0 / f_det
d00 = t00 * inv_det
d10 = t10 * inv_det
d20 = t20 * inv_det
d30 = t30 * inv_det
d01 = -(v5 * m01 - v4 * m02 + v3 * m03) * inv_det
d11 = +(v5 * m00 - v2 * m02 + v1 * m03) * inv_det
d21 = -(v4 * m00 - v2 * m01 + v0 * m03) * inv_det
d31 = +(v3 * m00 - v1 * m01 + v0 * m02) * inv_det
v0 = m10 * m31 - m11 * m30
v1 = m10 * m32 - m12 * m30
v2 = m10 * m33 - m13 * m30
v3 = m11 * m32 - m12 * m31
v4 = m11 * m33 - m13 * m31
v5 = m12 * m33 - m13 * m32
d02 = +(v5 * m01 - v4 * m02 + v3 * m03) * inv_det
d12 = -(v5 * m00 - v2 * m02 + v1 * m03) * inv_det
d22 = +(v4 * m00 - v2 * m01 + v0 * m03) * inv_det
d32 = -(v3 * m00 - v1 * m01 + v0 * m02) * inv_det
v0 = m21 * m10 - m20 * m11
v1 = m22 * m10 - m20 * m12
v2 = m23 * m10 - m20 * m13
v3 = m22 * m11 - m21 * m12
v4 = m23 * m11 - m21 * m13
v5 = m23 * m12 - m22 * m13
d03 = -(v5 * m01 - v4 * m02 + v3 * m03) * inv_det
d13 = +(v5 * m00 - v2 * m02 + v1 * m03) * inv_det
d23 = -(v4 * m00 - v2 * m01 + v0 * m03) * inv_det
d33 = +(v3 * m00 - v1 * m01 + v0 * m02) * inv_det
{d00, d01, d02, d03, d10, d11, d12, d13, d20, d21, d22, d23, d30, d31, d32, d33}
end
end | lib/graphmath/Mat44.ex | 0.956022 | 0.902481 | Mat44.ex | starcoder |
defmodule AWS.OpsWorksCM do
@moduledoc """
AWS OpsWorks CM
AWS OpsWorks for configuration management (CM) is a service that runs and
manages configuration management servers.
You can use AWS OpsWorks CM to create and manage AWS OpsWorks for Chef Automate
and AWS OpsWorks for Puppet Enterprise servers, and add or remove nodes for the
servers to manage.
## Glossary of terms
* **Server**: A configuration management server that can be
highly-available. The configuration management server runs on an Amazon Elastic
Compute Cloud (EC2) instance, and may use various other AWS services, such as
Amazon Relational Database Service (RDS) and Elastic Load Balancing. A server is
a generic abstraction over the configuration manager that you want to use, much
like Amazon RDS. In AWS OpsWorks CM, you do not start or stop servers. After you
create servers, they continue to run until they are deleted.
* **Engine**: The engine is the specific configuration manager that
you want to use. Valid values in this release include `ChefAutomate` and
`Puppet`.
* **Backup**: This is an application-level backup of the data that
the configuration manager stores. AWS OpsWorks CM creates an S3 bucket for
backups when you launch the first server. A backup maintains a snapshot of a
server's configuration-related attributes at the time the backup starts.
* **Events**: Events are always related to a server. Events are
written during server creation, when health checks run, when backups are
created, when system maintenance is performed, etc. When you delete a server,
the server's events are also deleted.
* **Account attributes**: Every account has attributes that are
assigned in the AWS OpsWorks CM database. These attributes store information
about configuration limits (servers, backups, etc.) and your customer account.
## Endpoints
AWS OpsWorks CM supports the following endpoints, all HTTPS. You must connect to
one of the following endpoints. Your servers can only be accessed or managed
within the endpoint in which they are created.
* opsworks-cm.us-east-1.amazonaws.com
* opsworks-cm.us-east-2.amazonaws.com
* opsworks-cm.us-west-1.amazonaws.com
* opsworks-cm.us-west-2.amazonaws.com
* opsworks-cm.ap-northeast-1.amazonaws.com
* opsworks-cm.ap-southeast-1.amazonaws.com
* opsworks-cm.ap-southeast-2.amazonaws.com
* opsworks-cm.eu-central-1.amazonaws.com
* opsworks-cm.eu-west-1.amazonaws.com
For more information, see [AWS OpsWorks endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/opsworks-service.html) in
the AWS General Reference.
## Throttling limits
All API operations allow for five requests per second with a burst of 10
requests per second.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "OpsWorksCM",
api_version: "2016-11-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "opsworks-cm",
global?: false,
protocol: "json",
service_id: "OpsWorksCM",
signature_version: "v4",
signing_name: "opsworks-cm",
target_prefix: "OpsWorksCM_V2016_11_01"
}
end
@doc """
Associates a new node with the server.
For more information about how to disassociate a node, see `DisassociateNode`.
On a Chef server: This command is an alternative to `knife bootstrap`.
Example (Chef): `aws opsworks-cm associate-node --server-name *MyServer*
--node-name *MyManagedNode* --engine-attributes
"Name=*CHEF_ORGANIZATION*,Value=default"
"Name=*CHEF_NODE_PUBLIC_KEY*,Value=*public-key-pem*"`
On a Puppet server, this command is an alternative to the `puppet cert sign`
command that signs a Puppet node CSR.
Example (Puppet): `aws opsworks-cm associate-node --server-name *MyServer*
--node-name *MyManagedNode* --engine-attributes
"Name=*PUPPET_NODE_CSR*,Value=*csr-pem*"`
A node can can only be associated with servers that are in a `HEALTHY` state.
Otherwise, an `InvalidStateException` is thrown. A `ResourceNotFoundException`
is thrown when the server does not exist. A `ValidationException` is raised when
parameters of the request are not valid. The AssociateNode API call can be
integrated into Auto Scaling configurations, AWS Cloudformation templates, or
the user data of a server's instance.
"""
def associate_node(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateNode", input, options)
end
@doc """
Creates an application-level backup of a server.
While the server is in the `BACKING_UP` state, the server cannot be changed, and
no additional backup can be created.
Backups can be created for servers in `RUNNING`, `HEALTHY`, and `UNHEALTHY`
states. By default, you can create a maximum of 50 manual backups.
This operation is asynchronous.
A `LimitExceededException` is thrown when the maximum number of manual backups
is reached. An `InvalidStateException` is thrown when the server is not in any
of the following states: RUNNING, HEALTHY, or UNHEALTHY. A
`ResourceNotFoundException` is thrown when the server is not found. A
`ValidationException` is thrown when parameters of the request are not valid.
"""
def create_backup(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateBackup", input, options)
end
@doc """
Creates and immedately starts a new server.
The server is ready to use when it is in the `HEALTHY` state. By default, you
can create a maximum of 10 servers.
This operation is asynchronous.
A `LimitExceededException` is thrown when you have created the maximum number of
servers (10). A `ResourceAlreadyExistsException` is thrown when a server with
the same name already exists in the account. A `ResourceNotFoundException` is
thrown when you specify a backup ID that is not valid or is for a backup that
does not exist. A `ValidationException` is thrown when parameters of the request
are not valid.
If you do not specify a security group by adding the `SecurityGroupIds`
parameter, AWS OpsWorks creates a new security group.
*Chef Automate:* The default security group opens the Chef server to the world
on TCP port 443. If a KeyName is present, AWS OpsWorks enables SSH access. SSH
is also open to the world on TCP port 22.
*Puppet Enterprise:* The default security group opens TCP ports 22, 443, 4433,
8140, 8142, 8143, and 8170. If a KeyName is present, AWS OpsWorks enables SSH
access. SSH is also open to the world on TCP port 22.
By default, your server is accessible from any IP address. We recommend that you
update your security group rules to allow access from known IP addresses and
address ranges only. To edit security group rules, open Security Groups in the
navigation pane of the EC2 management console.
To specify your own domain for a server, and provide your own self-signed or
CA-signed certificate and private key, specify values for `CustomDomain`,
`CustomCertificate`, and `CustomPrivateKey`.
"""
def create_server(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateServer", input, options)
end
@doc """
Deletes a backup.
You can delete both manual and automated backups. This operation is
asynchronous.
An `InvalidStateException` is thrown when a backup deletion is already in
progress. A `ResourceNotFoundException` is thrown when the backup does not
exist. A `ValidationException` is thrown when parameters of the request are not
valid.
"""
def delete_backup(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteBackup", input, options)
end
@doc """
Deletes the server and the underlying AWS CloudFormation stacks (including the
server's EC2 instance).
When you run this command, the server state is updated to `DELETING`. After the
server is deleted, it is no longer returned by `DescribeServer` requests. If the
AWS CloudFormation stack cannot be deleted, the server cannot be deleted.
This operation is asynchronous.
An `InvalidStateException` is thrown when a server deletion is already in
progress. A `ResourceNotFoundException` is thrown when the server does not
exist. A `ValidationException` is raised when parameters of the request are not
valid.
"""
def delete_server(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteServer", input, options)
end
@doc """
Describes your OpsWorks-CM account attributes.
This operation is synchronous.
"""
def describe_account_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAccountAttributes", input, options)
end
@doc """
Describes backups.
The results are ordered by time, with newest backups first. If you do not
specify a BackupId or ServerName, the command returns all backups.
This operation is synchronous.
A `ResourceNotFoundException` is thrown when the backup does not exist. A
`ValidationException` is raised when parameters of the request are not valid.
"""
def describe_backups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeBackups", input, options)
end
@doc """
Describes events for a specified server.
Results are ordered by time, with newest events first.
This operation is synchronous.
A `ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not valid.
"""
def describe_events(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEvents", input, options)
end
@doc """
Returns the current status of an existing association or disassociation request.
A `ResourceNotFoundException` is thrown when no recent association or
disassociation request with the specified token is found, or when the server
does not exist. A `ValidationException` is raised when parameters of the request
are not valid.
"""
def describe_node_association_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeNodeAssociationStatus", input, options)
end
@doc """
Lists all configuration management servers that are identified with your
account.
Only the stored results from Amazon DynamoDB are returned. AWS OpsWorks CM does
not query other services.
This operation is synchronous.
A `ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not valid.
"""
def describe_servers(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeServers", input, options)
end
@doc """
Disassociates a node from an AWS OpsWorks CM server, and removes the node from
the server's managed nodes.
After a node is disassociated, the node key pair is no longer valid for
accessing the configuration manager's API. For more information about how to
associate a node, see `AssociateNode`.
A node can can only be disassociated from a server that is in a `HEALTHY` state.
Otherwise, an `InvalidStateException` is thrown. A `ResourceNotFoundException`
is thrown when the server does not exist. A `ValidationException` is raised when
parameters of the request are not valid.
"""
def disassociate_node(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateNode", input, options)
end
@doc """
Exports a specified server engine attribute as a base64-encoded string.
For example, you can export user data that you can use in EC2 to associate nodes
with a server.
This operation is synchronous.
A `ValidationException` is raised when parameters of the request are not valid.
A `ResourceNotFoundException` is thrown when the server does not exist. An
`InvalidStateException` is thrown when the server is in any of the following
states: CREATING, TERMINATED, FAILED or DELETING.
"""
def export_server_engine_attribute(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ExportServerEngineAttribute", input, options)
end
@doc """
Returns a list of tags that are applied to the specified AWS OpsWorks for Chef
Automate or AWS OpsWorks for Puppet Enterprise servers or backups.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Restores a backup to a server that is in a `CONNECTION_LOST`, `HEALTHY`,
`RUNNING`, `UNHEALTHY`, or `TERMINATED` state.
When you run RestoreServer, the server's EC2 instance is deleted, and a new EC2
instance is configured. RestoreServer maintains the existing server endpoint, so
configuration management of the server's client devices (nodes) should continue
to work.
Restoring from a backup is performed by creating a new EC2 instance. If
restoration is successful, and the server is in a `HEALTHY` state, AWS OpsWorks
CM switches traffic over to the new instance. After restoration is finished, the
old EC2 instance is maintained in a `Running` or `Stopped` state, but is
eventually terminated.
This operation is asynchronous.
An `InvalidStateException` is thrown when the server is not in a valid state. A
`ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not valid.
"""
def restore_server(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RestoreServer", input, options)
end
@doc """
Manually starts server maintenance.
This command can be useful if an earlier maintenance attempt failed, and the
underlying cause of maintenance failure has been resolved. The server is in an
`UNDER_MAINTENANCE` state while maintenance is in progress.
Maintenance can only be started on servers in `HEALTHY` and `UNHEALTHY` states.
Otherwise, an `InvalidStateException` is thrown. A `ResourceNotFoundException`
is thrown when the server does not exist. A `ValidationException` is raised when
parameters of the request are not valid.
"""
def start_maintenance(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartMaintenance", input, options)
end
@doc """
Applies tags to an AWS OpsWorks for Chef Automate or AWS OpsWorks for Puppet
Enterprise server, or to server backups.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes specified tags from an AWS OpsWorks-CM server or backup.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates settings for a server.
This operation is synchronous.
"""
def update_server(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateServer", input, options)
end
@doc """
Updates engine-specific attributes on a specified server.
The server enters the `MODIFYING` state when this operation is in progress. Only
one update can occur at a time. You can use this command to reset a Chef
server's public key (`CHEF_PIVOTAL_KEY`) or a Puppet server's admin password
(`<PASSWORD>`).
This operation is asynchronous.
This operation can only be called for servers in `HEALTHY` or `UNHEALTHY`
states. Otherwise, an `InvalidStateException` is raised. A
`ResourceNotFoundException` is thrown when the server does not exist. A
`ValidationException` is raised when parameters of the request are not valid.
"""
def update_server_engine_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateServerEngineAttributes", input, options)
end
end | lib/aws/generated/ops_works_cm.ex | 0.892363 | 0.435541 | ops_works_cm.ex | starcoder |
defmodule Segment.Analytics.Sender do
@moduledoc """
The `Segment.Analytics.Sender` service implementation is an alternative to the default Batcher to send every event as it is called.
The HTTP call is made with an async `Task` to not block the GenServer. This will not guarantee ordering.
The `Segment.Analytics.Batcher` should be preferred in production but this module will emulate the implementaiton of the original library if
you need that or need events to be as real-time as possible.
"""
use GenServer
alias Segment.Analytics.{Track, Identify, Screen, Alias, Group, Page}
@doc """
Start the `Segment.Analytics.Sender` GenServer with an Segment HTTP Source API Write Key
"""
@spec start_link(String.t()) :: GenServer.on_start()
def start_link(api_key) do
client = Segment.Http.client(api_key)
GenServer.start_link(__MODULE__, client, name: __MODULE__)
end
@doc """
Start the `Segment.Analytics.Sender` GenServer with an Segment HTTP Source API Write Key and a Tesla Adapter. This is mainly used
for testing purposes to override the Adapter with a Mock.
"""
@spec start_link(String.t(), Tesla.adapter()) :: GenServer.on_start()
def start_link(api_key, adapter) do
client = Segment.Http.client(api_key, adapter)
GenServer.start_link(__MODULE__, {client, :queue.new()}, name: __MODULE__)
end
# client
@doc """
Make a call to Segment with an event. Should be of type `Track, Identify, Screen, Alias, Group or Page`.
This event will be sent immediately and asyncronously
"""
@spec call(Segment.segment_event()) :: :ok
def call(%{__struct__: mod} = event)
when mod in [Track, Identify, Screen, Alias, Group, Page] do
callp(event)
end
# GenServer Callbacks
@impl true
def init(client) do
{:ok, client}
end
@impl true
def handle_cast({:send, event}, client) do
Task.start_link(fn -> Segment.Http.send(client, event) end)
{:noreply, client}
end
# Helpers
defp callp(event) do
GenServer.cast(__MODULE__, {:send, event})
end
end | lib/segment/sender.ex | 0.823754 | 0.433802 | sender.ex | starcoder |
defmodule Esperanto.Walker do
@moduledoc """
Walker is used to go through input couting line and columns.
Every parser is responsible to walk and leave the walker in the state he can continue
"""
alias Esperanto.Barriers.NeverMatchBarrier
require Logger
defstruct [:input, rest: "", line: 1, column: 1, barriers: [NeverMatchBarrier], barriered: ""]
@type t :: %__MODULE__{
input: String.t(),
rest: String.t() | atom(),
line: integer(),
barriers: list(),
barriered: String.t(),
column: integer()
}
@doc ~S"""
Start the walker by split it in input and rest
## Examples
iex> Esperanto.Walker.start("abc")
%Esperanto.Walker{input: "a", rest: "bc"}
iex> Esperanto.Walker.start("")
%Esperanto.Walker{input: "", rest: ""}
iex> Esperanto.Walker.start("a")
%Esperanto.Walker{input: "a", rest: ""}
"""
@spec start(String.t()) :: __MODULE__.t()
def start(input) do
{input, rest} = String.split_at(input, 1)
%__MODULE__{
input: input,
rest: rest
}
end
@doc ~S"""
Walk through next input
## Examples
iex>
Esperanto.Walker.start("abc") |> Esperanto.Walker.walk()
%Esperanto.Walker{input: "ab", rest: "c", column: 2}
iex> Esperanto.Walker.start("a\nc") |> Esperanto.Walker.walk()
%Esperanto.Walker{input: "a\n", rest: "c", column: 1, line: 2}
"""
@spec walk(__MODULE__.t()) :: __MODULE__.t()
def walk(walker) do
cond do
is_barried(walker) ->
walker
List.first(walker.barriers).should_bar(walker) ->
%__MODULE__{
walker
| rest: :barried,
barriered: walker.rest
}
true ->
do_walk(walker)
end
end
def walk_until(walker, regex) do
cond do
String.match?(walker.input, regex) -> walker
walker.rest == "" -> walker
true -> walk_until(do_walk(walker), regex)
end
end
@spec is_barried(__MODULE__.t()) :: boolean()
def is_barried(walker), do: walker.rest == :barried
@doc ~S"""
Prevents walker from go fetch more content with the rest matches the barrier
until the barrier is destroyed
"""
@spec with_barrier(__MODULE__.t(), any()) :: __MODULE__.t()
def with_barrier(walker, barrier) do
Logger.debug("Creating barrier #{barrier}")
%__MODULE__{
walker
| barriers: [barrier] ++ walker.barriers
}
end
@spec destroy_barrier(__MODULE__.t()) :: __MODULE__.t()
def destroy_barrier(walker) do
barrier = List.first(walker.barriers)
Logger.debug("Destroyng barrier #{barrier}")
barrier.destroy_barrier(walker)
end
@doc ~S"""
Consume the current walk input
## Examples
iex> Esperanto.Walker.consume_input(Esperanto.Walker.start("abc"))
%Esperanto.Walker{input: "", rest: "bc", column: 1}
"""
@spec consume_input(__MODULE__.t(), length :: integer()) :: __MODULE__.t()
def consume_input(walker, length \\ 0)
def consume_input(walker, 0) do
%__MODULE__{
walker
| input: ""
}
end
def consume_input(walker, length) do
case walker.rest do
:barried ->
%__MODULE__{
walker
| input: "",
rest: String.slice(walker.input, length..-1) <> walker.barriered
}
_ ->
%__MODULE__{
walker
| input: "",
rest: String.slice(walker.input, length..-1) <> walker.rest
}
end
end
def consume_input_matching_regex(walker, regex) do
lenght = String.length(strip_from_regex(walker.input, regex))
consume_input(walker, lenght)
end
def strip_from_regex(input, regex) do
Regex.scan(regex, input)
|> List.flatten()
|> Enum.filter(fn s -> String.length(s) > 0 end)
|> List.first()
end
defp do_walk(walker) do
{next, rest} = String.split_at(walker.rest, 1)
{line, column} = increment_line_and_column(next, walker.line, walker.column)
%__MODULE__{
walker
| input: walker.input <> next,
rest: rest,
line: line,
column: column
}
end
def increment_line_and_column(<<input::utf8, rest::binary>>, line, column) do
{line, column} = increment_line_and_column(input, line, column)
increment_line_and_column(rest, line, column)
end
def increment_line_and_column(input, current_line, current_column) do
line = increment_line([input])
column = increment_column([input])
if line != current_line do
{line + current_line, 1 + column}
else
{line + current_line, current_column + column}
end
end
defp increment_line('\n'), do: 1
defp increment_line(_), do: 0
defp increment_column('\n'), do: 1
defp increment_column(_), do: 0
end | apps/esperanto/lib/trybe/esperanto/walker.ex | 0.826362 | 0.569523 | walker.ex | starcoder |
defmodule Openstex.Keystone.V2.Helpers do
@moduledoc ~s"""
A module that provides helper functions for executing more complex multi-step queries
for Keystone authentication.
See the `ExOvh` library for an example usage of the helpers module.
"""
alias Openstex.Request
alias Openstex.Keystone.V2
alias Openstex.Keystone.V2.Helpers.Identity
alias Openstex.Keystone.V2.Helpers.Identity.{Endpoint, Metadata, Service, Trust, Token, User}
@doc ~s"""
Helper function to authenticate openstack using keystone (identity) api. Returns a
`Openstex.Helpers.V2.Keystone.Identity` struct.
## Arguments
- ```endpoint```: the endpoint to which the http request should be sent for accessing keystone authentication.
- ```username```: openstack username
- ```password```: <PASSWORD>
- ```tenant```: A Keyword list as follows: [tenant_id: tenant_id, tenant_name: tenant_name].
One or the other should be present or {:error, message} is returned.
"""
@spec authenticate(String.t(), String.t(), String.t(), Keyword.t()) ::
{:ok, Identity.t()} | {:error, HTTPipe.Conn.t()} | {:error, any}
def authenticate(endpoint, username, password, tenant) do
token_request = V2.get_token(endpoint, username, password)
identity_request = fn token, endpoint, tenant ->
V2.get_identity(token, endpoint, tenant)
end
with {:ok, conn} <- Request.request(token_request, nil),
token =
conn.response.body
|> Map.get("access")
|> Map.get("token")
|> Map.get("id"),
{:ok, conn} <- Request.request(identity_request.(token, endpoint, tenant), nil) do
{:ok, parse_nested_map_into_identity_struct(conn.response.body)}
else
{:error, conn} -> {:error, conn}
end
end
@doc ~s"""
Helper function to authenticate openstack using keystone (identity) api. Returns a
`Openstex.Helpers.V2.Keystone.Identity` struct.
## Arguments
- ```endpoint```: the endpoint to which the http request should be sent for accessing keystone authentication.
- ```token```: the x-auth token
- ```tenant```: A Keyword list as follows: [tenant_id: tenant_id, tenant_name: tenant_name].
One or the other should be present or {:error, message} is returned.
"""
@spec authenticate(String.t(), String.t(), Keyword.t()) ::
{:ok, Identity.t()} | {:error, HTTPipe.Conn.t()} | {:error, any}
def authenticate(endpoint, token, tenant) do
identity_request = fn token, endpoint, tenant ->
V2.get_identity(token, endpoint, tenant)
end
case Request.request(identity_request.(token, endpoint, tenant), nil) do
{:ok, conn} -> {:ok, parse_nested_map_into_identity_struct(conn.response.body)}
{:error, conn} -> {:error, conn}
end
end
@doc ~s"""
Defaults to authenticate(endpoint, token, []). See `authenticate/3`.
"""
@spec authenticate(String.t(), String.t(), Keyword.t()) ::
{:ok, Identity.t()} | {:error, Openstex.Response.t()} | {:error, any}
def authenticate(endpoint, token) do
authenticate(endpoint, token, [])
end
@doc ~s"""
Helper function to authenticate openstack using keystone (identity) api. Returns a
`Openstex.Helpers.V2.Keystone.Identity` struct or raises and error. See `authenticate/3`.
"""
@spec authenticate!(String.t(), String.t()) :: Identity.t() | no_return
def authenticate!(endpoint, token) do
case authenticate(endpoint, token) do
{:ok, identity} -> identity
{:error, conn} -> raise(Openstex.ResponseError, conn: conn)
end
end
@doc ~s"""
Helper function to authenticate openstack using keystone (identity) api. Returns a
`Openstex.Helpers.V2.Keystone.Identity` struct or raises and error. See `authenticate/4`.
"""
@spec authenticate!(String.t(), String.t(), String.t(), Keyword.t()) :: Identity.t() | no_return
def authenticate!(endpoint, username, password, tenant) do
case authenticate(endpoint, username, password, tenant) do
{:ok, identity} -> identity
{:error, conn} -> raise(Openstex.ResponseError, conn: conn)
end
end
@doc false
def parse_nested_map_into_identity_struct(identity_map) do
identity = Map.fetch!(identity_map, "access")
tenant =
identity
|> Map.fetch!("token")
|> Map.fetch!("tenant")
|> Token.Tenant.build()
token =
identity
|> Map.fetch!("token")
|> Map.delete("tenant")
|> Map.put("tenant", tenant)
|> Token.build()
user =
identity
|> Map.get("user", %{})
|> User.build()
metadata =
identity
|> Map.get("metadata", %{})
|> Metadata.build()
trust =
identity
|> Map.get("trust", %{})
|> Trust.build()
service_catalog =
identity
|> Map.fetch!("serviceCatalog")
|> Enum.map(fn service ->
endpoints =
service
|> Map.get("endpoints", [])
|> Enum.map(&Endpoint.build/1)
service =
service
|> Map.delete("endpoints")
|> Map.put("endpoints", endpoints)
Service.build(service)
end)
%{
"token" => token,
"service_catalog" => service_catalog,
"user" => user,
"metadata" => metadata,
"trust" => trust
}
|> Identity.build()
end
defmodule Identity.Token.Tenant do
@moduledoc false
defstruct [:description, :enabled, :id, :name]
def build(map) do
opts = [rest: :merge, transformations: [:snake_case]]
Mapail.map_to_struct!(map, __MODULE__, opts)
end
end
defmodule Identity.Token do
@moduledoc false
defstruct [:audit_ids, :issued_at, :expires, :id, tenant: %Identity.Token.Tenant{}]
def build(map) do
opts = [rest: :merge, transformations: [:snake_case]]
Mapail.map_to_struct!(map, __MODULE__, opts)
end
end
defmodule Identity.Service do
@moduledoc false
defstruct endpoints: [], endpoints_links: [], type: "", name: ""
def build(map) do
opts = [rest: :merge, transformations: [:snake_case]]
Mapail.map_to_struct!(map, __MODULE__, opts)
end
end
defmodule Identity.Endpoint do
@moduledoc false
defstruct [:admin_url, :region, :internal_url, :id, :public_url]
def build(map) do
opts = [rest: :merge, transformations: [:snake_case]]
Mapail.map_to_struct!(map, __MODULE__, opts)
end
end
defmodule Identity.User do
@moduledoc false
defstruct [:username, :roles_links, :id, :roles, :name]
def build(map) do
opts = [rest: :merge, transformations: [:snake_case]]
Mapail.map_to_struct!(map, __MODULE__, opts)
end
end
defmodule Identity.Metadata do
@moduledoc false
defstruct [:metadata, :is_admin, :roles]
def build(map) do
opts = [rest: :merge, transformations: [:snake_case]]
Mapail.map_to_struct!(map, __MODULE__, opts)
end
end
defmodule Identity.Trust do
@moduledoc false
defstruct [:trust, :id, :trustee_user_id, :trustor_user_id, :impersonation]
def build(map) do
opts = [rest: :merge, transformations: [:snake_case]]
Mapail.map_to_struct!(map, __MODULE__, opts)
end
end
defmodule Identity do
@moduledoc false
defstruct token: %Token{},
service_catalog: [],
user: %User{},
metadata: %Metadata{},
trust: %Trust{}
def build(map) do
opts = [rest: :merge, transformations: [:snake_case]]
Mapail.map_to_struct!(map, __MODULE__, opts)
end
end
end | lib/openstex/keystone/v2/helpers.ex | 0.941005 | 0.729631 | helpers.ex | starcoder |
defprotocol Timex.Protocol do
@moduledoc """
This protocol defines the API for functions which take a `Date`,
`NaiveDateTime`, or `DateTime` as input.
"""
@doc """
Convert a date/time value to a Julian calendar date number
"""
def to_julian(datetime)
@doc """
Convert a date/time value to gregorian seconds (seconds since start of year zero)
"""
def to_gregorian_seconds(datetime)
@doc """
Convert a date/time value to gregorian microseconds (microseconds since the start of year zero)
"""
def to_gregorian_microseconds(datetime)
@doc """
Convert a date/time value to seconds since the UNIX Epoch
"""
def to_unix(datetime)
@doc """
Convert a date/time value to a Date
"""
def to_date(datetime)
@doc """
Convert a date/time value to a DateTime.
An optional timezone can be provided, UTC will be assumed if one is not provided.
"""
def to_datetime(datetime, timezone \\ :utc)
@doc """
Convert a date/time value to a NaiveDateTime
"""
def to_naive_datetime(datetime)
@doc """
Convert a date/time value to it's Erlang tuple variant
i.e. Date becomes `{y,m,d}`, DateTime/NaiveDateTime become
`{{y,m,d},{h,mm,s}}`
"""
def to_erl(datetime)
@doc """
Get the century a date/time value is in
"""
def century(datetime)
@doc """
Return a boolean indicating whether the date/time value is in a leap year
"""
def is_leap?(datetime)
@doc """
Shift a date/time value using a list of shift unit/value pairs
"""
def shift(datetime, options)
@doc """
Set fields on a date/time value using a list of unit/value pairs
"""
def set(datetime, options)
@doc """
Get a new version of the date/time value representing the beginning of the day
"""
def beginning_of_day(datetime)
@doc """
Get a new version of the date/time value representing the end of the day
"""
def end_of_day(datetime)
@doc """
Get a new version of the date/time value representing the beginning of it's week,
providing a weekday name (as an atom) for the day which starts the week, i.e. `:mon`.
"""
def beginning_of_week(datetime, weekstart)
@doc """
Get a new version of the date/time value representing the ending of it's week,
providing a weekday name (as an atom) for the day which starts the week, i.e. `:mon`.
"""
def end_of_week(datetime, weekstart)
@doc """
Get a new version of the date/time value representing the beginning of it's year
"""
def beginning_of_year(datetime)
@doc """
Get a new version of the date/time value representing the ending of it's year
"""
def end_of_year(datetime)
@doc """
Get a new version of the date/time value representing the beginning of it's quarter
"""
def beginning_of_quarter(datetime)
@doc """
Get a new version of the date/time value representing the ending of it's quarter
"""
def end_of_quarter(datetime)
@doc """
Get a new version of the date/time value representing the beginning of it's month
"""
def beginning_of_month(datetime)
@doc """
Get a new version of the date/time value representing the ending of it's month
"""
def end_of_month(datetime)
@doc """
Get the quarter for the given date/time value
"""
def quarter(datetime)
@doc """
Get the number of days in the month for the given date/time value
"""
def days_in_month(datetime)
@doc """
Get the week number of the given date/time value, starting at 1
"""
def week_of_month(datetime)
@doc """
Get the ordinal weekday number of the given date/time value
"""
def weekday(datetime)
@doc """
Get the ordinal day number of the given date/time value
"""
def day(datetime)
@doc """
Determine if the provided date/time value is valid.
"""
def is_valid?(datetime)
@doc """
Return a pair {year, week number} (as defined by ISO 8601) that the given date/time value falls on.
"""
def iso_week(datetime)
@doc """
Shifts the given date/time value to the ISO day given
"""
def from_iso_day(datetime, day)
end
defimpl Timex.Protocol, for: Any do
def to_julian(_datetime), do: {:error, :invalid_date}
def to_gregorian_seconds(_datetime), do: {:error, :invalid_date}
def to_gregorian_microseconds(_datetime), do: {:error, :invalid_date}
def to_unix(_datetime), do: {:error, :invalid_date}
def to_date(_datetime), do: {:error, :invalid_date}
def to_datetime(_datetime, _timezone), do: {:error, :invalid_date}
def to_naive_datetime(_datetime), do: {:error, :invalid_date}
def to_erl(_datetime), do: {:error, :invalid_date}
def century(_datetime), do: {:error, :invalid_date}
def is_leap?(_datetime), do: {:error, :invalid_date}
def shift(_datetime, _options), do: {:error, :invalid_date}
def set(_datetime, _options), do: {:error, :invalid_date}
def beginning_of_day(_datetime), do: {:error, :invalid_date}
def end_of_day(_datetime), do: {:error, :invalid_date}
def beginning_of_week(_datetime, _weekstart), do: {:error, :invalid_date}
def end_of_week(_datetime, _weekstart), do: {:error, :invalid_date}
def beginning_of_year(_datetime), do: {:error, :invalid_date}
def end_of_year(_datetime), do: {:error, :invalid_date}
def beginning_of_quarter(_datetime), do: {:error, :invalid_date}
def end_of_quarter(_datetime), do: {:error, :invalid_date}
def beginning_of_month(_datetime), do: {:error, :invalid_date}
def end_of_month(_datetime), do: {:error, :invalid_date}
def quarter(_datetime), do: {:error, :invalid_date}
def days_in_month(_datetime), do: {:error, :invalid_date}
def week_of_month(_datetime), do: {:error, :invalid_date}
def weekday(_datetime), do: {:error, :invalid_date}
def day(_datetime), do: {:error, :invalid_date}
def is_valid?(_datetime), do: {:error, :invalid_date}
def iso_week(_datetime), do: {:error, :invalid_date}
def from_iso_day(_datetime, _day), do: {:error, :invalid_date}
end | elixir/codes-from-books/little-elixir/cap8/blitzy/deps/timex/lib/protocol.ex | 0.908911 | 0.746139 | protocol.ex | starcoder |