code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule ApiWeb.EventStream.DiffServer do
@moduledoc """
GenServer responsible for sending EventStream diffs back to the process holding the conn.
The original implemntation of this feature had each client doing the diff
itself. However, when multiple clients are subscribed to the same data,
this results in a lot of duplicated diffing effort.
This new implementation does the diffing once, and sends the diffs to each client.
"""
use GenServer
alias ApiWeb.EventStream.Diff
def start_link({conn, module, opts}) do
if module.state_module() do
GenServer.start_link(__MODULE__, {conn, module}, opts)
else
{:error, :no_state_module}
end
end
def stop(pid) do
GenServer.stop(pid)
end
def subscribe(pid) do
GenServer.cast(pid, {:subscribe, self()})
end
def unsubscribe(pid) do
GenServer.cast(pid, {:unsubscribe, self()})
end
defmodule State do
@moduledoc false
defstruct [
:conn,
:view_module,
:opts,
:module,
last_data: nil,
last_rendered: nil,
refs: %{},
subscribed?: false
]
end
alias __MODULE__.State
@impl GenServer
def init({conn, module}) do
state = %State{
conn: conn,
view_module: Phoenix.Controller.view_module(conn),
opts: ApiWeb.ApiControllerHelpers.opts_for_params(conn, conn.params),
module: module
}
{:ok, state}
end
@impl GenServer
def handle_cast({:subscribe, parent}, state) do
case state.refs do
%{^parent => _} ->
{:noreply, state}
refs ->
ref = Process.monitor(parent)
state = %{state | refs: Map.put(refs, parent, ref)}
state =
cond do
not is_nil(state.last_rendered) ->
send(parent, {:events, [{"reset", Jason.encode_to_iodata!(state.last_rendered)}]})
state
state.subscribed? ->
state
true ->
Events.subscribe({:new_state, state.module.state_module()})
send(self(), {:event, :initial, :timeout, :event})
%{state | subscribed?: true}
end
{:noreply, state}
end
end
def handle_cast({:unsubscribe, parent}, state) do
{parent_ref, refs} = Map.pop(state.refs, parent)
Process.demonitor(parent_ref, [:flush])
state = %{state | refs: refs}
if refs == %{} do
{:stop, :normal, state}
else
{:noreply, state}
end
end
@impl GenServer
def handle_info({:event, _, _, _}, state) do
:ok = receive_all_events()
case state.module.index_data(state.conn, state.conn.params) do
{:error, error} ->
respond_with_error(state, error)
{data, _} ->
respond_with_data(state, data)
data ->
respond_with_data(state, data)
end
end
def handle_info(
{:DOWN, parent_ref, :process, parent, _},
%{refs: refs} = state
) do
{^parent_ref, refs} = Map.pop(refs, parent)
state = %{state | refs: refs}
if refs == %{} do
{:stop, :normal, state}
else
{:noreply, state}
end
end
def respond_with_error(state, error) do
for parent <- Map.keys(state.refs) do
send(parent, {:error, render_error(error)})
end
{:stop, :normal, state}
end
def respond_with_data(%{last_data: data} = state, data) do
{:noreply, state}
end
def respond_with_data(state, data) do
rendered = render_data(state, data)
events = diff_events(state.last_rendered, rendered)
for parent <- Map.keys(state.refs) do
send(parent, {:events, events})
end
{:noreply, %{state | last_data: data, last_rendered: rendered}}
end
defp render_data(state, data) do
json_api = JaSerializer.format(state.view_module, data, state.conn, state.opts)
Map.get(json_api, "included", []) ++ Map.get(json_api, "data", [])
end
@doc """
JaSerializer renders into a map that looks like:
```
%{
"data" => [
%{"type" => "vehicle", "id" => "y1234", ...},
...
],
"included" => [ # optional
%{"type" => "route", "id" => "5", ...}
]
}
```
`data` is a list of items from the primary source (here,
vehicles). `included` is an optional array of related items (here, route)
that have relationships to the primary data or other included items.
`diff_events/2` takes two of those maps, and returns a list of {event_name, json}
pairs, where event_name is either "add", "update", "remove", or "reset"
representing whether the item was added, updated, or removed from the
previous JSON-API output.
"""
def diff_events(previous, current) do
diff = Diff.diff(previous, current)
for type <- ~w(add update remove reset)a,
items = Map.get(diff, type, []),
items != [],
type = Atom.to_string(type),
item <- items do
json = Jason.encode_to_iodata!(item)
{type, json}
end
end
defp render_error(error) do
Phoenix.View.render_to_iodata(ApiWeb.ErrorView, "400.json-api", error: error)
end
defp receive_all_events do
# pull any extra {:event, _, _, _} off the message queue
receive do
{:event, _, _, _} ->
receive_all_events()
after
0 -> :ok
end
end
end | apps/api_web/lib/api_web/event_stream/diff_server.ex | 0.788298 | 0.684962 | diff_server.ex | starcoder |
defmodule EctoMnesia.Record.Context.MatchSpec do
@moduledoc """
This module provides a context that is able to rebuild Mnesia `match_spec` by `Ecto.Query` AST whenever new query
is assigned to a context.
Specs:
- [QLC](http://erlang.org/doc/man/qlc.html)
- [Match Specification](http://erlang.org/doc/apps/erts/match_spec.html)
"""
alias EctoMnesia.Record.Context
defstruct head: [], conditions: [], body: []
def update(%Context{query: %Context.Query{sources: sources}} = context, %Ecto.Query{} = query) do
%{
context
| match_spec: %{
context.match_spec
| body: match_body(context, sources),
head: match_head(context),
conditions: match_conditions(query, sources, context)
}
}
end
def dump(%Context.MatchSpec{head: head, conditions: conditions, body: body}) do
[{head, conditions, [body]}]
end
# Build match_spec head part (data placeholders)
defp match_head(%Context{table: %Context.Table{name: table_name}} = context) do
context
|> Context.get_fields_placeholders()
|> Enum.into([table_name])
|> List.to_tuple()
end
# Select was present
defp match_body(%Context{query: %Context.Query{select: %Ecto.Query.SelectExpr{fields: expr}}} = context, sources) do
expr
|> select_fields(sources)
|> Enum.map(&Context.find_field_placeholder!(&1, context))
end
# Select wasn't present, so we select everything
defp match_body(%Context{query: %Context.Query{select: select}} = context, _sources) when is_list(select) do
Enum.map(select, &Context.find_field_placeholder!(&1, context))
end
defp select_fields({:&, [], [0, fields, _]}, _sources), do: fields
defp select_fields({{:., [], [{:&, [], [0]}, field]}, _, []}, _sources), do: [field]
defp select_fields({:^, [], [_]} = expr, sources), do: [unbind(expr, sources)]
defp select_fields(exprs, sources) when is_list(exprs) do
Enum.flat_map(exprs, &select_fields(&1, sources))
end
# Resolve params
defp match_conditions(%Ecto.Query{wheres: wheres}, sources, context),
do: match_conditions(wheres, sources, context, [])
defp match_conditions([], _sources, _context, acc),
do: acc
defp match_conditions([%{expr: expr, params: params} | tail], sources, context, acc) do
condition = condition_expression(expr, merge_sources(sources, params), context)
match_conditions(tail, sources, context, [condition | acc])
end
# `expr.params` seems to be always empty, but we need to deal with cases when it's not
defp merge_sources(sources1, sources2) when is_list(sources1) and is_list(sources2), do: sources1 ++ sources2
defp merge_sources(sources, nil), do: sources
# Unbinding parameters
def condition_expression({:^, [], [_]} = binding, sources, _context), do: unbind(binding, sources)
def condition_expression({op, [], [field, {:^, [], [_]} = binding]}, sources, context) do
parameters = unbind(binding, sources)
condition_expression({op, [], [field, parameters]}, sources, context)
end
# `is_nil` is a special case when we need to :== with nil value
def condition_expression({:is_nil, [], [field]}, sources, context) do
{:==, condition_expression(field, sources, context), nil}
end
# `:in` is a special case when we need to expand it to multiple `:or`'s
def condition_expression({:in, [], [field, parameters]}, sources, context) when is_list(parameters) do
field = condition_expression(field, sources, context)
expr =
parameters
|> unbind(sources)
|> Enum.map(fn parameter ->
{:==, field, condition_expression(parameter, sources, context)}
end)
if expr == [] do
# Hack to return zero values
{:==, true, false}
else
expr
|> List.insert_at(0, :or)
|> List.to_tuple()
end
end
def condition_expression({:in, [], [_field, _parameters]}, _sources, _context) do
raise RuntimeError, "Complex :in queries is not supported by the Mnesia adapter."
end
# Conditions that have one argument. Functions (is_nil, not).
def condition_expression({op, [], [field]}, sources, context) do
{guard_function_operation(op), condition_expression(field, sources, context)}
end
# Other conditions with multiple arguments (<, >, ==, !=, etc)
def condition_expression({op, [], [field, parameter]}, sources, context) do
{
guard_function_operation(op),
condition_expression(field, sources, context),
condition_expression(parameter, sources, context)
}
end
# Fields
def condition_expression({{:., [], [{:&, [], [0]}, field]}, _, []}, _sources, context) do
Context.find_field_placeholder!(field, context)
end
# Recursively expand ecto query expressions and build conditions
def condition_expression({op, [], [left, right]}, sources, context) do
{
guard_function_operation(op),
condition_expression(left, sources, context),
condition_expression(right, sources, context)
}
end
# Another part of this function is to use binded variables values
def condition_expression(%Ecto.Query.Tagged{value: value}, _sources, _context), do: value
def condition_expression(raw_value, sources, _context), do: unbind(raw_value, sources)
def unbind({:^, [], [start_at, end_at]}, sources) do
Enum.slice(sources, Range.new(start_at, end_at))
end
def unbind({:^, [], [index]}, sources) do
sources
|> Enum.at(index)
|> get_binded()
end
def unbind(value, _sources), do: value
# Binded variable value
defp get_binded({value, {_, _}}), do: value
defp get_binded(value), do: value
# Convert Ecto.Query operations to MatchSpec analogs. (Only ones that doesn't match.)
defp guard_function_operation(:!=), do: :"/="
defp guard_function_operation(:<=), do: :"=<"
defp guard_function_operation(op), do: op
end | lib/ecto_mnesia/record/context/match_spec.ex | 0.768125 | 0.442396 | match_spec.ex | starcoder |
defmodule SBoM.CycloneDX.Xml do
@moduledoc false
alias SBoM.{CycloneDX, License}
def bom(components, options) do
bom =
case options[:schema] do
"1.1" ->
{:bom,
[
serialNumber: options[:serial] || CycloneDX.uuid(),
xmlns: "http://cyclonedx.org/schema/bom/1.1"
], [{:components, [], Enum.map(components, &component/1)}]}
_ ->
{:bom,
[
serialNumber: options[:serial] || CycloneDX.uuid(),
xmlns: "http://cyclonedx.org/schema/bom/1.2"
],
[
{:metadata, [],
[
{:timestamp, [], [[DateTime.utc_now() |> DateTime.to_iso8601()]]},
{:tools, [], [tool: [name: [["SBoM Mix task for Elixir"]]]]}
]},
{:components, [], Enum.map(components, &component/1)}
]}
end
:xmerl.export_simple([bom], :xmerl_xml)
end
defp component(component) do
{:component, [type: component.type], component_fields(component)}
end
defp component_fields(component) do
component |> Enum.map(&component_field/1) |> Enum.reject(&is_nil/1)
end
@simple_fields [:name, :version, :purl, :cpe, :description]
defp component_field({field, value}) when field in @simple_fields and not is_nil(value) do
{field, [], [[value]]}
end
defp component_field({:hashes, hashes}) when is_map(hashes) do
{:hashes, [], Enum.map(hashes, &hash/1)}
end
defp component_field({:licenses, [_ | _] = licenses}) do
{:licenses, [], Enum.map(licenses, &license/1)}
end
defp component_field(_other), do: nil
defp hash({algorithm, hash}) do
{:hash, [alg: algorithm], [[hash]]}
end
defp license(name) do
# If the name is a recognized SPDX license ID, or if we can turn it into
# one, we return a bom:license with a bom:id element
case License.spdx_id(name) do
nil ->
{:license, [],
[
{:name, [], [[name]]}
]}
id ->
{:license, [],
[
{:id, [], [[id]]}
]}
end
end
end | lib/sbom/cyclonedx/xml.ex | 0.622918 | 0.431884 | xml.ex | starcoder |
defmodule Timex.Calendar.Julian do
@moduledoc """
This module contains functions for working with dates in the Julian calendar.
"""
require Bitwise
import Timex.Macros
alias Timex.Types
@doc """
Returns the Julian day number for the given Erlang date (gregorian)
The Julian date (JD) is a continuous count of days from 1 January 4713 BC (= -4712 January 1),
Greenwich mean noon (= 12h UT). For example, AD 1978 January 1, 0h UT is JD 2443509.5
and AD 1978 July 21, 15h UT, is JD 2443711.125.
This algorithm assumes a proleptic Gregorian calendar (i.e. dates back to year 0),
unlike the NASA or US Naval Observatory algorithm - however they align perfectly
for dates back to October 15th, 1582, which is where it starts to differ, which is
due to the fact that their algorithm assumes there is no Gregorian calendar before that
date.
"""
@spec julian_date(Types.date) :: float
def julian_date({year, month, day}),
do: julian_date(year, month, day)
# Same as julian_date/1, except takes an Erlang datetime, and returns a more precise Julian date number
@spec julian_date(Types.datetime) :: float
def julian_date({{year, month, day}, {hour, minute, second}}) do
julian_date(year, month, day, hour, minute, second)
end
def julian_date(_), do: {:error, :invalid_date}
@doc """
Same as julian_date/1, except takes year/month/day as distinct arguments
"""
@spec julian_date(Types.year, Types.month, Types.day) :: float
def julian_date(year, month, day) when is_date(year, month, day) do
a = div(14 - month, 12)
y = year + 4800 - a
m = month + (12 * a) - 3
jdn = day + trunc(((153 * m) + 2) / 5) +
(365*y) +
div(y, 4) - div(y, 100) + div(y, 400) -
32045
jdn
end
def julian_date(_,_,_), do: {:error, :invalid_date}
@doc """
Same as julian_date/1, except takes year/month/day/hour/minute/second as distinct arguments
"""
@spec julian_date(Types.year, Types.month, Types.day, Types.hour, Types.minute, Types.second) :: float
def julian_date(year, month, day, hour, minute, second)
when is_datetime(year, month, day, hour, minute, second) do
jdn = julian_date(year, month, day)
jdn + ((hour - 12) / 24) + (minute / 1440) + (second / 86400)
end
def julian_date(_,_,_,_,_,_), do: {:error, :invalid_datetime}
@doc """
Returns the day of the week, starting with 0 for Sunday, or 1 for Monday
"""
@spec day_of_week(Types.date, :sun | :mon) :: Types.weekday
def day_of_week({year, month, day}, weekstart),
do: day_of_week(year, month, day, weekstart)
@doc """
Same as day_of_week/1, except takes year/month/day as distinct arguments
"""
@spec day_of_week(Types.year, Types.month, Types.day, :sun | :mon) :: Types.weekday
def day_of_week(year, month, day, weekstart) when is_date(year, month, day) and weekstart in [:sun, :mon] do
cardinal = mod((trunc(julian_date(year, month, day)) + 1), 7)
case weekstart do
:sun -> cardinal
:mon -> mod(cardinal + 6, 7) + 1
end
end
def day_of_week(_, _, _, weekstart) do
case weekstart in [:sun, :mon] do
true -> {:error, :invalid_date}
false -> {:error, {:bad_weekstart_value, expected: [:sun, :mon], got: weekstart}}
end
end
defp mod(a, b), do: rem(rem(a, b) + b, b)
end | lib/calendar/julian.ex | 0.877175 | 0.738009 | julian.ex | starcoder |
defmodule Temple do
defmacro __using__(_) do
quote location: :keep do
import Temple
import Temple.Tags
import Temple.Form
import Temple.Link
end
end
@doc """
Creates a markup context.
All tags must be called inside of a `Temple.temple/1` block.
Returns a safe result of the form `{:safe, result}`
## Example
```
team = ["Alice", "Bob", "Carol"]
temple do
for name <- team do
div class: "text-bold" do
text name
end
end
end
# {:safe, "<div class=\"text-bold\">Alice</div><div class=\"text-bold\">Bob</div><div class=\"text-bold\">Carol</div>"}
```
"""
defmacro temple([do: block] = _block) do
quote location: :keep do
import Kernel, except: [div: 2]
with {:ok, var!(buff, Temple.Tags)} <- Temple.Utils.start_buffer([]) do
unquote(block)
markup = Temple.Utils.get_buffer(var!(buff, Temple.Tags))
:ok = Temple.Utils.stop_buffer(var!(buff, Temple.Tags))
Temple.Utils.join_and_escape(markup)
end
end
end
@doc """
Emits a text node into the markup.
```
temple do
div do
text "Hello, world!"
end
end
# {:safe, "<div>Hello, world!</div>"}
```
"""
defmacro text(text) do
quote location: :keep do
Temple.Utils.put_buffer(
var!(buff, Temple.Tags),
unquote(text) |> Temple.Utils.escape_content()
)
end
end
@doc """
Emits a Phoenix partial into the markup.
```
temple do
html lang: "en" do
head do
title "MyApp"
link rel: "stylesheet", href: Routes.static_path(@conn, "/css/app.css")
end
body do
main role: "main", class: "container" do
p get_flash(@conn, :info), class: "alert alert-info", role: "alert"
p get_flash(@conn, :error), class: "alert alert-danger", role: "alert"
partial render(@view_module, @view_template, assigns)
end
script type: "text/javascript", src: Routes.static_path(@conn, "/js/app.js")
end
end
end
```
"""
defmacro partial(partial) do
quote location: :keep do
Temple.Utils.put_buffer(
var!(buff, Temple.Tags),
unquote(partial) |> Temple.Utils.from_safe()
)
end
end
@doc """
Defines a custom component.
Components are the primary way to extract partials and markup helpers.
## Assigns
Components accept a keyword list or a map of assigns and can be referenced in the body of the component by a module attribute of the same name.
This works exactly the same as EEx templates.
## Children
If a block is passed to the component, it can be referenced by a special assign called `@children`.
## Example
```
defcomponent :flex do
div id: @id, class: "flex" do
@children
end
end
temple do
flex id: "my-flex" do
div "Item 1"
div "Item 2"
div "Item 3"
end
end
# {:safe, "<div id=\"my-flex\" class=\"flex\">
# <div>Item 1</div>
# <div>Item 2</div>
# <div>Item 3</div>
# </div>"}
```
"""
defmacro defcomponent(name, [do: _] = block) do
quote location: :keep do
defmacro unquote(name)() do
outer = unquote(Macro.escape(block))
Temple.Utils.__quote__(outer)
end
defmacro unquote(name)(props_or_block)
defmacro unquote(name)([{:do, inner}]) do
outer =
unquote(Macro.escape(block))
|> Temple.Utils.__insert_props__([], inner)
Temple.Utils.__quote__(outer)
end
defmacro unquote(name)(props) do
outer =
unquote(Macro.escape(block))
|> Temple.Utils.__insert_props__(props, nil)
Temple.Utils.__quote__(outer)
end
defmacro unquote(name)(props, inner) do
outer =
unquote(Macro.escape(block))
|> Temple.Utils.__insert_props__(props, inner)
Temple.Utils.__quote__(outer)
end
end
end
end | lib/temple.ex | 0.810329 | 0.65139 | temple.ex | starcoder |
defmodule VendingMachine.CurrencyData do
alias VendingMachine.QueryHelper
def add_currency_note(currency_note) do
[
node_name: "CurrencyNote",
param: ~s({
backgroundColor: "#{Keyword.fetch!(currency_note, :background_color)}",
fluorescentStripColor: "#{Keyword.fetch!(currency_note, :fluorescent_strip_color)}",
obverse: "#{Keyword.fetch!(currency_note, :obverse)}",
reverse: "#{Keyword.fetch!(currency_note, :reverse)}",
value: #{Keyword.fetch!(currency_note, :value)}
}),
on_create: [quantity: Keyword.fetch!(currency_note, :quantity)]
]
|> QueryHelper.merge_node_query()
end
def add_currency_coin(currency_coin) do
[
node_name: "CurrencyCoin",
param: ~s({
diameter: #{Keyword.fetch!(currency_coin, :diameter)},
thickness: #{Keyword.fetch!(currency_coin, :thickness)},
mass: #{Keyword.fetch!(currency_coin, :mass)},
value: #{Keyword.fetch!(currency_coin, :value)}
}),
on_create: [quantity: Keyword.fetch!(currency_coin, :quantity)]
]
|> QueryHelper.merge_node_query()
end
def update_currency_note(currency_note) do
[
node_name: "CurrencyNote",
param: ~s({
backgroundColor: "#{Keyword.fetch!(currency_note, :background_color)}",
fluorescentStripColor: "#{Keyword.fetch!(currency_note, :fluorescent_strip_color)}",
obverse: "#{Keyword.fetch!(currency_note, :obverse)}",
reverse: "#{Keyword.fetch!(currency_note, :reverse)}",
value: #{Keyword.fetch!(currency_note, :value)}
}),
on_match: [quantity: Keyword.fetch!(currency_note, :quantity)]
]
|> QueryHelper.merge_node_query()
end
def update_currency_coin(currency_coin) do
[
node_name: "CurrencyCoin",
param: ~s({
diameter: #{Keyword.fetch!(currency_coin, :diameter)},
thickness: #{Keyword.fetch!(currency_coin, :thickness)},
mass: #{Keyword.fetch!(currency_coin, :mass)},
value: #{Keyword.fetch!(currency_coin, :value)}
}),
on_match: [quantity: Keyword.fetch!(currency_coin, :quantity)]
]
|> QueryHelper.merge_node_query()
end
def check_available_currency_note(currency_note) do
[
node_name: "CurrencyNote",
param: ~s({
backgroundColor: "#{Keyword.fetch!(currency_note, :background_color)}",
fluorescentStripColor: "#{Keyword.fetch!(currency_note, :fluorescent_strip_color)}",
obverse: "#{Keyword.fetch!(currency_note, :obverse)}",
reverse: "#{Keyword.fetch!(currency_note, :reverse)}",
value: #{Keyword.fetch!(currency_note, :value)}
})
# ,
# where: [quantity: Keyword.fetch!(currency_note, :quantity)]
]
|> QueryHelper.match_node_query()
end
def check_available_currency_coin(currency_coin) do
[
node_name: "CurrencyCoin",
param: ~s({
diameter: #{Keyword.fetch!(currency_coin, :diameter)},
thickness: #{Keyword.fetch!(currency_coin, :thickness)},
mass: #{Keyword.fetch!(currency_coin, :mass)},
value: #{Keyword.fetch!(currency_coin, :value)}
})
# ,
# where: [quantity: Keyword.fetch!(currency_coin, :quantity)]
]
|> QueryHelper.match_node_query()
end
end | lib/vending_machine/data/currency_data.ex | 0.757436 | 0.665988 | currency_data.ex | starcoder |
defmodule Infer do
@moduledoc """
This is the main entry for using the Infer API.
- `get/3` evaluates the given predicate(s) using only the (pre)loaded data available, and returns the result(s)
- `load/3` is like `get`, but loads any additional data as needed
- `put/3` is like `load`, but puts the results into the `:inferred` field
(or virtual schema field) of the subject(s) as a map, and returns the subject(s)
These functions return a tuple, either `{:ok, result}`, `{:error, error}`, or `{:not_loaded, data_reqs}` (only `get`).
The corresponding `get!/3`, `load!/3` and `put!/3` functions return `result`
directly, or otherwise raise an exception.
Arguments:
- **subjects** can either be an individual subject (with the given predicates defined on it), or a list of subjects.
Passing an individual subject will return the predicates for the subject, passing a list will return a list of them.
- **predicates** can either be a single predicate, or a list of predicates.
Passing a single predicate will return the resulting value, passing a list will return a **map**
of the predicates and their resulting values.
- **options** (optional) See below.
Options:
- **args** (list or map) can be used to pass in data from the caller's context that can be used in
rules (see *Arguments* below). A classic example is the `current_user`, e.g.
```elixir
put!(project, :can_edit?, args: [user: current_user])
```
- **extra_rules** (module or list of modules) can be used to add context-specific rules that are
not defined directly on the subject. This can be used to structure rules into their own modules
and use them only where needed.
- **debug** (boolean) makes Infer print additional information to the console as rules are evaluated.
Should only be used while debugging.
- **return_cache** (boolean) makes non-bang functions return `{:ok, result, cache}` instead of
`{:ok, result}` on success. This `cache` can be passed to other Infer functions (see `cache` option)
- **cache** (`Dataloader` struct) can be used to pass in an existing cache, so data already loaded
doesn't need to be loaded again. Can be initialized using `Loaders.Dataloader.init/0`.
"""
alias Infer.{Engine, Result, Util}
alias Infer.Evaluation, as: Eval
@doc """
Evaluates one or multiple predicates for one or multiple records and returns the results.
Does not load any additional data.
"""
def get(records, predicates, opts \\ []) do
eval = Eval.from_options(opts)
do_get(records, predicates, eval)
|> Result.to_simple_if(not eval.return_cache?)
end
defp do_get(records, predicates, eval) when is_list(records) do
Result.map(records, &do_get(&1, predicates, eval))
end
defp do_get(record, predicates, eval) when is_list(predicates) do
Result.map(predicates, &Engine.resolve_predicate(&1, record, eval))
|> Result.transform(&Util.Map.zip(predicates, &1))
end
defp do_get(record, predicate, eval) when is_atom(predicate) do
Engine.resolve_predicate(predicate, record, eval)
end
defp do_get(record, result, eval) do
Engine.map_result(result, %{eval | root_subject: record})
end
@doc """
Like `get/3` but returns the result value, or raises an error.
"""
def get!(records, predicates, opts \\ []) do
get(records, predicates, opts)
|> Result.unwrap!()
end
@doc """
Like `get/3`, but loads additional data if needed.
"""
def load(records, predicates, opts \\ []) do
eval = Eval.from_options(opts)
do_load(records, predicates, eval)
|> Result.to_simple_if(not eval.return_cache?)
end
defp do_load(records, predicates, eval) do
load_all_data_reqs(eval, fn eval ->
do_get(records, predicates, eval)
end)
end
defp load_all_data_reqs(eval, fun) do
case fun.(eval) do
{:not_loaded, data_reqs} -> Eval.load_data_reqs(eval, data_reqs) |> load_all_data_reqs(fun)
{:ok, result, _binds} -> {:ok, result, eval.cache}
other -> other
end
end
@doc """
Like `get!/3`, but loads additional data if needed.
"""
def load!(records, predicates, opts \\ []) do
load(records, predicates, opts)
|> Result.unwrap!()
end
@doc """
Loads the given predicate(s) for the given record(s) and merges the
results into the `inferred` map field of the record(s), returning them.
## Options
Same as for `get/3`.
"""
def put(records, predicates, opts \\ []) do
eval = Eval.from_options(opts)
do_load(records, List.wrap(predicates), eval)
|> Result.transform(&do_put(records, &1))
|> Result.to_simple_if(not eval.return_cache?)
end
defp do_put(records, results) when is_list(records) do
Util.Enum.zip(records, results, &do_put/2)
end
defp do_put(record, results) do
Map.update!(record, :inferred, &Util.Map.maybe_merge(&1, results))
end
def put!(records, predicates, opts \\ []) do
put(records, predicates, opts)
|> Result.unwrap!()
end
defp get_type(%Ecto.Query{from: %{source: {_, type}}}), do: type
defp get_type(%type{}), do: type
defp get_type([%type{} | _]), do: type
defp get_type(type) when is_atom(type) do
Code.ensure_compiled(type)
if Util.Module.has_function?(type, :infer_rules_for, 2) do
type
else
raise ArgumentError, "Could not derive type from " <> inspect(type, pretty: true)
end
end
@doc "Removes all elements not matching the given condition from the given list."
def filter(records, condition, opts \\ []) when is_list(records) do
eval = Eval.from_options(opts)
do_filter(records, condition, eval)
end
defp do_filter(records, true, _eval) do
records
end
defp do_filter(records, condition, eval) do
if eval.debug?, do: IO.puts("Infer Filter: #{inspect(condition, pretty: true)}")
load_all_data_reqs(eval, fn eval ->
Result.filter_map(
records,
&Engine.evaluate_condition(condition, &1, %{eval | root_subject: &1})
)
end)
|> Result.unwrap!()
end
@doc "Removes all elements matching the given condition from the given list."
def reject(records, condition, opts \\ []) when is_list(records) do
filter(records, {:not, condition}, opts)
end
@doc """
Returns all records matching the given condition.
## Caveat
In general, as much work as possible is done in the database.
If possible, the condition is completely translated to an `Ecto.Query`
so the database only returns matching records.
All condition parts that can not be translated to an `Ecto.Query`, will be
evaluated by **loading all remaining records**, and associations as needed,
and evaluating the rules on them.
"""
def query_all(queryable, condition, opts \\ []) do
{queryable, condition, repo, eval} = build_query(queryable, condition, opts)
queryable
|> repo.all()
|> do_filter(condition, eval)
|> apply_select(eval)
end
@doc """
Returns the first record matching the given condition.
## Options
Same as for `query_all/3`.
"""
def query_one(queryable, condition, opts \\ []) do
{queryable, condition, repo, eval} = build_query(queryable, condition, opts)
queryable
|> repo.one!()
|> do_filter(condition, eval)
|> apply_select(eval)
end
defp build_query(queryable, condition, opts) do
type = get_type(queryable)
query_mod = type.infer_query_module()
repo = type.infer_repo()
{queryable, opts} = query_mod.apply_options(queryable, opts)
eval = Eval.from_options(opts)
{queryable, condition} = query_mod.apply_condition(queryable, condition, eval)
if eval.debug? do
sql = query_mod.to_sql(repo, queryable)
IO.puts("Infer SQL: #{sql}")
end
{queryable, condition, repo, eval}
end
defp apply_select(records, %{select: nil}), do: records
defp apply_select(records, %{select: mapping} = eval) do
load_all_data_reqs(eval, fn eval ->
Result.map(records, &Engine.map_result(mapping, %{eval | root_subject: &1}))
end)
|> Result.unwrap!()
end
end | lib/infer.ex | 0.919177 | 0.914252 | infer.ex | starcoder |
defmodule AWS.Support do
@moduledoc """
AWS Support
The AWS Support API reference is intended for programmers who need detailed
information about the AWS Support operations and data types.
This service enables you to manage your AWS Support cases programmatically. It
uses HTTP methods that return results in JSON format.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
The AWS Support service also exposes a set of [AWS Trusted Advisor](http://aws.amazon.com/premiumsupport/trustedadvisor/) features. You can
retrieve a list of checks and their descriptions, get check results, specify
checks to refresh, and get the refresh status of checks.
The following list describes the AWS Support case management operations:
* **Service names, issue categories, and available severity levels.
**The `DescribeServices` and `DescribeSeverityLevels` operations return AWS
service names, service codes, service categories, and problem severity levels.
You use these values when you call the `CreateCase` operation.
* **Case creation, case details, and case resolution.** The
`CreateCase`, `DescribeCases`, `DescribeAttachment`, and `ResolveCase`
operations create AWS Support cases, retrieve information about cases, and
resolve cases.
* **Case communication.** The `DescribeCommunications`,
`AddCommunicationToCase`, and `AddAttachmentsToSet` operations retrieve and add
communications and attachments to AWS Support cases.
The following list describes the operations available from the AWS Support
service for Trusted Advisor:
* `DescribeTrustedAdvisorChecks` returns the list of checks that run
against your AWS resources.
* Using the `checkId` for a specific check returned by
`DescribeTrustedAdvisorChecks`, you can call `DescribeTrustedAdvisorCheckResult`
to obtain the results for the check that you specified.
* `DescribeTrustedAdvisorCheckSummaries` returns summarized results
for one or more Trusted Advisor checks.
* `RefreshTrustedAdvisorCheck` requests that Trusted Advisor rerun a
specified check.
* `DescribeTrustedAdvisorCheckRefreshStatuses` reports the refresh
status of one or more checks.
For authentication of requests, AWS Support uses [Signature Version 4 Signing Process](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
See [About the AWS Support API](https://docs.aws.amazon.com/awssupport/latest/user/Welcome.html) in the
*AWS Support User Guide* for information about how to use this service to create
and manage your support cases, and how to call Trusted Advisor for results of
checks on your resources.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2013-04-15",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "support",
global?: false,
protocol: "json",
service_id: "Support",
signature_version: "v4",
signing_name: "support",
target_prefix: "AWSSupport_20130415"
}
end
@doc """
Adds one or more attachments to an attachment set.
An attachment set is a temporary container for attachments that you add to a
case or case communication. The set is available for 1 hour after it's created.
The `expiryTime` returned in the response is when the set expires.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def add_attachments_to_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddAttachmentsToSet", input, options)
end
@doc """
Adds additional customer communication to an AWS Support case.
Use the `caseId` parameter to identify the case to which to add communication.
You can list a set of email addresses to copy on the communication by using the
`ccEmailAddresses` parameter. The `communicationBody` value contains the text of
the communication.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def add_communication_to_case(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddCommunicationToCase", input, options)
end
@doc """
Creates a case in the AWS Support Center.
This operation is similar to how you create a case in the AWS Support Center
[Create Case](https://console.aws.amazon.com/support/home#/case/create) page. The AWS Support API doesn't support requesting service limit increases. You can
submit a service limit increase in the following ways:
* Submit a request from the AWS Support Center [Create
Case](https://console.aws.amazon.com/support/home#/case/create) page.
* Use the Service Quotas
[RequestServiceQuotaIncrease](https://docs.aws.amazon.com/servicequotas/2019-06-24/apireference/API_RequestServiceQuotaIncrease.html) operation.
A successful `CreateCase` request returns an AWS Support case number. You can
use the `DescribeCases` operation and specify the case number to get existing
AWS Support cases. After you create a case, use the `AddCommunicationToCase`
operation to add additional communication or attachments to an existing case.
The `caseId` is separate from the `displayId` that appears in the [AWS Support
Center](https://console.aws.amazon.com/support). Use the `DescribeCases`
operation to get the `displayId`.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def create_case(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCase", input, options)
end
@doc """
Returns the attachment that has the specified ID.
Attachments can include screenshots, error logs, or other files that describe
your issue. Attachment IDs are generated by the case management system when you
add an attachment to a case or case communication. Attachment IDs are returned
in the `AttachmentDetails` objects that are returned by the
`DescribeCommunications` operation.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def describe_attachment(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAttachment", input, options)
end
@doc """
Returns a list of cases that you specify by passing one or more case IDs.
You can use the `afterTime` and `beforeTime` parameters to filter the cases by
date. You can set values for the `includeResolvedCases` and
`includeCommunications` parameters to specify how much information to return.
The response returns the following in JSON format:
* One or more
[CaseDetails](https://docs.aws.amazon.com/awssupport/latest/APIReference/API_CaseDetails.html) data types.
* One or more `nextToken` values, which specify where to paginate
the returned records represented by the `CaseDetails` objects.
Case data is available for 12 months after creation. If a case was created more
than 12 months ago, a request might return an error.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS
Support](http://aws.amazon.com/premiumsupport/).
"""
def describe_cases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCases", input, options)
end
@doc """
Returns communications and attachments for one or more support cases.
Use the `afterTime` and `beforeTime` parameters to filter by date. You can use
the `caseId` parameter to restrict the results to a specific case.
Case data is available for 12 months after creation. If a case was created more
than 12 months ago, a request for data might cause an error.
You can use the `maxResults` and `nextToken` parameters to control the
pagination of the results. Set `maxResults` to the number of cases that you want
to display on each page, and use `nextToken` to specify the resumption of
pagination.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def describe_communications(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCommunications", input, options)
end
@doc """
Returns the current list of AWS services and a list of service categories for
each service.
You then use service names and categories in your `CreateCase` requests. Each
AWS service has its own set of categories.
The service codes and category codes correspond to the values that appear in the
**Service** and **Category** lists on the AWS Support Center [Create Case](https://console.aws.amazon.com/support/home#/case/create) page. The values
in those fields don't necessarily match the service codes and categories
returned by the `DescribeServices` operation. Always use the service codes and
categories that the `DescribeServices` operation returns, so that you have the
most recent set of service and category codes.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def describe_services(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeServices", input, options)
end
@doc """
Returns the list of severity levels that you can assign to an AWS Support case.
The severity level for a case is also a field in the `CaseDetails` data type
that you include for a `CreateCase` request.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def describe_severity_levels(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSeverityLevels", input, options)
end
@doc """
Returns the refresh status of the AWS Trusted Advisor checks that have the
specified check IDs.
You can get the check IDs by calling the `DescribeTrustedAdvisorChecks`
operation.
Some checks are refreshed automatically, and you can't return their refresh
statuses by using the `DescribeTrustedAdvisorCheckRefreshStatuses` operation. If
you call this operation for these checks, you might see an
`InvalidParameterValue` error.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def describe_trusted_advisor_check_refresh_statuses(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeTrustedAdvisorCheckRefreshStatuses",
input,
options
)
end
@doc """
Returns the results of the AWS Trusted Advisor check that has the specified
check ID.
You can get the check IDs by calling the `DescribeTrustedAdvisorChecks`
operation.
The response contains a `TrustedAdvisorCheckResult` object, which contains these
three objects:
* `TrustedAdvisorCategorySpecificSummary`
* `TrustedAdvisorResourceDetail`
* `TrustedAdvisorResourcesSummary`
In addition, the response contains these fields:
* **status** - The alert status of the check: "ok" (green),
"warning" (yellow), "error" (red), or "not_available".
* **timestamp** - The time of the last refresh of the check.
* **checkId** - The unique identifier for the check.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def describe_trusted_advisor_check_result(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTrustedAdvisorCheckResult", input, options)
end
@doc """
Returns the results for the AWS Trusted Advisor check summaries for the check
IDs that you specified.
You can get the check IDs by calling the `DescribeTrustedAdvisorChecks`
operation.
The response contains an array of `TrustedAdvisorCheckSummary` objects.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def describe_trusted_advisor_check_summaries(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeTrustedAdvisorCheckSummaries",
input,
options
)
end
@doc """
Returns information about all available AWS Trusted Advisor checks, including
the name, ID, category, description, and metadata.
You must specify a language code. The AWS Support API currently supports English
("en") and Japanese ("ja"). The response contains a
`TrustedAdvisorCheckDescription` object for each check. You must set the AWS
Region to us-east-1.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def describe_trusted_advisor_checks(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTrustedAdvisorChecks", input, options)
end
@doc """
Refreshes the AWS Trusted Advisor check that you specify using the check ID.
You can get the check IDs by calling the `DescribeTrustedAdvisorChecks`
operation.
Some checks are refreshed automatically. If you call the
`RefreshTrustedAdvisorCheck` operation to refresh them, you might see the
`InvalidParameterValue` error.
The response contains a `TrustedAdvisorCheckRefreshStatus` object.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def refresh_trusted_advisor_check(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RefreshTrustedAdvisorCheck", input, options)
end
@doc """
Resolves a support case.
This operation takes a `caseId` and returns the initial and final state of the
case.
You must have a Business or Enterprise support plan to use the AWS
Support API.
If you call the AWS Support API from an account that does not have
a Business or Enterprise support plan, the `SubscriptionRequiredException` error
message appears. For information about changing your support plan, see [AWS Support](http://aws.amazon.com/premiumsupport/).
"""
def resolve_case(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ResolveCase", input, options)
end
end | lib/aws/generated/support.ex | 0.865096 | 0.564339 | support.ex | starcoder |
defmodule Game.Format.Players do
@moduledoc """
Format functions related to players
"""
import Game.Format.Context
alias Data.Save
alias Data.User
alias Game.Color
alias Game.Format
alias Game.Format.Table
@doc """
Colorize a player's name
"""
@spec player_name(User.t()) :: String.t()
def player_name(player) do
context()
|> assign(:name, player.name)
|> Format.template("{player}[name]{/player}")
end
@doc """
Format the player's prompt
Example:
iex> stats = %{health_points: 50, max_health_points: 75, skill_points: 9, max_skill_points: 10, endurance_points: 4, max_endurance_points: 10}
...> config = %{prompt: "%h/%Hhp %s/%Ssp %e/%Eep %xxp"}
...> Players.prompt(%{experience_points: 1010, stats: stats, config: config})
"\\\\[50/75hp 9/10sp 4/10ep 10xp\\\\] > "
"""
@spec prompt(Save.t()) :: String.t()
def prompt(save)
def prompt(%{experience_points: exp, stats: stats, config: config}) do
exp = rem(exp, 1000)
"\\[#{config.prompt}\\] > "
|> String.replace("%h", to_string(stats.health_points))
|> String.replace("%H", to_string(stats.max_health_points))
|> String.replace("%s", to_string(stats.skill_points))
|> String.replace("%S", to_string(stats.max_skill_points))
|> String.replace("%e", to_string(stats.endurance_points))
|> String.replace("%E", to_string(stats.max_endurance_points))
|> String.replace("%x", to_string(exp))
end
def prompt(_save), do: "> "
@doc """
Look at a Player
"""
@spec player_full(User.t()) :: String.t()
def player_full(player) do
context()
|> assign(:name, player_name(player))
|> Format.template("[name] is here.")
end
@doc """
Format your info sheet
"""
@spec info(Character.t()) :: String.t()
def info(character = %{save: save}) do
%{stats: stats} = save
rows = [
["Level", save.level],
["XP", save.experience_points],
["Spent XP", save.spent_experience_points],
["Health Points", "#{stats.health_points}/#{stats.max_health_points}"],
["Skill Points", "#{stats.skill_points}/#{stats.max_skill_points}"],
["Stamina Points", "#{stats.endurance_points}/#{stats.max_endurance_points}"],
["Strength", stats.strength],
["Agility", stats.agility],
["Intelligence", stats.intelligence],
["Awareness", stats.awareness],
["Vitality", stats.vitality],
["Willpower", stats.willpower],
["Play Time", play_time(character.seconds_online)]
]
Table.format(
"#{player_name(character)} - #{character.race.name} - #{character.class.name}",
rows,
[16, 15]
)
end
@doc """
View information about another player
"""
def short_info(player = %{save: save}) do
rows = [
["Level", save.level],
["Flags", player_flags(player)]
]
Table.format("#{player_name(player)} - #{player.race.name} - #{player.class.name}", rows, [
12,
15
])
end
@doc """
Format player flags
iex> Players.player_flags(%{flags: ["admin"]})
"{red}(Admin){/red}"
iex> Players.player_flags(%{flags: []})
"none"
"""
def player_flags(player, opts \\ [none: true])
def player_flags(%{flags: []}, none: true), do: "none"
def player_flags(%{flags: []}, none: false), do: ""
def player_flags(%{flags: flags}, _opts) do
flags
|> Enum.map(fn flag ->
"{red}(#{String.capitalize(flag)}){/red}"
end)
|> Enum.join(" ")
end
@doc """
Format number of seconds online into a human readable string
iex> Players.play_time(125)
"00h 02m 05s"
iex> Players.play_time(600)
"00h 10m 00s"
iex> Players.play_time(3670)
"01h 01m 10s"
iex> Players.play_time(36700)
"10h 11m 40s"
"""
@spec play_time(integer()) :: String.t()
def play_time(seconds) do
hours = seconds |> div(3600) |> to_string |> String.pad_leading(2, "0")
minutes = seconds |> div(60) |> rem(60) |> to_string |> String.pad_leading(2, "0")
seconds = seconds |> rem(60) |> to_string |> String.pad_leading(2, "0")
context()
|> assign(:hours, hours)
|> assign(:minutes, minutes)
|> assign(:seconds, seconds)
|> Format.template("[hours]h [minutes]m [seconds]s")
end
@doc """
Format the player's config
"""
@spec config(Save.t()) :: String.t()
def config(save) do
rows =
save.config
|> Enum.map(fn {key, value} ->
[to_string(key), value]
end)
rows = [["Name", "Value"] | rows]
max_size =
rows
|> Enum.map(fn row ->
row
|> Enum.at(1)
|> to_string()
|> Color.strip_color()
|> String.length()
end)
|> Enum.max()
Table.format("Config", rows, [20, max_size])
end
end | lib/game/format/players.ex | 0.725162 | 0.40072 | players.ex | starcoder |
defmodule AWS.Workspaces do
@moduledoc """
Amazon WorkSpaces Service
This reference provides detailed information about the Amazon WorkSpaces
operations.
"""
@doc """
Creates tags for a WorkSpace.
"""
def create_tags(client, input, options \\ []) do
request(client, "CreateTags", input, options)
end
@doc """
Creates one or more WorkSpaces.
<note> This operation is asynchronous and returns before the WorkSpaces are
created.
</note>
"""
def create_workspaces(client, input, options \\ []) do
request(client, "CreateWorkspaces", input, options)
end
@doc """
Deletes tags from a WorkSpace.
"""
def delete_tags(client, input, options \\ []) do
request(client, "DeleteTags", input, options)
end
@doc """
Describes tags for a WorkSpace.
"""
def describe_tags(client, input, options \\ []) do
request(client, "DescribeTags", input, options)
end
@doc """
Obtains information about the WorkSpace bundles that are available to your
account in the specified region.
You can filter the results with either the `BundleIds` parameter, or the
`Owner` parameter, but not both.
This operation supports pagination with the use of the `NextToken` request
and response parameters. If more results are available, the `NextToken`
response member contains a token that you pass in the next call to this
operation to retrieve the next set of items.
"""
def describe_workspace_bundles(client, input, options \\ []) do
request(client, "DescribeWorkspaceBundles", input, options)
end
@doc """
Retrieves information about the AWS Directory Service directories in the
region that are registered with Amazon WorkSpaces and are available to your
account.
This operation supports pagination with the use of the `NextToken` request
and response parameters. If more results are available, the `NextToken`
response member contains a token that you pass in the next call to this
operation to retrieve the next set of items.
"""
def describe_workspace_directories(client, input, options \\ []) do
request(client, "DescribeWorkspaceDirectories", input, options)
end
@doc """
Obtains information about the specified WorkSpaces.
Only one of the filter parameters, such as `BundleId`, `DirectoryId`, or
`WorkspaceIds`, can be specified at a time.
This operation supports pagination with the use of the `NextToken` request
and response parameters. If more results are available, the `NextToken`
response member contains a token that you pass in the next call to this
operation to retrieve the next set of items.
"""
def describe_workspaces(client, input, options \\ []) do
request(client, "DescribeWorkspaces", input, options)
end
@doc """
Describes the connection status of a specified WorkSpace.
"""
def describe_workspaces_connection_status(client, input, options \\ []) do
request(client, "DescribeWorkspacesConnectionStatus", input, options)
end
@doc """
Modifies the WorkSpace properties, including the RunningMode and AutoStop
time.
"""
def modify_workspace_properties(client, input, options \\ []) do
request(client, "ModifyWorkspaceProperties", input, options)
end
@doc """
Reboots the specified WorkSpaces.
To be able to reboot a WorkSpace, the WorkSpace must have a **State** of
`AVAILABLE`, `IMPAIRED`, or `INOPERABLE`.
<note> This operation is asynchronous and returns before the WorkSpaces
have rebooted.
</note>
"""
def reboot_workspaces(client, input, options \\ []) do
request(client, "RebootWorkspaces", input, options)
end
@doc """
Rebuilds the specified WorkSpaces.
Rebuilding a WorkSpace is a potentially destructive action that can result
in the loss of data. Rebuilding a WorkSpace causes the following to occur:
<ul> <li> The system is restored to the image of the bundle that the
WorkSpace is created from. Any applications that have been installed, or
system settings that have been made since the WorkSpace was created will be
lost.
</li> <li> The data drive (D drive) is re-created from the last automatic
snapshot taken of the data drive. The current contents of the data drive
are overwritten. Automatic snapshots of the data drive are taken every 12
hours, so the snapshot can be as much as 12 hours old.
</li> </ul> To be able to rebuild a WorkSpace, the WorkSpace must have a
**State** of `AVAILABLE` or `ERROR`.
<note> This operation is asynchronous and returns before the WorkSpaces
have been completely rebuilt.
</note>
"""
def rebuild_workspaces(client, input, options \\ []) do
request(client, "RebuildWorkspaces", input, options)
end
@doc """
Starts the specified WorkSpaces. The API only works with WorkSpaces that
have RunningMode configured as AutoStop and the State set to “STOPPED.”
"""
def start_workspaces(client, input, options \\ []) do
request(client, "StartWorkspaces", input, options)
end
@doc """
Stops the specified WorkSpaces. The API only works with WorkSpaces that
have RunningMode configured as AutoStop and the State set to AVAILABLE,
IMPAIRED, UNHEALTHY, or ERROR.
"""
def stop_workspaces(client, input, options \\ []) do
request(client, "StopWorkspaces", input, options)
end
@doc """
Terminates the specified WorkSpaces.
Terminating a WorkSpace is a permanent action and cannot be undone. The
user's data is not maintained and will be destroyed. If you need to archive
any user data, contact Amazon Web Services before terminating the
WorkSpace.
You can terminate a WorkSpace that is in any state except `SUSPENDED`.
<note> This operation is asynchronous and returns before the WorkSpaces
have been completely terminated.
</note>
"""
def terminate_workspaces(client, input, options \\ []) do
request(client, "TerminateWorkspaces", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "workspaces"}
host = get_host("workspaces", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "WorkspacesService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end | lib/aws/workspaces.ex | 0.858051 | 0.551272 | workspaces.ex | starcoder |
defmodule Poison.DecodeError do
@type t :: %__MODULE__{message: String.t(), value: any}
defexception message: nil, value: nil
def message(%{message: nil, value: value}) do
"unable to decode value: #{inspect(value)}"
end
def message(%{message: message}) do
message
end
end
defmodule Poison.Decode do
@moduledoc false
alias Poison.Decoder
def transform(value, options) when is_map(value) or is_list(value) do
case Map.get(options, :as) do
nil -> value
as -> transform(value, Map.get(options, :keys), as, options)
end
end
def transform(value, _options) do
value
end
defp transform(nil, _keys, _as, _options), do: nil
defp transform(value, keys, %{__struct__: _} = as, options) do
transform_struct(value, keys, as, options)
end
defp transform(value, keys, as, options) when is_map(as) do
transform_map(value, keys, as, options)
end
defp transform(value, keys, [as], options) do
for v <- value, do: transform(v, keys, as, options)
end
defp transform(value, _keys, _as, _options) do
value
end
defp transform_map(value, keys, as, options) do
Enum.reduce(as, value, fn {key, as}, acc ->
case Map.get(acc, key) do
value when is_map(value) or is_list(value) ->
Map.put(acc, key, transform(value, keys, as, options))
_value ->
acc
end
end)
end
defp transform_struct(value, keys, as, options)
when keys in [:atoms, :atoms!] do
as
|> Map.from_struct()
|> Map.merge(value)
|> do_transform_struct(keys, as, options)
end
defp transform_struct(value, keys, as, options) do
as
|> Map.from_struct()
|> Enum.reduce(%{}, fn {key, default}, acc ->
Map.put(acc, key, Map.get(value, Atom.to_string(key), default))
end)
|> do_transform_struct(keys, as, options)
end
defp do_transform_struct(value, keys, as, options) do
default = struct(as.__struct__)
as
|> Map.from_struct()
|> Enum.reduce(%{}, fn {key, as}, acc ->
new_value =
case Map.fetch(value, key) do
{:ok, ^as} when is_map(as) or is_list(as) ->
Map.get(default, key)
{:ok, value} when is_map(value) or is_list(value) ->
transform(value, keys, as, options)
{:ok, value} ->
value
:error ->
Map.get(default, key)
end
Map.put(acc, key, new_value)
end)
|> Map.put(:__struct__, as.__struct__)
|> Decoder.decode(options)
end
end
defprotocol Poison.Decoder do
@fallback_to_any true
@typep as :: map | struct | [as]
@type options :: %{
optional(:keys) => :atoms | :atoms!,
optional(:decimal) => boolean,
optional(:as) => as
}
@spec decode(t, options) :: any
def decode(value, options)
end
defimpl Poison.Decoder, for: Any do
def decode(value, _options) do
value
end
end | lib/poison/decoder.ex | 0.755457 | 0.494568 | decoder.ex | starcoder |
defmodule RedshiftEcto do
@moduledoc """
Ecto adapter for [AWS Redshift](https://aws.amazon.com/redshift/).
It uses `Postgrex` for communicating to the database and a connection pool,
such as `DBConnection.Poolboy`.
This adapter is based on Ecto's builtin `Ecto.Adapters.Postgres` adapter. It
delegates some functions to it but changes the implementation of most that
are incompatible with Redshift. The differences are detailed in this
documentation.
We also recommend developers to consult the documentation of the
[Postgres adapter](https://hexdocs.pm/ecto/Ecto.Adapters.Postgres.html).
## Notable differences
* no array type
* maps are stored as json in `varchar(max)` columns
* the `:binary_id` and `:uuid` Ecto types are stored in `char(36)` and
generated as text
* no binary type and literal support
* no aliases in `UPDATE` and `DELETE FROM` statements
* no `RETURNING`
* no support for `on_conflict` (except for the default `:raise`)
* no support for `on_delete` and `on_update` on foreign key definitions
* no support for `ALTER COLUMN`
* no support for `CHECK` and `EXCLUDE` constraints
* since Redshift doesn't enforce uniqueness and foreign key constraints the
adapter can't report violations
## Migrations
RedshiftEcto supports migrations with the exceptions of features that are not
supported by Redshift (see above). There are also some extra features in
migrations to help specify table attributes and column options available in
Redshift.
We highly recommend reading the
[Designing Tables](https://docs.aws.amazon.com/redshift/latest/dg/t_Creating_tables.html)
section from the AWS Redshift documentation.
### Table options
While similarly to other adapters RedshiftEcto accepts table options as an
opaque string, it also supports a keyword list with the following options:
* `:diststyle`: data distribution style, possible values: `:even`, `:key`, `:all`
* `:distkey`: specify the column to be used as the distribution key
* `:sortkey`: specify one or more sort keys, the value can be a single column
name, a list of columns, or a 2-tuple where the first element is a sort
style specifier (`:compound` or `:interleaved`) and the second is a single
column name or a list of columns
#### Examples
create table("posts", options: [distkey: :id, sortkey: :title])
create table("categories", options: [diststyle: :all, sortkey: {:interleaved, [:name, :parent_id]}])
create table("reports", options: [diststyle: :even, sortkey: [:department, :year, :month]])
### Column options
In addition to the column options accepted by `Ecto.Migration.add/3`
RedshiftEcto also accepts the following Redshift specific column options:
* `:identity`: specifies that the column is an identity column. The value must
be a tuple of two integers where the first is the `seed` and the second the
`step`. For example, `identity: {0, 1}` specifies that the values start from
`0` and increments by `1`. It's worth noting that identity columns may
behave differently in Redshift that one might be used to. See the [AWS
Redshift docs][identity] for more details.
* `:encode`: compression encoding for the column, possible values are lower
case atom version of the compression encodings supported by Redshift. Some
common values: `:zstd`, `:lzo`, `:delta`, `:bytedict`, `:raw`. See the [AWS
Redshift docs][encodings] for more.
* `:distkey`: specify the column as the distribution key (value must be `true`)
* `:sortkey`: specify the column as the single (compound) sort key of the table
(value must be `true`)
* `:unique`: specify that the column can contain only unique values. Note that
Redshift won't enforce uniqueness.
[identity]: https://docs.aws.amazon.com/redshift/latest/dg/r_CREATE_TABLE_NEW.html#identity-clause
[encodings]: https://docs.aws.amazon.com/redshift/latest/dg/c_Compression_encodings.html
#### Examples
create table("posts") do
add :id, :serial, primary_key: true, distkey: true, encode: :delta
add :title, :string, size: 765, null: false, unique: true, sortkey: true, encode: :lzo
add :counter, :serial, identity: {0, 1}, encode: :delta,
add :views, :smallint, default: 0, encode: :mostly8,
add :author, :string, default: "anonymous", encode: :text255,
add :created_at, :naive_datetime, encode: :zstd
end
"""
# Inherit all behaviour from Ecto.Adapters.SQL
use Ecto.Adapters.SQL,
driver: :postgrex,
migration_lock: "FOR UPDATE"
alias Ecto.Adapters.Postgres
# And provide a custom storage implementation
@behaviour Ecto.Adapter.Storage
@behaviour Ecto.Adapter.Structure
defdelegate extensions, to: Postgres
## Custom Redshift types
@doc false
def autogenerate(:id), do: nil
def autogenerate(:embed_id), do: Ecto.UUID.generate()
def autogenerate(:binary_id), do: Ecto.UUID.generate()
@doc false
def loaders(:map, type), do: [&json_decode/1, type]
def loaders({:map, _}, type), do: [&json_decode/1, type]
def loaders({:embed, _} = type, _) do
[&json_decode/1, &Ecto.Adapters.SQL.load_embed(type, &1)]
end
def loaders(:binary_id, _type), do: [&{:ok, &1}]
def loaders(:uuid, Ecto.UUID), do: [&{:ok, &1}]
def loaders(_, type), do: [type]
defp json_decode(x) when is_binary(x) do
{:ok, Ecto.Adapter.json_library().decode!(x)}
end
defp json_decode(x), do: {:ok, x}
@doc false
def dumpers(:map, type), do: [type, &json_encode/1]
def dumpers({:map, _}, type), do: [type, &json_encode/1]
def dumpers({:embed, _} = type, _) do
[&Ecto.Adapters.SQL.dump_embed(type, &1), &json_encode/1]
end
def dumpers(:binary_id, _type), do: [&Ecto.UUID.cast/1]
def dumpers(:uuid, Ecto.UUID), do: [&Ecto.UUID.cast/1]
def dumpers(_, type), do: [type]
defp json_encode(%{} = x) do
{:ok, Ecto.Adapter.json_library().encode!(x)}
end
defp json_encode(x), do: {:ok, x}
## Storage API
@doc false
@impl true
def storage_up(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
encoding = opts[:encoding] || "UTF8"
opts = Keyword.put(opts, :database, "template1")
command = ~s(CREATE DATABASE "#{database}" ENCODING '#{encoding}')
case run_query(command, opts) do
{:ok, _} ->
:ok
{:error, %{postgres: %{code: :duplicate_database}}} ->
{:error, :already_up}
{:error, error} ->
{:error, Exception.message(error)}
end
end
@doc false
@impl true
def storage_down(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
command = "DROP DATABASE \"#{database}\""
opts = Keyword.put(opts, :database, "template1")
case run_query(command, opts) do
{:ok, _} ->
:ok
{:error, %{postgres: %{code: :invalid_catalog_name}}} ->
{:error, :already_down}
{:error, error} ->
{:error, Exception.message(error)}
end
end
@impl Ecto.Adapter.Storage
def storage_status(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
opts = Keyword.put(opts, :database, "template1")
check_database_query = "SELECT datname FROM pg_catalog.pg_database WHERE datname = '#{database}'"
case run_query(check_database_query, opts) do
{:ok, %{num_rows: 0}} -> :down
{:ok, %{num_rows: _num_rows}} -> :up
other -> {:error, other}
end
end
@doc false
def supports_ddl_transaction? do
true
end
defdelegate structure_dump(default, config), to: Postgres
defdelegate structure_load(default, config), to: Postgres
## Helpers
defp run_query(sql, opts) do
{:ok, _} = Application.ensure_all_started(:postgrex)
opts =
opts
|> Keyword.drop([:name, :log])
|> Keyword.put(:pool, DBConnection.Connection)
|> Keyword.put(:backoff_type, :stop)
{:ok, pid} = Task.Supervisor.start_link()
task =
Task.Supervisor.async_nolink(pid, fn ->
{:ok, conn} = Postgrex.start_link(opts)
value = RedshiftEcto.Connection.execute(conn, sql, [], opts)
GenServer.stop(conn)
value
end)
timeout = Keyword.get(opts, :timeout, 15_000)
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, {:ok, result}} ->
{:ok, result}
{:ok, {:error, error}} ->
{:error, error}
{:exit, {%{__struct__: struct} = error, _}}
when struct in [Postgrex.Error, DBConnection.Error] ->
{:error, error}
{:exit, reason} ->
{:error, RuntimeError.exception(Exception.format_exit(reason))}
nil ->
{:error, RuntimeError.exception("command timed out")}
end
end
end | lib/redshift_ecto.ex | 0.877004 | 0.655129 | redshift_ecto.ex | starcoder |
defmodule Momento do
require Momento.Guards
@moduledoc """
Momento is an Elixir port of [Moment.js](https://github.com/moment/moment) for the purpose of parsing, validating,
manipulating, and formatting dates.
"""
@doc """
Provides a DateTime struct representing the current date and time.
## Examples
iex> Momento.date
{:ok,
%DateTime{calendar: Calendar.ISO, day: 1, hour: 21, microsecond: {827272, 6},
minute: 27, month: 7, second: 19, std_offset: 0, time_zone: "Etc/UTC",
utc_offset: 0, year: 2016, zone_abbr: "UTC"}}
"""
@spec date :: {:ok, DateTime.t}
def date, do: Momento.Date.date
@doc """
Provides a DateTime struct from any recognizeable form of input, such as an ISO string or UNIX timestamp.
## Examples
Momento.date
{:ok,
%DateTime{calendar: Calendar.ISO, day: 1, hour: 21, microsecond: {827272, 6},
minute: 27, month: 7, second: 19, std_offset: 0, time_zone: "Etc/UTC",
utc_offset: 0, year: 2016, zone_abbr: "UTC"}}
"""
@spec date(any) :: {:ok, DateTime.t}
def date(arg), do: Momento.Date.date(arg)
@doc """
Shortcut to get a `DateTime` struct representing now.
## Examples
iex> Momento.date!
%DateTime{calendar: Calendar.ISO, day: 1, hour: 21, microsecond: {0, 0},
minute: 32, month: 7, second: 15, std_offset: 0, time_zone: "Etc/UTC",
utc_offset: 0, year: 2016, zone_abbr: "UTC"}
"""
@spec date! :: DateTime.t
def date!, do: Momento.Date.date!
@doc """
Provides a DateTime struct from any recognizeable form of input, such as an ISO string or UNIX timestamp.
## Examples
iex> Momento.date!(1467408735)
%DateTime{calendar: Calendar.ISO, day: 1, hour: 21, microsecond: {0, 0},
minute: 32, month: 7, second: 15, std_offset: 0, time_zone: "Etc/UTC",
utc_offset: 0, year: 2016, zone_abbr: "UTC"}
"""
@spec date!(any) :: DateTime.t
def date!(arg), do: Momento.Date.date!(arg)
@doc """
Add a specified amount of time to a given DateTime struct.
## Examples
iex> Momento.date! |> Momento.add(2, :years)
%DateTime{calendar: Calendar.ISO, day: 1, hour: 21, microsecond: {796482, 6},
minute: 38, month: 7, second: 18, std_offset: 0, time_zone: "Etc/UTC",
utc_offset: 0, year: 2018, zone_abbr: "UTC"}
"""
@spec add(DateTime.t, integer, atom) :: DateTime.t
def add(datetime, num, time), do: Momento.Add.add(datetime, num, time)
@doc """
Subtract a specified amount of time to a given DateTime struct.
## Examples
iex> Momento.date! |> Momento.subtract(2, :years)
%DateTime{calendar: Calendar.ISO, day: 1, hour: 21, microsecond: {19292, 6},
minute: 39, month: 7, second: 11, std_offset: 0, time_zone: "Etc/UTC",
utc_offset: 0, year: 2014, zone_abbr: "UTC"}
"""
@spec subtract(DateTime.t, integer, atom) :: DateTime.t
def subtract(datetime, num, time), do: Momento.Subtract.subtract(datetime, num, time)
@doc """
Format a given DateTime struct to a desired date string.
## Examples
iex> Momento.date! |> Momento.format("YYYY-MM-DD")
"2016-07-01"
"""
@spec format(DateTime.t, String.t) :: DateTime.t
def format(datetime, tokens), do: Momento.Format.format(datetime, tokens)
end | lib/momento.ex | 0.911372 | 0.546799 | momento.ex | starcoder |
defimpl Timex.Protocol, for: Tuple do
alias Timex.Types
import Timex.Macros
@epoch :calendar.datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}})
@spec to_julian(Types.date | Types.datetime | Types.microsecond_datetime) :: float | {:error, term}
def to_julian(date) do
with {y,m,d} <- to_erl_datetime(date),
do: Timex.Calendar.Julian.julian_date(y, m, d)
end
@spec to_gregorian_seconds(Types.date | Types.datetime | Types.microsecond_datetime) :: non_neg_integer
def to_gregorian_seconds(date) do
with {:ok, date} <- to_erl_datetime(date),
do: :calendar.datetime_to_gregorian_seconds(date)
end
@spec to_gregorian_microseconds(Types.date | Types.datetime | Types.microsecond_datetime) :: non_neg_integer
def to_gregorian_microseconds(date) do
with {:ok, erl_date} <- to_erl_datetime(date),
do: :calendar.datetime_to_gregorian_seconds(erl_date) * 1_000 * 1_000 + get_microseconds(date)
end
@spec to_unix(Types.date | Types.datetime | Types.microsecond_datetime) :: non_neg_integer
def to_unix(date) do
with {:ok, date} <- to_erl_datetime(date),
do: :calendar.datetime_to_gregorian_seconds(date) - @epoch
end
@spec to_date(Types.date | Types.datetime | Types.microsecond_datetime) :: Date.t
def to_date(date) do
with {:ok, {{y, m, d}, _}} <- to_erl_datetime(date),
do: %Date{year: y, month: m, day: d}
end
@spec to_datetime(Types.date | Types.datetime | Types.microsecond_datetime, Types.valid_timezone) ::
DateTime.t | {:error, term}
def to_datetime({y,m,d} = date, timezone) when is_date(y,m,d) do
Timex.DateTime.Helpers.construct({date, {0,0,0}}, timezone)
end
def to_datetime({{y,m,d},{h,mm,s}} = dt, timezone) when is_datetime(y,m,d,h,mm,s) do
Timex.DateTime.Helpers.construct(dt, timezone)
end
def to_datetime({{y,m,d},{h,mm,s,_us}} = dt, timezone) when is_datetime(y,m,d,h,mm,s) do
Timex.DateTime.Helpers.construct(dt, timezone)
end
def to_datetime(_, _), do: {:error, :invalid_date}
@spec to_naive_datetime(Types.date | Types.datetime | Types.microsecond_datetime) :: NaiveDateTime.t
def to_naive_datetime({{y,m,d},{h,mm,s,us}}) when is_datetime(y,m,d,h,mm,s) do
us = Timex.DateTime.Helpers.construct_microseconds(us, -1)
%NaiveDateTime{year: y, month: m, day: d, hour: h, minute: mm, second: s, microsecond: us}
end
def to_naive_datetime(date) do
with {:ok, {{y,m,d},{h,mm,s}}} <- to_erl_datetime(date),
do: %NaiveDateTime{year: y, month: m, day: d, hour: h, minute: mm, second: s, microsecond: {0,0}}
end
@spec to_erl(Types.date | Types.datetime | Types.microsecond_datetime) :: Types.date | Types.datetime | Types.microsecond_datetime
def to_erl({y,m,d} = date) when is_date(y,m,d), do: date
def to_erl(date) do
with {:ok, date} <- to_erl_datetime(date),
do: date
end
@spec century(Types.date | Types.datetime | Types.microsecond_datetime) :: non_neg_integer
def century({y,m,d}) when is_date(y,m,d), do: Timex.century(y)
def century({{y,m,d},_}) when is_date(y,m,d), do: Timex.century(y)
def century(_), do: {:error, :invalid_date}
@spec is_leap?(Types.date | Types.datetime | Types.microsecond_datetime) :: boolean
def is_leap?({y,m,d}) when is_date(y,m,d), do: :calendar.is_leap_year(y)
def is_leap?({{y,m,d},_}) when is_date(y,m,d), do: :calendar.is_leap_year(y)
def is_leap?(_), do: {:error, :invalid_date}
@spec beginning_of_day(Types.date | Types.datetime | Types.microsecond_datetime) :: Types.date | Types.datetime | Types.microsecond_datetime
def beginning_of_day({y,m,d} = date) when is_date(y,m,d), do: date
def beginning_of_day({{y,m,d}=date,_}) when is_date(y,m,d),
do: {date, {0,0,0}}
def beginning_of_day(_), do: {:error, :invalid_date}
@spec end_of_day(Types.date | Types.datetime | Types.microsecond_datetime) :: Types.date | Types.datetime | Types.microsecond_datetime
def end_of_day({y,m,d} = date) when is_date(y,m,d), do: date
def end_of_day({{y,m,d}=date,_}) when is_date(y,m,d),
do: {date, {23,59,59}}
def end_of_day(_), do: {:error, :invalid_date}
@spec beginning_of_week(Types.date | Types.datetime | Types.microsecond_datetime, Types.weekstart) :: Types.date | Types.datetime | Types.microsecond_datetime
def beginning_of_week({y,m,d} = date, weekstart) when is_date(y,m,d) do
case Timex.days_to_beginning_of_week(date, weekstart) do
{:error, _} = err -> err
days -> shift(date, [days: -days])
end
end
def beginning_of_week({{y,m,d} = date,_}, weekstart) when is_date(y,m,d) do
case Timex.days_to_beginning_of_week({date,{0,0,0}}, weekstart) do
{:error, _} = err -> err
days -> beginning_of_day(shift({date,{0,0,0}}, [days: -days]))
end
end
def beginning_of_week(_,_), do: {:error, :invalid_date}
@spec end_of_week(Types.date | Types.datetime | Types.microsecond_datetime, Types.weekstart) :: Types.date | Types.datetime | Types.microsecond_datetime
def end_of_week({y,m,d} = date, weekstart) when is_date(y,m,d) do
case Timex.days_to_end_of_week(date, weekstart) do
{:error, _} = err -> err
days_to_end ->
shift(date, [days: days_to_end])
end
end
def end_of_week({{y,m,d},_} = date, weekstart) when is_date(y,m,d) do
case Timex.days_to_end_of_week(date, weekstart) do
{:error, _} = err -> err
days_to_end ->
end_of_day(shift(date, [days: days_to_end]))
end
end
def end_of_week(_,_), do: {:error, :invalid_date}
@spec beginning_of_year(Types.date | Types.datetime | Types.microsecond_datetime) :: Types.date | Types.datetime | Types.microsecond_datetime
def beginning_of_year({y,m,d}) when is_date(y,m,d),
do: {y,1,1}
def beginning_of_year({{y,m,d},_}) when is_date(y,m,d),
do: {{y,1,1},{0,0,0}}
def beginning_of_year(_), do: {:error, :invalid_date}
@spec end_of_year(Types.date | Types.datetime | Types.microsecond_datetime) :: Types.date | Types.datetime | Types.microsecond_datetime
def end_of_year({y,m,d}) when is_date(y,m,d),
do: {y,12,31}
def end_of_year({{y,m,d},_}) when is_date(y,m,d),
do: {{y,12,31},{23,59,59}}
def end_of_year(_), do: {:error, :invalid_date}
@spec beginning_of_quarter(Types.date | Types.datetime | Types.microsecond_datetime) :: Types.date | Types.datetime | Types.microsecond_datetime
def beginning_of_quarter({y,m,d}) when is_date(y,m,d) do
month = 1 + (3 * (Timex.quarter(m) - 1))
{y,month,1}
end
def beginning_of_quarter({{y,m,d},{h,mm,s} = _time}) when is_datetime(y,m,d,h,mm,s) do
month = 1 + (3 * (Timex.quarter(m) - 1))
{{y,month,1},{0,0,0}}
end
def beginning_of_quarter({{y,m,d},{h,mm,s,_us} = _time}) when is_datetime(y,m,d,h,mm,s) do
month = 1 + (3 * (Timex.quarter(m) - 1))
{{y,month,1},{0,0,0,0}}
end
def beginning_of_quarter(_), do: {:error, :invalid_date}
@spec end_of_quarter(Types.date | Types.datetime | Types.microsecond_datetime) :: Types.date | Types.datetime | Types.microsecond_datetime
def end_of_quarter({y,m,d}) when is_date(y,m,d) do
month = 3 * Timex.quarter(m)
end_of_month({y,month,d})
end
def end_of_quarter({{y,m,d},{h,mm,s} = time}) when is_datetime(y,m,d,h,mm,s) do
month = 3 * Timex.quarter(m)
end_of_month({{y,month,d}, time})
end
def end_of_quarter({{y,m,d},{h,mm,s,_us}}) when is_datetime(y,m,d,h,mm,s) do
month = 3 * Timex.quarter(m)
end_of_month({{y,month,d}, {h,mm,s}})
end
def end_of_quarter(_), do: {:error, :invalid_date}
@spec beginning_of_month(Types.date | Types.datetime | Types.microsecond_datetime) :: Types.date | Types.datetime | Types.microsecond_datetime
def beginning_of_month({y,m,d}) when is_date(y,m,d),
do: {y,m,1}
def beginning_of_month({{y,m,d},_}) when is_date(y,m,d),
do: {{y,m,1},{0,0,0}}
def beginning_of_month(_), do: {:error, :invalid_date}
@spec end_of_month(Types.date | Types.datetime | Types.microsecond_datetime) :: Types.date | Types.datetime | Types.microsecond_datetime
def end_of_month({y,m,d} = date) when is_date(y,m,d),
do: {y,m,days_in_month(date)}
def end_of_month({{y,m,d},_} = date) when is_date(y,m,d),
do: {{y,m,days_in_month(date)},{23,59,59}}
def end_of_month(_), do: {:error, :invalid_date}
@spec quarter(Types.date | Types.datetime | Types.microsecond_datetime) :: 1..4
def quarter({y,m,d}) when is_date(y,m,d), do: Timex.quarter(m)
def quarter({{y,m,d},_}) when is_date(y,m,d), do: Timex.quarter(m)
def quarter(_), do: {:error, :invalid_date}
def days_in_month({y,m,d}) when is_date(y,m,d), do: Timex.days_in_month(y, m)
def days_in_month({{y,m,d},_}) when is_date(y,m,d), do: Timex.days_in_month(y, m)
def days_in_month(_), do: {:error, :invalid_date}
def week_of_month({y,m,d}) when is_date(y,m,d), do: Timex.week_of_month(y,m,d)
def week_of_month({{y,m,d},_}) when is_date(y,m,d), do: Timex.week_of_month(y,m,d)
def week_of_month(_), do: {:error, :invalid_date}
def weekday({y,m,d} = date) when is_date(y,m,d), do: :calendar.day_of_the_week(date)
def weekday({{y,m,d} = date,_}) when is_date(y,m,d), do: :calendar.day_of_the_week(date)
def weekday(_), do: {:error, :invalid_date}
def day({y,m,d} = date) when is_date(y,m,d),
do: 1 + Timex.diff(date, {y,1,1}, :days)
def day({{y,m,d} = date,_}) when is_date(y,m,d),
do: 1 + Timex.diff(date, {y,1,1}, :days)
def day(_), do: {:error, :invalid_date}
def is_valid?({y,m,d}) when is_date(y,m,d), do: true
def is_valid?({{y,m,d},{h,mm,s}}) when is_datetime(y,m,d,h,mm,s), do: true
def is_valid?({{y,m,d},{h,mm,s,_us}}) when is_datetime(y,m,d,h,mm,s), do: true
def is_valid?(_), do: false
def iso_week({y,m,d}) when is_date(y,m,d),
do: Timex.iso_week(y, m, d)
def iso_week({{y,m,d}, _}) when is_date(y,m,d),
do: Timex.iso_week(y, m, d)
def iso_week(_), do: {:error, :invalid_date}
def from_iso_day({y,m,d}, day) when is_day_of_year(day) and is_date(y,m,d) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(y, day)
{year, month, day_of_month}
end
def from_iso_day({{y,m,d},{_,_,_}=time}, day) when is_day_of_year(day) and is_date(y,m,d) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(y, day)
{{year, month, day_of_month}, time}
end
def from_iso_day({{y,m,d},{_,_,_,_}=time}, day) when is_day_of_year(day) and is_date(y,m,d) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(y, day)
{{year, month, day_of_month}, time}
end
def from_iso_day(_,_), do: {:error, :invalid_date}
@spec set(Types.date | Types.datetime | Types.microsecond_datetime, list({atom(), term})) :: Types.date | Types.datetime | Types.microsecond_datetime | {:error, term}
def set({y,m,d} = date, options) when is_date(y,m,d),
do: do_set({date,{0,0,0}}, options, :date)
def set({{y,m,d},{h,mm,s}} = datetime, options) when is_datetime(y,m,d,h,mm,s),
do: do_set(datetime, options, :datetime)
def set({{y,m,d},{h,mm,s,us}}, options) when is_datetime(y,m,d,h,mm,s) do
{date,{h,mm,s}} = do_set({{y,m,d},{h,mm,s}}, options, :datetime)
{date,{h,mm,s,us}}
end
def set(_,_), do: {:error, :invalid_date}
defp do_set(date, options, datetime_type) do
validate? = Keyword.get(options, :validate, true)
Enum.reduce(options, date, fn
_option, {:error, _} = err ->
err
option, result ->
case option do
{:validate, _} ->
result
{:datetime, {{_,_,_} = date, {_,_,_} = time} = dt} ->
if validate? do
case datetime_type do
:date -> Timex.normalize(:date, date)
:datetime ->
{Timex.normalize(:date, date), Timex.normalize(:time, time)}
end
else
case datetime_type do
:date -> date
:datetime -> dt
end
end
{:date, {_, _, _} = d} ->
if validate? do
case result do
{_,_,_} -> Timex.normalize(:date, d)
{{_,_,_}, {_,_,_} = t} -> {Timex.normalize(:date, d), t}
end
else
case result do
{_,_,_} -> d
{{_,_,_}, {_,_,_} = t} -> {d,t}
end
end
{:time, {_,_,_} = t} ->
if validate? do
case result do
{_,_,_} -> date
{{_,_,_}=d,{_,_,_}} -> {d, Timex.normalize(:time, t)}
end
else
case result do
{_,_,_} -> date
{{_,_,_}=d,{_,_,_}} -> {d,t}
end
end
{:day, d} ->
if validate? do
case result do
{y,m,_} -> {y,m, Timex.normalize(:day, {y,m,d})}
{{y,m,_},{_,_,_}=t} -> {{y,m, Timex.normalize(:day, {y,m,d})}, t}
end
else
case result do
{y,m,_} -> {y,m,d}
{{y,m,_}, {_,_,_} = t} -> {{y,m,d}, t}
end
end
{:year, year} ->
if validate? do
case result do
{_,m,d} -> {Timex.normalize(:year, year), m, d}
{{_,m,d},{_,_,_} = t} -> {{Timex.normalize(:year, year),m,d}, t}
end
else
case result do
{_,m,d} -> {year,m,d}
{{_,m,d},{_,_,_} = t} -> {{year,m,d}, t}
end
end
{:month, month} ->
if validate? do
case result do
{y,_,d} ->
{y, Timex.normalize(:month, month), Timex.normalize(:day, {y, month, d})}
{{y,_,d},{_,_,_} = t} ->
{{y, Timex.normalize(:month, month),Timex.normalize(:day, {y, month, d})}, t}
end
else
case result do
{y,_,d} -> {y,month,d}
{{y,_,d},{_,_,_} = t} -> {{y,month,d}, t}
end
end
{:hour, hour} ->
if validate? do
case result do
{_,_,_} -> result
{{_,_,_} = d,{_,m,s}} -> {d, {Timex.normalize(:hour, hour),m,s}}
end
else
case result do
{_,_,_} -> result
{{_,_,_} = d,{_,m,s}} -> {d, {hour,m,s}}
end
end
{:minute, min} ->
if validate? do
case result do
{_,_,_} -> result
{{_,_,_} = d,{h,_,s}} -> {d, {h, Timex.normalize(:minute, min),s}}
end
else
case result do
{_,_,_} -> result
{{_,_,_} = d,{h,_,s}} -> {d, {h,min,s}}
end
end
{:second, sec} ->
if validate? do
case result do
{_,_,_} -> result
{{_,_,_} = d,{h,m,_}} -> {d, {h, m, Timex.normalize(:second, sec)}}
end
else
case result do
{_,_,_} -> result
{{_,_,_} = d,{h,m,_}} -> {d, {h,m,sec}}
end
end
{name, _} when name in [:timezone, :microsecond] ->
result
{option_name, _} ->
{:error, {:bad_option, option_name}}
end
end)
end
@spec shift(Types.date | Types.datetime | Types.microsecond_datetime, list({atom(), term})) ::
Types.date | Types.datetime | Types.microsecond_datetime | {:error, term}
def shift(date, [{_, 0}]),
do: date
def shift({y,m,d}=date, options) when is_date(y,m,d),
do: do_shift(date, options, :date)
def shift({{y,m,d},{h,mm,s}}=datetime, options) when is_datetime(y,m,d,h,mm,s),
do: do_shift(datetime, options, :datetime)
def shift({{y,m,d},{h,mm,s,_us}}=datetime, options) when is_datetime(y,m,d,h,mm,s),
do: do_shift(datetime, options, :datetime)
def shift(_, _), do: {:error, :invalid_date}
defp to_erl_datetime({y,m,d} = date) when is_date(y,m,d),
do: {:ok, {date,{0,0,0}}}
defp to_erl_datetime({{y,m,d},{h,mm,s}} = dt) when is_datetime(y,m,d,h,mm,s),
do: {:ok, dt}
defp to_erl_datetime({{y,m,d},{h,mm,s,_us}}) when is_datetime(y,m,d,h,mm,s),
do: {:ok, {{y,m,d},{h,mm,s}}}
defp to_erl_datetime(_),
do: {:error, :invalid_date}
defp get_microseconds({_, _, _,us}) when is_integer(us),
do: us
defp get_microseconds({_, _, _, {us, _precision}}) when is_integer(us),
do: us
defp get_microseconds({_, _, _}),
do: 0
defp get_microseconds({date, time}) when is_tuple(date) and is_tuple(time),
do: get_microseconds(time)
defp do_shift(date, options, type) do
allowed_options = Enum.reject(options, fn
{:weeks, _} -> false
{:days, _} -> false
{:hours, value} when value >= 24 or value <= -24 -> true
{:hours, _} -> false
{:minutes, value} when value >= 24*60 or value <= -24*60 -> true
{:minutes, _} -> false
{:seconds, value} when value >= 24*60*60 or value <= -24*60*60 -> true
{:seconds, _} -> false
{:milliseconds, value} when value >= 24*60*60*1000 or value <= -24*60*60*1000 -> true
{:milliseconds, _} -> false
{:microseconds, {value, _}} when value >= 24*60*60*1000*1000 or value <= -24*60*60*1000*1000 -> true
{:microseconds, value} when value >= 24*60*60*1000*1000 or value <= -24*60*60*1000*1000 -> true
{:microseconds, _} -> false
{_type, _value} -> true
end)
case Timex.shift(to_naive_datetime(date), allowed_options) do
{:error, _} = err -> err
%NaiveDateTime{} = nd when type == :date ->
{nd.year,nd.month,nd.day}
%NaiveDateTime{} = nd when type == :datetime ->
{{nd.year,nd.month,nd.day}, {nd.hour,nd.minute,nd.second}}
end
end
end | lib/datetime/erlang.ex | 0.823506 | 0.507385 | erlang.ex | starcoder |
defmodule Engine.Callbacks.Deposit do
@moduledoc """
Contains the business logic of persisting a deposit event and creating the
appropriate UTXO.
When you deposit into the network, you send a 'deposit' transaction to the
contract directly. Upon success, the contract generates a block just for that
single transaction(incrementing blknum by `1` vs `1000` in blocks submitted
by the childchain). Example:
- blknum 1000, this is submitted by the childchain and contains non-deposit transactions
- blknum 1001-1999, these would be deposits from the contract, in its own blocks (upto 999 deposits)
- blknum 2000, this is the next submitted childchain block, containing non-deposit transactions
"""
@behaviour Engine.Callback
use Spandex.Decorators
alias Ecto.Multi
alias Engine.Callback
alias Engine.DB.Output
alias Engine.Repo
require Logger
@doc """
Inserts deposit events, forming the associated UTXOs.
This will wrap all the build deposits into one DB transaction.
"""
@impl Callback
@decorate trace(service: :ecto, type: :backend)
# to me it seems like we need to call update_listener_height/3 here!
def callback([], _listener), do: {:ok, :noop}
def callback(events, listener) do
Multi.new()
|> Callback.update_listener_height(events, listener)
|> do_callback(events)
|> Repo.transaction()
end
defp do_callback(multi, [event | tail]), do: multi |> build_deposit(event) |> do_callback(tail)
defp do_callback(multi, []), do: multi
defp build_deposit(multi, event) do
deposit_blknum = event.data["blknum"]
changeset = Output.deposit(deposit_blknum, event.data["depositor"], event.data["token"], event.data["amount"])
_ =
Logger.info(
"Recognized deposit blknum #{deposit_blknum} event from #{inspect(event.data["depositor"])} of #{
inspect(event.data["token"])
} amount #{event.data["amount"]}"
)
Multi.insert(multi, "deposit-#{deposit_blknum}", changeset,
on_conflict: :nothing,
conflict_target: :position
)
end
end | apps/engine/lib/engine/callbacks/deposit.ex | 0.845576 | 0.448547 | deposit.ex | starcoder |
defmodule Unicode.Transform.Rule.Definition do
@moduledoc """
#### 10.3.7 [Variable Definition Rules](https://unicode.org/reports/tr35/tr35-general.html#Variable_Definition_Rules)
Each variable definition is of the following form:
```
$variableName = contents ;
```
The variable name can contain letters and digits, but must start with a letter. More precisely, the variable names use Unicode identifiers as defined by [[UAX31](https://www.unicode.org/reports/tr41/#UAX31)]. The identifier properties allow for the use of foreign letters and numbers.
The contents of a variable definition is any sequence of Unicode sets and characters or characters. For example:
```
$mac = M [aA] [cC] ;
```
Variables are only replaced within other variable definition rules and within conversion rules. They have no effect on transliteration rules.
"""
@fields [:variable, :value, :comment]
defstruct @fields
alias Unicode.Transform.Rule.Comment
alias Unicode.Transform.Utils
@regex ~r/(?<variable>[a-zA-Z][a-zA-Z0-9]*)\s*=\s*(?<value>[^;]*)\s*;(\s*\#\s*(?<comment>.*))?/u
def parse(<<"$">> <> rule) do
if Regex.match?(~r/(?<!\\)=/, rule) do
parsed =
@regex
|> Regex.named_captures(rule)
|> unescape_value()
|> maybe_nilify_comment()
|> Utils.atomize_keys()
struct(__MODULE__, parsed)
else
nil
end
end
def parse(_other) do
nil
end
defp unescape_value(%{"value" => value} = rule) do
value =
value
|> String.trim()
|> Utils.unescape_string()
%{rule | "value" => value}
end
defp maybe_nilify_comment(%{"comment" => ""} = rule) do
%{rule | "comment" => nil}
end
defp maybe_nilify_comment(%{"comment" => comment} = rule) do
%{rule | "comment" => String.trim(comment)}
end
defimpl Unicode.Transform.Rule do
def to_forward_code(rule) do
[
Comment.comment_from(rule),
"define(",
inspect("$" <> rule.variable),
", ",
inspect(rule.value),
")",
"\n"
]
end
def to_backward_code(rule) do
[
Comment.comment_from(rule),
"define(",
inspect("$" <> rule.variable),
", ",
inspect(rule.value),
")",
"\n"
]
end
end
end | lib/unicode/transform/rule/definition.ex | 0.741112 | 0.906031 | definition.ex | starcoder |
defmodule Unicode.String.Segment do
@moduledoc """
Implements the compilation of the Unicode
segment rules.
"""
import SweetXml
require Unicode.Set
@suppressions_variable "$Suppressions"
# This is the formal definition but it takes a while to compile
# and all of the known variable names are in the Latin-1 set
# defguard is_id_start(char) when Unicode.Set.match?(char, "\\p{ID_start}")
# defguard is_id_continue(char) when Unicode.Set.match?(char, "\\p{ID_continue}")
@doc "Identifies if a codepoint is a valid start of an indentifier"
defguard is_id_start(char)
when char in ?A..?Z
@doc "Identifies if a codepoint is a valid identifier character"
defguard is_id_continue(char)
when char in ?a..?z or char in ?A..?Z or char in ?0..?9 or char == ?_
@doc "Returns a list of the locales known to `Unicode.String.Break`"
def known_locales do
locale_map()
|> Map.keys
end
@doc """
Return the rules as defined by CLDR for a given
locale and break type.
"""
def rules(locale, segment_type, additional_variables \\ []) do
with {:ok, segment} <- segments(locale, segment_type) do
variables = Map.fetch!(segment, :variables) |> expand_variables(additional_variables)
rules = Map.fetch!(segment, :rules)
rules
|> compile_rules(variables)
|> wrap(:ok)
end
end
@doc """
Return the rules as defined by CLDR for a given
locale and break type and raises on error.
"""
def rules!(locale, segment_type, additional_variables \\ []) do
case rules(locale, segment_type, additional_variables) do
{:ok, rules} -> rules
{:error, reason} -> raise ArgumentError, reason
end
end
def compile_rules(rules, variables) when is_list(rules) do
rules
|> expand_rules(variables)
|> compile_rules
end
@doc """
Compiles a segment rule in the context of a list
of variables.
The compile rule can then be inserted into a
rule set.
"""
def compile_rule(rule, variables) when is_map(rule) do
compile_rules([rule], variables)
|> hd
end
# These options set unicode mode. Interpreset certain
# codes like \B and \w in the unicode space, ignore
# unescaped whitespace in regexs
@regex_options [:unicode, :extended, :ucp, :dollar_endonly, :dotall, :bsr_unicode]
@rule_splitter ~r/[×÷]/u
defp compile_rules(rules) do
Enum.map(rules, fn {sequence, rule} ->
[left, operator, right] = Regex.split(@rule_splitter, rule, include_captures: true)
operator = if operator == "×", do: :no_break, else: :break
left = if left != "", do: left <> "$", else: left
right = if right != "", do: "^" <> right, else: right
{sequence, {operator, compile_regex!(left), compile_regex!(right)}}
end)
end
@doc false
def suppressions_variable(locale, segment_type) do
variable =
locale
|> suppressions!(segment_type)
|> suppressions_regex
if variable do
%{name: @suppressions_variable, value: variable}
else
nil
end
end
defp suppressions_regex([]) do
nil
end
defp suppressions_regex(suppressions) do
suppression_regex =
suppressions
|> Enum.map(&String.replace(&1, ".", "\\."))
|> Enum.join("|")
"(" <> suppression_regex <> ")"
end
@doc """
Returns a list of the suppressions for a given
locale and segment type.
"""
def suppressions(locale, segment_type) do
with {:ok, segment} <- segments(locale, segment_type) do
{:ok, Map.get(segment, :suppressions, [])}
end
end
@doc """
Returns a list of the suppressions for a given
locale and segment type and raises on error.
"""
def suppressions!(locale, segment_type) do
case suppressions(locale, segment_type) do
{:ok, suppressions} -> suppressions
{:error, reason} -> raise ArgumentError, reason
end
end
defp compile_regex!("") do
:any
end
defp compile_regex!(string) do
string
|> String.trim
|> Unicode.Regex.compile!(@regex_options)
end
@doc """
Evaludates a list of rules against a given
string.
"""
def evaluate_rules(string, rules) when is_binary(string) do
evaluate_rules({"", string}, rules)
end
def evaluate_rules({string_before, string_after}, rules) do
Enum.reduce_while(rules, [], fn rule, _acc ->
{_sequence, {operator, _fore, _aft}} = rule
case evaluate_rule({string_before, string_after}, rule) do
{:pass, result} -> {:halt, {:pass, operator, result}}
{:fail, string} -> {:cont, {:fail, string}}
end
end)
|> return_break_or_no_break
end
# The final implicit rule is to
# to break. ie: :any ÷ :any
defp return_break_or_no_break({:fail, {before_string, ""}}) do
{:break, {before_string, {"", ""}}}
end
defp return_break_or_no_break({:fail, {before_string, after_string}}) do
<< char :: utf8, rest :: binary >> = after_string
{:break, {before_string, {<< char :: utf8 >>, rest}}}
end
defp return_break_or_no_break({:pass, operator, result}) do
{operator, result}
end
@split_options [parts: 2, include_captures: true, trim: true]
# Process an `:any op regex` rule at end of string
defp evaluate_rule({string_before, <<_::utf8>> = string_after}, {_seq, {_operator, :any, aft}}) do
if Regex.match?(aft, string_after) do
{:pass, {string_before, {string_after, ""}}}
else
{:fail, {string_before, string_after}}
end
end
defp evaluate_rule({string_before, string_after}, {_seq, {_operator, :any, aft}}) do
case Regex.split(aft, string_after, @split_options) do
[match, rest] -> {:pass, {string_before, {match, rest}}}
_other -> {:fail, {string_before, string_after}}
end
end
# :any matches end of string
defp evaluate_rule({string_before, "" = string_after}, {_seq, {_operator, fore, :any}}) do
if Regex.match?(fore, string_before) do
{:pass, {string_before, {"", ""}}}
else
{:fail, {string_before, string_after}}
end
end
defp evaluate_rule({string_before, string_after}, {_seq, {_operator, fore, :any}}) do
if Regex.match?(fore, string_before) do
<< char :: utf8, rest :: binary >> = string_after
{:pass, {string_before, {<< char :: utf8 >>, rest}}}
else
{:fail, {string_before, string_after}}
end
end
defp evaluate_rule({string_before, string_after}, {_seq, {_operator, fore, aft}}) do
if Regex.match?(fore, string_before) && Regex.match?(aft, string_after) do
case Regex.split(aft, string_after, @split_options) do
[match, rest] -> {:pass, {string_before, {match, rest}}}
[match] -> {:pass, {string_before, {match, ""}}}
end
else
{:fail, {string_before, string_after}}
end
end
defp expand_rules(rules, variables) do
Enum.reduce(rules, [], fn %{id: sequence, value: rule}, acc ->
rule =
rule
|> String.trim
|> substitute_variables(variables)
[{sequence, rule} | acc]
end)
|> Enum.sort
end
def expand_variables(variables, additional_variables)
when is_list(variables) and is_list(additional_variables) do
Enum.reduce variables ++ additional_variables, %{}, fn
%{name: << "$", name :: binary >>, value: value}, variables ->
new_value = substitute_variables(value, variables)
Map.put(variables, name, new_value)
end
end
defp substitute_variables("", _variables) do
""
end
defp substitute_variables(<< "$", char :: utf8, rest :: binary >>, variables)
when is_id_start(char) do
{name, rest} = extract_variable_name(<< char :: utf8 >> <> rest)
Map.fetch!(variables, name) <> substitute_variables(rest, variables)
end
defp substitute_variables(<< char :: binary-1, rest :: binary >>, variables) do
char <> substitute_variables(rest, variables)
end
defp extract_variable_name("" = string) do
{string, ""}
end
defp extract_variable_name(<< char :: utf8, rest :: binary >>)
when is_id_continue(char) do
{string, rest} = extract_variable_name(rest)
{<< char :: utf8 >> <> string, rest}
end
defp extract_variable_name(rest) do
{"", rest}
end
@app_name Mix.Project.config[:app]
@segments_dir Path.join(:code.priv_dir(@app_name), "/segments")
@doctype "<!DOCTYPE ldml SYSTEM \"../../common/dtd/ldml.dtd\">"
@doc false
def segments_dir do
@segments_dir
end
@doc false
def locale_map do
@segments_dir
|> File.ls!()
|> Enum.map(fn locale_file ->
locale =
locale_file
|> String.split(".xml")
|> hd
|> String.replace("_", "-")
{locale, locale_file}
end)
|> Map.new
end
@doc """
Returns a list of the ancestor locales
of the a given locale.
The list includes the given locale.
"""
def ancestors(locale_name) do
if Map.get(locale_map(), locale_name) do
case String.split(locale_name, "-") do
[locale] -> [locale, "root"]
[locale, _territory] -> [locale_name, locale, "root"]
[locale, script, _territory] -> [locale_name, "#{locale}-#{script}", locale, "root"]
end
|> wrap(:ok)
else
{:error, unknown_locale_error(locale_name)}
end
end
@doc false
def merge_ancestors("root") do
raw_segments!("root")
|> wrap(:ok)
end
def merge_ancestors(locale) when is_binary(locale) do
with {:ok, ancestors} <- ancestors(locale) do
merge_ancestors(ancestors)
|> wrap(:ok)
end
end
@doc false
def merge_ancestors([locale, root]) do
merge_ancestor(locale, raw_segments!(root))
end
def merge_ancestors([locale | rest]) do
merge_ancestor(locale, merge_ancestors(rest))
end
# For each segement type, add the variables, rules and
# suppressions from locale to other
defp merge_ancestor(locale, other) do
locale_segments = raw_segments!(locale)
Enum.map(other, fn {segment_type, content} ->
variables = Map.fetch!(content, :variables) ++
(get_in(locale_segments, [segment_type, :variables]) || [])
rules = Map.fetch!(content, :rules) ++
(get_in(locale_segments, [segment_type, :rules]) || [])
suppressions = Map.fetch!(content, :suppressions) ++
(get_in(locale_segments, [segment_type, :suppressions]) || [])
{segment_type, %{content | variables: variables, rules: rules, suppressions: suppressions}}
end)
|> Map.new
end
defp raw_segments(locale) do
if file = Map.get(locale_map(), locale) do
content =
@segments_dir
|> Path.join(file)
|> File.read!()
|> String.replace(@doctype, "")
|> xpath(~x"//segmentation"l,
type: ~x"./@type"s,
variables: [
~x".//variable"l,
name: ~x"./@id"s,
value: ~x"./text()"s
],
rules: [
~x".//rule"l,
id: ~x"./@id"f,
value: ~x"./text()"s
],
suppressions: ~x".//suppression/text()"ls
)
Enum.map(content, fn c ->
type = c.type
|> Macro.underscore()
|> String.replace("__", "_")
|> String.to_atom
{type, %{rules: c.rules, variables: c.variables, suppressions: c.suppressions}}
end)
|> Map.new
|> wrap(:ok)
else
{:error, unknown_locale_error(locale)}
end
end
defp raw_segments!(locale) do
case raw_segments(locale) do
{:ok, segments} -> segments
{:error, reason} -> raise ArgumentError, reason
end
end
@doc false
def segments(locale) do
merge_ancestors(locale)
end
@doc false
def segments(locale, segment_type) when is_binary(locale) do
with {:ok, segments} <- segments(locale) do
if segment = Map.get(segments, segment_type) do
{:ok, segment}
else
{:error, unknown_segment_type_error(segment_type)}
end
end
end
defp wrap(term, atom) do
{atom, term}
end
@doc false
def unknown_locale_error(locale) do
"Unknown locale #{inspect locale}"
end
@doc false
def unknown_segment_type_error(segment_type) do
"Unknown segment type #{inspect segment_type}"
end
end | lib/unicode/segment.ex | 0.717111 | 0.476884 | segment.ex | starcoder |
defmodule Day11 do
def part1(file_name \\ "test1.txt") do
file_name
|> parse()
|> grid()
|> steps(100)
|> total_flashes()
end
def part2(file_name \\ "test2.txt") do
file_name
|> parse()
|> grid()
|> find_step_all_flashing()
end
def find_step_all_flashing(grid, step \\ 1)
def find_step_all_flashing(grid, step) do
%{grid: flashed_grid} = perform_step(step, %{grid: grid, flashes: 0})
all_flashing = all_flashing?(flashed_grid)
if all_flashing do
step
else
find_step_all_flashing(flashed_grid, step + 1)
end
end
def all_flashing?(grid) do
Enum.all?(grid, fn {_coord, %{level: level}} -> level == 0 end)
end
def total_flashes(%{flashes: flashes}) do
flashes
end
def steps(grid, target_steps) do
Enum.reduce(1..target_steps, %{grid: grid, flashes: 0}, &perform_step/2)
end
def perform_step(_current_step, %{grid: ready_grid, flashes: flashes_so_far}) do
leveled_up_grid = increase_level(ready_grid)
%{grid: flashed_grid, flashes: flashes} = flash(leveled_up_grid)
%{grid: flashed_grid, flashes: flashes_so_far + flashes}
end
def flash(grid) do
do_flash(%{grid: grid, flashes: 0})
end
def do_flash(%{grid: grid, flashes: total_flashes}) do
flashable = flashable(grid)
if Enum.empty?(flashable) do
new_grid =
Enum.reduce(grid, %{}, fn
{coord, %{level: level}}, acc when level > 9 -> Map.put(acc, coord, %{level: 0, state: :not_ready})
{coord, octopus}, acc -> Map.put(acc, coord, octopus)
end)
%{grid: new_grid, flashes: total_flashes}
else
flash_grid(grid, flashable)
|> Map.update!(:flashes, & &1 + total_flashes)
|> do_flash()
end
end
def flash_grid(grid, flashable) do
flashes = length(flashable)
new_grid =
Enum.reduce(flashable, grid, fn coord, acc ->
flashed_grid = acc[coord][:state] |> get_and_update_in(& {&1, :flashed}) |> elem(1)
neighboring_coords = neighboring_coords(flashed_grid, coord)
Enum.reduce(neighboring_coords, flashed_grid, fn neighboring_coord, flashed_acc ->
octopus = Map.get(flashed_acc, neighboring_coord) |> Map.update!(:level, & &1 + 1)
if octopus.level == 10 do
new_octopus = %{octopus | state: :flashable}
Map.put(flashed_acc, neighboring_coord, new_octopus)
else
Map.put(flashed_acc, neighboring_coord, octopus)
end
end)
end)
%{grid: new_grid, flashes: flashes}
end
def neighboring_coords(grid, {x, y}) do
Enum.reduce(deltas(), [], fn {dx, dy}, acc ->
new_coord = {x + dx, y + dy}
neighbor = Map.get(grid, new_coord)
case neighbor do
nil -> acc
_value -> [new_coord | acc]
end
end)
end
def deltas() do
[
{-1, -1}, {0, -1}, {1, -1},
{-1, 0}, {1, 0},
{-1, 1}, {0, 1}, {1, 1}
]
end
def flashable(grid) do
grid
|> Enum.filter(fn {_coord, %{state: state}} -> state == :flashable end)
|> Enum.map(fn {coord, _state} -> coord end)
end
def increase_level(grid) do
Map.map(grid, fn {_key, %{level: level} = octopus} ->
new_level = level + 1
if new_level == 10 do
%{octopus | level: new_level, state: :flashable}
else
%{octopus | level: new_level}
end
end)
end
def parse(file_name) do
"priv/" <> file_name
|> File.read!()
|> String.split("\n")
|> Enum.map(fn line ->
line
|> String.graphemes()
|> Enum.map(&String.to_integer/1)
end)
end
def grid(parsed) do
for {lines, y} <- Enum.with_index(parsed),
{level, x} <- Enum.with_index(lines),
do: {{x,y}, %{level: level, state: :not_ready}},
into: Map.new()
end
end | jpcarver+elixir/day11/lib/day11.ex | 0.50708 | 0.522568 | day11.ex | starcoder |
defmodule Kino.Input do
@moduledoc """
Various input elements for entering data.
## Examples
First, create an input and make sure it is rendered,
either by placing it at the end of a code cell or by
explicitly rendering it with `Kino.render/1`.
input = Kino.Input.text("Name")
Then read the value at any later point:
name = Kino.Input.read(input)
"""
defstruct [:attrs]
@type t :: %__MODULE__{attrs: Kino.Output.input_attrs()}
defp new(attrs) do
token = Kino.Bridge.generate_token()
persistent_id = {token, attrs} |> :erlang.phash2() |> Integer.to_string()
ref = Kino.Output.random_ref()
subscription_manager = Kino.SubscriptionManager.cross_node_name()
attrs =
Map.merge(attrs, %{
ref: ref,
id: persistent_id,
destination: subscription_manager
})
Kino.Bridge.reference_object(ref, self())
Kino.Bridge.monitor_object(ref, subscription_manager, {:clear_topic, ref})
%__MODULE__{attrs: attrs}
end
@doc false
def duplicate(input) do
input.attrs
|> Map.drop([:ref, :id, :destination])
|> new()
end
@doc """
Creates a new text input.
## Options
* `:default` - the initial input value. Defaults to `""`
"""
@spec text(String.t(), keyword()) :: t()
def text(label, opts \\ []) when is_binary(label) and is_list(opts) do
default = Keyword.get(opts, :default, "")
new(%{type: :text, label: label, default: default})
end
@doc """
Creates a new multiline text input.
## Options
* `:default` - the initial input value. Defaults to `""`
"""
@spec textarea(String.t(), keyword()) :: t()
def textarea(label, opts \\ []) when is_binary(label) and is_list(opts) do
default = Keyword.get(opts, :default, "")
new(%{type: :textarea, label: label, default: default})
end
@doc """
Creates a new password input.
This is similar to text input, except the content is not
visible by default.
## Options
* `:default` - the initial input value. Defaults to `""`
"""
@spec password(String.t(), keyword()) :: t()
def password(label, opts \\ []) when is_binary(label) and is_list(opts) do
default = Keyword.get(opts, :default, "")
new(%{type: :password, label: label, default: default})
end
@doc """
Creates a new number input.
The input value is can be either a number or `nil`.
## Options
* `:default` - the initial input value. Defaults to `nil`
"""
@spec number(String.t(), keyword()) :: t()
def number(label, opts \\ []) when is_binary(label) and is_list(opts) do
default = Keyword.get(opts, :default, nil)
new(%{type: :number, label: label, default: default})
end
@doc """
Creates a new URL input.
The input value can be either a valid URL string or `nil`.
## Options
* `:default` - the initial input value. Defaults to `nil`
"""
@spec url(String.t(), keyword()) :: t()
def url(label, opts \\ []) when is_binary(label) and is_list(opts) do
default = Keyword.get(opts, :default, nil)
new(%{type: :url, label: label, default: default})
end
@doc """
Creates a new select input.
The input expects a list of options in the form `[{value, label}]`,
where `value` is an arbitrary term and `label` is a descriptive
string.
## Options
* `:default` - the initial input value. Defaults to the first
value from the given list of options
## Examples
Kino.Input.select("Language", [en: "English", fr: "Français"])
Kino.Input.select("Language", [{1, "One"}, {2, "Two"}, {3, "Three"}])
"""
@spec select(String.t(), list({value :: term(), label :: String.t()}), keyword()) :: t()
def select(label, options, opts \\ [])
when is_binary(label) and is_list(options) and is_list(opts) do
if options == [] do
raise ArgumentError, "expected at least one option, got: []"
end
options = Enum.map(options, fn {key, val} -> {key, to_string(val)} end)
values = Enum.map(options, &elem(&1, 0))
default = Keyword.get_lazy(opts, :default, fn -> hd(values) end)
if default not in values do
raise ArgumentError,
"expected :default to be either of #{Enum.map_join(values, ", ", &inspect/1)}, got: #{inspect(default)}"
end
new(%{type: :select, label: label, options: options, default: default})
end
@doc """
Creates a new checkbox.
The input value can be either `true` or `false`.
## Options
* `:default` - the initial input value. Defaults to `false`
"""
@spec checkbox(String.t(), keyword()) :: t()
def checkbox(label, opts \\ []) when is_binary(label) and is_list(opts) do
default = Keyword.get(opts, :default, false)
new(%{type: :checkbox, label: label, default: default})
end
@doc """
Creates a new slider input.
The input value can be either float in the configured range.
## Options
* `:default` - the initial input value. Defaults to the
minimum value
* `:min` - the minimum value
* `:max` - the maximum value
* `:step` - the slider increment
"""
@spec range(String.t(), keyword()) :: t()
def range(label, opts \\ []) when is_binary(label) and is_list(opts) do
min = Keyword.get(opts, :min, 0)
max = Keyword.get(opts, :max, 100)
step = Keyword.get(opts, :step, 1)
default = Keyword.get(opts, :default, min)
if min >= max do
raise ArgumentError,
"expected :min to be less than :max, got: #{inspect(min)} and #{inspect(max)}"
end
if step <= 0 do
raise ArgumentError, "expected :step to be positive, got: #{inspect(step)}"
end
if default < min or default > max do
raise ArgumentError,
"expected :default to be between :min and :max, got: #{inspect(default)}"
end
new(%{
type: :range,
label: label,
default: default,
min: min,
max: max,
step: step
})
end
@doc """
Creates a new color input.
The input value can be a hex color string.
## Options
* `:default` - the initial input value. Defaults to `#6583FF`
"""
@spec color(String.t(), keyword()) :: t()
def color(label, opts \\ []) when is_binary(label) and is_list(opts) do
default = Keyword.get(opts, :default, "#6583FF")
new(%{type: :color, label: label, default: default})
end
@doc """
Synchronously reads the current input value.
Note that to retrieve the value, the input must be rendered first,
otherwise an error is raised.
## Examples
input = Kino.Input.text("Name")
Kino.Input.read(input)
"""
@spec read(t()) :: term()
def read(%Kino.Input{} = input) do
case Kino.Bridge.get_input_value(input.attrs.id) do
{:ok, value} ->
value
{:error, reason} ->
raise "failed to read input value, reason: #{inspect(reason)}"
end
end
@doc """
Subscribes the calling process to input changes.
The events are sent as `{tag, info}`.
See `Kino.Control.subscribe/2` for more details.
"""
@spec subscribe(t(), term()) :: :ok
def subscribe(%Kino.Input{} = input, tag) do
Kino.SubscriptionManager.subscribe(input.attrs.ref, self(), tag)
end
@doc """
Unsubscribes the calling process from input events.
"""
@spec unsubscribe(t()) :: :ok
def unsubscribe(%Kino.Input{} = input) do
Kino.SubscriptionManager.unsubscribe(input.attrs.ref, self())
end
end | lib/kino/input.ex | 0.923566 | 0.650662 | input.ex | starcoder |
defmodule Haversine do
@moduledoc ~S"""
Calculate great circle distances (shortest travel distance on the surface of
a spherical Earth) given a two longitude-latitude pairs. This is an implementation
of the [Haversine formula](https://en.wikipedia.org/wiki/Haversine_formula)
"""
@pi_over_180 :math.pi() / 180.0
@radius_of_earth_meters 6_371_008.8
@type coordinates :: {float(), float()}
@spec distance(coordinates(), coordinates()) :: float()
@doc """
Returns the great circle distance in meters between two points in the form of
`{longitude, latitude}`.
## Examples
iex> Haversine.distance({-105.343, 39.984}, {-105.534, 39.123})
97129.22118967834
iex> Haversine.distance({-74.00597, 40.71427}, {-70.56656, -33.42628})
8251609.780264794
"""
def distance({lon1, lat1}, {lon2, lat2}) do
radial_arcs_latitude = :math.sin((lat2 - lat1) * @pi_over_180 / 2)
radial_arcs_longitude = :math.sin((lon2 - lon1) * @pi_over_180 / 2)
factor =
radial_arcs_latitude * radial_arcs_latitude +
radial_arcs_longitude * radial_arcs_longitude *
:math.cos(lat1 * @pi_over_180) * :math.cos(lat2 * @pi_over_180)
2 * :math.atan2(:math.sqrt(factor), :math.sqrt(1 - factor)) * @radius_of_earth_meters
end
@spec distance([coordinates()]) :: float()
@doc """
Returns the great circle distance in meters along a linestring defined by the
list of `{longitude, latitude}` pairs.
## Examples
iex> Haversine.distance([
...> {-96.796667, 32.775833},
...> {126.967583, 37.566776},
...> {151.215158, -33.857406},
...> {55.274180, 25.197229},
...> {6.942661, 50.334057},
...> {-97.635926, 30.134442}
...> ])
44728827.84910634
iex> Haversine.distance([])
0.0
iex> Haversine.distance([{-96.796667, 32.775833}])
0.0
"""
def distance([]), do: 0.0
def distance([_]), do: 0.0
def distance([point_1, point_2 | tail]),
do: distance(point_1, point_2) + distance([point_2 | tail])
end | lib/haversine.ex | 0.942744 | 0.796807 | haversine.ex | starcoder |
defmodule MerkleTree.Proof do
@moduledoc """
Generate and verify merkle proofs
## Usage Example
iex> proof = MerkleTree.new(~w/a b c d/) |>
...> MerkleTree.Proof.prove(1)
["40e2511a6323177e537acb2e90886e0da1f84656fd6334b89f60d742a3967f09",
"022a6979e6dab7aa5ae4c3e5e45f7e977112a7e63593820dbec1ec738a24f93c"]
iex> MerkleTree.Proof.proven?({"b", 1}, "9dc1674ae1ee61c90ba50b6261e8f9a47f7ea07d92612158edfe3c2a37c6d74c", &MerkleTree.Crypto.sha256/1, proof)
true
"""
@leaf_salt <<0>>
@node_salt <<1>>
defstruct [:hashes, :hash_function]
@type proof_t() :: list(String.t())
@doc """
Generates proof for a block at a specific index
"""
@spec prove(MerkleTree.t() | MerkleTree.Node.t(), non_neg_integer) :: proof_t()
def prove(%MerkleTree{root: root}, index),
do: prove(root, index)
def prove(%MerkleTree.Node{height: height} = root, index),
do: _prove(root, binarize(index, height))
defp _prove(_, ""), do: []
defp _prove(
%MerkleTree.Node{children: children},
index_binary
) do
{path_head, path_tail} = path_from_binary(index_binary)
[child, sibling] =
case path_head do
1 -> Enum.reverse(children)
0 -> children
end
[sibling.value] ++ _prove(child, path_tail)
end
@doc """
Verifies proof for a block at a specific index
"""
@spec proven?({String.t(), non_neg_integer}, String.t(), MerkleTree.hash_function(), proof_t()) :: boolean
def proven?({block, index}, root_hash, hash_function, proof) do
height = length(proof)
root_hash == _hash_proof(block, binarize(index, height), proof, hash_function)
end
defp _hash_proof(block, "", [], hash_function) do
hash_function.(@leaf_salt <> block)
end
defp _hash_proof(block, index_binary, [proof_head | proof_tail], hash_function) do
{path_head, path_tail} = path_from_binary(index_binary)
case path_head do
1 -> hash_function.(@node_salt <> proof_head <> _hash_proof(block, path_tail, proof_tail, hash_function))
0 -> hash_function.(@node_salt <> _hash_proof(block, path_tail, proof_tail, hash_function) <> proof_head)
end
end
@spec binarize(integer, integer) :: bitstring
defp binarize(index, height) do
<<index_binary::binary-unit(1)>> = <<index::unsigned-big-integer-size(height)>>
index_binary
end
@spec path_from_binary(bitstring) :: {0 | 1, bitstring}
defp path_from_binary(index_binary) do
<<path_head::unsigned-big-integer-unit(1)-size(1), path_tail::binary-unit(1)>> = index_binary
{path_head, path_tail}
end
end | lib/merkle_tree/proof.ex | 0.779238 | 0.417984 | proof.ex | starcoder |
defmodule Tyx.Traversal.Lookup do
@moduledoc false
use Boundary, deps: [Tyx.Traversal.Typemap, Tyx.Traversal.Preset]
alias Tyx.Traversal.{Preset, Typemap}
require Logger
# FIXME introduce an easy way to plug in the functionality to custom lookup elements
@lookup_plugins [Preset]
@behaviour Tyx.Traversal
@spec get(module(), atom(), [module()] | non_neg_integer()) ::
{:error, {module, atom(), non_neg_integer()}} | {:ok, atom()}
def get(mod, fun, args) do
@lookup_plugins
|> Enum.reduce_while(nil, fn preset, nil ->
case preset.lookup(mod, fun, args) do
{:ok, result} -> {:halt, {:ok, result}}
_ -> {:cont, nil}
end
end)
|> case do
{:ok, {:alias, {mod, fun, args}}} -> lookup(mod, fun, args)
{:ok, result} -> {:ok, result}
nil -> lookup(mod, fun, args)
end
end
@impl Tyx.Traversal
def lookup(mod, fun, args) do
args =
case args do
list when is_list(list) -> length(list)
arity when is_integer(arity) -> arity
end
with ^mod <- Code.ensure_compiled!(mod),
{:ok, specs} <- Code.Typespec.fetch_specs(mod),
{:ok, spec} <- to_spec(specs, {mod, fun, args}),
{:ok, tyx} <- to_tyx_fn(spec) do
{:ok, tyx}
else
{:error, error} -> {:error, error}
_ -> {:error, {mod, fun, args}}
end
end
@spec to_spec([tuple()], {module(), atom(), non_neg_integer()}) ::
{:ok, {[module()], module()}} | :error
defp to_spec(specs, {mod, fun, arity}) when is_list(specs) do
signature = {fun, arity}
specs
|> Enum.filter(&match?({^signature, _}, &1))
# FIXME HANDLE ALL THE TYPES, NOT ONLY THE FIRST ONE
|> Enum.find(&match?({^signature, [{:type, _, _fun, _spec} | _]}, &1))
|> spec_to_tyx({mod, fun, arity})
end
@spec spec_to_tyx(nil | Macro.t(), {module(), atom(), [module()] | non_neg_integer()}) ::
{:ok, {[module()], module()}} | :error
defp spec_to_tyx(nil, _mfa), do: :error
defp spec_to_tyx({{fun, arity}, [{type, _, f, spec} | _]}, {mod, fun, arity})
when type in ~w|type remote_type|a and f in ~w|fun bounded_fun|a do
case spec do
[{:type, _, :product, args}, ret] ->
{:ok, {Enum.map(args, &Typemap.from_spec(mod, &1)), Typemap.from_spec(mod, ret)}}
[{:type, _, :fun, _}, _] ->
{:error, not_implemented: [type: fun]}
unexpected ->
Logger.warn("Unhandled spec: " <> inspect(unexpected))
{:error, unexpected: unexpected}
end
end
@doc false
defmacrop list_to_kw(list) do
quote bind_quoted: [list: list] do
list
|> length()
|> Macro.generate_unique_arguments(nil)
|> Enum.map(&elem(&1, 0))
|> Enum.zip(list)
end
end
@spec to_tyx_fn({[module()], module()}) :: {:ok, Tyx.Fn.t()}
defp to_tyx_fn({args, ret}) do
{:ok, %Tyx.Fn{<~: list_to_kw(args), ~>: ret}}
end
end | lib/tyx/traversal/lookup.ex | 0.539954 | 0.509764 | lookup.ex | starcoder |
defmodule Game.Format.Quests do
@moduledoc """
Format function for quests
"""
import Game.Format.Context
alias Game.Format
alias Game.Format.Table
alias Game.Quest
@doc """
Format a quest name
iex> Game.Format.quest_name(%{name: "Into the Dungeon"})
"{quest}Into the Dungeon{/quest}"
"""
def quest_name(quest) do
context()
|> assign(:name, quest.name)
|> Format.template("{quest}[name]{/quest}")
end
@doc """
Format the status of a player's quests
"""
@spec quest_progress([QuestProgress.t()]) :: String.t()
def quest_progress(quests) do
rows =
quests
|> Enum.map(fn %{status: status, quest: quest} ->
[to_string(quest.id), quest.name, quest.giver.name, status]
end)
Table.format("You have #{length(quests)} active quests.", rows, [5, 30, 20, 10])
end
@doc """
Format the status of a player's quest
"""
@spec quest_detail(QuestProgress.t(), Save.t()) :: String.t()
def quest_detail(progress, save) do
%{quest: quest} = progress
steps = quest.quest_steps |> Enum.map(&quest_step(&1, progress, save))
context()
|> assign(:name, quest_name(quest))
|> assign(:progress, progress.status)
|> assign(:underline, Format.underline("#{quest.name} - #{progress.status}"))
|> assign(:description, quest.description)
|> assign(:steps, Enum.join(steps, "\n"))
|> Format.template(template("quest"))
|> Format.resources()
end
def quest_step(step = %{type: "item/collect"}, progress, save) do
current_step_progress = Quest.current_step_progress(step, progress, save)
context()
|> assign(:item_name, Format.item_name(step.item))
|> assign(:progress, current_step_progress)
|> assign(:total, step.count)
|> Format.template(" - Collect [item_name] - [progress]/[total]")
end
def quest_step(step = %{type: "item/give"}, progress, save) do
current_step_progress = Quest.current_step_progress(step, progress, save)
context()
|> assign(:item_name, Format.item_name(step.item))
|> assign(:npc_name, Format.npc_name(step.npc))
|> assign(:progress, current_step_progress)
|> assign(:total, step.count)
|> Format.template(" - Give [item_name] to [npc_name] - [progress]/[total]")
end
def quest_step(step = %{type: "item/have"}, progress, save) do
current_step_progress = Quest.current_step_progress(step, progress, save)
context()
|> assign(:item_name, Format.item_name(step.item))
|> assign(:progress, current_step_progress)
|> assign(:total, step.count)
|> Format.template(" - Have [item_name] - [progress]/[total]")
end
def quest_step(step = %{type: "npc/kill"}, progress, save) do
current_step_progress = Quest.current_step_progress(step, progress, save)
context()
|> assign(:npc_name, Format.npc_name(step.npc))
|> assign(:progress, current_step_progress)
|> assign(:total, step.count)
|> Format.template(" - Kill [npc_name] - [progress]/[total]")
end
def quest_step(step = %{type: "room/explore"}, progress, save) do
current_step_progress = Quest.current_step_progress(step, progress, save)
context()
|> assign(:room_name, Format.room_name(step.room))
|> assign(:progress, current_step_progress)
|> Format.template(" - Explore [room_name] - [progress]")
end
def template("quest") do
"""
[name] - [progress]
[underline]
[description]
[steps]
"""
end
end | lib/game/format/quests.ex | 0.61878 | 0.465752 | quests.ex | starcoder |
defmodule Crawler.Linker.PathFinder do
@moduledoc """
Finds different components of a given URL, e.g. its domain name, directory
path, or full path.
The `safe` option in some the functions indicates whether the return value
should be transformed in order to be safely used as folder and file names.
"""
@doc """
Finds the URL scheme (e.g. `https://`).
## Examples
iex> PathFinder.find_scheme("http://hi.hello")
"http://"
iex> PathFinder.find_scheme("https://hi.hello:8888/")
"https://"
"""
def find_scheme(url) do
(
url
|> String.split("://", part: 2)
|> Kernel.hd()
) <> "://"
end
@doc """
Finds the domain name with port number (e.g. `example.org:8080`).
## Examples
iex> PathFinder.find_domain("http://hi.hello")
"hi.hello"
iex> PathFinder.find_domain("https://hi.hello:8888/world")
"hi.hello-8888"
iex> PathFinder.find_domain("https://hi.hello:8888/world", false)
"hi.hello:8888"
"""
def find_domain(url, safe \\ true) do
url
|> find_path(safe)
|> String.split("/", parts: 2)
|> Kernel.hd()
end
@doc """
Finds the base path of a given page.
## Examples
iex> PathFinder.find_base_path("http://hi.hello")
"hi.hello"
iex> PathFinder.find_base_path("https://hi.hello:8888/dir/world")
"hi.hello-8888/dir"
iex> PathFinder.find_base_path("https://hi.hello:8888/dir/world", false)
"hi.hello:8888/dir"
"""
def find_base_path(url, safe \\ true) do
url
|> find_path(safe)
|> String.split("/")
|> base_path
end
defp base_path([path]), do: path
defp base_path(list) do
[_head | tail] = Enum.reverse(list)
tail
|> Enum.reverse()
|> Path.join()
end
@doc """
Finds the full path of a given page.
## Examples
iex> PathFinder.find_path("http://hi.hello")
"hi.hello"
iex> PathFinder.find_path("https://hi.hello:8888/world")
"hi.hello-8888/world"
iex> PathFinder.find_path("https://hi.hello:8888/world", false)
"hi.hello:8888/world"
"""
def find_path(url, safe \\ true)
def find_path(url, false) do
url
|> String.split("://", parts: 2)
|> Enum.at(-1)
end
def find_path(url, true) do
url
|> find_path(false)
|> String.replace(":", "-")
end
end | lib/crawler/linker/path_finder.ex | 0.897339 | 0.437283 | path_finder.ex | starcoder |
defmodule Godfist.DataDragon do
@moduledoc """
Module to interact with the static data provided by Data Dragon instead of
the default one by Riot.
The names provided for champions in this Module are case sensitive because of
the way that Data Dragon handles it's files. For now it's up to you to get the
champion's names correctly.
"""
@v "7.24.2"
@endpoint "https://ddragon.leagueoflegends.com/cdn"
@doc """
Get image of a profile icon by it's id.
## Example
```elixir
iex> Godfist.DataDragon.profile_icon(588)
```
"""
@spec profile_icon(integer) :: String.t()
def profile_icon(id) do
@endpoint <> "/#{@v}/img/profileicon/#{id}.png"
end
@doc """
Get champion splash arts by name and splash art number.
## Example
```elixir
iex> Godfist.DataDragon.champ_splash("Aatrox", 0)
```
"""
@spec champ_splash(String.t(), integer) :: String.t()
def champ_splash(name, number) do
@endpoint <> "/img/champion/splash/#{get_name(name)}_#{number}.jpg"
end
@doc """
Get a champion loading screen art by it's name and splash id.
## Example
```elixir
iex> Godfist.DataDragon.champ_loading("LeeSin", 1)
```
"""
@spec champ_loading(String.t(), integer) :: String.t()
def champ_loading(name, number \\ 0) do
@endpoint <> "/img/champion/loading/#{get_name(name)}_#{number}.jpg"
end
@doc """
Get the square image of a champion by it's name and splash id.
## Example
```elixir
iex> Godfist.DataDragon.champ_square("<NAME>")
```
"""
@spec champ_square(String.t()) :: String.t()
def champ_square(name) do
@endpoint <> "/#{@v}/img/champion/#{get_name(name)}.png"
end
@doc """
Get an ability from a champion by it's name.
Refer to `passive/1` on how to get spell name.
## Example
```elixir
iex> Godfist.DataDragon.ability("FlashFrost")
```
"""
@spec ability(String.t()) :: String.t()
def ability(name) do
rep = String.replace(name, " ", "")
@endpoint <> "/#{@v}/img/spell/#{rep}.png"
end
@doc """
Get summoner spells.
## Example
```elixir
iex> Godfist.DataDragon.summ_spell("Heal")
```
"""
@spec summ_spell(String.t()) :: String.t()
def summ_spell(name) do
@endpoint <> "/#{@v}/img/spell/Summoner#{String.capitalize(name)}.png"
end
@doc """
Get an item by it's id.
## Example
```elixir
iex> Godfist.DataDragon.item(1001)
```
"""
@spec item(integer) :: String.t()
def item(id) do
@endpoint <> "/#{@v}/img/item/#{id}.png"
end
@doc """
Get a mastery by it's id.
## Example
```elixir
iex> Godfist.DataDragon.mastery(6111)
```
"""
@spec mastery(integer) :: String.t()
def mastery(id) do
@endpoint <> "/#{@v}/img/mastery/#{id}.png"
end
@doc """
Get a rune by it's id.
## Example
```elixir
iex> Godfist.DataDragon.rune(8001)
```
"""
@spec rune(integer) :: String.t()
def rune(id) do
@endpoint <> "/#{@v}/img/rune/#{id}.png"
end
# Get Data Dragon champion name from in-game champion name.
defp get_name(champion) do
{name, _map} = Godfist.champion_by_name(champion)
name
end
end | lib/godfist/requests/data_dragon/data_dragon.ex | 0.764232 | 0.82566 | data_dragon.ex | starcoder |
defmodule AWS.CloudWatch.Events do
@moduledoc """
Amazon CloudWatch Events helps you to respond to state changes in your AWS
resources. When your resources change state they automatically send events
into an event stream. You can create rules that match selected events in
the stream and route them to targets to take action. You can also use rules
to take action on a pre-determined schedule. For example, you can configure
rules to:
<ul> <li>Automatically invoke an AWS Lambda function to update DNS entries
when an event notifies you that Amazon EC2 instance enters the running
state.</li> <li>Direct specific API records from CloudTrail to an Amazon
Kinesis stream for detailed analysis of potential security or availability
risks.</li> <li>Periodically invoke a built-in target to create a snapshot
of an Amazon EBS volume.</li> </ul> For more information about Amazon
CloudWatch Events features, see the [Amazon CloudWatch Developer
Guide](http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide).
"""
@doc """
Deletes a rule. You must remove all targets from a rule using
`RemoveTargets` before you can delete the rule.
**Note:** When you delete a rule, incoming events might still continue to
match to the deleted rule. Please allow a short period of time for changes
to take effect.
"""
def delete_rule(client, input, options \\ []) do
request(client, "DeleteRule", input, options)
end
@doc """
Describes the details of the specified rule.
"""
def describe_rule(client, input, options \\ []) do
request(client, "DescribeRule", input, options)
end
@doc """
Disables a rule. A disabled rule won't match any events, and won't
self-trigger if it has a schedule expression.
**Note:** When you disable a rule, incoming events might still continue to
match to the disabled rule. Please allow a short period of time for changes
to take effect.
"""
def disable_rule(client, input, options \\ []) do
request(client, "DisableRule", input, options)
end
@doc """
Enables a rule. If the rule does not exist, the operation fails.
**Note:** When you enable a rule, incoming events might not immediately
start matching to a newly enabled rule. Please allow a short period of time
for changes to take effect.
"""
def enable_rule(client, input, options \\ []) do
request(client, "EnableRule", input, options)
end
@doc """
Lists the names of the rules that the given target is put to. You can see
which of the rules in Amazon CloudWatch Events can invoke a specific target
in your account. If you have more rules in your account than the given
limit, the results will be paginated. In that case, use the next token
returned in the response and repeat ListRulesByTarget until the NextToken
in the response is returned as null.
"""
def list_rule_names_by_target(client, input, options \\ []) do
request(client, "ListRuleNamesByTarget", input, options)
end
@doc """
Lists the Amazon CloudWatch Events rules in your account. You can either
list all the rules or you can provide a prefix to match to the rule names.
If you have more rules in your account than the given limit, the results
will be paginated. In that case, use the next token returned in the
response and repeat ListRules until the NextToken in the response is
returned as null.
"""
def list_rules(client, input, options \\ []) do
request(client, "ListRules", input, options)
end
@doc """
Lists of targets assigned to the rule.
"""
def list_targets_by_rule(client, input, options \\ []) do
request(client, "ListTargetsByRule", input, options)
end
@doc """
Sends custom events to Amazon CloudWatch Events so that they can be matched
to rules.
"""
def put_events(client, input, options \\ []) do
request(client, "PutEvents", input, options)
end
@doc """
Creates or updates a rule. Rules are enabled by default, or based on value
of the State parameter. You can disable a rule using `DisableRule`.
**Note:** When you create or update a rule, incoming events might not
immediately start matching to new or updated rules. Please allow a short
period of time for changes to take effect.
A rule must contain at least an EventPattern or ScheduleExpression. Rules
with EventPatterns are triggered when a matching event is observed. Rules
with ScheduleExpressions self-trigger based on the given schedule. A rule
can have both an EventPattern and a ScheduleExpression, in which case the
rule will trigger on matching events as well as on a schedule.
**Note:** Most services in AWS treat : or / as the same character in Amazon
Resource Names (ARNs). However, CloudWatch Events uses an exact match in
event patterns and rules. Be sure to use the correct ARN characters when
creating event patterns so that they match the ARN syntax in the event you
want to match.
"""
def put_rule(client, input, options \\ []) do
request(client, "PutRule", input, options)
end
@doc """
Adds target(s) to a rule. Targets are the resources that can be invoked
when a rule is triggered. For example, AWS Lambda functions, Amazon Kinesis
streams, and built-in targets. Updates the target(s) if they are already
associated with the role. In other words, if there is already a target with
the given target ID, then the target associated with that ID is updated.
In order to be able to make API calls against the resources you own, Amazon
CloudWatch Events needs the appropriate permissions. For AWS Lambda and
Amazon SNS resources, CloudWatch Events relies on resource-based policies.
For Amazon Kinesis streams, CloudWatch Events relies on IAM roles. For more
information, see [Permissions for Sending Events to
Targets](http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/EventsTargetPermissions.html)
in the ***Amazon CloudWatch Developer Guide***.
**Input** and **InputPath** are mutually-exclusive and optional parameters
of a target. When a rule is triggered due to a matched event, if for a
target:
<ul> <li>Neither **Input** nor **InputPath** is specified, then the entire
event is passed to the target in JSON form.</li> <li> **InputPath** is
specified in the form of JSONPath (e.g. **$.detail**), then only the part
of the event specified in the path is passed to the target (e.g. only the
detail part of the event is passed). </li> <li> **Input** is specified in
the form of a valid JSON, then the matched event is overridden with this
constant.</li> </ul> **Note:** When you add targets to a rule, when the
associated rule triggers, new or updated targets might not be immediately
invoked. Please allow a short period of time for changes to take effect.
"""
def put_targets(client, input, options \\ []) do
request(client, "PutTargets", input, options)
end
@doc """
Removes target(s) from a rule so that when the rule is triggered, those
targets will no longer be invoked.
**Note:** When you remove a target, when the associated rule triggers,
removed targets might still continue to be invoked. Please allow a short
period of time for changes to take effect.
"""
def remove_targets(client, input, options \\ []) do
request(client, "RemoveTargets", input, options)
end
@doc """
Tests whether an event pattern matches the provided event.
**Note:** Most services in AWS treat : or / as the same character in Amazon
Resource Names (ARNs). However, CloudWatch Events uses an exact match in
event patterns and rules. Be sure to use the correct ARN characters when
creating event patterns so that they match the ARN syntax in the event you
want to match.
"""
def test_event_pattern(client, input, options \\ []) do
request(client, "TestEventPattern", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "events"}
host = get_host("events", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSEvents.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end | lib/aws/cloudwatch_events.ex | 0.910387 | 0.572006 | cloudwatch_events.ex | starcoder |
defmodule Openflow.Action.NxResubmit do
@moduledoc """
Searches the flow table again, using a flow that is slightly modified from the original lookup:
Following the lookup, the original in_port is restored.
If the modified flow matched in the flow table, then the corresponding
actions are executed. Afterward, actions following NXAST_RESUBMIT in
the original set of actions, if any, are executed; any changes made to
the packet (e.g. changes to VLAN) by secondary actions persist when
those actions are executed, although the original in_port is restored.
"""
defstruct(in_port: :in_port)
@experimenter 0x00002320
@nxast 1
alias __MODULE__
alias Openflow.Action.Experimenter
@type t :: %{in_port: port_no()}
@type port_no ::
:max
| :in_port
| :table
| :normal
| :flood
| :all
| :controller
| :local
| :none
| 1..0xFFFF
@doc """
Creates a new nx_resubmit action struct
## Options:
- in_port: New in_port for checking flow table in the one of the `port_no()` type
## Note:
If the modified flow matchd in the flow table, then the corresponding actions are executed,\\
Afterward, actions following the resubmit in the original set of actions, if any, are executed;\\
any changes made to the packet by secondary actions persist when those actions are executed,
although the original in_port is restored
## Example:
```elixir
iex> %NxResubmit{in_port: :in_port} = NxResubmit.new()
iex> %NxResubmit{in_port: 1} = NxResubmit.new(1)
```
"""
@spec new(port_no()) :: t()
def new(in_port \\ :in_port) do
%NxResubmit{in_port: in_port}
end
@spec to_binary(t()) :: binary()
def to_binary(%NxResubmit{in_port: in_port}) do
in_port_int = Openflow.Utils.get_enum(in_port, :openflow10_port_no)
Experimenter.pack_exp_header(<<
@experimenter::32,
@nxast::16,
in_port_int::16,
0::size(4)-unit(8)
>>)
end
@spec read(binary()) :: t()
def read(<<@experimenter::32, @nxast::16, in_port_int::16, _::size(4)-unit(8)>>) do
in_port = Openflow.Utils.get_enum(in_port_int, :openflow10_port_no)
%NxResubmit{in_port: in_port}
end
end | lib/openflow/actions/nx_resubmit.ex | 0.87749 | 0.758958 | nx_resubmit.ex | starcoder |
defmodule ESpec.To do
@moduledoc """
Defines `to`, `to_not` and `not_to` helper functions.
The functions implement syntax: 'expect 1 |> to eq 1'
These functions wrap arguments for `ESpec.ExpectTo` module.
Also defines `to` helper function for mocking without parenthesis:
`allow SomeModule |> to accept(:f, fn(a) -> "mock" end)`
These functions wrap arguments for `ESpec.AllowTo` module.
"""
alias ESpec.AllowTo
alias ESpec.ExpectTo
@doc "Wrappers for `ESpec.AllowTo.to`."
def to(module, {:accept, name, function}) when is_atom(name) do
AllowTo.to({:accept, name, function}, {AllowTo, module})
end
@doc false
def to(module, {:accept, name, function, meck_options})
when is_atom(name) and is_list(meck_options) do
AllowTo.to({:accept, name, function, meck_options}, {AllowTo, module})
end
@doc false
def to(module, {:accept, list}) when is_list(list) do
AllowTo.to({:accept, list}, {AllowTo, module})
end
@doc false
def to(module, {:accept, list, meck_options}) when is_list(list) and is_list(meck_options) do
AllowTo.to({:accept, list, meck_options}, {AllowTo, module})
end
@doc false
def to(module, {:accept, name}) when is_atom(name) do
AllowTo.to({:accept, name}, {AllowTo, module})
end
@doc "Special case for `is_expected` when `subject` present."
def to({ExpectTo, subject, stacktrace}, {module, data}) do
ExpectTo.to({module, data}, {ExpectTo, subject, stacktrace})
end
@doc "Wrapper for `ESpec.ExpectTo.to`."
def to(subject, {module, data}) do
ExpectTo.to({module, data}, {ExpectTo, subject, ESpec.Expect.pruned_stacktrace()})
end
@doc "Special case for `is_expected` when `subject` present."
def to_not({ExpectTo, subject, stacktrace}, {module, data}) do
ExpectTo.to_not({module, data}, {ExpectTo, subject, stacktrace})
end
@doc "Wrapper for `ESpec.ExpectTo.to_not`."
def to_not(subject, {module, data}) do
ExpectTo.to_not({module, data}, {ExpectTo, subject, ESpec.Expect.pruned_stacktrace()})
end
@doc "Special case for `is_expected` when `subject` present."
def not_to({ExpectTo, subject, stacktrace}, {module, data}) do
to_not({ExpectTo, subject, stacktrace}, {module, data})
end
@doc "Wrapper for `ESpec.ExpectTo.not_to`."
def not_to(subject, {module, data}) do
to_not(subject, {module, data})
end
end | lib/espec/to.ex | 0.780871 | 0.804751 | to.ex | starcoder |
defmodule Cluster.Strategy.KubernetesPods do
@moduledoc """
This clustering strategy works by loading all pods in the current Kubernetes
namespace with the configured tag. It will fetch the addresses of all pods with
that tag and attempt to connect. It will continually monitor and update its
connections every 5s.
It assumes that all nodes share a base name, are using longnames, and are unique
based on their FQDN, rather than the base hostname. In other words, in the following
longname, `<basename>@<domain>`, `basename` would be the value configured in
<domain> can be either of type :ip (the pod's ip, can be obtained by setting an env
variable to status.podIP) or :dns, which is the pod's internal A Record.
This A Record has the format <ip-with-dashes>.<namespace>.pod.cluster.local, e.g
1-2-3-4.default.pod.cluster.local.
Getting :ip to work requires a bit fiddling in the container's CMD, for example:
```yaml
# deployment.yaml
command: ["sh", -c"]
args: ["POD_A_RECORD"]
args: ["export POD_A_RECORD=${POD_IP//./-} && /app/bin/app foreground"]
```
```
# vm.args
-name app@<%= "${POD_A_RECORD}.${NAMESPACE}.pod.cluster.local" %>
```
(in an app running as a Distillery release).
The benefit of using :dns over :ip is that you can establish a remote shell (as well as
run observer) by using `kubectl port-forward` in combination with some entries in `/etc/hosts`.
Defaults to :ip.
An example configuration is below:
config :libcluster,
topologies: [
k8s_example: [
strategy: #{__MODULE__},
config: [
mode: :ip,
kubernetes_selector: "cluster=myapp-cluster",
polling_interval: 10_000]]]
"""
use GenServer
use Cluster.Strategy
import Cluster.Logger
alias Cluster.Strategy.State
@default_polling_interval 5_000
@kubernetes_master "kubernetes.default.svc.cluster.local"
@service_account_path "/var/run/secrets/kubernetes.io/serviceaccount"
def start_link(opts), do: GenServer.start_link(__MODULE__, opts)
def init(opts) do
state = %State{
topology: Keyword.fetch!(opts, :topology),
connect: Keyword.fetch!(opts, :connect),
disconnect: Keyword.fetch!(opts, :disconnect),
list_nodes: Keyword.fetch!(opts, :list_nodes),
config: Keyword.fetch!(opts, :config),
meta: MapSet.new([])
}
{:ok, state, 0}
end
def handle_info(:timeout, state) do
handle_info(:load, state)
end
def handle_info(:load, %State{topology: topology, connect: connect, disconnect: disconnect, list_nodes: list_nodes} = state) do
new_nodelist = MapSet.new(get_nodes(state))
added = MapSet.difference(new_nodelist, state.meta)
removed = MapSet.difference(state.meta, new_nodelist)
new_nodelist = case Cluster.Strategy.disconnect_nodes(topology, disconnect, list_nodes, MapSet.to_list(removed)) do
:ok ->
new_nodelist
{:error, bad_nodes} ->
# Add back the nodes which should have been removed, but which couldn't be for some reason
Enum.reduce(bad_nodes, new_nodelist, fn {n, _}, acc ->
MapSet.put(acc, n)
end)
end
new_nodelist = case Cluster.Strategy.connect_nodes(topology, connect, list_nodes, MapSet.to_list(added)) do
:ok ->
new_nodelist
{:error, bad_nodes} ->
# Remove the nodes which should have been added, but couldn't be for some reason
Enum.reduce(bad_nodes, new_nodelist, fn {n, _}, acc ->
MapSet.delete(acc, n)
end)
end
Process.send_after(self(), :load, Keyword.get(state.config, :polling_interval, @default_polling_interval))
{:noreply, %{state | :meta => new_nodelist}}
end
def handle_info(_, state) do
{:noreply, state}
end
@spec get_token() :: String.t
defp get_token() do
path = Path.join(@service_account_path, "token")
case File.exists?(path) do
true -> path |> File.read! |> String.trim()
false -> ""
end
end
@spec get_namespace() :: String.t
defp get_namespace() do
path = Path.join(@service_account_path, "namespace")
case File.exists?(path) do
true -> path |> File.read! |> String.trim()
false -> ""
end
end
@spec get_nodes(State.t) :: [atom()]
defp get_nodes(%State{topology: topology, config: config}) do
token = get_token()
namespace = get_namespace()
selector = Keyword.fetch!(config, :kubernetes_selector)
cond do
selector != nil ->
selector = URI.encode(selector)
endpoints_path = "api/v1/namespaces/#{namespace}/endpoints?labelSelector=#{selector}"
headers = [{'authorization', 'Bearer #{token}'}]
http_options = [ssl: [verify: :verify_none]]
case :httpc.request(:get, {'https://#{@kubernetes_master}/#{endpoints_path}', headers}, http_options, []) do
{:ok, {{_version, 200, _status}, _headers, body}} ->
parse_response(Keyword.get(config, :mode, :ip), Poison.decode!(body))
{:ok, {{_version, 403, _status}, _headers, body}} ->
%{"message" => msg} = Poison.decode!(body)
warn topology, "cannot query kubernetes (unauthorized): #{msg}"
[]
{:ok, {{_version, code, status}, _headers, body}} ->
warn topology, "cannot query kubernetes (#{code} #{status}): #{inspect body}"
[]
{:error, reason} ->
error topology, "request to kubernetes failed!: #{inspect reason}"
[]
end
selector == nil ->
warn topology, "kubernetes strategy is selected, but :kubernetes_selector is not configured!"
[]
:else ->
warn topology, "kubernetes strategy is selected, but is not configured!"
[]
end
end
defp parse_response(:ip, resp) do
case resp do
%{"items" => []} ->
[]
%{"items" => items} ->
Enum.reduce(items, [], fn
%{"subsets" => []}, acc ->
acc
%{"subsets" => subsets, "metadata" => %{"name" => app_name}}, acc ->
addrs = Enum.flat_map(subsets, fn
%{"addresses" => addresses} ->
Enum.map(addresses, fn %{"ip" => ip} -> :"#{app_name}@#{ip}" end)
_ ->
[]
end)
acc ++ addrs
_, acc ->
acc
end)
_ ->
[]
end
end
defp parse_response(:dns, resp) do
case resp do
%{"items" => []} ->
[]
%{"items" => items} ->
Enum.reduce(items, [], fn
%{"subsets" => []}, acc ->
acc
%{"subsets" => subsets, "metadata" => %{"name" => app_name}}, acc ->
addrs = Enum.flat_map(subsets, fn
%{"addresses" => addresses} -> Enum.map(addresses, fn
%{"ip" => ip, "targetRef" => %{"namespace" => namespace}} ->
format_dns_record(app_name, ip, namespace) end)
_ ->
[]
end)
acc ++ addrs
_, acc ->
acc
end)
_ ->
[]
end
end
defp format_dns_record(app_name, ip, namespace) do
ip = String.replace(ip, ".", "-")
:"#{app_name}@#{ip}.#{namespace}.pod.cluster.local"
end
end | lib/strategy/kubernetes_pods.ex | 0.78899 | 0.780286 | kubernetes_pods.ex | starcoder |
defmodule Commanded.Aggregate.Multi.BankAccount do
defstruct [:account_number, :status, balance: 0]
alias Commanded.Aggregate.Multi
alias Commanded.Aggregate.Multi.BankAccount
defmodule Commands do
defmodule OpenAccount do
defstruct [:account_number, :initial_balance]
end
defmodule WithdrawMoney do
defstruct [:account_number, :transfer_uuid, :amount]
end
end
defmodule Events do
defmodule BankAccountOpened do
@derive Jason.Encoder
defstruct [:account_number, :balance]
end
defmodule MoneyWithdrawn do
@derive Jason.Encoder
defstruct [:account_number, :transfer_uuid, :amount, :balance]
end
end
alias Commands.{OpenAccount, WithdrawMoney}
alias Events.{BankAccountOpened, MoneyWithdrawn}
# Public command functions
def execute(%BankAccount{status: nil}, %OpenAccount{initial_balance: initial_balance} = command)
when is_number(initial_balance) and initial_balance > 0 do
%OpenAccount{account_number: account_number} = command
%BankAccountOpened{account_number: account_number, balance: initial_balance}
end
def execute(%BankAccount{status: :active} = account, %WithdrawMoney{amount: amount})
when is_number(amount) and amount > 0 do
account
|> Multi.new()
|> Multi.execute(&withdraw_money(&1, amount))
|> Multi.execute(&check_balance/1)
end
# State mutators
def apply(%BankAccount{} = state, %BankAccountOpened{} = event) do
%BankAccountOpened{account_number: account_number, balance: balance} = event
%BankAccount{state | account_number: account_number, balance: balance, status: :active}
end
def apply(%BankAccount{} = state, %MoneyWithdrawn{} = event) do
%MoneyWithdrawn{balance: balance} = event
%BankAccount{state | balance: balance}
end
# Private helpers
defp withdraw_money(%BankAccount{} = state, amount) do
%BankAccount{account_number: account_number, balance: balance} = state
%MoneyWithdrawn{
account_number: account_number,
amount: amount,
balance: balance - amount
}
end
defp check_balance(%BankAccount{balance: balance}) when balance < 0 do
{:error, :insufficient_funds_available}
end
defp check_balance(%BankAccount{}), do: []
end | test/aggregates/support/multi_bank_account.ex | 0.600423 | 0.58436 | multi_bank_account.ex | starcoder |
defmodule BlueHeronScan do
@moduledoc """
A scanner to collect Manufacturer Specific Data from AdvertisingReport packets.
A useful reference:
[Overview of BLE device identification](https://reelyactive.github.io/ble-identifier-reference.html)
Tested with:
- [Raspberry Pi Model Zero W](https://github.com/nerves-project/nerves_system_rpi0)
- /dev/ttyS0 is the BLE controller transport interface
- [BlueHeronTransportUART](https://github.com/blue-heron/blue_heron_transport_uart)
- [Govee H5102](https://fccid.io/2AQA6-H5102) Thermo-Hygrometer
- [Govee H5074](https://fccid.io/2AQA6-H5074) Thermo-Hygrometer
- Random devices from neighbors and passing cars.😉
## Examples
iex> {:ok, pid} = BlueHeronScan.start_link(:uart, %{device: "ttyS0"})
{:ok, #PID<0.10860.0>}
iex> {:ok, devices} = BlueHeronScan.devices(pid)
{:ok,
%{
9049270267450 => %{name: "SS3", time: ~U[2021-12-09 15:59:01.392458Z]},
48660401950223 => %{
784 => <<64, 16, 2, 48>>,
:time => ~U[2021-12-09 15:59:09.606645Z]
},
181149778439893 => %{
1 => <<1, 1, 3, 112, 82, 73>>,
:name => "GVH5102_EED5",
:time => ~U[2021-12-09 15:59:09.457780Z]
},
181149781445015 => %{
name: "ihoment_H6182_C997",
time: ~U[2021-12-09 15:59:09.545683Z]
},
209497230420943 => %{
name: "ELK-BLEDOM ",
time: ~U[2021-12-09 15:59:09.631200Z]
},
246390811914386 => %{
60552 => <<0, 81, 2, 189, 25, 100, 2>>,
:name => "Govee_H5074_F092",
:time => ~U[2021-12-09 15:59:09.450767Z]
}
}}
iex> BlueHeronScan.ignore_cids(pid, MapSet.new([6, 76, 117, 784]))
{:ok, #MapSet<[6, 76, 117, 784]>}
iex> BlueHeronScan.clear_devices(pid)
:ok
iex> {:ok, devices} = BlueHeronScan.devices(pid)
{:ok,
%{
181149778439893 => %{
1 => <<1, 1, 3, 108, 106, 73>>,
:name => "GVH5102_EED5",
:time => ~U[2021-12-09 16:02:01.800281Z]
},
181149781445015 => %{
name: "ihoment_H6182_C997",
time: ~U[2021-12-09 16:02:02.458660Z]
},
209497230420943 => %{
name: "ELK-BLEDOM ",
time: ~U[2021-12-09 16:02:02.337530Z]
},
210003231250023 => %{
name: "ELK-BLEDOM ",
time: ~U[2021-12-09 16:01:50.546539Z]
},
246390811914386 => %{
60552 => <<0, 84, 2, 182, 25, 100, 2>>,
:name => "Govee_H5074_F092",
:time => ~U[2021-12-09 16:02:01.408051Z]
}
}}
iex> BleAdMfgData.print(devices)
[
["6.0˚C 65.8% RH 100%🔋", "Govee_H5074_F092"],
["22.4˚C 36.2% RH 73%🔋", "GVH5102_EED5"]
]
iex> BlueHeronScan.disable(pid)
:scan_disable
iex> BlueHeronScan.clear_devices(pid)
:ok
iex> {:ok, devices} = BlueHeronScan.devices(pid)
{:ok, %{}}
iex> BlueHeronScan.enable(pid)
:ok
"""
use GenServer
require Logger
alias BlueHeron.HCI.Command.{
ControllerAndBaseband.WriteLocalName,
LEController.SetScanEnable
}
alias BlueHeron.HCI.Event.{
LEMeta.AdvertisingReport,
LEMeta.AdvertisingReport.Device
}
@init_commands [%WriteLocalName{name: "BlueHeronScan"}]
@default_uart_config %{
device: "ttyACM0",
uart_opts: [speed: 115_200],
init_commands: @init_commands
}
@default_usb_config %{
vid: 0x0BDA,
pid: 0xB82C,
init_commands: @init_commands
}
@doc """
Start a linked connection to the Bluetooth module and enable active scanning.
## UART
iex> {:ok, pid} = BlueHeronScan.start_link(:uart, %{device: "ttyS0"})
{:ok, #PID<0.111.0>}
## USB
iex> {:ok, pid} = BlueHeronScan.start_link(:usb)
{:ok, #PID<0.111.0>}
"""
def start_link(transport_type, config \\ %{})
def start_link(:uart, config) do
config = struct(BlueHeronTransportUART, Map.merge(@default_uart_config, config))
GenServer.start_link(__MODULE__, config, name: __MODULE__)
end
def start_link(:usb, config) do
config = struct(BlueHeronTransportUSB, Map.merge(@default_usb_config, config))
GenServer.start_link(__MODULE__, config, name: __MODULE__)
end
@doc """
Enable BLE scanning. This will deliver messages to the process mailbox
when other devices broadcast.
Returns `:ok` or `{:error, :not_working}` if uninitialized.
"""
def enable(pid) do
GenServer.call(pid, :scan_enable)
end
@doc """
Disable BLE scanning.
"""
def disable(pid) do
send(pid, :scan_disable)
end
@doc """
Get devices.
iex> BlueHeronScan.devices(pid)
{:ok, %{}}
"""
def devices(pid) do
GenServer.call(pid, :devices)
end
@doc """
Clear devices from the state.
iex> BlueHeronScan.clear_devices(pid)
:ok
"""
def clear_devices(pid) when is_pid(pid) do
GenServer.call(pid, :clear_devices)
end
@doc """
Get or set the company IDs to ignore.
https://www.bluetooth.com/specifications/assigned-numbers/company-identifiers
Apple and Microsoft beacons, 76 & 6, are noisy.
## Examples
iex> BlueHeronScan.ignore_cids(pid)
{:ok, [6, 76]}
iex> BlueHeronScan.ignore_cids(pid, [6, 76, 117])
{:ok, [6, 76, 117]}
"""
def ignore_cids(pid, cids \\ nil) do
GenServer.call(pid, {:ignore_cids, cids})
end
@impl GenServer
def init(config) do
# Create a context for BlueHeron to operate with.
{:ok, ctx} = BlueHeron.transport(config)
# Subscribe to HCI and ACL events.
BlueHeron.add_event_handler(ctx)
{:ok, %{ctx: ctx, working: false, devices: %{}, ignore_cids: [6, 76]}}
end
# Sent when a transport connection is established.
@impl GenServer
def handle_info({:BLUETOOTH_EVENT_STATE, :HCI_STATE_WORKING}, state) do
# Enable BLE Scanning. This will deliver messages to the process mailbox
# when other devices broadcast.
state = %{state | working: true}
scan(state, true)
{:noreply, state}
end
# Scan AdvertisingReport packets.
@impl GenServer
def handle_info(
{:HCI_EVENT_PACKET, %AdvertisingReport{devices: devices}},
state
) do
{:noreply, Enum.reduce(devices, state, &scan_device/2)}
end
# Ignore other HCI Events.
@impl GenServer
def handle_info({:HCI_EVENT_PACKET, _val}, state) do
# Logger.debug("#{__MODULE__} ignore HCI Event #{inspect(val)}")
{:noreply, state}
end
def handle_info(:scan_disable, state) do
scan(state, false)
{:noreply, state}
end
@impl GenServer
def handle_call(:devices, _from, state) do
{:reply, {:ok, state.devices}, state}
end
@impl GenServer
def handle_call(:clear_devices, _from, state) do
{:reply, :ok, %{state | devices: %{}}}
end
@impl GenServer
def handle_call({:ignore_cids, cids}, _from, state) do
cond do
cids == nil ->
{:reply, {:ok, state.ignore_cids}, state}
Enumerable.impl_for(cids) != nil ->
{:reply, {:ok, cids}, %{state | ignore_cids: cids}}
true ->
{:reply, {:error, :not_enumerable}, state}
end
end
def handle_call(:scan_enable, _from, state) do
{:reply, scan(state, true), state}
end
defp scan(%{working: false}, _enable) do
{:error, :not_working}
end
defp scan(%{ctx: ctx = %BlueHeron.Context{}}, enable) do
BlueHeron.hci_command(ctx, %SetScanEnable{le_scan_enable: enable})
status = if(enable, do: "enabled", else: "disabled")
Logger.info("#{__MODULE__} #{status} scanning")
end
defp scan_device(device, state) do
case device do
%Device{address: addr, data: data} ->
Enum.reduce(data, state, fn e, acc ->
cond do
is_local_name?(e) -> store_local_name(acc, addr, e)
is_mfg_data?(e) -> store_mfg_data(acc, addr, e)
true -> acc
end
end)
_ ->
state
end
end
defp is_local_name?(val) do
is_binary(val) && String.starts_with?(val, "\t") && String.valid?(val)
end
defp is_mfg_data?(val) do
is_tuple(val) && elem(val, 0) == "Manufacturer Specific Data"
end
defp store_local_name(state, addr, "\t" <> name) do
device = Map.get(state.devices, addr, %{})
device = Map.merge(device, %{name: name, time: DateTime.utc_now()})
%{state | devices: Map.put(state.devices, addr, device)}
end
defp store_mfg_data(state, addr, dt) do
{_, mfg_data} = dt
<<cid::little-16, sdata::binary>> = mfg_data
unless cid in state.ignore_cids do
device = Map.get(state.devices, addr, %{})
device = Map.merge(device, %{cid => sdata, time: DateTime.utc_now()})
%{state | devices: Map.put(state.devices, addr, device)}
else
state
end
end
end
defmodule BleAdMfgData do
@moduledoc """
Decode AdvertisingReport Manufacturer Specific Data.
https://www.bluetooth.com/specifications/assigned-numbers/company-identifiers
"""
@doc """
Print device data collected by `BlueHeronScan`.
## Examples
iex> {:ok, pid} = BlueHeronScan.start_link(:uart, %{device: "ttyS0"})
{:ok, #PID<0.2012.0>}
iex> {:ok, devices} = BlueHeronScan.devices(pid)
...
iex> BleAdMfgData.print(devices)
[
["26.9˚C 62.1% RH 100%🔋", "Govee_H5074_F092"],
["27.2˚C 57.5% RH 92%🔋", "GVH5102_EED5"]
]
iex>
"""
def print(devices) do
Enum.reduce(devices, [], fn {_, dmap}, list ->
Enum.reduce(dmap, list, fn {k, v}, acc ->
case print_device(k, v) do
nil -> acc
s -> [[s, Map.get(dmap, :name, "")] | acc]
end
end)
end)
end
# https://github.com/Home-Is-Where-You-Hang-Your-Hack/sensor.goveetemp_bt_hci
# custom_components/govee_ble_hci/govee_advertisement.py
# GVH5102
defp print_device(0x0001, <<_::16, temhum::24, bat::8>>) do
tem = Float.round(temhum / 10000, 1)
hum = rem(temhum, 1000) / 10
"#{tem}˚C #{hum}% RH #{bat}%🔋"
end
# https://github.com/wcbonner/GoveeBTTempLogger
# goveebttemplogger.cpp
# bool Govee_Temp::ReadMSG(const uint8_t * const data)
# Govee_H5074
defp print_device(0xEC88, <<_::8, tem::little-16, hum::little-16, bat::8, _::8>>) do
tem = Float.round(tem / 100, 1)
hum = Float.round(hum / 100, 1)
"#{tem}˚C #{hum}% RH #{bat}%🔋"
end
defp print_device(_cid, _data) do
nil
end
end | examples/scanner.ex | 0.747524 | 0.524516 | scanner.ex | starcoder |
defmodule Clex.CL.ImageDesc do
@moduledoc ~S"""
This module defines a `Record` type that represents the `cl_image_desc` as specified in the Open CL specification:
```c
typedef struct _cl_image_desc {
cl_mem_object_type image_type;
size_t image_width;
size_t image_height;
size_t image_depth;
size_t image_array_size;
size_t image_row_pitch;
size_t image_slice_pitch;
cl_uint num_mip_levels;
cl_uint num_samples;
cl_mem buffer;
} cl_image_desc;
```
## Members
`:type` \
Describes the image type and must be either `:image1d`, `:image1d_buffer`, `:image1d_array`, `:image2d`, `:image2d_array`, or `:image3d`.
`:width` \
The width of the image in pixels. For a 2D image and image array, the image width must be ≤ `:image2d_max_width`. For a 3D image, the image width must be ≤ `:image3d_max_width`. For a 1D image buffer, the image width must be ≤ `:image_max_buffer_size`. For a 1D image and 1D image array, the image width must be ≤ `:image2d_max_width`.
`:height` \
The height of the image in pixels. This is only used if the image is a 2D, 3D or 2D image array. For a 2D image or image array, the image height must be ≤ `:image2d_max_height`. For a 3D image, the image height must be ≤ `:image3d_max_height`.
`:depth` \
The depth of the image in pixels. This is only used if the image is a 3D image and must be a value ≥ 1 and ≤ `:image3d_max_depth`.
`:array_size` \
The number of images in the image array. This is only used if the image is a 1D or 2D image array. The values for `:array_size`, if specified, must be a value ≥ 1 and ≤ `:image_max_array_size`. Note that reading and writing 2D image arrays from a kernel with `:array_size` = 1 may be lower performance than 2D images.
`:row_pitch` \
The scan-line pitch in bytes. This must be 0 if host_ptr is NULL and can be either 0 or ≥ `:width` * size of element in bytes if host_ptr is not NULL. If host_ptr is not NULL and `:row_pitch` = 0, `:row_pitch` is calculated as `:width` * size of element in bytes. If `:row_pitch` is not 0, it must be a multiple of the image element size in bytes.
`:slice_pitch` \
The size in bytes of each 2D slice in the 3D image or the size in bytes of each image in a 1D or 2D image array. This must be 0 if host_ptr is NULL. If host_ptr is not NULL, `:slice_pitch` can be either 0 or ≥ `:row_pitch` * `:height` for a 2D image array or 3D image and can be either 0 or ≥ `:row_pitch` for a 1D image array. If host_ptr is not NULL and `:slice_pitch` = 0, `:slice_pitch` is calculated as `:row_pitch` * `:height` for a 2D image array or 3D image and `:row_pitch` for a 1D image array. If `:slice_pitch` is not 0, it must be a multiple of the `:row_pitch`.
`:num_mip_levels`, `:num_samples` \
Must be 0.
`:buffer` \
Refers to a valid buffer memory object if `:type` is `:image1d_buffer`. Otherwise it must be NULL. For a 1D image buffer object, the image pixels are taken from the buffer object's data store. When the contents of a buffer object's data store are modified, those changes are reflected in the contents of the 1D image buffer object and vice-versa at corresponding sychronization points. The `:width` * size of element in bytes must be ≤ size of buffer object data store.
"""
require Record
defmacro __using__(_opts) do
quote do
import unquote(__MODULE__)
end
end
@type t :: record(:cl_image_desc,
type: Clex.CL.cl_mem_object_type,
width: non_neg_integer,
height: non_neg_integer,
depth: non_neg_integer,
array_size: non_neg_integer,
row_pitch: non_neg_integer,
slice_pitch: non_neg_integer,
num_mip_levels: non_neg_integer,
num_samples: non_neg_integer,
buffer: Clex.CL.cl_mem | :undefined
)
Record.defrecord(:cl_image_desc, type: nil,
width: nil, height: nil, depth: 1,
array_size: 1, row_pitch: 0, slice_pitch: 0,
num_mip_levels: 0, num_samples: 0, buffer: :undefined
)
end | lib/clex/cl/image_desc.ex | 0.914544 | 0.96622 | image_desc.ex | starcoder |
defmodule Automaton.Types.TWEANN.Constructor do
alias Automaton.Types.TWEANN.Sensor
alias Automaton.Types.TWEANN.Actuator
alias Automaton.Types.TWEANN.Cortex
alias Automaton.Types.TWEANN.Neuron
@doc """
The `construct_genotype` function accepts the name of the file to which we'll
save the genotype, sensor name, actuator name, and the hidden layer density
parameters. We have to generate unique Ids for every sensor and actuator. The
sensor and actuator names are used as input to the create_sensor and
create_actuator functions, which in turn generate the actual Sensor and
Actuator representing tuples. We create unique Ids for sensors and actuators
so that when in the future a NN uses 2 or more sensors or actuators of the
same type, we will be able to differentiate between them using their ids.
After the Sensor and Actuator tuples are generated, we extract the NN’s input
and output vector lengths from the sensor and actuator used by the system. The
Input_VL is then used to specify how many weights the neurons in the input
layer will need, and the Output_VL specifies how many neurons are in the
output layer of the NN. After appending the HiddenLayerDensites to the now
known number of neurons in the last layer to generate the full LayerDensities
list, we use the create_NeuroLayers function to generate the Neuron
representing tuples. We then update the Sensor and Actuator records with
proper fanin and fanout ids from the freshly created Neuron tuples, compose
the Cortex, and write the genotype to file.
"""
def construct_genotype(sensor_name, actuator_name, hidden_layer_densities) do
construct_genotype(:neuro, sensor_name, actuator_name, hidden_layer_densities)
end
def construct_genotype(file_name, sensor_name, actuator_name, hidden_layer_densities) do
sensor = create_sensor(sensor_name)
actuator = create_actuator(actuator_name)
output_vl = actuator.vl
layer_densities = List.insert_at(hidden_layer_densities, -1, output_vl)
cx_id = {:cortex, generate_id()}
neurons = create_neuro_layers(cx_id, sensor, actuator, layer_densities)
input_layer = List.first(neurons)
output_layer = List.last(neurons)
fl_nids = Enum.map(input_layer, fn n -> n.id end)
ll_nids = Enum.map(output_layer, fn n -> n.id end)
n_ids = for n <- List.flatten(neurons), do: n.id
sensor = %Sensor{sensor | cx_id: cx_id, fanout_ids: fl_nids}
actuator = %Actuator{actuator | cx_id: cx_id, fanin_ids: ll_nids}
cortex = create_cortex(cx_id, [sensor.id], [actuator.id], n_ids)
genotype = List.flatten([cortex, sensor, actuator | neurons])
{:ok, file} = :file.open(file_name, :write)
:lists.foreach(fn x -> :io.format(file, "~p.~n", [x]) end, genotype)
:file.close(file)
end
@doc """
Every sensor and actuator uses some kind of function associated with it, a
function that either polls the environment for sensory signals (in the case of
a sensor) or acts upon the environment (in the case of an actuator). It is the
function that we need to define and program before it is used, and the name of
the function is the same as the name of the sensor or actuator itself. For
example, the create_sensor/1 has specified only the rng sensor, because that
is the only sensor function we’ve finished developing. The rng function has
its own vl specification, which will determine the number of weights that a
neuron will need to allocate if it is to accept this sensor's output vector.
The same principles apply to the create_actuator function. Both, create_sensor
and create_actuator function, given the name of the sensor or actuator, will
return a record with all the specifications of that element, each with its own
unique Id.
"""
def create_sensor(name) do
case name do
:rng ->
%Sensor{id: {:sensor, generate_id()}, name: :rng, vl: 2}
_ ->
exit("System does not yet support a sensor by the name #{name}")
end
end
def create_actuator(name) do
case name do
:pts ->
%Actuator{id: {:actuator, generate_id()}, name: :pts, vl: 1}
_ ->
exit("System does not yet support a actuator by the name #{name}")
end
end
@doc """
The function create_neuro_layers/3 prepares the initial step before starting
the recursive create_neuro_layers/7 function which will create all the Neuron
records. We first generate the place holder Input Ids "Plus" (Input_IdPs),
which are tuples composed of Ids and the vector lengths of the incoming
signals associated with them. The proper input_idps will have a weight list in
the tuple instead of the vector length. Because we are only building NNs each
with only a single Sensor and Actuator, the IdP to the first layer is composed
of the single Sensor Id with the vector length of its sensory signal, likewise
in the case of the Actuator. We then generate unique ids for the neurons in
the first layer, and drop into the recursive create_neuro_layers/7 function.
"""
def create_neuro_layers(cx_id, sensor, actuator, layer_densities) do
input_id_ps = [{sensor.id, sensor.vl}]
tot_layers = length(layer_densities)
[fl_neurons | next_lds] = layer_densities
n_ids = for id <- generate_ids(fl_neurons, []), do: {:neuron, {1, id}}
create_neuro_layers(cx_id, actuator.id, 1, tot_layers, input_id_ps, n_ids, next_lds, [])
end
def create_neuro_layers(
cx_id,
actuator_id,
layer_index,
tot_layers,
input_id_ps,
n_ids,
[next_ld | lds],
acc
) do
output_nids = for id <- generate_ids(next_ld, []), do: {:neuron, {layer_index + 1, id}}
layer_neurons = create_neuro_layer(cx_id, input_id_ps, n_ids, output_nids, [])
next_input_id_ps = for n_id <- n_ids, do: {n_id, 1}
create_neuro_layers(
cx_id,
actuator_id,
layer_index + 1,
tot_layers,
next_input_id_ps,
output_nids,
lds,
[layer_neurons | acc]
)
end
def create_neuro_layers(cx_id, actuator_id, tot_layers, tot_layers, input_id_ps, nids, [], acc) do
output_ids = [actuator_id]
layer_neurons = create_neuro_layer(cx_id, input_id_ps, nids, output_ids, [])
Enum.reverse([layer_neurons | acc])
end
@doc """
To create neurons from the same layer, all that is needed are the Ids for
those neurons, a list of Input_IdPs for every neuron so that we can create the
proper number of weights, and a list of Output_Ids. Since in our simple feed
forward neural network all neurons are fully connected to the neurons in the
next layer, the Input_IdPs and Output_Ids are the same for every neuron be-
longing to the same layer.
"""
def create_neuro_layer(cx_id, input_id_ps, [id | n_ids], output_ids, acc) do
neuron = create_neuron(input_id_ps, id, cx_id, output_ids)
create_neuro_layer(cx_id, input_id_ps, n_ids, output_ids, [neuron | acc])
end
def create_neuro_layer(_cx_id, _input_id_ps, [], _output_ids, acc), do: acc
@doc """
Each neuron record is composed by the `create_neuron/3` function. The
`create_neuron/3` function creates the Input list from the tuples
[{Id,Weights}...] using the vector lengths specified in the place holder
Input_IdPs. The `create_neural_input/2` function uses `create_neural_weights/2` to
generate the random weights in the range of -0.5 to 0.5, adding the bias to
the end of the list.
"""
def create_neuron(input_id_ps, id, cx_id, output_ids) do
proper_input_id_ps = create_neural_input(input_id_ps, [])
%Neuron{
id: id,
cx_id: cx_id,
af: :tanh,
input_id_ps: proper_input_id_ps,
output_ids: output_ids
}
end
def create_neural_input([{input_id, input_vl} | input_id_ps], acc) do
weights = create_neural_weights(input_vl, [])
create_neural_input(input_id_ps, [{input_id, weights} | acc])
end
def create_neural_input([], acc) do
Enum.reverse([{:bias, :rand.uniform() - 0.5} | acc])
end
def create_neural_weights(0, acc), do: acc
def create_neural_weights(index, acc) do
w = :rand.uniform() - 0.5
create_neural_weights(index - 1, [w | acc])
end
@doc """
The `generate_id/0` creates a unique Id using current time, the Id is a floating
point value. The `generate_ids/2` function creates a list of unique Ids.
"""
def generate_ids(0, acc), do: acc
def generate_ids(index, acc) do
id = generate_id()
generate_ids(index - 1, [id | acc])
end
def generate_id() do
Ksuid.generate()
end
@doc """
The `create_cortex/4` function generates the record encoded genotypical
representation of the cortex element. The Cortex element needs to know the Id
of every Neuron, Sensor, and Actuator in the NN
"""
def create_cortex(cx_id, s_ids, a_ids, n_ids) do
%Cortex{id: cx_id, sensor_ids: s_ids, actuator_ids: a_ids, n_ids: n_ids}
end
end | lib/automata/automaton_types/neuroevolution/constructor.ex | 0.767341 | 0.63987 | constructor.ex | starcoder |
defmodule Toby.App.Views.Tables do
@moduledoc """
Builds a view for displaying information about ETS tables
TODO: Show DETS & Mnesia tables
"""
alias Toby.Util.Selection
import Toby.Util.Formatting, only: [format_bytes: 1]
import Ratatouille.View
import Ratatouille.Constants, only: [attribute: 1, color: 1]
@frame_rows 7
@bold attribute(:bold)
@style_selected [
color: color(:black),
background: color(:white)
]
def render(%{data: %{tables: tables}, cursor_x: cursor_x, cursor_y: cursor}, window) do
tables_slice = Selection.slice(tables, window.height - @frame_rows, cursor.position)
selected = Enum.at(tables, cursor.position)
row do
column(size: 8) do
panel title: "Tables (ETS)", height: :fill do
viewport(offset_x: cursor_x.position) do
table do
table_row(attributes: [@bold]) do
table_cell(content: "Name")
table_cell(content: "Objects")
table_cell(content: "Size")
table_cell(content: "Owner PID")
table_cell(content: "Owner Name")
table_cell(content: "Table ID")
end
for tab <- tables_slice do
table_row(if(tab == selected, do: @style_selected, else: [])) do
table_cell(content: to_string(tab[:name]))
table_cell(content: to_string(tab[:size]))
table_cell(content: format_bytes(tab[:memory]))
table_cell(content: inspect(tab[:owner]))
table_cell(content: to_string(tab[:owner_name]))
table_cell(content: inspect(tab[:id]))
end
end
end
end
end
end
column(size: 4) do
render_table_details(selected)
end
end
end
defp render_table_details(tab) do
panel title: to_string(tab[:name]), height: :fill do
label do
text(attributes: [@bold], content: "Identification & Owner")
end
table do
table_row do
table_cell(content: "Name")
table_cell(content: to_string(tab[:name]))
end
table_row do
table_cell(content: "ID")
table_cell(content: inspect(tab[:id]))
end
table_row do
table_cell(content: "Named table")
table_cell(content: to_string(tab[:named_table]))
end
table_row do
table_cell(content: "Owner")
table_cell(content: inspect(tab[:owner]))
end
table_row do
table_cell(content: "Owner name")
table_cell(content: to_string(tab[:owner_name]))
end
table_row do
table_cell(content: "Heir")
table_cell(content: to_string(tab[:heir]))
end
table_row do
table_cell(content: "Node")
table_cell(content: to_string(tab[:node]))
end
end
label do
text(attributes: [@bold], content: "Settings")
end
table do
table_row do
table_cell(content: "Source")
table_cell(content: to_string(tab[:source]))
end
table_row do
table_cell(content: "Key position")
table_cell(content: to_string(tab[:keypos]))
end
table_row do
table_cell(content: "Table type")
table_cell(content: to_string(tab[:type]))
end
table_row do
table_cell(content: "Protection mode")
table_cell(content: to_string(tab[:protection]))
end
end
label do
text(attributes: [@bold], content: "Memory Usage")
end
table do
table_row do
table_cell(content: "Number of objects")
table_cell(content: to_string(tab[:size]))
end
table_row do
table_cell(content: "Memory allocated")
table_cell(content: format_bytes(tab[:memory]))
end
table_row do
table_cell(content: "Compressed")
table_cell(content: to_string(tab[:compressed]))
end
end
end
end
end | lib/toby/app/views/tables.ex | 0.5083 | 0.419707 | tables.ex | starcoder |
defmodule Cldr.Number.Backend.Transliterate do
@moduledoc false
def define_number_module(config) do
module = inspect(__MODULE__)
backend = config.backend
config = Macro.escape(config)
quote location: :keep, bind_quoted: [module: module, backend: backend, config: config] do
defmodule Number.Transliterate do
@moduledoc false
if Cldr.Config.include_module_docs?(config.generate_docs) do
@moduledoc """
Transliteration for digits and separators.
Transliterating a string is an expensive business. First the string has to
be exploded into its component graphemes. Then for each grapheme we have
to map to the equivalent in the other `{locale, number_system}`. Then we
have to reassemble the string.
Effort is made to short circuit where possible. Transliteration is not
required for any `{locale, number_system}` that is the same as `{"en",
"latn"}` since the implementation uses this combination for the placeholders during
formatting already. When short circuiting is possible (typically the en-*
locales with "latn" number_system - the total number of short circuited
locales is 211 of the 537 in CLDR) the overall number formatting is twice as
fast than when formal transliteration is required.
### Configuring precompilation of digit transliterations
This module includes `Cldr.Number.Transliterate.transliterate_digits/3` which transliterates
digits between number systems. For example from :arabic to :latn. Since generating a
transliteration map is slow, pairs of transliterations can be configured so that the
transliteration map is created at compile time and therefore speeding up transliteration at
run time.
To configure these transliteration pairs, add the to the `use Cldr` configuration
in a backend module:
defmodule MyApp.Cldr do
use Cldr,
locale: ["en", "fr", "th"],
default_locale: "en",
precompile_transliterations: [{:latn, :thai}, {:arab, :thai}]
end
Where each tuple in the list configures one transliteration map. In this example, two maps are
configured: from `:latn` to `:thai` and from `:arab` to `:thai`.
A list of configurable number systems is returned by `Cldr.Number.System.systems_with_digits/0`.
If a transliteration is requested between two number pairs that have not been configured for
precompilation, a warning is logged.
"""
end
alias Cldr.Number.System
alias Cldr.Number.Symbol
alias Cldr.Number.Format.Compiler
alias Cldr.LanguageTag
alias Cldr.Config
@doc """
Transliterates from latin digits to another number system's digits.
Transliterates the latin digits 0..9 to their equivalents in
another number system. Also transliterates the decimal and grouping
separators as well as the plus, minus and exponent symbols. Any other character
in the string will be returned "as is".
## Arguments
* `sequence` is the string to be transliterated.
* `locale` is any known locale, defaulting to `#{inspect(backend)}.get_locale/0`.
* `number_system` is any known number system. If expressed as a `string` it
is the actual name of a known number system. If epressed as an `atom` it is
used as a key to look up a number system for the locale (the usual keys are
`:default` and `:native` but :traditional and :finance are also part of the
standard). See `#{inspect(backend)}.Number.System.number_systems_for/1` for a locale to
see what number system types are defined. The default is `:default`.
For available number systems see `Cldr.Number.System.number_systems/0`
and `#{inspect(backend)}.Number.System.number_systems_for/1`. Also see
`#{inspect(backend)}.Number.Symbol.number_symbols_for/1`.
## Examples
iex> #{inspect(__MODULE__)}.transliterate("123556")
"123556"
iex> #{inspect(__MODULE__)}.transliterate("123,556.000", "fr", :default)
"123 556,000"
iex> #{inspect(__MODULE__)}.transliterate("123556", "th", :default)
"123556"
iex> #{inspect(__MODULE__)}.transliterate("123556", "th", "thai")
"๑๒๓๕๕๖"
iex> #{inspect(__MODULE__)}.transliterate("123556", "th", :native)
"๑๒๓๕๕๖"
iex> #{inspect(__MODULE__)}.transliterate("Some number is: 123556", "th", "thai")
"Some number is: ๑๒๓๕๕๖"
"""
@spec transliterate(
String.t(),
LanguageTag.t() | Cldr.Locale.locale_name(),
String.t() | atom()
) ::
String.t() | {:error, {module(), String.t()}}
def transliterate(
sequence,
locale \\ unquote(backend).get_locale(),
number_system \\ System.default_number_system_type()
)
# No transliteration required when the digits and separators as the same
# as the ones we use in formatting.
with {:ok, systems} <- Config.known_number_systems_like("en", :latn, config) do
for {locale, system} <- systems do
def transliterate(
sequence,
%LanguageTag{cldr_locale_name: unquote(locale)},
unquote(system)
) do
sequence
end
end
end
# We can only transliterate if the target {locale, number_system} has defined
# digits. Some systems don't have digits, just rules.
for {number_system, %{digits: _digits}} <- System.systems_with_digits() do
def transliterate(sequence, locale, unquote(number_system)) do
sequence
|> String.graphemes()
|> Enum.map(&transliterate_char(&1, locale, unquote(number_system)))
|> Elixir.List.to_string()
end
end
# String locale name needs validation
def transliterate(sequence, locale_name, number_system) when is_binary(locale_name) do
with {:ok, locale} <- Module.concat(unquote(backend), :Locale).new(locale_name) do
transliterate(sequence, locale, number_system)
end
end
# For when the system name is not known (because its probably a system type
# like :default, or :native)
def transliterate(sequence, locale_name, number_system) do
with {:ok, system_name} <-
System.system_name_from(number_system, locale_name, unquote(backend)) do
transliterate(sequence, locale_name, system_name)
end
end
def transliterate!(sequence, locale, number_system) do
case transliterate(sequence, locale, number_system) do
{:error, {exception, reason}} -> raise exception, reason
string -> string
end
end
# Functions to transliterate the symbols
for locale_name <- Cldr.Locale.Loader.known_locale_names(config),
{name, symbols} <- Config.number_symbols_for!(locale_name, config),
!is_nil(symbols) do
# Mapping for the grouping separator
defp transliterate_char(
unquote(Compiler.placeholder(:group)),
%LanguageTag{cldr_locale_name: unquote(locale_name)},
unquote(name)
) do
unquote(symbols.group)
end
# Mapping for the decimal separator
defp transliterate_char(
unquote(Compiler.placeholder(:decimal)),
%LanguageTag{cldr_locale_name: unquote(locale_name)},
unquote(name)
) do
unquote(symbols.decimal)
end
# Mapping for the exponent
defp transliterate_char(
unquote(Compiler.placeholder(:exponent)),
%LanguageTag{cldr_locale_name: unquote(locale_name)},
unquote(name)
) do
unquote(symbols.exponential)
end
# Mapping for the plus sign
defp transliterate_char(
unquote(Compiler.placeholder(:plus)),
%LanguageTag{cldr_locale_name: unquote(locale_name)},
unquote(name)
) do
unquote(symbols.plus_sign)
end
# Mapping for the minus sign
defp transliterate_char(
unquote(Compiler.placeholder(:minus)),
%LanguageTag{cldr_locale_name: unquote(locale_name)},
unquote(name)
) do
unquote(symbols.minus_sign)
end
end
# Functions to transliterate the digits
for {name, %{digits: digits}} <- System.systems_with_digits() do
graphemes = String.graphemes(digits)
for latin_digit <- 0..9 do
grapheme = :lists.nth(latin_digit + 1, graphemes)
latin_char = Integer.to_string(latin_digit)
defp transliterate_char(unquote(latin_char), _locale, unquote(name)) do
unquote(grapheme)
end
end
end
# Any unknown mapping gets returned as is
defp transliterate_char(digit, _locale, _name) do
digit
end
@doc """
Transliterates digits from one number system to another number system
* `digits` is binary representation of a number
* `from_system` and `to_system` are number system names in atom form. See
`Cldr.Number.System.systems_with_digits/0` for available number systems.
## Example
iex> #{inspect(__MODULE__)}.transliterate_digits "٠١٢٣٤٥٦٧٨٩", :arab, :latn
"0123456789"
"""
@spec transliterate_digits(binary, atom, atom) :: binary
for {from_system, to_system} <- Map.get(config, :precompile_transliterations, []) do
with {:ok, from} = System.number_system_digits(from_system),
{:ok, to} = System.number_system_digits(to_system),
map = System.generate_transliteration_map(from, to) do
def transliterate_digits(digits, unquote(from_system), unquote(to_system)) do
do_transliterate_digits(digits, unquote(Macro.escape(map)))
end
end
end
def transliterate_digits(digits, from_system, to_system) when is_binary(digits) do
Cldr.Number.Transliterate.transliterate_digits(digits, from_system, to_system)
end
defp do_transliterate_digits(digits, map) do
digits
|> String.graphemes()
|> Enum.map(&Map.get(map, &1, &1))
|> Enum.join()
end
end
end
end
end | lib/cldr/number/backend/transliterate.ex | 0.85817 | 0.623864 | transliterate.ex | starcoder |
defmodule Warpath.Element.Path do
@moduledoc """
This module contains functions to accumulate and transform item path tokens.
The path are built during a expression evaluation by `Warpath.query/3`.
"""
@type token ::
{:root, String.t()}
| {:property, String.t() | atom()}
| {:index_access, integer}
@type acc :: [token, ...] | []
@doc """
Accumulate a path token into a path acc.
## Example
iex> acc = [{:root, "$"}]
...> Warpath.Element.Path.accumulate({:property, "name"}, acc)
[{:property, "name"}, {:root, "$"}]
"""
@spec accumulate(token, acc) :: acc
def accumulate({tag, _} = token, acc)
when is_list(acc) and tag in [:root, :property, :index_access],
do: [token | acc]
@doc """
Transform path tokens into a jsonpath bracket-notation representation.
## Example
iex> acc = [{:property, "name"}, {:root, "$"}]
...> Warpath.Element.Path.bracketify(acc)
"$['name']"
"""
@spec bracketify(acc) :: binary
def bracketify(paths), do: make_path(paths, :bracketify)
@doc """
Transform path tokens into a jsonpath dot-notation representation.
## Example
iex> acc = [{:property, "name"}, {:root, "$"}]
...> Warpath.Element.Path.dotify(acc)
"$.name"
"""
@spec dotify(acc) :: binary
def dotify(paths), do: make_path(paths, :dotify)
defp make_path([h | _] = tokens, option) when is_tuple(h) do
to_string(tokens, option)
end
defp make_path([h | _] = tokens, option) when is_list(h) do
tokens
|> Enum.map(&make_path(&1, option))
|> List.flatten()
end
defp make_path([], _), do: ""
defp to_string(tokens, opts) do
tokens
|> Enum.reverse()
|> Enum.map(&path(&1, opts))
|> Enum.join()
end
defp path({:root, root}, :bracketify), do: root
defp path({:root, root}, :dotify), do: root
defp path({:property, property}, :bracketify), do: "['#{property}']"
defp path({:property, property}, :dotify), do: ".#{property}"
defp path({:index_access, index}, _), do: "[#{index}]"
end | lib/warpath/element/path.ex | 0.859531 | 0.50653 | path.ex | starcoder |
defmodule DartSass do
@moduledoc """
DartSass is a installer and runner for [Sass](https://sass-lang.com/dart-sass).
## Profiles
You can define multiple configuration profiles. By default, there is a
profile called `:default` which you can configure its args, current
directory and environment:
config :dart_sass,
version: "1.43.4",
default: [
args: ~w(css/app.scss ../priv/static/assets/app.css),
cd: Path.expand("../assets", __DIR__)
]
## Dart Sass configuration
There are three global configurations for the `dart_sass` application:
* `:version` - the expected Sass version.
* `:sass_path` - the path to the Sass snapshot or executable. By default
it is automatically downloaded and placed inside the `_build` directory
of your current app.
* `:dart_path` - the path to the Dart VM executable. By default it is
automatically downloaded and placed inside the `_build` directory
of your current app. Note that the Dart Sass release for your
operating system may not require a separate Dart executable.
Overriding the `:sass_path` or `:dart_path` option is not recommended,
as we will automatically download and manage Dart Sass for you,
but in case you can't download it (for example, you are building
from source), you may want to set the paths to a configurable
system location. In your config files, do:
config :dart_sass,
sass_path: System.get_env("MIX_SASS_PATH")
dart_path: System.get_env("MIX_SASS_DART_PATH")
And then you can install Dart Sass elsewhere and configure the relevant
environment variables.
"""
use Application
require Logger
@doc false
def start(_, _) do
unless Application.get_env(:dart_sass, :version) do
Logger.warn("""
dart_sass version is not configured. Please set it in your config files:
config :dart_sass, :version, "#{latest_version()}"
""")
end
configured_version = configured_version()
case bin_version() do
{:ok, ^configured_version} ->
:ok
{:ok, version} ->
Logger.warn("""
Outdated dart-sass version. Expected #{configured_version}, got #{version}. \
Please run `mix sass.install` or update the version in your config files.\
""")
:error ->
:ok
end
Supervisor.start_link([], strategy: :one_for_one)
end
@doc false
# Latest known version at the time of publishing.
def latest_version do
"1.43.4"
end
@doc """
Returns the configured Sass version.
"""
def configured_version do
Application.get_env(:dart_sass, :version, latest_version())
end
@doc """
Returns the configuration for the given profile.
Returns nil if the profile does not exist.
"""
def config_for!(profile) when is_atom(profile) do
Application.get_env(:dart_sass, profile) ||
raise ArgumentError, """
unknown dart_sass profile. Make sure the profile is defined in your config files, such as:
config :dart_sass,
#{profile}: [
args: ~w(css/app.scss ../priv/static/assets/app.css),
cd: Path.expand("../assets", __DIR__)
]
"""
end
@doc """
Checks whether or not dart-sass is installed.
"""
def installed? do
case detect_platform() do
%{cmd: sass, args: []} -> File.exists?(sass)
%{cmd: dart, args: [snapshot]} -> File.exists?(dart) and File.exists?(snapshot)
end
end
@doc """
Returns information about the current environment.
"""
def detect_platform do
case :os.type() do
{:unix, :darwin} ->
%{platform: :macos, cmd: dart_path(), args: [snapshot_path()]}
{:unix, osname} ->
%{platform: osname, cmd: sass_path(), args: []}
{:win32, _osname} ->
%{platform: :windows, cmd: dart_path(), args: [snapshot_path()]}
end
end
@doc false
def dart_path do
Application.get_env(:dart_sass, :dart_path) || build_path("dart")
end
@doc false
def snapshot_path do
Application.get_env(:dart_sass, :sass_path) || build_path("sass.snapshot")
end
@doc false
def sass_path do
Application.get_env(:dart_sass, :sass_path) || build_path("sass")
end
defp build_path(path) do
if Code.ensure_loaded?(Mix.Project) do
Path.join(Path.dirname(Mix.Project.build_path()), path)
else
"_build/#{path}"
end
end
# TODO: Remove when dart-sass will exit when stdin is closed.
@doc false
def script_path() do
Path.join(:code.priv_dir(:dart_sass), "dart_sass.bash")
end
@doc """
Returns the version of the Sass executable (or snapshot).
Returns `{:ok, version_string}` on success or `:error` when the executable
is not available.
"""
def bin_version do
{path, args} = sass(["--version"])
with true <- File.exists?(path),
{result, 0} <- System.cmd(path, args) do
{:ok, String.trim(result)}
else
_ -> :error
end
end
defp sass(extra_args) do
%{cmd: cmd, args: args, platform: platform} = detect_platform()
args = args ++ extra_args
# TODO: Remove when dart-sass will exit when stdin is closed.
# Link: https://github.com/sass/dart-sass/pull/1411
cond do
"--watch" in args and platform != :windows ->
{script_path(), [cmd] ++ args}
true ->
{cmd, args}
end
end
@doc """
Runs the given command with `args`.
The given args will be appended to the configured args.
The task output will be streamed directly to stdio. It
returns the status of the underlying call.
"""
def run(profile, extra_args) when is_atom(profile) and is_list(extra_args) do
config = config_for!(profile)
args = config[:args] || []
opts = [
cd: config[:cd] || File.cwd!(),
env: config[:env] || %{},
into: IO.stream(:stdio, :line),
stderr_to_stdout: true
]
{path, args} = sass(args ++ extra_args)
path
|> System.cmd(args, opts)
|> elem(1)
end
@doc """
Installs, if not available, and then runs `sass`.
Returns the same as `run/2`.
"""
def install_and_run(profile, args) do
unless installed?() do
install()
end
run(profile, args)
end
@doc """
Installs dart-sass with `configured_version/0`.
"""
def install do
version = configured_version()
tmp_dir = Path.join(System.tmp_dir!(), "cs-dart-sass")
File.rm_rf!(tmp_dir)
File.mkdir_p!(tmp_dir)
platform = detect_platform()
name = "dart-sass-#{version}-#{target(platform)}"
url = "https://github.com/sass/dart-sass/releases/download/#{version}/#{name}"
archive = fetch_body!(url)
case unpack_archive(Path.extname(name), archive, tmp_dir) do
:ok -> :ok
other -> raise "couldn't unpack archive: #{inspect(other)}"
end
case platform do
%{platform: :linux, cmd: sass} ->
File.rm(sass)
File.cp!(Path.join([tmp_dir, "dart-sass", "sass"]), sass)
%{platform: :macos, cmd: dart, args: [snapshot]} ->
File.rm(dart)
File.cp!(Path.join([tmp_dir, "dart-sass", "src", "dart"]), dart)
File.rm(snapshot)
File.cp!(Path.join([tmp_dir, "dart-sass", "src", "sass.snapshot"]), snapshot)
%{platform: :windows, cmd: dart, args: [snapshot]} ->
File.rm(dart)
File.cp!(Path.join([tmp_dir, "dart-sass", "src", "dart.exe"]), dart)
File.rm(snapshot)
File.cp!(Path.join([tmp_dir, "dart-sass", "src", "sass.snapshot"]), snapshot)
end
end
defp unpack_archive(".zip", zip, cwd) do
with {:ok, _} <- :zip.unzip(zip, cwd: to_charlist(cwd)), do: :ok
end
defp unpack_archive(_, tar, cwd) do
:erl_tar.extract({:binary, tar}, [:compressed, cwd: to_charlist(cwd)])
end
# Available targets: https://github.com/sass/dart-sass/releases
defp target(%{platform: :windows}) do
case :erlang.system_info(:wordsize) * 8 do
32 -> "windows-ia32.zip"
64 -> "windows-x64.zip"
end
end
defp target(%{platform: platform}) do
arch_str = :erlang.system_info(:system_architecture)
[arch | _] = arch_str |> List.to_string() |> String.split("-")
# TODO: remove "arm" when we require OTP 24
arch =
if platform == :macos and arch in ["aarch64", "arm"] do
# Using Rosetta2 for M1 until sass/dart-sass runs native
# Link: https://github.com/sass/dart-sass/issues/1125
"amd64"
else
arch
end
case arch do
"amd64" -> "#{platform}-x64.tar.gz"
"x86_64" -> "#{platform}-x64.tar.gz"
_ -> raise "could not download dart_sass for architecture: #{arch_str}"
end
end
defp fetch_body!(url) do
url = String.to_charlist(url)
Logger.debug("Downloading dart-sass from #{url}")
{:ok, _} = Application.ensure_all_started(:inets)
{:ok, _} = Application.ensure_all_started(:ssl)
# https://erlef.github.io/security-wg/secure_coding_and_deployment_hardening/inets
cacertfile = CAStore.file_path() |> String.to_charlist()
http_options = [
autoredirect: false,
ssl: [
verify: :verify_peer,
cacertfile: cacertfile,
depth: 2,
customize_hostname_check: [
match_fun: :public_key.pkix_verify_hostname_match_fun(:https)
]
]
]
case :httpc.request(:get, {url, []}, http_options, []) do
{:ok, {{_, 302, _}, headers, _}} ->
{'location', download} = List.keyfind(headers, 'location', 0)
options = [body_format: :binary]
case :httpc.request(:get, {download, []}, http_options, options) do
{:ok, {{_, 200, _}, _, body}} ->
body
other ->
raise "couldn't fetch #{download}: #{inspect(other)}"
end
other ->
raise "couldn't fetch #{url}: #{inspect(other)}"
end
end
end | lib/dart_sass.ex | 0.742702 | 0.441974 | dart_sass.ex | starcoder |
defmodule Windtrap.Normalizer do
import Windtrap.Varint
@moduledoc """
Turn a WASM binary format stream into a normalized binary stream
in which function arguments have their own size. This is useful
for execuction.
"""
@doc """
This is the function that takes as an input the stream of a function's
body and returns its normalized version, where values aren't
varint-encoded.
## Example
This example normalizes the binary stream `<<0xc, 3>>` which represents
`br 3` and the immediate 3 should be normalized to its little-endian 32
bit representation.
iex> Windtrap.Normalizer.normalize(<<0xc, 3>>)
{<<0xc, 3, 0, 0, 0>>, %{}}
"""
@spec normalize(binary()) :: {binary(), map()}
def normalize(input), do: normalize_helper(input, <<>>, %{}, [])
defp normalize_helper(<<>>, output, refs, _), do: {output, refs}
defp normalize_helper(<<instr, rest :: binary>>, output, refs, stack) when instr in 0x45..0xbf or instr in 0..1 or instr in 0x1a..0x1b or instr == 0x0f do
normalize_helper(rest, output <> <<instr>>, refs, stack)
end
defp normalize_helper(<<instr, rest :: binary>>, output, refs, stack) when instr in 0x20..0x24 or instr in 0x41..0x42 or instr in 0x0c..0x0d or instr == 0x10 do
width = if instr == 0x42, do: 64, else: 32
{val, r} = varint rest
normalize_helper(r, output <> <<instr, val :: integer-little-size(width)>>, refs, stack)
end
defp normalize_helper(<<instr, rest :: binary>>, output, refs, stack) when instr in 0x43..0x44 do
width = if instr == 0x43, do: 32, else: 64
{val, r} = varint rest
normalize_helper(r, output <> <<instr, val :: float-little-size(width)>>, refs, stack)
end
defp normalize_helper(<<instr, oa_rest :: binary>>, output, refs, stack) when instr in 0x28..0x3e do
{offset, a_rest} = varint oa_rest
{align, rest} = varint a_rest
normalize_helper(rest, output <> <<instr, offset :: integer-little-size(32), align :: integer-little-size(32)>>, refs, stack)
end
defp normalize_helper(<<0x11, idx_zero_rest :: binary>>, output, refs, stack) do
{idx, <<0, rest :: binary>>} = varint idx_zero_rest
normalize_helper(rest, output <> <<0x11, idx :: integer-little-size(32), 0>>, refs, stack)
end
defp normalize_helper(<<5, rest :: binary>>, output, refs, [top|stack]) do
# TODO check type == if
newtop = Map.put(top, :elseloc, byte_size(output))
normalize_helper(rest, output <> <<5>>, refs, [newtop|stack])
end
defp normalize_helper(<<0x0b, rest :: binary>>, output, refs, [top|stack]) do
next_pc = byte_size(output)+1
next_ref = Map.put(refs, top.addr, Map.put(top, :endloc, next_pc))
normalize_helper(rest, output <> <<0x0b>>, next_ref, stack)
end
defp normalize_helper(<<0x0b, rest :: binary>>, output, refs, []) do
normalize_helper(rest, output <> <<0x0b>>, refs, [])
end
defp normalize_helper(<<instr, rt, rest :: binary>>, output, refs, stack) when instr in 2..4 and (rt == 0x40 or rt in 0x7c..0x7f) do
startloc = %{type: instr, addr: byte_size(output)}
normalize_helper(rest, output <> <<instr, rt :: integer-little-size(32)>>, refs, [startloc|stack])
end
end | lib/windtrap/normalizer.ex | 0.690872 | 0.564369 | normalizer.ex | starcoder |
defmodule Geometry.FeatureCollection do
@moduledoc """
A collection of `Geometry.Featre`s.
`GeometryCollectionZM` implements the protocols `Enumerable` and `Collectable`.
## Examples
iex> Enum.filter(
...> FeatureCollection.new([
...> Feature.new(
...> geometry: Point.new(11, 12),
...> properties: %{"facility" => "Hotel"}
...> ),
...> Feature.new(
...> geometry: Point.new(55, 55),
...> properties: %{"facility" => "Tower"}
...> )
...> ]),
...> fn %Feature{properties: properties} ->
...> Map.get(properties, "facility") == "Hotel"
...> end
...> )
[%Feature{geometry: %Point{coordinate: [11, 12]}, properties: %{"facility" => "Hotel"}}]
iex> Enum.into(
...> [Feature.new(geometry: Point.new(5, 1), properties: %{"area" => 51})],
...> FeatureCollection.new([
...> Feature.new(geometry: Point.new(4, 2), properties: %{"area" => 42})
...> ])
...> )
%FeatureCollection{
features:
MapSet.new([
%Feature{geometry: %Point{coordinate: [4, 2]}, properties: %{"area" => 42}},
%Feature{geometry: %Point{coordinate: [5, 1]}, properties: %{"area" => 51}}
])
}
"""
alias Geometry.{Feature, FeatureCollection, GeoJson}
defstruct features: MapSet.new()
@type t :: %FeatureCollection{
features: MapSet.t(Feature.t())
}
@doc """
Creates an empty `FeatureCollection`.
## Examples
iex> FeatureCollection.new()
%FeatureCollection{}
"""
@spec new :: t()
def new, do: %FeatureCollection{}
@doc """
Creates a `FeatureCollection`.
## Examples
iex> FeatureCollection.new([
...> Feature.new(
...> geometry: Point.new(1, 2),
...> properties: %{facility: :hotel}
...> ),
...> Feature.new(
...> geometry: Point.new(3, 4),
...> properties: %{facility: :school}
...> )
...> ])
%FeatureCollection{features: MapSet.new([
%Feature{
geometry: %Point{coordinate: [1, 2]},
properties: %{facility: :hotel}},
%Feature{
geometry: %Point{coordinate: [3, 4]},
properties: %{facility: :school}}
])}
"""
@spec new([Feature.t()]) :: t()
def new(features), do: %FeatureCollection{features: MapSet.new(features)}
@doc """
Returns `true` for an empty `FeatureCollection`.
## Examples
iex> FeatureCollection.empty?(FeatureCollection.new())
true
"""
@spec empty?(t()) :: boolean()
def empty?(%FeatureCollection{features: features}), do: Enum.empty?(features)
@doc """
Returns an `:ok` tuple with the `FeatureCollection` from the given GeoJSON
term. Otherwise returns an `:error` tuple.
The `:type` option specifies which type is expected. The
possible values are `:z`, `:m`, and `:zm`.
## Examples
iex> ~s({
...> "type": "FeatureCollection",
...> "features": [
...> {
...> "type": "Feature",
...> "geometry": {"type": "Point", "coordinates": [1, 2, 3]},
...> "properties": {"facility": "Hotel"}
...> }, {
...> "type": "Feature",
...> "geometry": {"type": "Point", "coordinates": [4, 3, 2]},
...> "properties": {"facility": "School"}
...> }
...> ]
...> })
iex> |> Jason.decode!()
iex> |> FeatureCollection.from_geo_json(type: :z)
{
:ok,
%FeatureCollection{
features:
MapSet.new([
%Feature{
geometry: %PointZ{coordinate: [1, 2, 3]},
properties: %{"facility" => "Hotel"}
},
%Feature{
geometry: %PointZ{coordinate: [4, 3, 2]},
properties: %{"facility" => "School"}
}
])
}
}
"""
@spec from_geo_json(Geometry.geo_json_term(), opts) :: {:ok, t()} | Geometry.geo_json_error()
when opts: [type: :z | :m | :zm]
def from_geo_json(json, opts \\ []), do: GeoJson.to_feature_collection(json, opts)
@doc """
The same as `from_geo_josn/1`, but raises a `Geometry.Error` exception if it
fails.
## Examples
iex> ~s({
...> "type": "FeatureCollection",
...> "features": [
...> {
...> "type": "Feature",
...> "geometry": {"type": "Point", "coordinates": [1, 2, 3]},
...> "properties": {"facility": "Hotel"}
...> }, {
...> "type": "Feature",
...> "geometry": {"type": "Point", "coordinates": [4, 3, 2]},
...> "properties": {"facility": "School"}
...> }
...> ]
...> })
iex> |> Jason.decode!()
iex> |> FeatureCollection.from_geo_json!(type: :m)
%FeatureCollection{
features:
MapSet.new([
%Feature{
geometry: %PointM{coordinate: [1, 2, 3]},
properties: %{"facility" => "Hotel"}
},
%Feature{
geometry: %PointM{coordinate: [4, 3, 2]},
properties: %{"facility" => "School"}
}
])
}
"""
@spec from_geo_json!(Geometry.geo_json_term(), opts) :: t()
when opts: [type: :z | :m | :zm]
def from_geo_json!(json, opts \\ []) do
case GeoJson.to_feature_collection(json, opts) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the GeoJSON term of a `FeatureCollection`.
## Examples
iex> FeatureCollection.to_geo_json(FeatureCollection.new([
...> Feature.new(
...> geometry: Point.new(1, 2),
...> properties: %{facility: :hotel}
...> )
...> ]))
%{
"type" => "FeatureCollection",
"features" => [
%{
"type" => "Feature",
"geometry" => %{"coordinates" => [1, 2], "type" => "Point"},
"properties" => %{facility: :hotel}
}
]
}
"""
@spec to_geo_json(t()) :: Geometry.geo_json_term()
def to_geo_json(%FeatureCollection{features: features}) do
%{
"type" => "FeatureCollection",
"features" => Enum.map(features, &Feature.to_geo_json/1)
}
end
@doc """
Returns the number of elements in `FeatureCollection`.
## Examples
iex> FeatureCollection.size(
...> FeatureCollection.new([
...> Feature.new(geometry: Point.new(11, 12)),
...> Feature.new(geometry:
...> LineString.new([
...> Point.new(21, 22),
...> Point.new(31, 32)
...> ])
...> )
...> ])
...> )
2
"""
@spec size(t()) :: non_neg_integer()
def size(%FeatureCollection{features: features}), do: MapSet.size(features)
@doc """
Checks if `FeatureCollection` contains `geometry`.
## Examples
iex> FeatureCollection.member?(
...> FeatureCollection.new([
...> Feature.new(geometry: Point.new(11, 12)),
...> Feature.new(geometry:
...> LineString.new([
...> Point.new(21, 22),
...> Point.new(31, 32)
...> ])
...> )
...> ]),
...> Feature.new(geometry: Point.new(11, 12))
...> )
true
iex> FeatureCollection.member?(
...> FeatureCollection.new([
...> Feature.new(geometry: Point.new(11, 12)),
...> Feature.new(geometry:
...> LineString.new([
...> Point.new(21, 22),
...> Point.new(31, 32)
...> ])
...> )
...> ]),
...> Feature.new(geometry: Point.new(1, 2))
...> )
false
"""
@spec member?(t(), Geometry.t()) :: boolean()
def member?(%FeatureCollection{features: features}, geometry),
do: MapSet.member?(features, geometry)
@doc """
Converts `FeatureCollection` to a list.
## Examples
iex> FeatureCollection.to_list(
...> FeatureCollection.new([
...> Feature.new(geometry: Point.new(11, 12))
...> ])
...> )
[%Feature{geometry: %Point{coordinate: [11, 12]}, properties: %{}}]
"""
@spec to_list(t()) :: [Geometry.t()]
def to_list(%FeatureCollection{features: features}), do: MapSet.to_list(features)
defimpl Enumerable do
# credo:disable-for-next-line Credo.Check.Readability.Specs
def count(geometry_collection) do
{:ok, FeatureCollection.size(geometry_collection)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def member?(geometry_collection, val) do
{:ok, FeatureCollection.member?(geometry_collection, val)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def slice(geometry_collection) do
size = FeatureCollection.size(geometry_collection)
{:ok, size,
&Enumerable.List.slice(FeatureCollection.to_list(geometry_collection), &1, &2, size)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def reduce(geometry_collection, acc, fun) do
Enumerable.List.reduce(FeatureCollection.to_list(geometry_collection), acc, fun)
end
end
defimpl Collectable do
# credo:disable-for-next-line Credo.Check.Readability.Specs
def into(%FeatureCollection{features: features}) do
fun = fn
list, {:cont, x} ->
[{x, []} | list]
list, :done ->
%FeatureCollection{
features: %{features | map: Map.merge(features.map, Map.new(list))}
}
_list, :halt ->
:ok
end
{[], fun}
end
end
end | lib/geometry/feature_collection.ex | 0.949553 | 0.637595 | feature_collection.ex | starcoder |
defmodule TicTacToe.Strategy.FourByFour do
@moduledoc false
alias TicTacToe.Strategy.Minimax
alias TicTacToe.Board
@scale 4
@center_tiles [6, 7, 10, 11]
@doc """
AI strategy for 4x4 game
"""
def strategy(board, ai_player) do
moves_left = Board.possible_moves(board) |> length()
cond do
moves_left == 16 -> List.first(@center_tiles)
moves_left == 15 -> take_center(board)
moves_left > 8 -> aggressively_block(board, ai_player)
true -> Minimax.best_move(board, ai_player)
end
end
@doc """
Takes a center tile not already taken by a player
"""
def take_center(board) do
@center_tiles |> Enum.find(fn tile -> Board.empty_at?(board, tile) end)
end
@doc """
Returns blocking move to oponent's most threatening move
"""
def aggressively_block(board, ai_player) do
counter_dangerous = block_dangerous_move(board, ai_player)
counter_promising = block_promising_move(board, ai_player)
additional_move = extend_existing_move(board, ai_player)
cond do
counter_dangerous != :no_move -> counter_dangerous
counter_promising != :no_move -> counter_promising
true -> additional_move
end
end
@doc """
Returns a move extending the AI's most promising move
"""
def extend_existing_move(board, ai_player) do
case find_best_existing_move(board, ai_player) do
:no_move -> :no_move
mvs -> first_matching(mvs)
end
end
def find_best_existing_move(board, ai_player) do
match_winning_states(board, ai_player)
|> Enum.filter(fn {st, _} -> unnocupied_win_state?(st, board, ai_player) end)
|> existing_move_by_length()
end
def existing_move_by_length([]), do: :no_move
def existing_move_by_length(xs), do: Enum.max_by(xs, fn {_, mv} -> length(mv) end)
def unnocupied_win_state?(st, board, ai_player) do
oponent = Board.swap_player(ai_player)
not moves_taken?(st, board, oponent)
end
@doc """
Returns a blocking move to an oponent's most threatening move
"""
def block_dangerous_move(board, ai_player) do
case find_dangerous_oponent_move(board, ai_player) do
:no_move -> :no_move
mvs -> first_matching(mvs)
end
end
def find_dangerous_oponent_move(board, ai_player) do
oponent = Board.swap_player(ai_player)
match_winning_states(board, oponent)
|> Enum.find(fn {st, mvs} -> dangerous_move?(st, mvs, board, ai_player) end)
|> format_move_result()
end
def match_winning_states(board, player) do
moves = board |> Board.moves(player)
Board.winning_states(@scale)
|> Enum.map(fn st -> {st, Board.match_winning_moves(st, moves)} end)
end
def dangerous_move?(st, mvs, board, ai_player) do
length(mvs) == 3 and not moves_taken?(st, board, ai_player)
end
@doc """
Returns a blocking move to an oponent's potential winning move
"""
def block_promising_move(board, ai_player) do
case find_promising_oponent_move(board, ai_player) do
:no_move -> :no_move
mvs -> first_matching(mvs)
end
end
def find_promising_oponent_move(board, ai_player) do
oponent = Board.swap_player(ai_player)
match_winning_states(board, oponent)
|> Enum.find(fn {st, mvs} -> promising_oponent_move?(st, mvs, board, ai_player) end)
|> format_move_result()
end
def promising_oponent_move?(st, mvs, board, ai_player) do
length(mvs) > 1 and not moves_taken?(st, board, ai_player)
end
def format_move_result(nil), do: :no_move
def format_move_result({st, mv}), do: %{win_state: st, move: mv}
def first_matching({st, mv}), do: first_matching_(st, mv)
def first_matching(%{win_state: st, move: mv}), do: first_matching_(st, mv)
defp first_matching_(st, mv) do
st
|> Enum.filter(fn x -> not Enum.member?(mv, x) end)
|> List.first()
end
@doc """
Checks for a player's presence in a given winning state
"""
def moves_taken?(win_state, board, ai_player) do
win_state
|> Enum.map(fn move -> board.tiles[move] end)
|> Enum.any?(fn x -> x == Board.tile_symbol(board, ai_player) end)
end
end | lib/tic_tac_toe/strategy/four_by_four.ex | 0.846101 | 0.547887 | four_by_four.ex | starcoder |
defmodule Sanity.Cache do
@doc false
defmacro __using__([]) do
quote do
import Sanity.Cache, only: [defq: 2]
Module.register_attribute(__MODULE__, :sanity_cache_update_opts, accumulate: true)
@before_compile Sanity.Cache
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
def child_spec(_) do
%{
id: __MODULE__,
start: {Sanity.Cache.Poller, :start_link, [@sanity_cache_update_opts]}
}
end
def update_all(opts \\ []) do
Enum.each(@sanity_cache_update_opts, fn update_opts ->
Map.merge(Map.new(opts), Map.new(update_opts))
|> Map.to_list()
|> Sanity.Cache.update()
end)
end
end
end
@common_opts_validation [
config_key: [
type: :atom,
default: :default
],
projection: [
type: :string,
required: true
]
]
@fetch_opts_validation Keyword.merge(@common_opts_validation,
fetch_query: [
type: :string,
required: true
]
)
@list_query_validation [
type: :string,
required: true
]
@fetch_pairs_opts_validation Keyword.merge(@common_opts_validation,
list_query: @list_query_validation,
keys: [
type: {:list, :atom},
required: true
]
)
@defq_opts_validation Keyword.merge(@common_opts_validation,
list_query: @list_query_validation,
lookup: [
type: :keyword_list,
required: true
]
)
@doc """
Defines a Sanity query.
## Options
#{NimbleOptions.docs(@defq_opts_validation)}
"""
defmacro defq(name, opts) when is_atom(name) do
Enum.map(Keyword.fetch!(opts, :lookup), fn {lookup_name, lookup_opts} ->
table = :"#{name}_by_#{lookup_name}"
fetch_pairs = :"fetch_#{table}_pairs"
quote do
NimbleOptions.validate!(unquote(opts), unquote(@defq_opts_validation))
Module.put_attribute(__MODULE__, :sanity_cache_update_opts,
fetch_pairs_mfa: {__MODULE__, unquote(fetch_pairs), []},
table: unquote(table)
)
def unquote(fetch_pairs)() do
opts =
Keyword.take(unquote(opts), Keyword.keys(unquote(@fetch_pairs_opts_validation)))
|> Keyword.put(:keys, Keyword.fetch!(unquote(lookup_opts), :keys))
Sanity.Cache.fetch_pairs(opts)
end
def unquote(:"get_#{table}")(key) do
opts =
Keyword.take(unquote(opts), Keyword.keys(unquote(@fetch_opts_validation)))
|> Keyword.put(:fetch_query, Keyword.fetch!(unquote(lookup_opts), :fetch_query))
Sanity.Cache.get(unquote(table), key, opts)
end
def unquote(:"get_#{table}!")(key) do
opts =
Keyword.take(unquote(opts), Keyword.keys(unquote(@fetch_opts_validation)))
|> Keyword.put(:fetch_query, Keyword.fetch!(unquote(lookup_opts), :fetch_query))
Sanity.Cache.get!(unquote(table), key, opts)
end
end
end)
end
defmodule NotFoundError do
defexception [:message]
defimpl Plug.Exception do
def status(_), do: :not_found
def actions(_), do: []
end
end
require Logger
alias Sanity.Cache.CacheServer
@doc """
Gets a single document using cache. If the cache table doesn't exist then `fetch/2` will be
called. Returns `{:ok, value}` or `{:error, :not_found}`.
"""
def get(table, key, opts) when is_atom(table) do
case CacheServer.fetch(table, key) do
{:error, :no_table} ->
fetch(key, opts)
{:error, :not_found} ->
{:error, :not_found}
{:ok, result} ->
{:ok, result}
end
end
@doc """
Like `get/3` except raises if not found.
"""
def get!(table, key, opts) do
case get(table, key, opts) do
{:ok, value} -> value
{:error, :not_found} -> raise NotFoundError, "can't find document with key #{inspect(key)}"
end
end
@doc """
Fetches a single document by making a request to the Sanity CMS API. The cache is not used.
## Options
#{NimbleOptions.docs(@fetch_opts_validation)}
"""
def fetch(key, opts) do
opts = NimbleOptions.validate!(opts, @fetch_opts_validation)
config_key = Keyword.fetch!(opts, :config_key)
fetch_query = Keyword.fetch!(opts, :fetch_query)
projection = Keyword.fetch!(opts, :projection)
sanity = Application.get_env(:sanity_cache, :sanity_client, Sanity)
Enum.join([fetch_query, projection], " | ")
|> Sanity.query(%{key: key})
|> sanity.request!(Application.fetch_env!(:sanity_cache, config_key))
|> Sanity.result!()
|> Sanity.atomize_and_underscore()
|> case do
[doc] -> {:ok, doc}
[] -> {:error, :not_found}
end
end
@doc """
Fetches list of key/value pairs.
## Options
#{NimbleOptions.docs(@fetch_pairs_opts_validation)}
"""
def fetch_pairs(opts) do
opts = NimbleOptions.validate!(opts, @fetch_pairs_opts_validation)
config_key = Keyword.fetch!(opts, :config_key)
list_query = Keyword.fetch!(opts, :list_query)
keys = Keyword.fetch!(opts, :keys)
projection = Keyword.fetch!(opts, :projection)
sanity = Application.get_env(:sanity_cache, :sanity_client, Sanity)
sanity_config =
Application.fetch_env!(:sanity_cache, config_key)
|> Keyword.put_new(:http_options, receive_timeout: 45_000)
Enum.join([list_query, projection], " | ")
|> Sanity.query()
|> sanity.request!(sanity_config)
|> Sanity.result!()
|> Sanity.atomize_and_underscore()
|> Enum.map(&{get_in(&1, keys), &1})
end
@update_opts_validation [
fetch_pairs_mfa: [
type: :mfa,
required: true
],
table: [
type: :atom,
required: true
],
update_remote_nodes: [
type: :boolean,
default: false
]
]
@doc """
Updates a cache table.
## Options
#{NimbleOptions.docs(@update_opts_validation)}
"""
def update(opts) do
opts = NimbleOptions.validate!(opts, @update_opts_validation)
update_remote_nodes = Keyword.fetch!(opts, :update_remote_nodes)
{module, function_name, args} = Keyword.fetch!(opts, :fetch_pairs_mfa)
table = Keyword.fetch!(opts, :table)
pairs = apply(module, function_name, args)
if update_remote_nodes do
Enum.each(Node.list(), fn node ->
Logger.info("updating #{table} on remote node #{inspect(node)}")
CacheServer.cast_put_table({CacheServer, node}, table, pairs)
end)
end
Logger.info("updating #{table} on local node")
CacheServer.put_table(table, pairs)
end
end | lib/sanity/cache.ex | 0.626124 | 0.409516 | cache.ex | starcoder |
defmodule KafkaGenStage.Consumer do
@moduledoc """
Producer GenStage for reading from Kafka topic, using [Klarna's Brod](https://github.com/klarna/brod).
> Note that is its **consumer** from Kafka's perspective, but **producer** from GenStage's.
## Messages
Events emited are in 4-tuple format
@type msg_tuple :: {offset :: non_neg_integer(), timestamp :: non_neg_integer(),
key :: binary(), value :: binary()}
## Options
When starting, several options can modify behaviour of GenStage.
* `:begin_offset` - where to start reading the topic, defaut to `:earliest`,
or provide exact offset number (inclusive)
* `:read_end_offset` - when to stop reading, also stops gen_stage and sent cancel to subscribers
possible values:
* exact integer of last offset to be read(inclusive)
* `:latest` - will check what is offset of last message at time when GenStage is initializing
* `:infinity` - does not stop reading by offset, **default**
* `:stats_handler` - every second called function with some statistics, type is:
`(%{count: non_neg_integer(), cursor: non_neg_integer()}, topic() -> :ok)`, you can use this
function for monitoring of throughput etc...
* `:stats_handler_interval` - if you dont want stats_hander to be called every second,
provide desired interval (milliseconds)
* `:bulk_transformer` - optionally bulk of events can be statelessly transformed before sending downstream.
Function signature must be:
([msg_tuple], is_end_of_stream) :: [msg_tuple]
* `:gen_stage_producer_options` - refer to options passed to underlaying GenStage, usefull for
accumulating demand when partition dispatcher is used (`[demand: :accumulate]`).
* `:partition` - one gen_stage reads from single partition, 0 by default
## Starting and stopping brod client
Brod client should be either already started (provided as atom or pid) or provided as
initializing function.
Closing of brod client is out of scope of this gen_stage. If you want client to be started
exclusively for this gen_stage, do it via initialize function, and manage lifecycle on your own,
simple example being:
fn -> :brod.start_link_client([{'localhost', 9092}] = _endpoints) end
"""
use GenStage
@partition 0
@default_interval 1000
alias KafkaGenStage.ConsumerLogic, as: Logic
alias KafkaGenStage.Utils
require Record
require Logger
import Record, only: [defrecord: 2, extract: 2]
defrecord :kafka_message, extract(:kafka_message, from_lib: "brod/include/brod.hrl")
defrecord :kafka_message_set, extract(:kafka_message_set, from_lib: "brod/include/brod.hrl")
@typedoc "Last offset (inclusive) to be emmited by GenStage."
@type end_offset :: Logic.end_offset()
@typedoc "Starting option(see @moduledoc) which defines when to stop reading."
@type read_end_offset :: :latest | end_offset()
@typedoc "Format of read messages."
@type msg_tuple :: Logic.msg_tuple()
@typedoc "Runtime stats of consumer. Count of messeges received in last second."
@type stats :: %{count: non_neg_integer(), cursor: non_neg_integer()}
@typedoc "Kafka topic identifier."
@type topic :: KafkaGenStage.topic()
@typedoc "Function consuming runtime stats -> to sent to StatsD or so."
@type stats_handler :: (stats(), topic() -> :ok)
@typedoc "Function transforming message bulks before sending them downstream."
@type bulk_transformer :: ([msg_tuple], boolean() -> [msg_tuple])
@typedoc "Brod's type for where to start reading in kafka topic."
@type begin_offset :: KafkaGenStage.begin_offset()
@typedoc "Brod client passing or initialization."
@type brod_client_init :: atom() | pid() | (() -> {:ok, atom() | pid()})
@typedoc """
All the startup option to configure genstage. See @moduledoc.
"""
@type option ::
{:begin_offset, begin_offset()}
| {:read_end_offset, read_end_offset()}
| {:gen_stage_producer_options, [GenStage.producer_option()]}
| {:partition, integer()}
| {:stats_handler, stats_handler}
| {:bulk_transformer, bulk_transformer}
| {:stats_handler_interval, pos_integer()}
@typedoc "List of startup options."
@type options :: [option()]
defmodule State do
@moduledoc false
defstruct [
:topic,
:partition,
:brod_client,
:brod_client_mref,
:consumer,
:consumer_ref,
:queue,
:is_end_of_stream,
:end_of_stream_offset_queue,
:reading,
:demand,
:stats,
:end_offset,
:stats_handler,
:stats_handler_interval,
:bulk_transformer
]
end
@typedoc "Just documentation purposes of internal state typespec."
@type state :: %State{
topic: topic(),
partition: non_neg_integer(),
brod_client: atom() | pid(),
brod_client_mref: reference(),
consumer: pid(),
consumer_ref: reference(),
queue: :queue.queue(),
is_end_of_stream: boolean(),
end_of_stream_offset_queue: :queue.queue(),
reading: boolean(),
demand: non_neg_integer(),
stats: stats(),
end_offset: end_offset(),
stats_handler: stats_handler(),
stats_handler_interval: pos_integer(),
bulk_transformer: bulk_transformer()
}
@doc """
Start linked Consumer GenStage of topic (with underlying brod consumer).
See option type documentation for possible options.
"""
@spec start_link(brod_client_init(), topic(), options(), GenServer.options()) ::
GenServer.on_start()
def start_link(brod_client_init, topic, options \\ [], gen_server_options \\ []) do
GenStage.start_link(__MODULE__, {brod_client_init, topic, options}, gen_server_options)
end
@doc """
Return some running metadata such as current offset position in topic.
"""
@spec get_insight(server :: term()) ::
{:ok, %{offset_cursor: non_neg_integer(), topic: topic()}}
def get_insight(reader) do
GenStage.call(reader, :get_insight)
end
@impl true
def init({brod_client_init, topic, options}) do
# default options
partition = options[:partition] || @partition
begin_offset = options[:begin_offset] || :earliest
gen_stage_producer_options = options[:gen_stage_producer_options]
read_end_offset = options[:read_end_offset] || :infinity
stats_handler = options[:stats_handler] || (&Utils.log_stats/2)
stats_handler_interval = options[:stats_handler_interval] || @default_interval
bulk_transformer = options[:bulk_transformer] || nil
with {:ok, client} <- Utils.resolve_client(brod_client_init),
:ok <- :brod_utils.assert_client(client),
:ok <- :brod_utils.assert_topic(topic),
{:ok, latest_offset} = Utils.resolve_offset(client, topic, partition, :latest),
{:ok, earliest_offset} = Utils.resolve_offset(client, topic, partition, :earliest),
:ok <- :brod.start_consumer(client, topic, begin_offset: begin_offset) do
GenStage.async_info(self(), :subscribe_consumer)
Process.send_after(self(), :time_to_report_stats, stats_handler_interval)
state = %State{
brod_client: client,
topic: topic,
partition: partition,
brod_client_mref: Process.monitor(client),
queue: :queue.new(),
is_end_of_stream: begin_is_end_of_stream(earliest_offset, latest_offset, begin_offset),
end_of_stream_offset_queue: :queue.new(),
demand: 0,
stats: %{count: 0, cursor: 0},
reading: true,
stats_handler: stats_handler,
stats_handler_interval: stats_handler_interval,
end_offset: resolve_end_offset(latest_offset, read_end_offset),
bulk_transformer: bulk_transformer
}
case gen_stage_producer_options do
nil -> {:producer, state}
_ -> {:producer, state, gen_stage_producer_options}
end
else
err -> {:stop, err}
end
end
@impl true
def handle_demand(
new_demand,
%State{
queue: queue,
demand: pending_demand,
consumer: consumer_pid,
bulk_transformer: bulk_transformer,
is_end_of_stream: is_end_of_stream,
end_of_stream_offset_queue: eos_queue
} = state
) do
{to_send, to_ack, demand, queue, eos_queue} =
Logic.prepare_dispatch(
queue,
new_demand + pending_demand,
bulk_transformer,
is_end_of_stream,
eos_queue
)
ack(consumer_pid, to_ack)
unless state.reading,
do: GenStage.async_info(self(), :reading_end)
{:noreply, to_send,
%State{
state
| queue: queue,
demand: demand,
stats: update_stats(state.stats, to_send, to_ack),
end_of_stream_offset_queue: eos_queue
}}
end
@impl true
def handle_call(:get_insight, _from, %State{stats: stats, topic: topic} = state) do
{:reply, {:ok, %{offset_cursor: stats.cursor, topic: topic}}, [], state}
end
@impl true
def handle_info(:subscribe_consumer, %State{} = state) do
case :brod.subscribe(state.brod_client, self(), state.topic, state.partition, []) do
{:ok, consumer_pid} ->
ref = Process.monitor(consumer_pid)
{:noreply, [], %State{state | consumer: consumer_pid, consumer_ref: ref}}
{:error, reason} ->
{:stop, reason, state}
end
end
def handle_info(
{consumer_pid,
kafka_message_set(topic: topic, messages: messages, high_wm_offset: high_offset)},
%State{
consumer: consumer_pid,
topic: topic,
queue: queue,
demand: demand,
end_offset: end_offset,
bulk_transformer: bulk_transformer,
end_of_stream_offset_queue: eos_queue
} = state
) do
kafka_message(offset: last_offset) = List.last(messages)
is_end_of_stream = last_offset >= min(high_offset - 1, end_offset)
eos_queue =
if is_end_of_stream do
:queue.in(last_offset, eos_queue)
else
eos_queue
end
queue = Logic.enqueue(queue, messages |> Stream.map(&kafka_msg_record_to_tuple/1), end_offset)
{to_send, to_ack, demand, queue, eos_queue} =
Logic.prepare_dispatch(
queue,
demand,
bulk_transformer,
is_end_of_stream,
eos_queue
)
:ok = ack(consumer_pid, to_ack)
if last_offset >= end_offset,
do: GenStage.async_info(self(), :reading_end)
{:noreply, to_send,
%State{
state
| demand: demand,
queue: queue,
stats: update_stats(state.stats, to_send, to_ack),
is_end_of_stream: is_end_of_stream,
end_of_stream_offset_queue: eos_queue
}}
end
def handle_info({:DOWN, ref, :process, _object, reason}, %State{brod_client_mref: ref} = state) do
Logger.warn("Brod client #{inspect(state.brod_client)} DOWN, stopping consumer gen_stage.")
{:stop, reason, state}
end
def handle_info({:DOWN, ref, :process, _object, reason}, %State{consumer_ref: ref} = state) do
Logger.warn(
"Brod consumer of #{inspect(state.brod_client)} for #{state.config.topic} DOWN, stopping consumer gen_stage"
)
{:stop, reason, state}
end
def handle_info(
:time_to_report_stats,
%State{stats: stats, topic: topic, stats_handler: stats_handler} = state
) do
stats_handler.(stats, topic)
Process.send_after(self(), :time_to_report_stats, state.stats_handler_interval)
{:noreply, [], %State{state | stats: %{stats | count: 0}}}
end
def handle_info(:reading_end, %State{} = state) do
if :queue.is_empty(state.queue) do
{:stop, :normal, state}
else
{:noreply, [], %State{state | reading: false}}
end
end
@impl true
def terminate(_reason, %State{consumer: pid}) do
:brod_consumer.stop(pid)
end
# the topic is empty
defp begin_is_end_of_stream(0, 0, _), do: true
# reading from the last message, automatically is at the end of stream
defp begin_is_end_of_stream(_earliest, _latest, :latest), do: true
# reading from the first message, is at the end of stream only if the first message is also the last
defp begin_is_end_of_stream(earliest, latest, :earliest), do: latest == earliest
# reading from the middle
defp begin_is_end_of_stream(_earliest, latest, begin) when is_integer(begin), do: latest <= begin
defp resolve_end_offset(latest, :latest), do: latest - 1
defp resolve_end_offset(_latest, :infinity), do: :infinity
defp resolve_end_offset(_latest, read_end) when is_integer(read_end), do: :infinity
defp update_stats(%{count: count, cursor: cursor} = stats, to_send, to_ack) do
%{stats | count: count + length(to_send), cursor: update_cursor(to_ack, cursor)}
end
defp update_cursor(to_ack, current) do
case to_ack do
:no_ack = _nothing_to_send ->
current
ack ->
ack
end
end
defp kafka_msg_record_to_tuple(kafka_msg) do
kafka_message(value: value, offset: offset, key: key, ts: ts) = kafka_msg
{offset, ts, key, value}
end
defp ack(_pid, :no_ack), do: :ok
defp ack(pid, offset) when is_integer(offset), do: :brod.consume_ack(pid, offset)
end | lib/kafka_gen_stage/consumer.ex | 0.921601 | 0.516291 | consumer.ex | starcoder |
defmodule AOC.Day5Bad do
@moduledoc """
Solution to Day 4 of the Advent of code 2021
https://adventofcode.com/2021/day/4
"""
@doc """
Read the input file
Returns the data as a tuple with inputs as first element and bingo boards as second
"""
@spec get_inputs(File) :: [String.t()]
def get_inputs(f \\ "lib/inputs/day5.txt") do
File.read!(f)
|> String.trim()
|> String.split("\n")
|> Enum.map(fn s ->
String.split(s, "->")
|> Enum.map(&String.trim/1)
|> Enum.map(fn s -> String.split(s, ",") |> Enum.map(&String.to_integer/1) end)
end)
end
def filter_for_straight_lines(segments \\ get_inputs()) do
segments |> Enum.filter(fn [[sx, sy], [ex, ey]] -> sx == ex || sy == ey end)
end
# both lines horizontal, only intersect if ys are same
def find_intersections([[sp1x, y], [ep1x, y]], [[sp2x, y], [ep2x, y]]) do
[s1, e1] = Enum.sort([sp1x, ep1x])
[s2, e2] = Enum.sort([sp2x, ep2x])
cond do
s2 < s1 and e1 < e2 -> [[s1, y], [e1, y]]
s1 < s2 and e2 < e1 -> [[s2, y], [e2, y]]
s1 < s2 and s2 <= e1 and e1 < e2 -> [[s2, y], [e1, y]]
s2 < s1 and s1 <= e2 and e2 < e1 -> [[s1, y], [e2, y]]
true -> nil
end
end
# both lines vertical, only intersect if xs are same
def find_intersections([[x, sp1y], [x, ep1y]], [[x, sp2y], [x, ep2y]]) do
[s1, e1] = Enum.sort([sp1y, ep1y])
[s2, e2] = Enum.sort([sp2y, ep2y])
cond do
s2 < s1 and e1 < e2 -> [[x, s1], [x, e1]]
s1 < s2 and e2 < e1 -> [[x, s2], [x, e2]]
s1 < s2 and s2 <= e1 and e1 < e2 -> [[x, s2], [x, e1]]
s2 < s1 and s1 <= e2 and e2 < e1 -> [[x, s1], [x, e2]]
true -> nil
end
end
# first line vertical, second horizontal
def find_intersections([[x1, sp1y], [x1, ep1y]], [[sp2x, y2], [ep2x, y2]]) do
[sy, ey] = Enum.sort([sp1y, ep1y])
[sx, ex] = Enum.sort([sp2x, ep2x])
if sy <= y2 and y2 <= ey and sx <= x1 and x1 <= ex, do: [[x1, y2], [x1, y2]]
end
def find_intersections(_p1, _p2), do: nil
def find_all_intersections(lines) do
for l1 <- lines do
for l2 <- lines, l1 != l2, do: find_intersections(l1, l2)
end
end
def find_length([[x, y], [x, y]]) do
1
end
def find_length([[x1, y], [x2, y]]) do
1 + abs(x2 - x1)
end
def find_length([[x, y1], [x, y2]]) do
1 + abs(y2 - y1)
end
def run() do
"lib/inputs/day5.txt"
|> get_inputs
|> filter_for_straight_lines
|> find_all_intersections
|> Stream.map(fn l -> Enum.filter(l, &(&1 != nil)) end)
|> Stream.filter(&(length(&1) > 0))
|> Stream.flat_map(& &1)
|> Stream.uniq()
|> Enum.reduce(0, fn x, acc -> acc + find_length(x) end)
end
end | elixir/advent_of_code/lib/2021/day5_bad.ex | 0.714827 | 0.69233 | day5_bad.ex | starcoder |
defmodule Reprise.Runner do
require Logger
@moduledoc """
Module discovery and reloading.
The main entry point is `go/2` function.
"""
@type time :: :calendar.datetime
@type path :: [String.t]
@type beam :: String.t
@spec load_path(Regex.t) :: path
def load_path(pattern \\ ~r[/_build/]) do
for d <- :code.get_path,
d = Path.expand(d),
Regex.match?(pattern, d),
do: d
end
@spec iterate_beams(path) :: [beam]
def iterate_beams(load_path) do
for d <- load_path, File.dir?(d) do
for f <- File.ls!(d), Path.extname(f)==".beam", do: Path.join(d,f)
end
|> List.flatten
end
@doc """
Returns all beam files belonging to the build of current project.
"""
@spec beams() :: [beam]
def beams(), do: load_path() |> iterate_beams()
@doc """
Returns pairs of beam files and modules which are loaded
and belong to the build of current project.
"""
@spec beam_modules([beam]) :: [{beam, module}]
def beam_modules(beams \\ __MODULE__.beams) do
beamset = beams |> Enum.into(MapSet.new)
for {m,f} <- :code.all_loaded,
is_list(f),
f = Path.expand(f),
MapSet.member?(beamset, f),
do: {f,m}
end
@doc "Reloads a single module."
@spec reload(module) :: {:module, module} | {:error, atom}
def reload(module) do
:code.purge(module)
:code.load_file(module)
end
@doc """
Attempts to reload all modules which belong to the current mix project
and have changed between given time frames.
If there were any and they reloaded successfully, prints a summary
via `Logger.info/1`.
Modules whose reload errored are silently ignored.
Returns `:ok` if there were any modules successfully reloaded
or `nil` when there were none.
"""
@spec go(time, time) :: :ok | nil
def go(from, to) do
modules = for {f,m} <- beam_modules() do
case File.stat(f, time: :local) do
{:ok, %File.Stat{mtime: mtime}} when mtime >= from and mtime < to ->
case reload(m) do
{:module, m} -> {:reloaded, m}
{:error, _} -> {:load_error, m}
end
{:ok, _} -> {:unmodified, m}
{:error, _} -> {:fstat_error, m}
end
end
reloaded = for {:reloaded, m} <- modules, do: m
unless reloaded == [], do:
Logger.info("Reloaded modules: #{inspect reloaded}")
end
end | lib/runner.ex | 0.86148 | 0.417954 | runner.ex | starcoder |
defmodule Sanbase.Clickhouse.Exchanges.MarketDepth do
use Ecto.Schema
@exchanges ["Binance", "Bitfinex", "Kraken", "Poloniex", "Bitrex"]
alias Sanbase.ClickhouseRepo
@table "exchange_market_depth"
schema @table do
field(:timestamp, :utc_datetime)
field(:source, :string)
field(:symbol, :string)
field(:ask, :float)
field(:asks_0_25_percent_depth, :float)
field(:asks_0_25_percent_volume, :float)
field(:asks_0_5_percent_depth, :float)
field(:asks_0_5_percent_volume, :float)
field(:asks_0_75_percent_depth, :float)
field(:asks_0_75_percent_volume, :float)
field(:asks_10_percent_depth, :float)
field(:asks_10_percent_volume, :float)
field(:asks_1_percent_depth, :float)
field(:asks_1_percent_volume, :float)
field(:asks_20_percent_depth, :float)
field(:asks_20_percent_volume, :float)
field(:asks_2_percent_depth, :float)
field(:asks_2_percent_volume, :float)
field(:asks_30_percent_depth, :float)
field(:asks_30_percent_volume, :float)
field(:asks_5_percent_depth, :float)
field(:asks_5_percent_volume, :float)
field(:bid, :float)
field(:bids_0_25_percent_depth, :float)
field(:bids_0_25_percent_volume, :float)
field(:bids_0_5_percent_depth, :float)
field(:bids_0_5_percent_volume, :float)
field(:bids_0_75_percent_depth, :float)
field(:bids_0_75_percent_volume, :float)
field(:bids_10_percent_depth, :float)
field(:bids_10_percent_volume, :float)
field(:bids_1_percent_depth, :float)
field(:bids_1_percent_volume, :float)
field(:bids_20_percent_depth, :float)
field(:bids_20_percent_volume, :float)
field(:bids_2_percent_depth, :float)
field(:bids_2_percent_volume, :float)
field(:bids_30_percent_depth, :float)
field(:bids_30_percent_volume, :float)
field(:bids_5_percent_depth, :float)
field(:bids_5_percent_volume, :float)
end
@doc false
@spec changeset(any(), any()) :: no_return()
def changeset(_, _),
do: raise("Should not try to change exchange trades")
def last_exchange_market_depth(exchange, ticker_pair, limit) when exchange in @exchanges do
{query, args} = last_exchange_market_depth_query(exchange, ticker_pair, limit)
ClickhouseRepo.query_transform(query, args, fn
[
timestamp,
source,
symbol,
ask,
asks025_percent_depth,
asks025_percent_volume,
asks05_percent_depth,
asks05_percent_volume,
asks075_percent_depth,
asks075_percent_volume,
asks10_percent_depth,
asks10_percent_volume,
asks1_percent_depth,
asks1_percent_volume,
asks20_percent_depth,
asks20_percent_volume,
asks2_percent_depth,
asks2_percent_volume,
asks30_percent_depth,
asks30_percent_volume,
asks5_percent_depth,
asks5_percent_volume,
bid,
bids025_percent_depth,
bids025_percent_volume,
bids05_percent_depth,
bids05_percent_volume,
bids075_percent_depth,
bids075_percent_volume,
bids10_percent_depth,
bids10_percent_volume,
bids1_percent_depth,
bids1_percent_volume,
bids20_percent_depth,
bids20_percent_volume,
bids2_percent_depth,
bids2_percent_volume,
bids30_percent_depth,
bids30_percent_volume,
bids5_percent_depth,
bids5_percent_volume
] ->
%{
datetime: timestamp |> DateTime.from_unix!(),
exchange: source,
ticker_pair: symbol,
ask: ask,
asks025_percent_depth: asks025_percent_depth,
asks025_percent_volume: asks025_percent_volume,
asks05_percent_depth: asks05_percent_depth,
asks05_percent_volume: asks05_percent_volume,
asks075_percent_depth: asks075_percent_depth,
asks075_percent_volume: asks075_percent_volume,
asks10_percent_depth: asks10_percent_depth,
asks10_percent_volume: asks10_percent_volume,
asks1_percent_depth: asks1_percent_depth,
asks1_percent_volume: asks1_percent_volume,
asks20_percent_depth: asks20_percent_depth,
asks20_percent_volume: asks20_percent_volume,
asks2_percent_depth: asks2_percent_depth,
asks2_percent_volume: asks2_percent_volume,
asks30_percent_depth: asks30_percent_depth,
asks30_percent_volume: asks30_percent_volume,
asks5_percent_depth: asks5_percent_depth,
asks5_percent_volume: asks5_percent_volume,
bid: bid,
bids025_percent_depth: bids025_percent_depth,
bids025_percent_volume: bids025_percent_volume,
bids05_percent_depth: bids05_percent_depth,
bids05_percent_volume: bids05_percent_volume,
bids075_percent_depth: bids075_percent_depth,
bids075_percent_volume: bids075_percent_volume,
bids10_percent_depth: bids10_percent_depth,
bids10_percent_volume: bids10_percent_volume,
bids1_percent_depth: bids1_percent_depth,
bids1_percent_volume: bids1_percent_volume,
bids20_percent_depth: bids20_percent_depth,
bids20_percent_volume: bids20_percent_volume,
bids2_percent_depth: bids2_percent_depth,
bids2_percent_volume: bids2_percent_volume,
bids30_percent_depth: bids30_percent_depth,
bids30_percent_volume: bids30_percent_volume,
bids5_percent_depth: bids5_percent_depth,
bids5_percent_volume: bids5_percent_volume
}
end)
end
defp last_exchange_market_depth_query(exchange, ticker_pair, limit) do
query = """
SELECT
toUnixTimestamp(dt),
source,
symbol,
ask,
asks_0_25_percent_depth,
asks_0_25_percent_volume,
asks_0_5_percent_depth,
asks_0_5_percent_volume,
asks_0_75_percent_depth,
asks_0_75_percent_volume,
asks_10_percent_depth,
asks_10_percent_volume,
asks_1_percent_depth,
asks_1_percent_volume,
asks_20_percent_depth,
asks_20_percent_volume,
asks_2_percent_depth,
asks_2_percent_volume,
asks_30_percent_depth,
asks_30_percent_volume,
asks_5_percent_depth,
asks_5_percent_volume,
bid,
bids_0_25_percent_depth,
bids_0_25_percent_volume,
bids_0_5_percent_depth,
bids_0_5_percent_volume,
bids_0_75_percent_depth,
bids_0_75_percent_volume,
bids_10_percent_depth,
bids_10_percent_volume,
bids_1_percent_depth,
bids_1_percent_volume,
bids_20_percent_depth,
bids_20_percent_volume,
bids_2_percent_depth,
bids_2_percent_volume,
bids_30_percent_depth,
bids_30_percent_volume,
bids_5_percent_depth,
bids_5_percent_volume
FROM #{@table}
PREWHERE
source = ?1 AND symbol = ?2
ORDER BY dt DESC
LIMIT ?3
"""
args = [
exchange,
ticker_pair,
limit
]
{query, args}
end
end | lib/sanbase/clickhouse/exchanges/market_depth.ex | 0.716119 | 0.51818 | market_depth.ex | starcoder |
defmodule VexValidators.Uuid do
@moduledoc """
Ensure a value is a valid UUID string.
## Options
The `options` can be a keyword list with the following keys:
* `:format`: An atom or boolean that defines the validation & format of the UUID:
* `:default`: The value must a string with format `xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxxx`
where `x` is a hex number.
* `:hex`: The value must be a string with the format `xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx`
where `x` is a hex number.
* `:urn`: The value must be a string with the format `urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxxx`
where `x` is a hex number.
* `:any` or `true`: The value must be a string of any of the supported formats (`:default`, `:hex` or `:urn`).
* `:not_any` or `false`: The value must **not** be a valid UUID string.
The options can also be an atom or boolean instead of the keyword list, which is the value of the `:format` option.
## Examples
Examples when using the `:any` or `true` options:
iex> VexValidators.Uuid.validate(:not_a_guid, :any)
{:error, "must be a valid UUID"}
iex> VexValidators.Uuid.validate("not_a_uuid", :any)
{:error, "must be a valid UUID"}
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", :any)
:ok
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a30", :any)
{:error, "must be a valid UUID"}
iex> VexValidators.Uuid.validate(:not_a_guid, true)
{:error, "must be a valid UUID"}
iex> VexValidators.Uuid.validate("not_a_uuid", true)
{:error, "must be a valid UUID"}
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", true)
:ok
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a30", true)
{:error, "must be a valid UUID"}
Examples when using the `:not_any` or `false` options:
iex> VexValidators.Uuid.validate(:not_a_guid, :not_any)
:ok
iex> VexValidators.Uuid.validate("not_a_uuid", :not_any)
:ok
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", :not_any)
{:error, "must not be a UUID"}
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a30", :not_any)
:ok
iex> VexValidators.Uuid.validate(:not_a_guid, false)
:ok
iex> VexValidators.Uuid.validate("not_a_uuid", false)
:ok
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", false)
{:error, "must not be a UUID"}
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a30", false)
:ok
Examples when using the `:default` option:
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", :default)
:ok
iex> VexValidators.Uuid.validate("02aa7f483ccd11e4b63e14109ff1a304", :default)
{:error, "must be a valid UUID in :default format"}
iex> VexValidators.Uuid.validate("urn:uuid:02aa7f48-3ccd-11e4-b63e-14109ff1a304", :default)
{:error, "must be a valid UUID in :default format"}
Examples when using the `:hex` option:
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", :hex)
{:error, "must be a valid UUID in :hex format"}
iex> VexValidators.Uuid.validate("02aa7f483ccd11e4b63e14109ff1a304", :hex)
:ok
iex> VexValidators.Uuid.validate("urn:uuid:02aa7f48-3ccd-11e4-b63e-14109ff1a304", :hex)
{:error, "must be a valid UUID in :hex format"}
Examples when using the `:urn` option:
iex> VexValidators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", :urn)
{:error, "must be a valid UUID in :urn format"}
iex> VexValidators.Uuid.validate("02aa7f483ccd11e4b63e14109ff1a304", :urn)
{:error, "must be a valid UUID in :urn format"}
iex> VexValidators.Uuid.validate("urn:uuid:02aa7f48-3ccd-11e4-b63e-14109ff1a304", :urn)
:ok
## Custom Error Messages
Custom error messages (in EEx format), provided as :message, can use the following values:
iex> VexValidators.Uuid.__validator__(:message_fields)
[value: "Bad value", format: "UUID format"]
For examples please see the [Vex documentation](https://github.com/CargoSense/vex#custom-eex-error-renderer-messages).
"""
use Vex.Validator
@any_formats [:default, :hex, :urn]
@all_formats @any_formats ++ [:any, :not_any]
@urn_prefix "urn:uuid:"
@doc false
@message_fields [value: "Bad value", format: "UUID format"]
def validate(value, true), do: validate(value, format: :any)
def validate(value, false), do: validate(value, format: :not_any)
def validate(value, options) when options in @all_formats, do: validate(value, format: options)
def validate(value, options) when is_list(options) do
unless_skipping(value, options) do
format = options[:format]
case do_validate(value, format) do
:ok -> :ok
{:error, reason} -> {:error, message(options, reason, value: value, format: format)}
end
end
end
defp do_validate(<<_u0::64, ?-, _u1::32, ?-, _u2::32, ?-, _u3::32, ?-, _u4::96>>, :default) do
:ok
end
defp do_validate(<<_u::256>>, :hex) do
:ok
end
defp do_validate(<<@urn_prefix, _u0::64, ?-, _u1::32, ?-, _u2::32, ?-, _u3::32, ?-, _u4::96>>, :urn) do
:ok
end
defp do_validate(_, format) when format in @any_formats do
{:error, "must be a valid UUID in #{inspect(format)} format"}
end
defp do_validate(value, :any) do
Enum.reduce_while(@any_formats, {:error, "must be a valid UUID"}, fn
format, _ ->
case do_validate(value, format) do
:ok -> {:halt, :ok}
_ -> {:cont, {:error, "must be a valid UUID"}}
end
end)
end
defp do_validate(value, :not_any) do
Enum.reduce_while(@any_formats, :ok, fn
format, _ ->
case do_validate(value, format) do
:ok -> {:halt, {:error, "must not be a UUID"}}
_ -> {:cont, :ok}
end
end)
end
defp do_validate(_, _) do
{:error, "must provide a valid UUID format in options"}
end
end | lib/vex_validators/uuid.ex | 0.871932 | 0.62798 | uuid.ex | starcoder |
defmodule Ecto.Validator do
@moduledoc """
Validates a given struct or dict given a set of predicates.
Ecto.Validator.struct(user,
name: present() when on_create?(user),
age: present(message: "must be present"),
age: greater_than(18),
also: validate_other
)
Validations are passed as the second argument in the attribute-predicate
format. Each predicate can be filtered via the `when` operator. Note `when`
here is not limited to only guard expressions.
The predicates above are going to receive the attribute being validated
and its current value as argument. For example, the `present` predicate
above is going to be called as:
present(:name, user.name)
present(:age, user.age, message: "must be present")
The validator also handles a special key `:also`, which is used to pipe
to predicates without a particular attribute. Instead, such predicates
receive the struct as argument. In this example, `validate_other` will
be invoked as:
validate_other(user)
Note all predicates must return a keyword list, with the attribute error
as key and the validation message as value.
A handful of predicates can be found at `Ecto.Validator.Predicates`.
"""
@doc """
Validates a given dict given a set of predicates.
"""
@spec dict(Macro.t, Keyword.t) :: Macro.t
defmacro dict(value, opts) when is_list(opts) do
process opts, value, fn var, attr ->
quote do: Dict.get(unquote(var), unquote(attr))
end
end
@doc """
Validates a given dict, with binary keys, given a set of predicates.
"""
@spec bin_dict(Macro.t, Keyword.t) :: Macro.t
defmacro bin_dict(value, opts) when is_list(opts) do
process opts, value, fn var, attr ->
quote do: Dict.get(unquote(var), unquote(Atom.to_string(attr)))
end
end
@doc """
Validates a given struct given a set of predicates.
"""
@spec struct(Macro.t, Keyword.t) :: Macro.t
defmacro struct(value, opts) when is_list(opts) do
process opts, value, fn var, attr ->
quote do: Map.get(unquote(var), unquote(attr))
end
end
defp process([], _value, _getter), do: nil
defp process(opts, value, getter) do
var = quote do: var
validations =
Enum.reduce(opts, quote(do: %{}), &process_each(&1, &2, var, getter))
quote do
unquote(var) = unquote(value)
if map_size(errors = unquote(validations)) > 0 do
errors
end
end
end
defp process_each({:also, function}, acc, var, _getter) do
quotation = fn acc, predicate ->
quote do
acc = unquote(acc)
case unquote(predicate) do
nil -> acc
err -> Map.merge(acc, err, fn _k, v1, v2 -> v1 ++ v2 end)
end
end
end
handle_ops function, acc, quotation, fn call ->
Macro.pipe(var, call, 0)
end
end
defp process_each({attr, function}, acc, var, getter) do
quotation = fn acc, predicate ->
quote do
acc = unquote(acc)
case unquote(predicate) do
nil -> acc
msg -> Map.update(acc, unquote(attr), [msg], &[msg|&1])
end
end
end
handle_ops function, acc, quotation, fn call ->
Macro.pipe(attr, Macro.pipe(getter.(var, attr), call, 0), 0)
end
end
defp handle_ops({:when, _, [left, right]}, acc, quotation, builder) do
quote do
if unquote(right) do
unquote(handle_and(left, acc, quotation, builder))
else
unquote(acc)
end
end
end
defp handle_ops(other, acc, quotation, builder) do
handle_and(other, acc, quotation, builder)
end
defp handle_and({:and, _, [left, right]}, acc, quotation, builder) do
handle_and(left, quotation.(acc, builder.(right)), quotation, builder)
end
defp handle_and(other, acc, quotation, builder) do
quotation.(acc, builder.(other))
end
end | lib/ecto/validator.ex | 0.914463 | 0.634642 | validator.ex | starcoder |
defmodule Grouper do
@moduledoc """
Isolates groups of process subtrees together for configuration and name
registration purposes.
* Do you struggle with Elixir's global namespace for process names?
* Do all of your tests run synchronously because of this collisions?
* Do you mutate your application environment during tests?
If the above problems sounds familiar, `Grouper` might be for you.
## Usage
Simply start your `GenServer`s like this:
GenServer.start_link(mod, arg, name: {:via, Grouper.Registry})
And access configuration like this:
Grouper.Config.get(:key)
In tests, scripts and IEX; initialize a group with:
{:ok, _} = Grouper.start_link()
Or run a single function in its own group like this:
Grouper.exec!(&MyApp.my_task/0)
During normal application runtime, each application gets its own namespace
for processes and has isolated config.
During tests, however, each test can get its own group with isolated naming
and configuration. This makes it trivial to run all of your tests
asynchronously, eliminates the need to use global config for things like
mocking, and prevents config mutations in different tests from interfering
with each other.
Scripts and IEX can similarly benefit from group isolation with a single
command, thereby rounding out behavior to be identical in all common
execution environments.
## Migration
For convenience, the OTP config environment is loaded into the config for
you, simplifying migration from older, global configuration. This can be
suppressed if desired (see `Grouper.Config.suppress_otp_env/1` for
details).
"""
defdelegate start_link(opts \\ []), to: Grouper.Group
defdelegate stop(opts \\ []), to: Grouper.Group
@doc """
execute a function in its own group, returning an `:ok` or `:error` result
Options are passed through to `Grouper.start_link/1` and `Grouper.stop/1`.
"""
@spec exec(fun(), keyword()) :: {:ok, any()} | {:error, any()}
def exec(fun, opts \\ []) do
case Grouper.Group.start_link(opts) do
{:ok, group_pid} ->
group_pid
{:error, _} = err ->
throw(err)
end
try do
{:ok, fun.()}
after
:ok = Grouper.Group.stop(opts)
end
catch
{:error, _} = err ->
err
end
@doc """
execute a function in its own group, raising an exception on error
Options are passed through to `Grouper.start_link/1` and `Grouper.stop/1`.
"""
@spec exec!(fun(), keyword()) :: any() | no_return()
def exec!(fun, opts \\ []) do
case exec(fun, opts) do
{:ok, result} ->
result
{:error, :no_group} ->
raise Grouper.NoGroupError
{:error, reason} ->
raise RuntimeError, reason: reason
end
end
@doc """
execute a function in its own group, returning an `:ok` or `:error` result
Options are passed through to `Grouper.start_link/1` and `Grouper.stop/1`.
"""
@spec exec(atom(), atom(), [any()], keyword()) :: {:ok, any()} | {:error, any()}
def exec(m, f, a, opts \\ []) when is_atom(m) and is_atom(f) and is_list(a) do
exec(fn -> apply(m, f, a) end, opts)
end
@doc """
execute a function in its own group, raising an exception on error
Options are passed through to `Grouper.start_link/1` and `Grouper.stop/1`.
"""
@spec exec!(atom(), atom(), [any()], keyword()) :: any()
def exec!(m, f, a, opts \\ []) when is_atom(m) and is_atom(f) and is_list(a) do
exec!(fn -> apply(m, f, a) end, opts)
end
end | lib/grouper.ex | 0.874332 | 0.468183 | grouper.ex | starcoder |
defmodule Charts.DataProvider do
@moduledoc """
`Charts.DataProvider` is a callback module for use in
client applications. Implement the callbacks defined here
in your application to provide the data that drives a chart's
underlying `Charts.dataset`.
Let's say we have a list of data we want to render in an
`Charts.ColumnChart`. We will need to populate the `Charts.ColumnChart`'s
dataset with a list of data, where each data element is an
`Charts.BaseDatum`.
Given data that looks like this:
```elixir
[
%MyApp.Vehicle{id: "supersonic plane ABC", current_velocity: 12000},
%MyApp.Vehicle{id: "scooter XYZ", current_velocity: 17},
%MyApp.Vehicle{id: "super car 123", current_velocity: 220}
]
```
We will need to transform this data in a way that an `Charts.ColumnChart` can
understand. We can manage this by defining our own module that implements
the `Charts.DataProvider` behaviour. Here is an example of how that might
look.
```elixir
defmodule MyApp.Vehicles.DataProvider do
@behaviour Charts.DataProvider
alias Charts.BaseDatum
# The `get/0` will be used to bootstrap a dataset from scratch.
@impl true
def get do
MyApp.Persistence.list_vehicles()
|> Enum.map(&vehicle_to_datum/1)
end
# `update/2` can be used to react to real-time changes in your dataset
@impl true
def update_chart(chart, updates) do
%Charts.BaseChart{
chart |
dataset: %Charts.ColumnChart.Dataset{
chart.dataset |
data: Enum.map(updates, &vehicle_to_datum/1)
}
}
end
defp vehicle_to_datum(%MyApp.Vehicle{} = vehicle) do
%BaseDatum{
name: vehicle.id,
values: [vehicle.current_velocity],
fill_color: MyAppWeb.ChartStyles.colors().blue_gradient
}
end
end
```
For further examples, take a look at the `Demo` application examples
in `Charts`'s Github Repo, [here](https://github.com/spawnfest/livechart/tree/master/demo).
Relevant examples can be found in `Demo.SystemData.MemoryChart` and `Demo.Examples.Cincy`
[here](https://github.com/spawnfest/livechart/tree/master/demo/lib/demo/examples).
You can also look at `DemoWeb.PageLive` where chart's are created and data is piped
into them in realtime using PubSub and Erlang `:timer.send_interval/3` to update the
charts on the fly.
"""
@doc """
Returns data that can be used in a `Charts.chart()` dataset.
"""
@callback get() :: term()
@doc """
Updates a `Charts.chart`'s dataset.
"""
@callback update_chart(Charts.chart(), term()) :: Charts.chart()
end | charts/lib/charts/data_provider.ex | 0.877909 | 0.865452 | data_provider.ex | starcoder |
defmodule Resourceful.Collection.Filter do
@moduledoc """
Provides a common interface for filtering collections. See `call/2` for use
and examples.
This module is intended to dispatch arguments to the appropriate `Filter`
module for the underlying data source.
Filtering is not meant to replace and be anything even resembling feature
complete with more robust querying options provided by various databases. As
The focus is on edge-facing APIs, generally web-based APIs, filthering is
meant to be much simpler and more predictable. For instance, wildcard and
regular expression filtering are omitted specifically by default. This is
intentional.
"""
alias Resourceful.{Collection, Error}
alias Resourceful.Collection.Delegate
@type t() :: {Collection.queryable(), String.t(), any()}
@type coercible() :: t() | {String.t(), any()} | list()
@shorthand %{
"eq" => %{func: :equal},
"ex" => %{func: :exclude, only: [:string, :list]},
"gt" => %{func: :greater_than},
"gte" => %{func: :greater_than_or_equal},
"in" => %{func: :include, only: [:string, :list]},
"lt" => %{func: :less_than},
"lte" => %{func: :less_than_or_equal},
"not" => %{func: :not_equal},
"sw" => %{func: :starts_with, only: [:string]}
}
@default_op "eq"
@doc """
Returns a data source that is filtered in accordance with `filters`.
If `data_source` is not an actual list of resources (e.g. an Ecto Queryable)
underlying modules should not return a list of resources, but rather a
filtered version of `data_source`.
## Args
* `data_source`: See `Resourceful.Collection` module overview.
* `filters`: A list of filters. See `cast/1` for a list of valid
filters.
"""
def call(data_source, []), do: data_source
def call(data_source, filters) when is_list(filters),
do: Enum.reduce(filters, data_source, &apply_filter!(&2, &1))
def call(data_source, filters), do: call(data_source, [filters])
@doc """
Converts an argument into an appropriate filter parameter. A castd filter
is a tuple of containing an atom for the field name, an atom of the function
name that will be called by the deligated module, and the value that will
be used for comparison.
Filter parameters can be provded as a tuple, list, or string and will be
castd to the appropriate format. Invalid operators and their respective
values will result in an exception. Please see `valid_operator?/2` if you want
to ensure client provided data is valid first.
(Note: Should this throw an exception or should it return an :error tuple by
default?)
## Examples
cast({:age, "gte", 18})
cast(["age", "gte", 18])
cast(["age gte", 18])
"""
def cast({field, op, val}) when is_binary(op), do: {:ok, {field, op, val}}
def cast({field_and_op, val}) when is_binary(field_and_op) do
field_and_op
|> cast_field_and_op()
|> Enum.concat([val])
|> cast()
end
def cast({field, val}) when is_list(field) do
cast({field, @default_op, val})
end
def cast([field_and_op, val]), do: cast({field_and_op, val})
def cast(filter) when is_list(filter) and length(filter) == 3 do
filter
|> List.to_tuple()
|> cast()
end
def cast(filter), do: Error.with_context(:invalid_filter, %{filter: filter})
def cast!(filter) do
case cast(filter) do
{:ok, filter} ->
filter
{:error, {_, %{filter: filter}}} ->
raise ArgumentError, message: "Cannot cast filter: #{inspect(filter)}"
end
end
@doc """
"""
@spec cast_as_list?(String.t()) :: boolean()
def cast_as_list?(op) when op in ["ex", "in"], do: true
def cast_as_list?(_), do: false
@doc """
Checks whether or not an operator is valid.
## Args
* `op`: Intended operator. Valid operators are keys in `@shorthand`.
"""
def valid_operator?(op) when is_binary(op), do: Map.has_key?(@shorthand, op)
def valid_operator?(op) when is_atom(op), do: valid_operator?(Atom.to_string(op))
@doc """
Checks whether or not an operator is valid in conjunction with an intended
value. This can be used to validate the data from a client query.
## Args
* `op`: See `valid_operator?/1`.
* `val`: The value to be used with the operator. Certain operators only work
on a subset of value types. For instance `sw` is only valid with strings.
"""
def valid_operator?(op, val), do: valid_operator_with_type?(operator(op), val)
defp apply_filter!(data_source, filter) do
{field, op, val} = Delegate.cast_filter(data_source, cast!(filter))
data_source
|> Delegate.filters()
|> apply(operator_func!(op), [data_source, field, val])
end
defp cast_field_and_op(field_and_op) when is_binary(field_and_op) do
field_and_op
|> String.split(" ", parts: 2)
|> cast_field_and_op()
end
defp cast_field_and_op([field | []]), do: [field, @default_op]
defp cast_field_and_op(field_and_op), do: field_and_op
defp operator(op) when is_binary(op), do: Map.get(@shorthand, op)
defp operator(op) when is_atom(op), do: operator(Atom.to_string(op))
defp operator_func!(op) when is_binary(op), do: Map.fetch!(@shorthand, op).func
defp valid_operator_with_type?(nil, _), do: false
defp valid_operator_with_type?(%{only: only}, val) when is_binary(val) do
Enum.member?(only, :string)
end
defp valid_operator_with_type?(%{only: only}, val) when is_list(val) do
Enum.member?(only, :list)
end
defp valid_operator_with_type?(%{only: _}, _), do: false
defp valid_operator_with_type?(%{}, _), do: true
end | lib/resourceful/collection/filter.ex | 0.911731 | 0.606003 | filter.ex | starcoder |
defmodule Croma.TypeUtil do
@moduledoc """
Utilities to work with internal representation of types.
"""
alias Kernel.Typespec
@primitive_types [
:pid,
:port,
:reference,
:atom,
:binary,
:bitstring,
:boolean,
:byte,
:char,
:integer,
:pos_integer,
:neg_integer,
:non_neg_integer,
:float,
:number,
:list,
:tuple,
:map,
:fun,
]
@spec resolve_primitive(module, atom, Macro.Env.t) :: {:ok, atom} | :error
def resolve_primitive(module, name, env) do
case Typespec.beam_types(module) do
nil -> # No beam file is available
try do
[:type, :typep, :opaque]
|> Enum.flat_map(&Module.get_attribute(module, &1))
|> Enum.find(&match?({_, {:::, _, [{^name, _, _}, _ast]}, _}, &1))
|> case do
nil -> :error
{_, type_ast, _} -> destructure_type_expr(module, type_ast, env)
end
rescue
_ -> :error
end
types ->
case Enum.find(types, &match?({_, {^name, _, _}}, &1)) do
nil -> :error
{_, type_expr} -> destructure_type_expr(module, Typespec.type_to_ast(type_expr), env)
end
end
end
defp destructure_type_expr(module, {:::, _, [_lhs, type_ast]}, env) do
case type_ast do
{_, _} -> {:ok, :tuple} # 2-tuple is special in elixir AST
{:{} , _, _} -> {:ok, :tuple}
{:%{}, _, _} -> {:ok, :map}
{:% , _, _} -> {:ok, :map} # struct
{t , _, _} -> destructure_type_expr2(module, t, env)
[{:->, _, _}] -> {:ok, :fun}
l when is_list(l) -> {:ok, :list}
_other -> :error
end
end
defp destructure_type_expr2(module, t, env) do
case t do
t when t in @primitive_types -> {:ok, t}
a when is_atom(a) -> resolve_primitive(module, a, env)
{:., _, [mod, n]} -> resolve_primitive(Macro.expand(mod, env), n, env)
end
end
def list_to_type_union([v ]), do: v
def list_to_type_union([h | t]), do: {:|, [], [h, list_to_type_union(t)]}
end | lib/croma/type_util.ex | 0.667906 | 0.449755 | type_util.ex | starcoder |
defmodule Guss do
@moduledoc """
Guss generates Signed URLs for Google Cloud Storage.
Signed URLs provide a mechanism for query-string authentication for storage objects.
For more information, see the Storage Docs for [Signed URLs](https://cloud.google.com/storage/docs/access-control/signed-urls).
"""
alias __MODULE__
alias Guss.Resource
@base_attrs [:account, :base_url, :content_type, :content_md5, :expires, :http_verb]
@doc """
Returns a new `Guss.Resource` for a `GET` request.
"""
@spec get(binary(), binary(), keyword()) :: Guss.Resource.t()
def get(bucket, objectname, opts \\ []) do
new(:get, bucket, objectname, opts)
end
@doc """
Returns a new `Guss.Resource` for a `POST` request.
"""
def post(bucket, objectname, opts \\ []) do
new(:post, bucket, objectname, opts)
end
@doc """
Returns a new `Guss.Resource` for a `PUT` request.
"""
@spec put(binary(), binary(), keyword()) :: Guss.Resource.t()
def put(bucket, objectname, opts \\ []) do
new(:put, bucket, objectname, opts)
end
@doc """
Returns a new `Guss.Resource` for a `DELETE` request.
"""
@spec delete(binary(), binary(), keyword()) :: Guss.Resource.t()
def delete(bucket, objectname, opts \\ []) do
new(:delete, bucket, objectname, opts)
end
@doc """
Returns a new `Guss.Resource`.
"""
@spec new(binary(), binary(), keyword()) :: Guss.Resource.t()
def new(bucket, objectname, opts \\ []) do
{attrs, extensions} = Keyword.split(opts, @base_attrs)
%Resource{bucket: bucket, objectname: objectname}
|> struct!(Keyword.put(attrs, :extensions, extensions))
end
@doc """
Returns a new `Guss.Resource`.
"""
@spec new(atom(), binary(), binary(), keyword()) :: Guss.Resource.t()
def new(verb, bucket, objectname, opts) when is_atom(verb) do
new(bucket, objectname, Keyword.put(opts, :http_verb, verb))
end
@doc """
Converts a `Guss.Resource` into a Signed URL.
"""
@spec sign(resource :: Guss.Resource.t(), opts :: keyword()) ::
{:error, {atom(), any()}} | {:ok, binary()}
def sign(resource, opts \\ [])
def sign(%Resource{expires: nil} = resource, opts) do
sign(%{resource | expires: expires_in(3600)}, opts)
end
def sign(%Resource{} = resource, opts) do
config_mod = Keyword.get(opts, :config_module, Goth.Config)
with {:ok, {access_id, private_key}} <- Guss.Config.for_resource(config_mod, resource) do
resource = %{resource | account: access_id}
Guss.StorageV2Signer.sign(resource, private_key)
end
end
@doc """
Returns an expiration value for a future timestamp, with optional granularity.
By default, `expires_in/1` expects a value in `:seconds`.
To specify a different granularity, pass the value as a tuple,
for instance: `{1, :hour}` or `{7, :days}`
Valid granularities are `:seconds, :hours, and :days`, as well as
their singular variants.
"""
def expires_in({n, granularity}) when is_integer(n) and n > 0 do
expires_in(to_seconds(n, granularity))
end
def expires_in(seconds) when is_integer(seconds) do
DateTime.utc_now() |> DateTime.to_unix() |> Kernel.+(seconds)
end
defp to_seconds(n, i) when i in [:second, :seconds], do: n
defp to_seconds(n, i) when i in [:hour, :hours], do: n * 3600
defp to_seconds(n, i) when i in [:day, :days], do: n * 3600 * 24
end | lib/guss.ex | 0.881583 | 0.539954 | guss.ex | starcoder |
defmodule Raft do
@moduledoc """
Raft provides users with an api for building consistent (as defined by CAP), distributed
state machines. It does this using the raft leader election and concensus
protocol as described in the [original paper](https://raft.github.io/raft.pdf).
## Example
Lets create a distributed key value store. The first thing that we'll need is
a state machine:
```
defmodule KVStore do
use Raft.StateMachine
@initial_state %{}
def set(name, key, value) do
Raft.write(name, {:set, key, value})
end
def get(name, key) do
Raft.read(name, {:get, key})
end
def init(_name) do
{:ok, @initial_state}
end
def handle_write({:set, key, value}, state) do
{{:ok, key, value}, put_in(state, [key], value)}
end
def handle_read({:get, key}, state) do
case get_in(state, [key]) do
nil ->
{{:error, :key_not_found}, state}
value ->
{{:ok, value}, state}
end
end
end
```
Now we can start our peers:
```
{:ok, _pid} = Raft.start_peer(KVStore, name: :s1)
{:ok, _pid} = Raft.start_peer(KVStore, name: :s2)
{:ok, _pid} = Raft.start_peer(KVStore, name: :s3)
```
Each node must be given a unique name within the cluster. At this point our
nodes are started but they're all followers and don't know anything about each
other. We need to set the configuration so that they can communicate:
```
Raft.set_configuration(:s1, [:s1, :s2, :s3])
```
Once this runs the peers will start an election and elect a leader. You can
check the current leader like so:
```
leader = Raft.leader(:s1)
```
Once we have the leader we can read and write to our state machine:
```
{:error, :key_not_found} = KVStore.get(leader, :foo)
{:ok, :foo, :bar} = KVStore.write(leader, :foo, :bar)
{:ok, :bar} = KVStore.read(leader, :foo)
```
We can now shutdown our leader and ensure that a new leader has been elected
and our state is replicated across all nodes:
```
Raft.stop(leader)
# wait for election...
new_leader = Raft.leader(:s2)
{:ok, :bar} = KVStore.read(new_leader, :foo)
```
We now have a consistent, replicated key-value store.
### Failures and re-elections
Networks disconnects and other failures will happen. If this happens the peers
might elect a new leader. If this occurs you will see messages like this:
```
{:error, :election_in_progress} = KVStore.get(leader, :foo)
{:error, {:redirect, new_leader}} = KVStore.get(leader, :foo)
```
## State Machine Message Safety
The commands sent to each state machine are opaque to the raft protocol. There
is *no validation* done to ensure that the messages conform to what the user
state machine expects. Also these logs are persisted. What this means is that
if a message is sent that causes the user state machine to crash it will
crash the raft process until a code change is made to the state machine. There
is no mechanism for removing an entry from the log. Great care must be taken
to ensure that messages don't "poison" the log and state machine.
## Log Storage
The log and metadata store is persisted to disk using rocksdb. This allows us
to use a well known and well supported db engine that also does compaction.
The log store is built as an adapter so its possible to construct other adapters
for persistence.
## Protocol Overview
Raft is a complex protocol and all of the details won't be covered here.
This is an attempt to cover the high level topics so that users can make more
informed technical decisions.
Key Terms:
* Cluster - A group of peers. These peers must be explicitly set.
* Peer - A server participating in the cluster. Peers route messages,
participate in leader election and store logs.
* Log - The log is an ordered sequence of entries. Each entry is replicated on
each peer and we consider the log to be consistent if all peers in the
cluster agree on the entries and their order. Each log contains a binary blob
which is opaque to the raft protocol but has meaning in the users state machine.
This log is persisted to the local file system.
* Quorum - A majority of peers. In raft this is (2/n)+1. Using a quorum allows
some number of peers to be unavailable during leader election or
replication.
* Leader - At any time there will only be 1 leader in a cluster. All reads and
writes and configuration changes must pass through the leader in order to
provide consistency. Its the leaders responsibility to replicate logs to all
other members of the cluster.
* Committed - A log entry is "committed" if the leader has replicated it to
a majority of peers. Only committed entries are applied to the users state
machine.
Each peer can be in 1 of 3 states: follower, leader, or candidate. When a
peer is started it starts in a follower state. If a follower does not receive
messages within a random timeout it transitions to a candidate and starts a
new election.
During an election the candidate requests votes from all of the other peers.
If the candidate receives enough votes to have a quorum then the candidate
transitions to the leader state and informs all of the other peers that they
are the new leader.
The leader accepts all reads and writes for the cluster. If a write occurs
then the leader creates a new log entry and replicates that entry to the other
peers. If a peer's log is missing any entries then the leader will bring the
peer up to date by replicating the missing entries. Once the new log entry
has been replicated to a majority of peers, the leader "commits" the new entry
and applies it to the users state machine. In order to provide consistent
reads a leader must ensure that they still maintain a quorum. Before executing
a read the leader will send a message to each follower and ensure that they
are still the leader. This provides consistent views of the data but it also
can have performance implications for read heavy operations.
Each time the followers receive a message they reset their "election timeout".
This process continues until a follower times out and starts a new election,
starting the cycle again.
"""
alias Raft.{
Log,
Server,
Config,
Configuration
}
require Logger
@type peer :: atom() | {atom(), atom()}
@type opts :: [
{:name, peer()},
{:config, Config.t},
]
@doc """
Starts a new peer with a given Config.t.
"""
@spec start_peer(module(), opts()) :: {:ok, pid()} | {:error, term()}
def start_peer(mod, opts) do
name = Keyword.get(opts, :name)
config = Keyword.get(opts, :config) || %Raft.Config{}
do_start(name, mod, config)
end
@doc """
Gracefully stops the node.
"""
def stop_peer(name) do
Raft.Server.Supervisor.stop_peer(name)
end
@doc """
Used to apply a new change to the application fsm. This is done in consistent
manner. This operation blocks until the log has been replicated to a
majority of servers.
"""
@spec write(peer(), term(), any()) :: {:ok, term()} | {:error, :timeout} | {:error, :not_leader}
def write(leader, cmd, timeout \\ 3_000) do
Raft.Server.write(leader, {UUID.uuid4(), cmd}, timeout)
end
@doc """
Reads state that has been applied to the state machine.
"""
@spec read(peer(), term(), any()) :: {:ok, term()} | {:error, :timeout} | {:error, :not_leader}
def read(leader, cmd, timeout \\ 3_000) do
Raft.Server.read(leader, {UUID.uuid4(), cmd}, timeout)
end
@doc """
Returns the leader according to the given peer.
"""
@spec leader(peer()) :: peer() | :none
def leader(name) do
Raft.Server.current_leader(name)
end
@doc """
Returns the current status for a peer. This is used for debugging and
testing purposes only.
"""
@spec status(peer()) :: {:ok, %{}} | {:error, :no_node}
def status(name) do
{:ok, Raft.Server.status(name)}
catch
:exit, {:noproc, _} ->
{:error, :no_node}
end
@doc """
Sets peers configuration. The new configuration will be merged with any
existing configuration.
"""
@spec set_configuration(peer(), [peer()]) :: {:ok, Configuration.t}
| {:error, term()}
def set_configuration(peer, configuration) do
id = UUID.uuid4()
configuration = Enum.map(configuration, &set_node/1)
Server.set_configuration(peer, {id, configuration})
end
@doc """
Gets an entry from the log. This should only be used for testing purposes.
"""
@spec get_entry(peer(), non_neg_integer()) :: {:ok, Log.Entry.t} | {:error, term()}
def get_entry(to, index) do
Log.get_entry(to, index)
catch
:exit, {:noproc, _} ->
{:error, :no_node}
end
def test_node(name) do
Raft.start_peer(Raft.StateMachine.Stack, name: {name, node()})
end
@doc """
Creates a test cluster for running on a single. Should only be used for
development and testing.
"""
@spec test_cluster() :: {peer(), peer(), peer()}
def test_cluster() do
path =
File.cwd!
|> Path.join("test_data")
File.rm_rf!(path)
File.mkdir(path)
{:ok, _s1} = Raft.start_peer(Raft.StateMachine.Echo, name: :s1, config: %Config{data_dir: path})
{:ok, _s2} = Raft.start_peer(Raft.StateMachine.Echo, name: :s2, config: %Config{data_dir: path})
{:ok, _s3} = Raft.start_peer(Raft.StateMachine.Echo, name: :s3, config: %Config{data_dir: path})
nodes = [:s1, :s2, :s3]
{:ok, _configuration} = Raft.set_configuration(:s1, nodes)
{:s1, :s2, :s3}
end
defp do_start(nil, _, _), do: raise ArgumentError, "Must include a `:name` argument"
defp do_start(name, mod, config) do
Raft.Server.Supervisor.start_peer({name, node()}, %{config | state_machine: mod})
end
defp set_node(server) when is_atom(server), do: {server, node()}
defp set_node(peer), do: peer
end | lib/raft.ex | 0.913276 | 0.960249 | raft.ex | starcoder |
defmodule UUIDTools.UUID do
@moduledoc """
UUIDTools.UUID is a module that handles the generation of UUIDs for [Elixir](http://elixir-lang.org/).
It follows the [RFC 4122](http://www.ietf.org/rfc/rfc4122.txt).
"""
use Bitwise, only_operators: true
@compile {:inline, e: 1}
# 15 Oct 1582 to 1 Jan 1970.
@beginnning_of_time 122_192_928_000_000_000
@micro_to_nanoseconds_factor 10
# Variant, corresponds to variant 1 0 of RFC 4122.
@variant10 2
# UUID v1 identifier.
@uuid_v1 1
# UUID v3 identifier.
@uuid_v3 3
# UUID v4 identifier.
@uuid_v4 4
# UUID v5 identifier.
@uuid_v5 5
# UUID URN prefix.
@urn_prefix "urn:uuid:"
@doc """
Convert binary UUID data to a string.
Will raise an ArgumentError if the given binary is not valid UUID data, or
the format argument is not one of: `:default`, `:hex`, `:urn`, or `:raw`.
## Examples
```elixir
iex> UUIDTools.UUID.binary_to_string!(<<135, 13, 248, 232, 49, 7, 68, 135,
...> 131, 22, 129, 224, 137, 184, 194, 207>>)
"870df8e8-3107-4487-8316-81e089b8c2cf"
iex> UUIDTools.UUID.binary_to_string!(<<142, 161, 81, 61, 248, 161, 77, 234, 155,
...> 234, 107, 143, 75, 91, 110, 115>>, :hex)
"8ea1513df8a14dea9bea6b8f4b5b6e73"
iex> UUIDTools.UUID.binary_to_string!(<<239, 27, 26, 40, 238, 52, 17, 227, 136,
...> 19, 20, 16, 159, 241, 163, 4>>, :urn)
"urn:uuid:ef1b1a28-ee34-11e3-8813-14109ff1a304"
iex> UUIDTools.UUID.binary_to_string!(<<39, 73, 196, 181, 29, 90, 74, 96, 157,
...> 47, 171, 144, 84, 164, 155, 52>>, :raw)
<<39, 73, 196, 181, 29, 90, 74, 96, 157, 47, 171, 144, 84, 164, 155, 52>>
```
"""
def binary_to_string!(uuid, format \\ :default)
def binary_to_string!(<<uuid::binary>>, format) do
uuid_to_string(<<uuid::binary>>, format)
end
def binary_to_string!(_, _) do
raise ArgumentError, message: "Invalid argument; Expected: <<uuid::128>>"
end
@doc """
Convert a UUID string to its binary data equivalent.
Will raise an ArgumentError if the given string is not a UUID representation
in a format like:
* `"870df8e8-3107-4487-8316-81e089b8c2cf"`
* `"8ea1513df8a14dea9bea6b8f4b5b6e73"`
* `"urn:uuid:ef1b1a28-ee34-11e3-8813-14109ff1a304"`
## Examples
```elixir
iex> UUIDTools.UUID.string_to_binary!("870df8e8-3107-4487-8316-81e089b8c2cf")
<<135, 13, 248, 232, 49, 7, 68, 135, 131, 22, 129, 224, 137, 184, 194, 207>>
iex> UUIDTools.UUID.string_to_binary!("8ea1513df8a14dea9bea6b8f4b5b6e73")
<<142, 161, 81, 61, 248, 161, 77, 234, 155, 234, 107, 143, 75, 91, 110, 115>>
iex> UUIDTools.UUID.string_to_binary!("urn:uuid:ef1b1a28-ee34-11e3-8813-14109ff1a304")
<<239, 27, 26, 40, 238, 52, 17, 227, 136, 19, 20, 16, 159, 241, 163, 4>>
iex> UUIDTools.UUID.string_to_binary!(<<39, 73, 196, 181, 29, 90, 74, 96, 157, 47,
...> 171, 144, 84, 164, 155, 52>>)
<<39, 73, 196, 181, 29, 90, 74, 96, 157, 47, 171, 144, 84, 164, 155, 52>>
```
"""
def string_to_binary!(<<uuid::binary>>) do
{_type, <<uuid::128>>} = uuid_string_to_hex_pair(uuid)
<<uuid::128>>
end
@doc """
Generate a new UUID v1. This version uses a combination of one or more of:
unix epoch, random bytes, pid hash, and hardware address.
## Examples
```elixir
iex> UUIDTools.UUID.uuid1()
"cdfdaf44-ee35-11e3-846b-14109ff1a304"
iex> UUIDTools.UUID.uuid1(:default)
"cdfdaf44-ee35-11e3-846b-14109ff1a304"
iex> UUIDTools.UUID.uuid1(:hex)
"cdfdaf44ee3511e3846b14109ff1a304"
iex> UUIDTools.UUID.uuid1(:urn)
"urn:uuid:cdfdaf44-ee35-11e3-846b-14109ff1a304"
iex> UUIDTools.UUID.uuid1(:raw)
<<205, 253, 175, 68, 238, 53, 17, 227, 132, 107, 20, 16, 159, 241, 163, 4>>
iex> UUIDTools.UUID.uuid1(:slug)
"zf2vRO41EeOEaxQQn_GjBA"
```
"""
def uuid1(format \\ :default) do
uuid1(uuid1_clockseq(), uuid1_node(), format)
end
@doc """
Generate a new UUID v1, with an existing clock sequence and node address. This
version uses a combination of one or more of: unix epoch, random bytes,
pid hash, and hardware address.
## Examples
```elixir
iex> UUIDTools.uuid1()
"e93880b4-c4b7-11e9-8925-f2189835db58"
iex> UUIDTools.uuid1(:default)
"f580ace8-c4b7-11e9-a704-f2189835db58"
iex> UUIDTools.uuid1(:hex)
"05bcb75ac4b811e99c8af2189835db58"
iex> UUIDTools.uuid1(:urn)
"urn:uuid:10ac6930-c4b8-11e9-93d7-f2189835db58"
iex> UUIDTools.uuid1(:raw)
<<31, 88, 207, 250, 196, 184, 17, 233, 187, 65, 242, 24, 152, 53, 219, 88>>
iex> UUIDTools.uuid1(:slug)
"V7E_wsS4EemLq_IYmDXbWA"
```
"""
def uuid1(clock_seq, node, format \\ :default)
def uuid1(<<clock_seq::14>>, <<node::48>>, format) do
<<time_hi::12, time_mid::16, time_low::32>> = uuid1_time()
<<clock_seq_hi::6, clock_seq_low::8>> = <<clock_seq::14>>
<<time_low::32, time_mid::16, @uuid_v1::4, time_hi::12, @variant10::2, clock_seq_hi::6,
clock_seq_low::8, node::48>>
|> uuid_to_string(format)
end
@doc """
Generate a new UUID v3. This version uses an MD5 hash of fixed value (chosen
based on a namespace atom - see Appendix C of
[RFC 4122](http://www.ietf.org/rfc/rfc4122.txt) and a name value. Can also be
given an existing UUID String instead of a namespace atom.
## Examples
```elixir
iex> UUIDTools.uuid3(:md5, "google.com", :raw)
<<154, 116, 200, 62, 44, 9, 53, 19, 167, 75, 145, 214, 121, 190, 130, 184>>
iex> UUIDTools.uuid3("8808f33a-3e11-3708-919e-15fba88908db", "google.com")
"9556d661-520d-3843-8745-4e0601b06ca0"
iex> UUIDTools.uuid3(:md5, "google.com", :slug)
"mnTIPiwJNROnS5HWeb6CuA"
```
"""
def uuid3(namespace_or_uuid, name, format \\ :default)
def uuid3(:md5, <<name::binary>>, format) do
namebased_uuid(:md5, <<0x6BA7B8149DAD11D180B400C04FD430C8::128, name::binary>>)
|> uuid_to_string(format)
end
def uuid3(nil, <<name::binary>>, format) do
namebased_uuid(:md5, <<0::128, name::binary>>)
|> uuid_to_string(format)
end
def uuid3(<<uuid::binary>>, <<name::binary>>, format) do
{_type, <<uuid::128>>} = uuid_string_to_hex_pair(uuid)
namebased_uuid(:md5, <<uuid::128, name::binary>>)
|> uuid_to_string(format)
end
@doc """
Generate a new UUID v4. This version uses pseudo-random bytes generated by
the `crypto` module.
## Examples
```elixir
iex> UUIDTools.uuid4()
"e453a6bf-1acc-41a0-a768-8e486ff0bc74"
iex> UUIDTools.uuid4(:default)
"06e04ae3-5db2-4b20-bb31-fc1ffdc4ac87"
iex> UUIDTools.uuid4(:hex)
98c73069ce8549ac82ef4a131d7f05b4
iex> UUIDTools.uuid4(:urn)
"urn:uuid:18f2ea3b-6508-4f42-b65d-a0eaa6892bf4"
iex> UUIDTools.uuid4(:raw)
<<193, 103, 191, 223, 115, 251, 75, 179, 165, 138, 217, 109, 228, 155, 201, 163>>
iex> UUIDTools.uuid4(:slug)
"p78u0Qi7RpqX6D3A8FD5BQ"
```
"""
def uuid4(), do: uuid4(:default)
# For backwards compatibility.
def uuid4(:strong), do: uuid4(:default)
# For backwards compatibility.
def uuid4(:weak), do: uuid4(:default)
def uuid4(format) do
<<u0::48, _::4, u1::12, _::2, u2::62>> = :crypto.strong_rand_bytes(16)
<<u0::48, @uuid_v4::4, u1::12, @variant10::2, u2::62>>
|> uuid_to_string(format)
end
@doc """
Generate a new UUID v5. This version uses an SHA1 hash of fixed value (chosen
based on a namespace atom - see Appendix C of
[RFC 4122](http://www.ietf.org/rfc/rfc4122.txt) and a name value. Can also be
given an existing UUID String instead of a namespace atom.
## Examples
```elixir
iex> UUIDTools.uuid5(:sha1, "google.com")
"64ee70a4-8cc1-5d25-abf2-dea6c79a09c8"
iex> UUIDTools.uuid5("d26d4db3-2a94-5185-a091-5b7b61148c87", "google.com")
"4699746d-3d64-5122-a1f3-187f40fb63ac"
iex> UUIDTools.uuid5("d26d4db3-2a94-5185-a091-5b7b61148c87", "google.com", :slug)
"Rpl0bT1kUSKh8xh_QPtjrA"
```
"""
def uuid5(namespace_or_uuid, name, format \\ :default)
def uuid5(:sha1, <<name::binary>>, format) do
namebased_uuid(:sha1, <<0::128, name::binary>>)
|> uuid_to_string(format)
end
def uuid5(<<uuid::binary>>, <<name::binary>>, format) do
{_type, <<uuid::128>>} = uuid_string_to_hex_pair(uuid)
namebased_uuid(:sha1, <<uuid::128, name::binary>>)
|> uuid_to_string(format)
end
defp uuid_to_string(<<_::128>> = u, :default) do
uuid_to_string_default(u)
end
defp uuid_to_string(<<_::128>> = u, :hex) do
IO.iodata_to_binary(for <<part::4 <- u>>, do: e(part))
end
defp uuid_to_string(<<_::128>> = u, :urn) do
@urn_prefix <> uuid_to_string(u, :default)
end
defp uuid_to_string(<<_::128>> = u, :raw) do
u
end
# TODO pass options so that the padding may be included
defp uuid_to_string(<<_::128>> = u, :slug) do
Base.url_encode64(u, padding: false)
end
defp uuid_to_string(_u, format) when format in [:default, :hex, :urn, :slug] do
raise ArgumentError, message: "Invalid binary data; Expected: <<uuid::128>>"
end
defp uuid_to_string(_u, format) do
raise ArgumentError, message: "Invalid format #{format}; Expected: :default|:hex|:urn|:slug"
end
defp uuid_to_string_default(
<<fc00:db20:35b:7399::5, fc00:db20:35b:7399::5, fc00:db20:35b:7399::5, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:e968:6179::de52:7100, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fdf8:f53e:61e4::18, fc00:db20:35b:7399::5, fdf8:f53e:61e4::18, fdf8:f53e:61e4::18, bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, b4::4,
cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, cfc00:e968:6179::de52:7100, dfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, d2::4, d3::4, dfc00:e968:6179::de52:7100, fc00:e968:6179::de52:7100, efd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, efd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, efc00:e968:6179::de52:7100,
efd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, efdf8:f53e:61e4::18, efc00:db20:35b:7399::5, efc00:e968:6179::de52:7100, efd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, efc00:e968:6179::de52:7100, efd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, e12::4>>
) do
<<e(a1), e(a2), e(a3), e(a4), e(a5), e(a6), e(a7), e(a8), ?-, e(b1), e(b2), e(b3), e(b4), ?-,
e(c1), e(c2), e(c3), e(c4), ?-, e(d1), e(d2), e(d3), e(d4), ?-, e(e1), e(e2), e(e3), e(e4),
e(e5), e(e6), e(e7), e(e8), e(e9), e(e10), e(e11), e(e12)>>
end
defp e(0), do: ?0
defp e(1), do: ?1
defp e(2), do: ?2
defp e(3), do: ?3
defp e(4), do: ?4
defp e(5), do: ?5
defp e(6), do: ?6
defp e(7), do: ?7
defp e(8), do: ?8
defp e(9), do: ?9
defp e(10), do: ?a
defp e(11), do: ?b
defp e(12), do: ?c
defp e(13), do: ?d
defp e(14), do: ?e
defp e(15), do: ?f
# Extract the type (:default etc) and pure byte value from a UUID String.
defp uuid_string_to_hex_pair(<<_::128>> = uuid) do
{:raw, uuid}
end
defp uuid_string_to_hex_pair(<<uuid_in::binary>>) do
uuid = String.downcase(uuid_in)
{type, hex_str} =
case uuid do
<<u0::64, ?-, u1::32, ?-, u2::32, ?-, u3::32, ?-, u4::96>> ->
{:default, <<u0::64, u1::32, u2::32, u3::32, u4::96>>}
<<u::256>> ->
{:hex, <<u::256>>}
<<@urn_prefix, u0::64, ?-, ufd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, ?-, u2::32, ?-, u3::32, ?-, u4::96>> ->
{:urn, <<u0::64, u1::32, u2::32, u3::32, u4::96>>}
_ ->
case uuid_in do
_ when byte_size(uuid_in) == 22 ->
case Base.url_decode64(uuid_in <> "==") do
{:ok, decoded} -> {:slug, Base.encode16(decoded)}
_ -> raise ArgumentError, message: "Invalid argument; Not a valid UUID: #{uuid}"
end
_ ->
raise ArgumentError, message: "Invalid argument; Not a valid UUID: #{uuid}"
end
end
try do
{type, hex_str_to_binary(hex_str)}
catch
_, _ ->
raise ArgumentError, message: "Invalid argument; Not a valid UUID: #{uuid}"
end
end
# Get unix epoch as a 60-bit timestamp.
defp uuid1_time() do
{mega_sec, sec, micro_sec} = :os.timestamp()
epoch = mega_sec * 1_000_000_000_000 + sec * 1_000_000 + micro_sec
timestamp = @beginnning_of_time + @micro_to_nanoseconds_factor * epoch
<<timestamp::60>>
end
# Generate random clock sequence.
defp uuid1_clockseq() do
<<rnd::14, _::2>> = :crypto.strong_rand_bytes(2)
<<rnd::14>>
end
# Get local IEEE 802 (MAC) address, or a random node id if it can't be found.
defp uuid1_node() do
with nil <- :persistent_term.get({__MODULE__, :mac_address}, nil) do
{:ok, ifs0} = :inet.getifaddrs()
mac_address = uuid1_node(ifs0)
:persistent_term.put({__MODULE__, :mac_address}, mac_address)
mac_address
end
end
defp uuid1_node([{_if_name, if_config} | rest]) do
case :lists.keyfind(:hwaddr, 1, if_config) do
false ->
uuid1_node(rest)
{:hwaddr, hw_addr} ->
if length(hw_addr) != 6 or Enum.all?(hw_addr, fn n -> n == 0 end) do
uuid1_node(rest)
else
:erlang.list_to_binary(hw_addr)
end
end
end
defp uuid1_node(_) do
<<rnd_hi::7, _::1, rnd_low::40>> = :crypto.strong_rand_bytes(6)
<<rnd_hi::7, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, rnd_low::40>>
end
# Generate a hash of the given data.
defp namebased_uuid(:md5, data) do
compose_namebased_uuid(@uuid_v3, :crypto.hash(:md5, data))
end
defp namebased_uuid(:sha1, data) do
<<sha1::128, _::32>> = :crypto.hash(:sha, data)
compose_namebased_uuid(@uuid_v5, <<sha1::128>>)
end
# Format the given hash as a UUID.
defp compose_namebased_uuid(version, hash) do
<<time_low::32, time_mid::16, _::4, time_hi::12, _::2, clock_seq_hi::6, clock_seq_low::8,
node::48>> = hash
<<time_low::32, time_mid::16, version::4, time_hi::12, @variant10::2, clock_seq_hi::6,
clock_seq_low::8, node::48>>
end
defp hex_str_to_binary(
<<a1, a2, a3, a4, a5, a6, a7, a8, b1, b2, b3, b4, c1, c2, c3, c4, d1, d2, d3, d4, e1, e2,
e3, e4, e5, e6, e7, e8, e9, e10, e11, e12>>
) do
<<d(a1)::4, d(a2)::4, d(a3)::4, d(a4)::4, d(a5)::4, d(a6)::4, d(a7)::4, d(a8)::4, d(b1)::4,
d(b2)::4, d(b3)::4, d(b4)::4, d(c1)::4, d(c2)::4, d(c3)::4, d(c4)::4, d(d1)::4, d(d2)::4,
d(d3)::4, d(d4)::4, d(e1)::4, d(e2)::4, d(e3)::4, d(e4)::4, d(e5)::4, d(e6)::4, d(e7)::4,
d(e8)::4, d(e9)::4, d(e10)::4, d(e11)::4, d(e12)::4>>
end
defp d(?0), do: 0
defp d(?1), do: 1
defp d(?2), do: 2
defp d(?3), do: 3
defp d(?4), do: 4
defp d(?5), do: 5
defp d(?6), do: 6
defp d(?7), do: 7
defp d(?8), do: 8
defp d(?9), do: 9
defp d(?A), do: 10
defp d(?B), do: 11
defp d(?C), do: 12
defp d(?D), do: 13
defp d(?E), do: 14
defp d(?F), do: 15
defp d(?a), do: 10
defp d(?b), do: 11
defp d(?c), do: 12
defp d(?d), do: 13
defp d(?e), do: 14
defp d(?f), do: 15
end | lib/uuid_tools/uuid.ex | 0.886629 | 0.63023 | uuid.ex | starcoder |
defmodule Joq.Job do
@moduledoc """
Internal representation of a job instance.
See [below](#t:t/0) for the format of Job structs.
"""
import Joq.Timing
alias Joq.Retry
defstruct [:id, :worker, :args, :retry, :delay_until]
@typedoc """
The type of a Job struct.
Job structs contain the following fields:
* `:id` - an identifier string that is unique for each job
* `:worker` - the worker module (see `Joq.Worker` for more info)
* `:args` - the arguments the worker function will be called with
* `:retry` - an optional retry configuration (see `Joq.Retry` for more info)
* `:delay_until` - an optional timestamp when the job should be run. This is
an Erlang monotonic timestamp (see `Joq.Timing`)
"""
@type t :: %__MODULE__{
id: String.t,
worker: atom,
args: term,
retry: Retry.t | nil,
delay_until: integer | nil
}
@doc """
Create a job. Used internally in `Joq.enqueue/3`.
Valid options are `retry` and `delay_for`. See `Joq.Retry` for retry configs.
`delay_for` is the amount of milliseconds to wait before executing the job.
## Examples
# Create a job with an id of "foo" that will be run as
# MyWorker.perform(param: 1) and will not be retried on errors
Job.make("foo", MyWorker, [param: 1], retry: :no_retry)
# Create a job that will be run as MyWorker.perform(:param) in 3 seconds
Job.make("foo", MyWorker, :param, delay_for: 3_000)
"""
@spec make(String.t, atom, term, keyword) :: t
def make(id, worker, args, options \\ []) do
# Throw errors for invalid configs
Retry.make_config(options[:retry])
delay_until = options[:delay_for] && now() + options[:delay_for]
%__MODULE__{id: id, worker: worker, args: args, retry: options[:retry],
delay_until: delay_until}
end
@doc """
Returns true for jobs that have the same worker and arguments.
These jobs are considered duplicates and will be ignored if
`duplicates: :drop` is set (see `Joq.Worker` for more info)
"""
@spec is_equal(t, t) :: boolean
def is_equal(a, b) do
a.worker == b.worker and a.args == b.args
end
end | lib/joq/job.ex | 0.891721 | 0.63077 | job.ex | starcoder |
defmodule AdventOfCode.Y2020.Day18v2 do
@supported_ops ["+", "-", "*", "/"]
def run() do
AdventOfCode.Helpers.Data.read_from_file("2020/day18.txt")
|> Enum.map(&calculate/1)
|> Enum.sum()
end
def calculate(input) do
input
|> parse()
|> rpn()
end
def parse(input) do
input
|> String.replace(" ", "")
|> String.graphemes()
|> read()
end
def precedence(operator) do
case operator do
# Change to 3 to run AoC part 1 and to 4 to run the AoC part 2
"+" -> 2
"-" -> 2
"*" -> 3
"/" -> 3
end
end
def rpn(l), do: rpn(l, [])
def rpn([], [result]), do: result
def rpn([operator | rest], [a, b | stack]) when operator in @supported_ops do
res =
case operator do
"+" -> a + b
"-" -> b - a
"*" -> a * b
"/" -> b / a
end
rpn(rest, [res | stack])
end
def rpn([n | rest], stack), do: rpn(rest, [n | stack])
# Transform infix notation to reverse polish notation
# using Dijkstras https://en.wikipedia.org/wiki/Shunting-yard_algorithm
def read(input), do: read(input, %{out: [], ops: []})
def read([], %{out: out, ops: ops}) do
ops |> Enum.reverse() |> Enum.concat(out) |> Enum.reverse()
end
def read([token | rest] = input, %{ops: ops, out: out} = output) do
case token do
"(" ->
read(rest, %{output | ops: [token | ops]})
")" ->
read(rest, pop_until("(", output))
op when op in @supported_ops ->
read(rest, push_op(op, output))
_ ->
{nr, remaining} = read_number(input)
read(remaining, %{output | out: [nr | out]})
end
end
def pop_until(token, %{ops: [operator | rest]} = output) when operator == token do
%{output | ops: rest}
end
def pop_until(token, %{out: out, ops: [h | rest]}),
do: pop_until(token, %{out: [h | out], ops: rest})
def push_op(token, %{ops: []} = output) do
%{output | ops: [token]}
end
def push_op(operator, %{out: out, ops: [s_operator | rest] = ops}) do
cond do
s_operator == "(" ->
%{out: out, ops: [operator, s_operator | rest]}
precedence(s_operator) >= precedence(operator) ->
push_op(operator, %{out: [s_operator | out], ops: rest})
true ->
%{out: out, ops: [operator | ops]}
end
end
# Parse number
@numbers String.graphemes("0123456789")
def read_number(input), do: read_number(input, [])
def read_number([h | rest], res) when h in @numbers, do: read_number(rest, [h | res])
def read_number(remaining, res) do
nr = res |> Enum.reverse() |> Enum.join() |> String.to_integer()
{nr, remaining}
end
end | lib/2020/day18v2.ex | 0.677367 | 0.494507 | day18v2.ex | starcoder |
defmodule Type.Function do
@moduledoc """
Represents a function type.
There are two fields for the struct defined by this module.
- `params` a list of types for the function arguments. Note that the arity
of the function is the length of this list. May also be the atom `:any`
which corresponds to "a function of any arity".
- `return` the type of the returned value.
### Examples:
- `(... -> integer())` would be represented as `%Type.Function{params: :any, return: %Type{name: :integer}}`
- `(integer() -> integer())` would be represented as `%Type.Function{params: [%Type{name: :integer}], return: %Type{name: :integer}}`
### Shortcut Form
The `Type` module lets you specify a function using "shortcut form" via the `Type.function/1` macro:
```
iex> import Type, only: :macros
iex> function((atom() -> pos_integer()))
%Type.Function{params: [%Type{name: :atom}], return: %Type{name: :pos_integer}}
```
### Inference
By default, Mavis will not attempt to perform inference on function types.
```elixir
iex> inspect Type.of(&(&1 + 1))
"(any() -> any())"
```
If you would like to perform inference on the function to obtain
more details on the acceptable function types, set the inference
environment variable. For example, if you're using the `:mavis_inference` hex package, do:
```
Application.put_env(:mavis, :inference, Type.Inference)
```
The default module for this is `Type.NoInference`
### Key functions:
#### comparison
Functions are ordered first by the type order on their return type,
followed by type order on their parameters.
```elixir
iex> import Type, only: :macros
iex> Type.compare(function(( -> atom())), function(( -> integer())))
:gt
iex> Type.compare(function((integer() -> integer())),
...> function((atom() -> integer())))
:lt
```
#### intersection
Functions with distinct parameter types are nonoverlapping, even if their parameter
types overlap. If they have the same parameters, then their return values are intersected.
```elixir
iex> import Type, only: :macros
iex> Type.intersection(function(( -> 1..10)), function(( -> integer())))
%Type.Function{params: [], return: 1..10}
iex> Type.intersection(function((integer() -> integer())),
...> function((1..10 -> integer())))
%Type{name: :none}
```
functions with `:any` parameters intersected with a function with specified parameters
will adopt the parameters of the intersected function.
```elixir
iex> import Type, only: :macros
iex> Type.intersection(function((... -> pos_integer())),
...> function((1..10 -> pos_integer())))
%Type.Function{params: [1..10], return: %Type{name: :pos_integer}}
```
#### union
Functions are generally not merged in union operations, but if their parameters are
identical then their return types will be merged.
```elixir
iex> import Type, only: :macros
iex> Type.union(function(( -> 1..10)), function(( -> 11..20)))
%Type.Function{params: [], return: 1..20}
```
#### subtype?
A function type is the subtype of another if it has the same parameters and its return
value type is the subtype of the other's
```elixir
iex> import Type, only: :macros
iex> Type.subtype?(function((integer() -> 1..10)),
...> function((integer() -> integer())))
true
```
#### usable_as
The `usable_as` relationship for functions may not necessarily be obvious. An
easy way to think about it, is: if I passed a function with this type to a
function that demanded the other type how confident would I be that it would
not crash.
A function is `usable_as` another function if all of its parameters are
supertypes of the targeted function; and if its return type is subtypes of the
return type of the targeted function.
```elixir
iex> import Type, only: :macros
iex> Type.usable_as(function((pos_integer() -> 1..10)), function((1..10 -> pos_integer())))
:ok
iex> Type.usable_as(function((1..10 -> 1..10)), function((pos_integer() -> pos_integer())))
{:maybe, [%Type.Message{type: %Type.Function{params: [1..10], return: 1..10},
target: %Type.Function{params: [%Type{name: :pos_integer}], return: %Type{name: :pos_integer}}}]}
iex> Type.usable_as(function(( -> atom())), function(( -> pos_integer())))
{:error, %Type.Message{type: %Type.Function{params: [], return: %Type{name: :atom}},
target: %Type.Function{params: [], return: %Type{name: :pos_integer}}}}
```
"""
@enforce_keys [:return]
defstruct @enforce_keys ++ [params: :any]
@type t :: %__MODULE__{
params: [Type.t] | :any | pos_integer,
return: Type.t
}
@type return :: {:ok, Type.t} | {:maybe, Type.t, [Type.Message.t]} | {:error, Type.Message.t}
@spec apply_types(t | Type.Union.t(t), [Type.t], keyword) :: return
@doc """
applies types to a function definition.
Raises with `Type.FunctionError` if one of the following is true:
- an :any function is attempted to be applied
- a `top-arity` function is attempted to be applied
- a non-function (or union of functions) is attempted to be applied
Returns
- `{:ok, return_type}` when the function call is successful.
- `{:maybe, return_type, [messages]}` when one or more of the parameters
is overspecified.
- `{:error, message}` when any of the parameters is disjoint
Examples:
```
iex> import Type, only: :macros
iex> func = function ((pos_integer() -> float()))
iex> Type.Function.apply_types(func, [pos_integer()])
{:ok, %Type{name: :float}}
iex> Type.Function.apply_types(func, [non_neg_integer()])
{:maybe, %Type{name: :float}, [
%Type.Message{
type: %Type.Union{of: [%Type{name: :pos_integer}, 0]},
target: %Type{name: :pos_integer},
meta: [message: "non_neg_integer() is overbroad for argument 1 (pos_integer()) of function (pos_integer() -> float())"]
}]}
iex> Type.Function.apply_types(func, [float()])
{:error,
%Type.Message{
type: %Type{name: :float},
target: %Type{name: :pos_integer},
meta: [message: "float() is disjoint to argument 1 (pos_integer()) of function (pos_integer() -> float())"]
}}
iex> var_func = function((i -> i when i: integer()))
iex> Type.Function.apply_types(var_func, [1..10])
{:ok, 1..10}
```
"""
def apply_types(fun, vars, meta \\ [])
def apply_types(fun = %__MODULE__{params: plst}, vlst, meta) when
length(plst) == length(vlst) do
var_match = match_vars(plst, vlst)
vlst
|> Enum.zip(plst)
|> Enum.with_index(1)
|> Enum.map(fn {{v, p}, idx} ->
arg = argument(fun, idx - 1)
Type.usable_as(v, p, meta)
|> add_message(v, idx, arg, fun)
end)
|> Enum.reduce({:ok, fun.return}, &apply_reduce/2)
|> substitute_vars(var_match)
end
def apply_types(fun = %__MODULE__{params: arity}, params, _) when length(params) == arity do
{:ok, fun.return}
end
def apply_types(union = %Type.Union{of: funs}, vars, meta) do
# double check that everything is okay.
segregated_vars = funs
|> Enum.reduce(Enum.map(vars, &[&1]), fn
%__MODULE__{params: p}, acc when length(p) == length(vars) ->
p
|> Enum.zip(acc)
|> Enum.map(fn {a, b} -> [a | b] end)
type, _ ->
varp = length(vars)
raise Type.FunctionError, "type #{inspect type} in union #{inspect union} is not a function with #{varp} parameter#{p varp}"
end)
|> Enum.map(&Enum.reverse/1)
# partition the variables based on how they work with the
evaluated_type = segregated_vars
|> Enum.map(fn [var | segments] ->
Type.partition(var, segments)
end)
|> transpose
|> Enum.zip(funs)
|> Enum.map(fn {part, fun} -> apply_types(fun, part) end)
|> Enum.flat_map(fn
# throw away most of this information. We figure out whether it's okay by checking
# out the preimage map.
{:ok, type} -> [type]
{:error, _} -> []
# this should be unreachable because by definition everything should be proper
# subtypes.
{:maybe, _, _} -> raise "unreachable"
end)
|> Type.union()
import Type, only: :macros
if evaluated_type == none() do
# find the type that doesn't match. Sorry, just going to do the worst
# possible thing here.
vars
|> Enum.with_index(1)
|> Enum.map(fn {var, idx} ->
arg = argument(union, idx - 1)
var
|> Type.usable_as(arg, meta)
|> add_message(var, idx, arg, union)
end)
|> Enum.reduce(&Type.ternary_and/2)
else
segregated_vars
|> Enum.with_index(1)
|> Enum.map(fn {[var | segments], idx} ->
if Type.covered?(var, segments) do
:ok
else
arg = argument(union, idx - 1)
add_message({:maybe, [Type.Message.make(var, arg, meta)]},
var, idx, arg, union)
end
end)
|> Enum.reduce({:ok, evaluated_type}, &apply_reduce/2)
end
end
## error raising
def apply_types(%__MODULE__{params: :any}, _, _) do
raise Type.FunctionError, "cannot apply a function with ... parameters"
end
def apply_types(fun = %__MODULE__{params: params}, vars, _) do
funp = if is_integer(params), do: params, else: length(fun.params)
varp = length(vars)
raise Type.FunctionError, "mismatched arity; #{inspect fun} expects #{funp} parameter#{p funp}, got #{varp} parameter#{p varp} #{inspect vars}"
end
def apply_types(any, _, _) do
raise Type.FunctionError, "cannot apply a function to the type #{inspect any}"
end
@spec apply_reduce(Type.ternary, return) :: return
defp apply_reduce(:ok, {:ok, term}), do: {:ok, term}
defp apply_reduce({:maybe, msgs}, {:ok, term}), do: {:maybe, term, msgs}
defp apply_reduce({:error, msg}, {:ok, _}), do: {:error, msg}
defp apply_reduce(:ok, {:maybe, term, msgs}), do: {:maybe, term, msgs}
defp apply_reduce({:maybe, msgs1}, {:maybe, term, msgs2}), do: {:maybe, term, msgs1 + msgs2}
defp apply_reduce({:error, msg}, {:maybe, _, _}), do: {:error, msg}
defp apply_reduce(_, {:error, msg}), do: {:error, msg}
defp match_vars(plist, vlist) do
plist
|> Enum.zip(vlist)
|> Enum.filter(&match?({%Type.Function.Var{}, _}, &1))
|> Enum.into(%{})
end
defp substitute_vars({:ok, result}, match) do
{:ok, Type.Function.Var.resolve(result, match)}
end
defp substitute_vars({:maybe, result, msg}, match) do
{:maybe, Type.Function.Var.resolve(result, match), msg}
end
defp substitute_vars(error, _), do: error
# pluralization
defp p(1), do: ""
defp p(_), do: "s"
defp add_message(:ok, _, _, _, _), do: :ok
defp add_message({:maybe, [message]}, var, idx, arg, fun) do
{:maybe, [%{message | meta: message.meta ++
[message: "#{inspect var} is overbroad for argument #{idx} (#{inspect arg}) of function #{inspect fun}"]}]}
end
defp add_message({:error, message}, var, idx, arg, fun) do
{:error, %{message | meta: message.meta ++
[message: "#{inspect var} is disjoint to argument #{idx} (#{inspect arg}) of function #{inspect fun}"]}}
end
# NB: This is zero-indexed.
defp argument(%__MODULE__{params: params}, index) do
Enum.at(params, index)
end
defp argument(%Type.Union{of: funs}, index) do
funs
|> Enum.map(&argument(&1, index))
|> Enum.into(%Type.Union{})
end
@spec transpose([[Type.t]]) :: [[Type.t]]
defp transpose(lst) do
lst
|> Enum.reduce(List.duplicate([], length(hd(lst))),
fn vec, acc ->
vec
|> Enum.zip(acc)
|> Enum.map(fn {a, b} -> [a | b] end)
end)
|> Enum.map(&Enum.reverse/1)
end
defimpl Type.Properties do
import Type, only: :macros
use Type.Helpers
group_compare do
def group_compare(%{params: :any, return: r1}, %{params: :any, return: r2}) do
Type.compare(r1, r2)
end
def group_compare(%{params: :any}, _), do: :gt
def group_compare(_, %{params: :any}), do: :lt
def group_compare(%{params: p1}, %{params: p2})
when length(p1) < length(p2), do: :gt
def group_compare(%{params: p1}, %{params: p2})
when length(p1) > length(p2), do: :lt
def group_compare(f1, f2) do
[f1.return | f1.params]
|> Enum.zip([f2.return | f2.params])
|> Enum.each(fn {t1, t2} ->
compare = Type.compare(t1, t2)
unless compare == :eq do
throw compare
end
end)
:eq
catch
compare when compare in [:gt, :lt] -> compare
end
end
alias Type.{Function, Message}
usable_as do
def usable_as(challenge = %{params: cparam}, target = %Function{params: tparam}, meta)
when cparam == :any or tparam == :any do
case Type.usable_as(challenge.return, target.return, meta) do
:ok -> :ok
# TODO: add meta-information here.
{:maybe, _} -> {:maybe, [Message.make(challenge, target, meta)]}
{:error, _} -> {:error, Message.make(challenge, target, meta)}
end
end
def usable_as(challenge = %{params: cparam}, target = %Function{params: tparam}, meta)
when length(cparam) == length(tparam) do
[challenge.return | tparam] # note that the target parameters and the challenge
|> Enum.zip([target.return | cparam]) # parameters are swapped here. this is important!
|> Enum.map(fn {c, t} -> Type.usable_as(c, t, meta) end)
|> Enum.reduce(&Type.ternary_and/2)
|> case do
:ok -> :ok
# TODO: add meta-information here.
{:maybe, _} -> {:maybe, [Message.make(challenge, target, meta)]}
{:error, _} -> {:error, Message.make(challenge, target, meta)}
end
end
end
intersection do
def intersection(%{params: :any, return: ret}, target = %Function{}) do
new_ret = Type.intersection(ret, target.return)
if new_ret == none() do
none()
else
%Function{params: target.params, return: new_ret}
end
end
def intersection(a, b = %Function{params: :any}) do
intersection(b, a)
end
def intersection(%{params: p, return: lr}, %Function{params: p, return: rr}) do
return = Type.intersection(lr, rr)
if return == none() do
none()
else
%Function{params: p, return: return}
end
end
end
subtype do
def subtype?(challenge, target = %Function{params: :any}) do
Type.subtype?(challenge.return, target.return)
end
def subtype?(challenge = %{params: p_c}, target = %Function{params: p_t})
when p_c == p_t do
Type.subtype?(challenge.return, target.return)
end
end
def normalize(function = %{params: i}) when is_integer(i) do
%{function | params: List.duplicate(any(), i)}
end
def normalize(function), do: super(function)
end
defimpl Inspect do
import Inspect.Algebra
def inspect(%{params: :any, return: %Type{module: nil, name: :any}}, _), do: "function()"
def inspect(%{params: :any, return: return}, opts) do
concat(basic_inspect(:any, return, opts) ++ [")"])
end
def inspect(%{params: arity, return: return}, opts) when is_integer(arity) do
concat(basic_inspect(arity, return, opts) ++ [")"])
end
def inspect(%{params: params, return: return}, opts) do
# check if any of the params or the returns have *when* statements
# TODO: nested variables
[return | params]
|> Enum.filter(&match?(%Type.Function.Var{}, &1))
|> case do
[] -> basic_inspect(params, return, opts)
free_vars ->
when_list = free_vars
|> Enum.uniq
|> Enum.map(&Inspect.inspect(&1, %{opts | custom_options: [show_constraints: true]}))
|> Enum.intersperse(", ")
basic_inspect(params, return, opts) ++ [" when " | when_list]
end
|> Kernel.++([")"])
|> concat
end
defp basic_inspect(params, return, opts) do
["(", render_params(params, opts), " -> ", to_doc(return, opts)]
end
defp render_params(:any, _), do: "..."
defp render_params(arity, _) when is_integer(arity) do
"_"
|> List.duplicate(arity)
|> Enum.intersperse(", ")
|> concat
end
defp render_params(lst, opts) do
lst
|> Enum.map(&to_doc(&1, opts))
|> Enum.intersperse(", ")
|> concat
end
end
end
defmodule Type.FunctionError do
defexception [:message]
end | lib/type/function.ex | 0.936249 | 0.971019 | function.ex | starcoder |
defmodule Tablespoon.Transport.PMPPMultiplex do
@moduledoc """
Transport which serves to multiplex several senders over a single transport.
Since we don't know who responses are for directly, we treat them as a FIFO queue. The first response is for the first sender, &c.
"""
@behaviour Tablespoon.Transport
@enforce_keys [:transport, :address, :id_mfa]
defstruct [:transport, :address, :id_mfa, :from, timeout: 60_000, max_in_flight: :infinity]
@impl Tablespoon.Transport
def new(opts) when is_list(opts) do
struct!(__MODULE__, opts)
end
@impl Tablespoon.Transport
def connect(%__MODULE__{} = t) do
case DynamicSupervisor.start_child(dynamic_supervisor(), child_spec(t)) do
{:ok, pid} ->
monitor_and_set_from(t, pid)
{:error, {:already_started, pid}} ->
monitor_and_set_from(t, pid)
{:error, {:bad_return_value, e}} ->
e
e ->
e
end
end
@impl Tablespoon.Transport
def close(%__MODULE__{from: from} = t) when from != nil do
with {_pid, ref} <- from do
Process.demonitor(ref, [:flush])
end
_ = __MODULE__.Child.close(from)
%{t | from: nil}
end
def close(%__MODULE__{from: nil} = t) do
t
end
@impl Tablespoon.Transport
def send(%__MODULE__{from: from} = t, iodata) when from != nil do
with :ok <- __MODULE__.Child.send(from, iodata) do
{:ok, t}
end
catch
:exit, _ ->
{:error, :not_started}
end
def send(%__MODULE__{}, _) do
{:error, :not_connected}
end
@impl Tablespoon.Transport
def stream(%__MODULE__{from: {_pid, ref}} = t, {ref, response}) do
handle_response(t, response)
end
def stream(%__MODULE__{from: {pid, ref}} = t, {:DOWN, ref, :process, pid, _}) do
# parent process died, so treat that as a close
handle_response(t, :closed)
end
def stream(%__MODULE__{}, _) do
:unknown
end
defp monitor_and_set_from(t, pid) do
ref = Process.monitor(pid)
{:ok, %{t | from: {pid, ref}}}
end
defp handle_response(t, :closed = response) do
with {_pid, ref} <- t.from do
Process.demonitor(ref, [:flush])
end
t = %{t | from: nil}
{:ok, t, [response]}
end
defp handle_response(t, response) do
{:ok, t, [response]}
end
defp child_spec(t) do
{__MODULE__.Child, {t, child_name(t)}}
end
defp child_name(%{transport: transport, address: address}) do
{:via, Registry, {__MODULE__.Registry, {transport, address}}}
end
def registry, do: __MODULE__.Registry
def dynamic_supervisor, do: __MODULE__.DynamicSupervisor
end | lib/tablespoon/transport/pmpp_multiplex.ex | 0.673943 | 0.434161 | pmpp_multiplex.ex | starcoder |
defmodule Chronik.Aggregate.Multi do
@moduledoc """
`Chronik.Aggregate.Multi` can be used to generate a single commmand
that affects multiple entities.
As can be seen on the test a multiple-entity command can be defined
as:
## Example
```
alias Chronik.Aggregate.Multi
def handle_command({:update_name_and_max, name, max}, %Counter{id: id} = state) do
state
|> Multi.new(__MODULE__)
|> Multi.delegate(&(&1.name), &rename(&1, id, name))
|> Multi.delegate(&(&1.max), &update_max(&1, id, max))
|> Multi.run()
end
```
This command affects both the `:name` entity and the `:max` entity
in a transaction like manner. The `update_max/3` receives the
updated aggregate state.
"""
alias Chronik.Aggregate
@type monad_state :: {Aggregate.state(), [Chronik.domain_event()], module()}
# API
@doc "Create a new state for a multi-entity command."
@spec new(state :: Aggregate.state(), module :: module()) :: monad_state()
def new(state, module), do: {state, [], module}
@doc """
Applies `val_fun` on a given entity.
The state of the entity is obtained using the `lens_fun` function.
"""
@spec delegate(ms :: monad_state(), lens :: fun(), validator_fun :: fun()) :: monad_state()
def delegate({state, events, module}, lens_fun, validator_fun)
when is_function(lens_fun) and is_function(validator_fun) do
new_events =
state
|> lens_fun.()
|> validator_fun.()
|> List.wrap()
{apply_events(new_events, state, module), events ++ new_events, module}
end
@doc "Applies the `val_fun` function on the aggregate state."
@spec validate(ms :: monad_state(), validator_fun :: fun()) :: monad_state()
def validate({state, events, module}, validator_fun) do
new_events =
state
|> validator_fun.()
|> List.wrap()
{apply_events(new_events, state, module), events ++ new_events, module}
end
@doc """
Run a concatenation of entities updates and return the domain events
generated.
"""
@spec run(ms :: monad_state()) :: [Chronik.domain_event()]
def run({_state, events, _module}), do: events
# Internal functions
defp apply_events(events, state, module) when is_atom(module) do
Enum.reduce(events, state, &module.handle_event/2)
end
end | lib/chronik/aggregate/multi.ex | 0.884389 | 0.813461 | multi.ex | starcoder |
defmodule Phoenix.LiveView do
@moduledoc ~S'''
LiveView provides rich, real-time user experiences with
server-rendered HTML.
LiveView programming model is declarative: instead of
saying "once event X happens, change Y on the page",
events in LiveView are regular messages which may cause
changes to its state. Once the state changes, LiveView will
re-render the relevant parts of its HTML template and push it
to the browser, which updates itself in the most efficient
manner. This means developers write LiveView templates as
any other server-rendered HTML and LiveView does the hard
work of tracking changes and sending the relevant diffs to
the browser.
At the end of the day, a LiveView is nothing more than a
process, that receives events as messages and updates its
state. The state itself is nothing more than functional
and immutable Elixir data structures. The events are either
internal application messages (usually emitted by `Phoenix.PubSub`)
or sent by the client/browser.
LiveView provides many features that make it excellent
to build rich, real-time user experiences:
* By building on top of Elixir processes and
`Phoenix.Channels`, LiveView scales well vertically
(from small to large instances) and horizontally
(by adding more instances);
* LiveView is first rendered statically as part of
regular HTTP requests, which provides quick times
for "First Meaningful Paint" and also help search
and indexing engines;
* LiveView performs diff tracking. If the LiveView
state changes, it won't re-render the whole template,
but only the parts affected by the changed state.
This reduces latency and the amount of data sent over
the wire;
* LiveView tracks static and dynamic contents. Any
server-rendered HTML is made of static parts (i.e.
that never change) and dynamic ones. On the first
render, LiveView sends the static contents and in
future updates only the modified dynamic contents
are resent;
* (Coming soon) LiveView uses the Erlang Term Format
to send messages to the client. This binary-based
format is quite efficient on the server and uses
less data over the wire;
* (Coming soon) LiveView includes a latency simulator,
which allows you to simulate how your application
behaves on increased latency and guides you to provide
meaningful feedback to users while they wait for events
to be processed;
Furthermore, by keeping a persistent connection between client
and server, LiveView applications can react faster to user events
as there is less work to be done and less data to be sent compared
to stateless requests that have to authenticate, decode, load,
and encode data on every request. The flipside is that LiveView
uses more memory on the server compared to stateless requests.
## Use cases
There are many use cases where LiveView is an excellent
fit right now:
* Handling of user interaction and inputs, buttons, and
forms - such as input validation, dynamic forms,
autocomplete, etc;
* Events and updates pushed by server - such as
notifications, dashboards, etc;
* Page and data navigation - such as navigating between
pages, pagination, etc can be built with LiveView
but currently you will lose the back/forward button,
and the ability to link to pages as you navigate.
Support for `pushState` is on the roadmap;
There are other cases that have limited support but
will become first-class as we further develop LiveView:
* Transitions and loading states - the LiveView
programming model provides a good foundation for
transitions and loading states since any UI change
done after a user action is undone once the server
sends the update for said action. For example, it is
relatively straight-forward to click a button that
changes itself in a way that is automatically undone
when the update arrives. This is especially important
as user feedback when latency is involved. A complete
feature set for modelling those states is coming in
future versions;
* Optimistic UIs - once we add transitions and loading
states, many of the building blocks necessary for
building optimistic UIs will be part of LiveView, but
since optimistic UIs are about doing work on the client
while the server is unavailable, complete support for
Optimistic UIs cannot be achieved without also writing
JavaScript for the cases the server is not available.
See "JS Interop and client controlled DOM" on how to
integrate JS hooks;
There are also use cases which are a bad fit for LiveView:
* Animations - animations, menus, and general events
that do not need the server in the first place are a
bad fit for LiveView, as they can be achieved purely
with CSS and/or CSS transitions;
## Life-cycle
A LiveView begins as a regular HTTP request and HTML response,
and then upgrades to a stateful view on client connect,
guaranteeing a regular HTML page even if JavaScript is disabled.
Any time a stateful view changes or updates its socket assigns, it is
automatically re-rendered and the updates are pushed to the client.
You begin by rendering a LiveView from your router, controller, or
view. When a view is first rendered, the `mount/3` callback is invoked
with the current params, the current session and the LiveView socket.
As in a regular request, `params` contains public data that can be
modified by the user. The `session` always contains private data set
by the application itself. The `mount/3` callback wires up socket
assigns necessary for rendering the view. After mounting, `render/1`
is invoked and the HTML is sent as a regular HTML response to the
client.
After rendering the static page, LiveView connects from the client
where stateful views are spawned to push rendered updates to the
browser, and receive client events via phx bindings. Just like
the first rendering, `mount/3` is invoked with params, session,
and socket state, where mount assigns values for rendering. However
in the connected client case, a LiveView process is spawned on
the server, pushes the result of `render/1` to the client and
continues on for the duration of the connection. If at any point
during the stateful life-cycle a crash is encountered, or the client
connection drops, the client gracefully reconnects to the server,
calling `mount/3` once again.
## Example
First, a LiveView requires two callbacks: `mount/3` and `render/1`:
defmodule AppWeb.ThermostatLive do
use Phoenix.LiveView
def render(assigns) do
~L"""
Current temperature: <%= @temperature %>
"""
end
def mount(_params, %{"current_user_id" => user_id}, socket) do
temperature = Thermostat.get_user_reading(user_id)
{:ok, assign(socket, :temperature, temperature)}
end
end
The `render/1` callback receives the `socket.assigns` and is responsible
for returning rendered content. You can use `Phoenix.LiveView.sigil_L/2`
to inline LiveView templates. If you want to use `Phoenix.HTML` helpers,
remember to `use Phoenix.HTML` at the top of your `LiveView`.
A separate `.leex` HTML template can also be rendered within
your `render/1` callback by delegating to an existing `Phoenix.View`
module in your application. For example:
defmodule AppWeb.ThermostatLive do
use Phoenix.LiveView
def render(assigns) do
Phoenix.View.render(AppWeb.PageView, "page.html", assigns)
end
end
With a LiveView defined, you first define the `socket` path in your endpoint,
and point it to `Phoenix.LiveView.Socket`:
defmodule AppWeb.Endpoint do
use Phoenix.Endpoint
socket "/live", Phoenix.LiveView.Socket,
websocket: [connect_info: [session: @session_options]]
...
end
Where `@session_options` are the options given to `plug Plug.Session` extracted
to a module attribute.
And configure its signing salt in the endpoint:
config :my_app, AppWeb.Endpoint,
...,
live_view: [signing_salt: ...]
You can generate a secure, random signing salt with the `mix phx.gen.secret 32` task.
Next, decide where you want to use your LiveView.
You can serve the LiveView directly from your router (recommended):
defmodule AppWeb.Router do
use Phoenix.Router
import Phoenix.LiveView.Router
scope "/", AppWeb do
live "/thermostat", ThermostatLive
end
end
You can also `live_render` from any template:
<h1>Temperature Control</h1>
<%= live_render(@conn, AppWeb.ThermostatLive) %>
Or you can `live_render` your view from any controller:
defmodule AppWeb.ThermostatController do
...
import Phoenix.LiveView.Controller
def show(conn, %{"id" => id}) do
live_render(conn, AppWeb.ThermostatLive)
end
end
When a LiveView is rendered, all of the data currently stored in the
connection session (see `Plug.Conn.get_session/1`) will be given to
the LiveView.
It is also possible to pass extra session information besides the one
currently in the connection session to the LiveView, by passing a
session parameter:
# In the router
live "/thermostat", ThermostatLive, session: %{"extra_token" => "foo"}
# In a view
<%= live_render(@conn, AppWeb.ThermostatLive, session: %{"extra_token" => "foo"}) %>
Notice the `:session` uses string keys as a reminder that session data
is serialized and sent to the client. So you should always keep the data
in the session to a minimum. I.e. instead of storing a User struct, you
should store the "user_id" and load the User when the LiveView mounts.
Once the LiveView is rendered, a regular HTML response is sent. Next, your
client code connects to the server:
import {Socket} from "phoenix"
import LiveSocket from "phoenix_live_view"
let csrfToken = document.querySelector("meta[name='csrf-token']").getAttribute("content");
let liveSocket = new LiveSocket("/live", {params: {_csrf_token: csrfToken}});
liveSocket.connect()
*Note*: Comprehensive JavaScript client usage is covered in a later section.
After the client connects, `mount/3` will be invoked inside a spawned
LiveView process. At this point, you can use `connected?/1` to
conditionally perform stateful work, such as subscribing to pubsub topics,
sending messages, etc. For example, you can periodically update a LiveView
with a timer:
defmodule DemoWeb.ThermostatLive do
use Phoenix.LiveView
...
def mount(_params, %{"current_user_id" => user_id}, socket) do
if connected?(socket), do: :timer.send_interval(30000, self(), :update)
case Thermostat.get_user_reading(user_id) do
{:ok, temperature} ->
{:ok, assign(socket, temperature: temperature, user_id: user_id)}
{:error, reason} ->
{:error, reason}
end
end
def handle_info(:update, socket) do
{:ok, temperature} = Thermostat.get_reading(socket.assigns.user_id)
{:noreply, assign(socket, :temperature, temperature)}
end
end
We used `connected?(socket)` on mount to send our view a message every 30s if
the socket is in a connected state. We receive `:update` in a
`handle_info` just like a GenServer, and update our socket assigns. Whenever
a socket's assigns change, `render/1` is automatically invoked, and the
updates are sent to the client.
## Assigns and LiveEEx Templates
All of the data in a LiveView is stored in the socket as assigns.
The `assign/2` and `assign/3` functions help store those values.
Those values can be accessed in the LiveView as `socket.assigns.name`
but they are most commonly accessed inside LiveView templates as
`@name`.
`Phoenix.LiveView`'s built-in templates provided by the `.leex`
extension or `~L` sigil, stands for Live EEx. They are similar
to regular `.eex` templates except they are designed to minimize
the amount of data sent over the wire by splitting static from
dynamic parts and also tracking changes.
When you first render a `.leex` template, it will send all of the
static and dynamic parts of the template to the client. After that,
any change you do on the server will now send only the dynamic parts,
and only if those parts have changed.
The tracking of changes is done via assigns. Imagine this template:
<div id="user_<%= @user.id %>">
<%= @user.name %>
</div>
If the `@user` assign changes, then LiveView will re-render only
the `@user.id` and `@user.name` and send them to the browser.
The change tracking also works when rendering other templates, as
long as they are also `.leex` templates and as long as all assigns
are passed to the child/inner template:
<%= render "child_template.html", assigns %>
The assign tracking feature also implies that you MUST avoid performing
direct operations in the template. For example, if you perform a database
query in your template:
<%= for user <- Repo.all(User) do %>
<%= user.name %>
<% end %>
Then Phoenix will never re-render the section above, even if the number of
users in the database changes. Instead, you need to store the users as
assigns in your LiveView before it renders the template:
assign(socket, :users, Repo.all(User))
Generally speaking, **data loading should never happen inside the template**,
regardless if you are using LiveView or not. The difference is that LiveView
enforces those as best practices.
### Change tracking pitfalls
Although change tracking can considerably reduce the amount of data sent
over the wire, there are some pitfalls users should be aware of.
First of all, change tracking can only track assigns directly. So for example,
if you do something such as:
<%= @post.the_whole_content %>
If any of other field besides `the_whole_content` in `@post` change for any
reason, the `the_whole_content` will be sent downstream. Although this is not
generally a problem, if you have large fields that you don't want to resend
or if you have one field in particular that changes all the time while others
do not, you may want to track them as their own assign.
Another limitation of changing tracking is that it does not work across regular
function calls. For example, imagine the following template that renders a `div`:
<%= content_tag :div, id: "user_#{@id}" do %>
<%= @name %>
<%= @description %>
<% end %>
LiveView knows nothing about `content_tag`, which means the whole `div` will be
sent whenever any of the assigns change. This can be easily fixed by writing the
HTML directly:
<div id="user_<%= @id %>">
<%= @name %>
<%= @description %>
</div>
Note though this concern does not apply to Elixir's constructs, such as `if`,
`case`, `for`, and friends. LiveView always knows how to optimize across those.
## Bindings
Phoenix supports DOM element bindings for client-server interaction. For
example, to react to a click on a button, you would render the element:
<button phx-click="inc_temperature">+</button>
Then on the server, all LiveView bindings are handled with the `handle_event`
callback, for example:
def handle_event("inc_temperature", _value, socket) do
{:ok, new_temp} = Thermostat.inc_temperature(socket.assigns.id)
{:noreply, assign(socket, :temperature, new_temp)}
end
| Binding | Attributes |
|------------------------|------------|
| [Params](#module-click-events) | `phx-value-*` |
| [Click Events](#module-click-events) | `phx-click`, `phx-target` |
| [Focus/Blur Events](#module-focus-and-blur-events) | `phx-blur`, `phx-focus`, `phx-target` |
| [Form Events](#module-form-events) | `phx-change`, `phx-submit`, `phx-target`, `data-phx-error-for`, `phx-disable-with` |
| [Key Events](#module-key-events) | `phx-keydown`, `phx-keyup`, `phx-target` |
| [Rate Limiting](#module-rate-limiting-events-with-debounce-and-throttle) | `phx-debounce`, `phx-throttle` |
| [DOM Patching](#module-dom-patching-and-temporary-assigns) | `phx-update` |
| [JS Interop](#module-js-interop-and-client-controlled-dom) | `phx-hook` |
### Click Events
The `phx-click` binding is used to send click events to the server.
When any client event, such as a `phx-click` click is pushed, the value
sent to the server will be chosen with the following priority:
* Any number of optional `phx-value-` prefixed attributes, such as:
<div phx-click="inc" phx-value-myvar1="val1" phx-value-myvar2="val2">
will send the following map of params to the server:
def handle_event("inc", %{"myvar1" => "val1", "myvar2" => "val2"}, socket) do
If the `phx-value-` prefix is used, the server payload will also contain a `"value"`
if the element's value attribute exists.
* When receiving a map on the server, the payload will also contain metadata of the
client event, containing all literal keys of the event object, such as a click event's
`clientX`, a keydown event's `keyCode`, etc.
### Focus and Blur Events
Focus and blur events may be bound to DOM elements that emit
such events, using the `phx-blur`, and `phx-focus` bindings, for example:
<input name="email" phx-focus="myfocus" phx-blur="myblur"/>
To detect when the page itself has received focus or blur,
`phx-window-focus` and `phx-window-blur` may be specified. These window
level events may also be necessary if the element in consideration
(most often a `div` with no tabindex) cannot receive focus. Like other
bindings, `phx-value-*` can be provided on the bound element, and those
values will be sent as part of the payload. For example:
<div class="container"
phx-window-focus="page-active"
phx-window-blur="page-inactive"
phx-value-page="123">
...
</div>
The following window level bindings are supported:
* `phx-window-focus`
* `phx-window-blur`
* `phx-window-keydown`
* `phx-window-keyup`
### Form Events
To handle form changes and submissions, use the `phx-change` and `phx-submit`
events. In general, it is preferred to handle input changes at the form level,
where all form fields are passed to the LiveView's callback given any
single input change. For example, to handle real-time form validation and
saving, your template would use both `phx_change` and `phx_submit` bindings:
<%= f = form_for @changeset, "#", [phx_change: :validate, phx_submit: :save] %>
<%= label f, :username %>
<%= text_input f, :username %>
<%= error_tag f, :username %>
<%= label f, :email %>
<%= text_input f, :email %>
<%= error_tag f, :email %>
<%= submit "Save" %>
</form>
Next, your LiveView picks up the events in `handle_event` callbacks:
def render(assigns) ...
def mount(_params, _session, socket) do
{:ok, assign(socket, %{changeset: Accounts.change_user(%User{})})}
end
def handle_event("validate", %{"user" => params}, socket) do
changeset =
%User{}
|> Accounts.change_user(params)
|> Map.put(:action, :insert)
{:noreply, assign(socket, changeset: changeset)}
end
def handle_event("save", %{"user" => user_params}, socket) do
case Accounts.create_user(user_params) do
{:ok, user} ->
{:stop,
socket
|> put_flash(:info, "user created")
|> redirect(to: Routes.user_path(AppWeb.Endpoint, AppWeb.User.ShowView, user))}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, changeset: changeset)}
end
end
The validate callback simply updates the changeset based on all form input
values, then assigns the new changeset to the socket. If the changeset
changes, such as generating new errors, `render/1` is invoked and
the form is re-rendered.
Likewise for `phx-submit` bindings, the same callback is invoked and
persistence is attempted. On success, a `:stop` tuple is returned and the
socket is annotated for redirect with `Phoenix.LiveView.redirect/2` to
the new user page, otherwise the socket assigns are updated with the errored
changeset to be re-rendered for the client.
*Note*: For proper form error tag updates, the error tag must specify which
input it belongs to. This is accomplished with the `data-phx-error-for` attribute.
For example, your `AppWeb.ErrorHelpers` may use this function:
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "help-block",
data: [phx_error_for: input_id(form, field)]
)
end)
end
### Number inputs
Number inputs are a special case in LiveView forms. On programmatic updates,
some browsers will clear invalid inputs. So LiveView will not send change events
from the client when an input is invalid, instead allowing the browser's native
validation UI to drive user interaction. Once the input becomes valid, change and
submit events will be sent as normal.
### Password inputs
Password inputs are also special cased in `Phoenix.HTML`. For security reasons,
password field values are not reused when rendering a password input tag. This
requires explicitly setting the `:value` in your markup, for example:
<%= password_input f, :password, value: input_value(f, :password) %>
<%= password_input f, :password_confirmation, value: input_value(f, :password_confirmation) %>
<%= error_tag f, :password %>
<%= error_tag f, :password_confirmation %>
### Key Events
The `onkeydown`, and `onkeyup` events are supported via
the `phx-keydown`, and `phx-keyup` bindings. When
pushed, the value sent to the server will contain all the client event
object's metadata. For example, pressing the Escape key looks like this:
%{
"altKey" => false, "charCode" => 0, "code" => "Escape",
"ctrlKey" => false, "key" => "Escape", "keyCode" => 27,
"location" => 0, "metaKey" => false, "repeat" => false,
"shiftKey" => false, "which" => 27
}
By default, the bound element will be the event listener, but a
window-level binding may be provided via `phx-window-keydown`,
for example:
def render(assigns) do
~L"""
<div id="thermostat" phx-window-keyup="update_temp">
Current temperature: <%= @temperature %>
</div>
"""
end
def handle_event("update_temp", %{"code" => "ArrowUp"}, socket) do
{:ok, new_temp} = Thermostat.inc_temperature(socket.assigns.id)
{:noreply, assign(socket, :temperature, new_temp)}
end
def handle_event("update_temp", %{"code" => "ArrowDown"}, socket) do
{:ok, new_temp} = Thermostat.dec_temperature(socket.assigns.id)
{:noreply, assign(socket, :temperature, new_temp)}
end
def handle_event("update_temp", _key, socket) do
{:noreply, socket}
end
## Compartmentalizing markup and events with `render`, `live_render`, and `live_component`
We can render another template directly from a LiveView template by simply
calling `render`:
render "child_template", assigns
render SomeOtherView, "child_template", assigns
If the other template has the `.leex` extension, LiveView change tracking
will also work across templates.
When rendering a child template, any of the events bound in the child
template will be sent to the parent LiveView. In other words, similar to
regular Phoenix templates, a regular `render` call does not start another
LiveView. This means `render` is useful to sharing markup between views.
One option to address this problem is to render a child LiveView inside a
parent LiveView by calling `live_render/3` instead of `render/3` from the
LiveView template. This child LiveView runs in a completely separate process
than the parent, with its own `mount` and `handle_event` callbacks. If a
child LiveView crashes, it won't affect the parent. If the parent crashes,
all children are terminated.
When rendering a child LiveView, the `:id` option is required to uniquely
identify the child. A child LiveView will only ever be rendered and mounted
a single time, provided its ID remains unchanged. Updates to a child session
will be merged on the client, but not passed back up until either a crash and
re-mount or a connection drop and recovery. To force a child to re-mount with
new session data, a new ID must be provided.
Given a LiveView runs on its own process, it is an excellent tool for creating
completely isolated UI elements, but it is a slightly expensive abstraction if
all you want is to compartmentalize markup and events. For example, if you are
showing a table with all users in the system, and you want to compartmentalize
this logic, using a separate `LiveView`, each with its own process, would likely
be too expensive. For these cases, LiveView provides `Phoenix.LiveComponent`,
which are rendered using `live_component/3`:
<%= live_component(@socket, UserComponent, id: user.id, user: user) %>
Components have their own `mount` and `handle_event` callbacks, as well as their
own state with change tracking support. Components are also lightweight as they
"run" in the same process as the parent `LiveView`. However, this means an error
in a component would cause the whole view to fail to render. See
`Phoenix.LiveComponent` for a complete rundown on components.
To sum it up:
* `render` - compartmentalizes markup
* `live_component` - compartmentalizes state, markup, and events
* `live_render` - compartmentalizes state, markup, events, and error isolation
## Rate limiting events with Debounce and Throttle
All events can be rate-limited on the client by using the
`phx-debounce` and `phx-throttle` bindings, with the following behavior:
* `phx-debounce` - Accepts either a string integer timeout value, or `"blur"`.
When an int is provided, delays emitting the event by provided milliseconds.
When `"blur"` is provided, delays emitting an input's change event until the
field is blurred by the user.
* `phx-throttle` - Accepts an integer timeout value to throttle the event in milliseconds.
Unlike debounce, throttle will immediately emit the event, then rate limit the
event at one event per provided timeout.
For example, to avoid validating an email until the field is blurred, while validating
the username at most every 2 seconds after a user changes the field:
<form phx-change="validate" phx-submit="save">
<input type="text" name="user[email]" phx-debounce="blur"/>
<input type="text" name="user[username]" phx-debounce="2000"/>
</form>
And to rate limit a button click to once every second:
<button phx-click="search" phx-throttle="1000">Search</button>
Likewise, you may throttle held-down keydown:
<div phx-window-keydown="keydown" phx-throttle="500">
...
</div>
Unless held-down keys are required, a better approach is generally to use
`phx-keyup` bindings which only trigger on key up, thereby being self-limiting.
However, `phx-keydown` is useful for games and other usecases where a constant
press on a key is desired. In such cases, throttle should always be used.
### Debounce and Throttle special behavior
The following specialized behavior is performed for forms and keydown bindings:
* When a `phx-submit`, or a `phx-change` for a different
input is triggered, any current debounce or throttle timers are reset for
existing inputs.
* A `phx-keydown` binding is only throttled for key repeats. Unique keypresses
back-to-back will dispatch the pressed key events.
## DOM patching and temporary assigns
A container can be marked with `phx-update`, allowing the DOM patch
operations to avoid updating or removing portions of the LiveView, or to append
or prepend the updates rather than replacing the existing contents. This
is useful for client-side interop with existing libraries that do their
own DOM operations. The following `phx-update` values are supported:
* `replace` - the default operation. Replaces the element with the contents
* `ignore` - ignores updates to the DOM regardless of new content changes
* `append` - append the new DOM contents instead of replacing
* `prepend` - prepend the new DOM contents instead of replacing
When using `phx-update`, a unique DOM ID must always be set in the
container. If using "append" or "prepend", a DOM ID must also be set
for each child. When appending or prepending elements containing an
ID already present in the container, LiveView will replace the existing
element with the new content instead appending or prepending a new
element.
The "ignore" behaviour is frequently used when you need to integrate
with another JS library. The "append" and "prepend" feature is often
used with "Temporary assigns" to work with large amounts of data. Let's
learn more.
### Temporary assigns
By default, all LiveView assigns are stateful, which enables change
tracking and stateful interactions. In some cases, it's useful to mark
assigns as temporary, meaning they will be reset to a default value after
each update, allowing otherwise large, but infrequently updated values
to be discarded after the client has been patched.
Imagine you want to implement a chat application with LiveView. You
could render each message like this:
<%= for message <- @messages do %>
<p><span><%= message.username %>:</span> <%= message.text %></p>
<% end %>
Every time there is a new message, you would append it to the `@messages`
assign and re-render all messages.
As you may suspect, keeping the whole chat conversation in memory
and resending it on every update would be too expensive, even with
LiveView smart change tracking. By using temporary assigns and phx-update,
we don't need to keep any message in memory and send messages to be
appended to the UI only when there are new messages.
To do so, the first step is to mark which assigns are temporary and
what are the value they should be reset to on mount:
def mount(_params, _session, socket) do
socket = assign(socket, :messages, load_last_20_messages())
{:ok, socket, temporary_assigns: [messages: []]}
end
On mount we also load the initial amount of messages we want to
send. After the initial render, the initial batch of messages will
be reset back to an empty list.
Now, whenever there are one or more new messages, we will assign
only the new messages to `@messages`:
socket = assign(socket, :messages, new_messages)
In the template, we want to wrap all of the messages in a container
and tag this content with phx-update. Remember must also add an ID
to the container as well as to each child:
<div id="chat-messages" phx-update="append">
<%= for message <- @messages do %>
<p id="<%= message.id %>">
<span><%= message.username %>:</span> <%= message.text %>
</p>
<% end %>
</div>
And now, once the client receives new messages, it knows it shouldn't
replace the old content, but rather append to it.
## Live navigation
LiveView provides functionality to allow page navitation using the
[browser's pushState API](https://developer.mozilla.org/en-US/docs/Web/API/History_API).
With live navigation, the page is updated without a full page reload.
You can trigger live navigation in two ways:
* From the client - this is done by replacing `Phoenix.HTML.link/3`
by `Phoenix.LiveView.Helpers.live_patch/3` or
`Phoenix.LiveView.Helpers.live_redirect/3`
* From the server - this is done by replacing `redirect/2` calls
by `push_patch/2` or `push_redirect/2`.
For example, in a template you may write:
<%= live_patch "next", to: Routes.live_path(@socket, MyLive, @page + 1) %>
or in a LiveView:
{:noreply, push_redirect(socket, to: Routes.live_path(socket, MyLive, page + 1))}
The "patch" operations must be used when you want to navigate to the
current LiveView, simply updating the URL and the current parameters,
without mounting a new LiveView. When patch is used, the `c:handle_params/3`
callback is invoked. See the next section for more information.
The "redirect" operations must be used when you want to dismount the
current LiveView and mount a new one. In those cases, the existing root
LiveView is shutdown, and an Ajax request is made to request the necessary
information about the new LiveView, without performing a full static render
(which reduces latency and improves performance). Once information is
retrieved, the new LiveView is mounted. While redirecting, a `phx-disconnected`
class is added to the root LiveView, which can be used to indicate to the
user a new page is being loaded.
`live_patch/3`, `live_redirect/3`, `push_redirect/2`, and `push_patch/2`
only work for LiveViews defined at the router with the `live/3` macro.
### `handle_params/3`
The `c:handle_params/3` callback is invoked after `c:mount/3`. It receives the
request parameters as first argument, the url as second, and the socket as third.
The parameters given to `c:handle_params/3` are the same as the one given to
`c:mount/3`. So how do you decide which callback to use to load data? Generally
speaking, data should always be loaded on `c:mount/3`. Only the params that
can be changed via `live_link/3` or `push_patch/2` must be loaded on
`c:handle_params/3`. As any other `handle_*` callback, changes to the state
inside `c:handle_params/3` will trigger a server render.
For example, imagine you have a `UserTable` LiveView to show all users in
the system and you define it in the router as:
live "/users", UserTable
Now to add live sorting, you could do:
<%= live_patch "Sort by name", to: Routes.live_path(@socket, UserTable, %{sort_by: "name"}) %>
When clicked, since we are navigating to the current LiveView, `c:handle_params/3`
will be invoked. Remember you should never trust received params, so you must use
the callback to validate the user input and change the state accordingly:
def handle_params(params, _uri, socket) do
case params["sort_by"] do
sort_by when sort_by in ~w(name company) ->
{:noreply, socket |> assign(:sort_by, sort) |> recompute_users()}
_ ->
{:noreply, socket}
end
end
### Replace page address
LiveView also allows the current browser URL to be replaced. This is useful when you
want certain events to change the URL but without polluting the browser's history.
This can be done by passing the `replace: true` option to any of the navigation helpers.
## Live Layouts
Your LiveView will be rendered within the layout specified in your Plug pipeline,
such as the default app layout. Assigns defined during `mount` of the root LiveView
are accessible in the layout, but the app layout is never updated after the initial
render. For a live layout, you must specify an additional layout to use with your
LiveView. For example, your regular `app.html` template may display a `@new_message_count`
notification, like this:
<!DOCTYPE html>
<html lang="en">
<head>
<title><%= @page_title %></title>
</head>
<body>
<div>
<nav>
...
Messages (<%= @new_message_count %>)
</nav>
<%= render @view_module, @view_template, assigns %>
</div>
</body>
</html>
To allow the `@new_message_count` to be be updated by your LiveView, you can
move the dynamic content inside a sub-layout, such as `app_web/templates/layout/live.html.leex`.
First, you would update your `app.html` layout to keep only the barebones HTML
structure:
<!DOCTYPE html>
<html lang="en">
<head>
<title>...</title>
<script>...</script>
</head>
<body>
<%= render @view_module, @view_template, assigns %>
</body>
</html>
Next, define a new `live.html.leex` layout with the dynamic content,
followed by a render of the inner `@live_view_module`:
<nav>
...
Messages (<%= @new_message_count %>)
</nav>
<%= @live_view_module.render(assigns) %>
Finally, update your LiveView to pass the `:layout` option to `use Phoenix.LiveView`:
use Phoenix.LiveView, layout: {AppWeb.LayoutView, "live.html"}
Or alternatively, you can provide the `:layout` dynamically as an option in mount:
def mount(_params, _session, socket) do
socket = assign(socket, new_message_count: 0)
{:ok, socket, layout: {AppWeb.LayoutView, "live.html"}}
end
def handle_info({:new_messages, count}, socket) do
{:noreply, assign(socket, new_message_count: count)}
end
end
*Note*: The layout will be wrapped by the LiveView's `:container` tag.
### Updating the HTML document title
Because the main layout from the Plug pipeline is rendered outside of LiveView,
the contents cannot be dynamically changed. The one exception is the `<title>`
of the HTML document. Phoenix LiveView special cases the `@page_title` assign
to allow dynamically updating the title of the page, which is useful when
using live navigation, or annotating the browser tab with a notification.
For example, to update the user's notification count in the browser's title bar,
first set the `page_title` assign on mount:
def mount(_params, _session, socket) do
socket = assign(socket, page_title: "Latest Posts")
{:ok, socket}
end
Then access `@page_title` in the app layout:
<title><%= @page_title %></title>
Now, although the app layout is not updated by LiveView, by simply assigning
to `page_title`, LiveView knows you want the title to be updated:
def handle_info({:new_messages, count}, socket) do
{:noreply, assign(socket, page_title: "Latest Posts (#{count} new)")}
end
*Note*: If you find yourself needing to dynamically patch other parts of the
base layout, such as injecting new scripts or styles into the `<head>` during
live navigation, *then a regular, non-live, page navigation should be used
instead*. Assigning the `@page_title` updates the `document.title` directly,
and therefore cannot be used to update any other part of the base layout.
## Disconnecting all instances of a given live user
It is possible to identify all LiveView sockets by setting a "live_socket_id"
in the session. For example, when signing in a user, you could do:
conn
|> put_session(:current_user_id, user.id)
|> put_session(:live_socket_id, "users_sockets:#{user.id}")
Now all LiveView sockets will be identified and listening to the given
`live_socket_id`. You can disconnect all live users identified by said
ID by broadcasting on the topic:
MyApp.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
It is the same mechanism provided by `Phoenix.Socket`, so you can use the
same approach to disconnect live users and regular channels.
## JavaScript Client Specific
As seen earlier, you start by instantiating a single LiveSocket instance to
enable LiveView client/server interaction, for example:
import {Socket} from "phoenix"
import LiveSocket from "phoenix_live_view"
let liveSocket = new LiveSocket("/live", Socket)
liveSocket.connect()
All options are passed directly to the `Phoenix.Socket` constructor,
except for the following LiveView specific options:
* `bindingPrefix` - the prefix to use for phoenix bindings. Defaults `"phx-"`
* `params` - the `connect_params` to pass to the view's mount callback. May be
a literal object or closure returning an object. When a closure is provided,
the function receives the view's phx-view name.
* `hooks` – a reference to a user-defined hooks namespace, containing client
callbacks for server/client interop. See the interop section below for details.
### Forms and input handling
The JavaScript client is always the source of truth for current
input values. For any given input with focus, LiveView will never
overwrite the input's current value, even if it deviates from
the server's rendered updates. This works well for updates where
major side effects are not expected, such as form validation errors,
or additive UX around the user's input values as they fill out a form.
For these use cases, the `phx-change` input does not concern itself
with disabling input editing while an event to the server is inflight.
When a `phx-change` event is sent to the server, a `"_target"` param
will be in the root payload containing the keyspace of the input name
which triggered the change event. For example, if the following input
triggered a change event:
<input name="user[username]"/>
The server's `handle_event/3` would receive a payload:
%{"_target" => ["user", "username"], "user" => %{"name" => "Name"}}
The `phx-submit` event is used for form submissions where major side-effects
typically happen, such as rendering new containers, calling an external
service, or redirecting to a new page. For these use-cases, the form inputs
are set to `readonly` on submit, and any submit button is disabled until
the client gets an acknowledgment that the server has processed the
`phx-submit` event. Following an acknowledgment, any updates are patched
to the DOM as normal, and the last input with focus is restored if the
user has not otherwise focused on a new input during submission.
To handle latent form submissions, any HTML tag can be annotated with
`phx-disable-with`, which swaps the element's `innerText` with the provided
value during form submission. For example, the following code would change
the "Save" button to "Saving...", and restore it to "Save" on acknowledgment:
<button type="submit" phx-disable-with="Saving...">Save</button>
### Form Recovery following crashes or disconnects
By default, all forms marked with `phx-change` will recover input values
automatically after the user has reconnected or the LiveView has remounted
after a crash. This is achieved by the client triggering the same `phx-change`
to the server as soon as the mount has been completed. For most use cases,
this is all you need and form recovery will happen without consideration. In some cases,
where forms are built step-by-step in a stateful fashion, it may require extra recovery
handling on the server outside of your existing `phx-change` callback code. To enable
specialized recovery, provide a `phx-auto-recover` binding on the form to
specify a different event to trigger for recovery, which will receive the form params
as usual. For example, imagine a LiveView wizard form where the form is stateful and
built based on what step the user is on and by prior selections:
<form phx-change="validate_wizard_step" phx-auto-recover="recover_wizard">
On the server, the `"validate_wizard_step"` event is only concerned with the current client
form data, but the server maintains the entire state of the wizard. To recover in this
scenario, you can specify a recovery event, such as `"recover_wizard"` above, which
would wire up to the following server callbacks in your LiveView:
def handle_event("validate_wizard_step", params, socket) do
# regular validations for current step
{:noreply, socket}
end
def handle_event("recover_wizard", params, socket) do
# rebuild state based on client input data up to the current step
{:noreply, socket}
end
To forgo automatic form recovery, set `phx-auto-recover="ignore"`.
### Loading state and errors
By default, the following classes are applied to the LiveView's parent
container:
- `"phx-connected"` - applied when the view has connected to the server
- `"phx-disconnected"` - applied when the view is not connected to the server
- `"phx-error"` - applied when an error occurs on the server. Note, this
class will be applied in conjunction with `"phx-disconnected"` if connection
to the server is lost.
When a form bound with `phx-submit` is submitted, the `"phx-loading"` class
is applied to the form, which is removed on update.
### JS Interop and client controlled DOM
To handle custom client-side javascript when an element is added, updated,
or removed by the server, a hook object may be provided with the following
life-cycle callbacks:
* `mounted` - the element has been added to the DOM and its server
LiveView has finished mounting
* `beforeUpdate` - the element is about to be updated in the DOM.
*Note*: any call here must be synchronous as the operation cannot
be deferred or cancelled.
* `updated` - the element has been updated in the DOM by the server
* `beforeDestroy` - the element is about to be removed from the DOM.
*Note*: any call here must be synchronous as the operation cannot
be deferred or cancelled.
* `destroyed` - the element has been removed from the page, either
by a parent update, or the parent being removed entirely
* `disconnected` - the element's parent LiveView has disconnected from the server
* `reconnected` - the element's parent LiveView has reconnected to the server
In addition to the callbacks, the callbacks contain the following attributes in scope:
* `el` - attribute referencing the bound DOM node,
* `viewName` - attribute matching the dom node's phx-view value
* `pushEvent(event, payload)` - method to push an event from the client to the LiveView server
* `pushEventTo(selector, event, payload)` - method to push targeted events from the client
to LiveViews and LiveComponents.
For example, a controlled input for phone-number formatting would annotate their
markup:
<input type="text" name="user[phone_number]" id="user-phone-number" phx-hook="PhoneNumber" />
Then a hook callback object can be defined and passed to the socket:
let Hooks = {}
Hooks.PhoneNumber = {
mounted() {
this.el.addEventListener("input", e => {
let match = this.el.value.replace(/\D/g, "").match(/^(\d{3})(\d{3})(\d{4})$/)
if(match) {
this.el.value = `${match[1]}-${match[2]}-${match[3]}`
}
})
}
}
let liveSocket = new LiveSocket("/live", Socket, {hooks: Hooks})
...
*Note*: when using `phx-hook`, a unique DOM ID must always be set.
## Endpoint configuration
LiveView accepts the following configuration in your endpoint under
the `:live_view` key:
* `:signing_salt` (required) - the salt used to sign data sent
to the client
* `:hibernate_after` (optional) - the amount of time in miliseconds
of inactivity inside the LiveView so it hibernates (i.e. it
compresses its own memory and state). Defaults to 15000ms (15 seconds)
'''
alias Phoenix.LiveView.Socket
@doc """
The LiveView entry-point.
For each LiveView in the root of a template, `c:mount/3` is invoked twice:
once to do the initial page load and another to establish the live socket.
It expects three parameters:
* `params` - a map of string keys which contain public information that
can be set by the user. It contains the query params as well as any
router path parameter. `params` is only available for LiveViews mounted
at the router, otherwise it is the atom `:not_mounted_at_router`
* `session` - the connection session
* `socket` - the LiveView socket
It must return either `{:ok, socket}` or `{:ok, socket, options}`, where
`options` is one of:
* `:temporary_assigns` - a keyword list of assigns that are temporary
and must be reset to their value after every render
* `:layout` - the optional layout to be used by the LiveView
"""
@callback mount(
Socket.unsigned_params() | :not_mounted_at_router,
session :: map,
socket :: Socket.t()
) ::
{:ok, Socket.t()} | {:ok, Socket.t(), keyword()}
@callback render(assigns :: Socket.assigns()) :: Phoenix.LiveView.Rendered.t()
@callback terminate(reason, socket :: Socket.t()) :: term
when reason: :normal | :shutdown | {:shutdown, :left | :closed | term}
@callback handle_params(Socket.unsigned_params(), uri :: String.t(), socket :: Socket.t()) ::
{:noreply, Socket.t()} | {:stop, Socket.t()}
@callback handle_event(event :: binary, Socket.unsigned_params(), socket :: Socket.t()) ::
{:noreply, Socket.t()} | {:stop, Socket.t()}
@callback handle_call(msg :: term, {pid, reference}, socket :: Socket.t()) ::
{:noreply, Socket.t()} | {:reply, term, Socket.t()} | {:stop, Socket.t()}
@callback handle_info(msg :: term, socket :: Socket.t()) ::
{:noreply, Socket.t()} | {:stop, Socket.t()}
@optional_callbacks mount: 3,
terminate: 2,
handle_params: 3,
handle_event: 3,
handle_call: 3,
handle_info: 2
@doc """
Uses LiveView in the current module to mark it a LiveView.
use Phoenix.LiveView,
namespace: MyAppWeb,
container: {:tr, class: "colorized"},
layout: {MyAppWeb.LayoutView, "live.html"}
## Options
* `:namespace` - configures the namespace the `LiveView` is in
* `:container` - configures the container the `LiveView` will be wrapped in
* `:layout` - configures the layout the `LiveView` will be rendered in
"""
defmacro __using__(opts) do
quote do
opts = unquote(opts)
import Phoenix.LiveView
import Phoenix.LiveView.Helpers
@behaviour Phoenix.LiveView
@before_compile Phoenix.LiveView
@doc false
@__live__ Phoenix.LiveView.__live__(__MODULE__, opts)
def __live__, do: @__live__
end
end
# TODO: Remove once the deprecation period is over
@doc false
defmacro __before_compile__(env) do
if Module.defines?(env.module, {:mount, 3}) or not Module.defines?(env.module, {:mount, 2}) do
:ok
else
IO.warn(
"mount(session, socket) is deprecated, please define mount(params, session, socket) instead",
Macro.Env.stacktrace(env)
)
quote do
def mount(_params, session, socket), do: mount(session, socket)
end
end
end
@doc false
def __live__(module, opts) do
container = opts[:container] || {:div, []}
namespace = opts[:namespace] || module |> Module.split() |> Enum.take(1) |> Module.concat()
name = module |> Atom.to_string() |> String.replace_prefix("#{namespace}.", "")
layout =
case opts[:layout] do
{mod, template} when is_atom(mod) and is_binary(template) ->
{mod, template}
nil ->
nil
other ->
raise ArgumentError,
":layout expects a tuple of the form {MyLayoutView, \"my_template.html\"}, " <>
"got: #{inspect(other)}"
end
%{container: container, name: name, kind: :view, module: module, layout: layout}
end
@doc """
Returns true if the socket is connected.
Useful for checking the connectivity status when mounting the view.
For example, on initial page render, the view is mounted statically,
rendered, and the HTML is sent to the client. Once the client
connects to the server, a LiveView is then spawned and mounted
statefully within a process. Use `connected?/1` to conditionally
perform stateful work, such as subscribing to pubsub topics,
sending messages, etc.
## Examples
defmodule DemoWeb.ClockLive do
use Phoenix.LiveView
...
def mount(_params, _session, socket) do
if connected?(socket), do: :timer.send_interval(1000, self(), :tick)
{:ok, assign(socket, date: :calendar.local_time())}
end
def handle_info(:tick, socket) do
{:noreply, assign(socket, date: :calendar.local_time())}
end
end
"""
def connected?(%Socket{connected?: connected?}), do: connected?
@doc """
Assigns a value into the socket only if it does not exist.
Useful for lazily assigning values and referencing parent assigns.
## Referencing parent assigns
When a LiveView is mounted in a disconnected state, the Plug.Conn assigns
will be available for reference via `assign_new/3`, allowing assigns to
be shared for the initial HTTP request. On connected mount, the `assign_new/3`
would be invoked, and the LiveView would use its session to rebuild the
originally shared assign. Likewise, nested LiveView children have access
to their parent's assigns on mount using `assign_new`, which allows
assigns to be shared down the nested LiveView tree.
## Examples
# controller
conn
|> assign(:current_user, user)
|> LiveView.Controller.live_render(MyLive, session: %{"user_id" => user.id})
# LiveView mount
def mount(_params, %{"user_id" => user_id}, socket) do
{:ok, assign_new(socket, :current_user, fn -> Accounts.get_user!(user_id) end)}
end
"""
def assign_new(%Socket{} = socket, key, func) when is_function(func, 0) do
case socket do
%{assigns: %{^key => _}} ->
socket
%{private: %{assigned_new: {assigns, keys}} = private} ->
# It is important to store the keys even if they are not in assigns
# because maybe the controller doesn't have it but the view does.
private = put_in(private.assigned_new, {assigns, [key | keys]})
assign_each(%{socket | private: private}, key, Map.get_lazy(assigns, key, func))
%{} ->
assign_each(socket, key, func.())
end
end
@doc """
Adds key value pairs to socket assigns.
A single key value pair may be passed, or a keyword list
of assigns may be provided to be merged into existing
socket assigns.
## Examples
iex> assign(socket, :name, "Elixir")
iex> assign(socket, name: "Elixir", logo: "💧")
"""
def assign(%Socket{} = socket, key, value) do
assign(socket, [{key, value}])
end
@doc """
See `assign/3`.
"""
def assign(%Socket{} = socket, attrs) when is_map(attrs) or is_list(attrs) do
Enum.reduce(attrs, socket, fn {key, val}, acc ->
case Map.fetch(acc.assigns, key) do
{:ok, ^val} -> acc
{:ok, _old_val} -> assign_each(acc, key, val)
:error -> assign_each(acc, key, val)
end
end)
end
defp assign_each(%Socket{assigns: assigns, changed: changed} = acc, key, val) do
new_changed = Map.put(changed, key, true)
new_assigns = Map.put(assigns, key, val)
%Socket{acc | assigns: new_assigns, changed: new_changed}
end
@doc """
Updates an existing key in the socket assigns.
The update function receives the current key's value and
returns the updated value. Raises if the key does not exist.
## Examples
iex> update(socket, :count, fn count -> count + 1 end)
iex> update(socket, :count, &(&1 + 1))
"""
def update(%Socket{assigns: assigns} = socket, key, func) do
case Map.fetch(assigns, key) do
{:ok, val} -> assign(socket, [{key, func.(val)}])
:error -> raise KeyError, key: key, term: assigns
end
end
@doc """
Adds a flash message to the socket to be displayed on redirect.
*Note*: the `Phoenix.LiveView.Flash` plug must be plugged in
your browser's pipeline for flash to be supported, for example:
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug Phoenix.LiveView.Flash
...
end
## Examples
iex> put_flash(socket, :info, "It worked!")
iex> put_flash(socket, :error, "You can't access that page")
"""
defdelegate put_flash(socket, kind, msg), to: Phoenix.LiveView.Utils
@doc """
Annotates the socket for redirect to a destination path.
*Note*: LiveView redirects rely on instructing client
to perform a `window.location` update on the provided
redirect location. The whole page will be reloaded and
all state will be discarded.
## Options
* `:to` - the path to redirect to. It must always be a local path
* `:external` - an external path to redirect to
"""
def redirect(%Socket{} = socket, opts) do
assert_root_live_view!(socket, "redirect/2")
url =
cond do
to = opts[:to] -> validate_local_url!(to, "redirect/2")
external = opts[:external] -> external
true -> raise ArgumentError, "expected :to or :external option in redirect/2"
end
put_redirect(socket, {:redirect, %{to: url}})
end
@doc """
Annotates the socket for navigation within the current LiveView.
When navigating to the current LiveView, `c:handle_params/3` is
immediately invoked to handle the change of params and URL state.
Then the new state is pushed to the client, without reloading the
whole page. For live redirects to another LiveView, use
`push_redirect/2`.
## Options
* `:to` - the required path to link to. It must always be a local path
* `:replace` - the flag to replace the current history or push a new state.
Defaults `false`.
## Examples
{:noreply, push_patch(socket, to: "/")}
{:noreply, push_patch(socket, to: "/", replace: true)}
"""
def push_patch(%Socket{} = socket, opts) do
%{to: to} = opts = push_opts!(socket, opts, "push_patch/2")
case Phoenix.LiveView.Utils.live_link_info!(socket.router, socket.view, to) do
{:internal, params, _parsed_uri} ->
put_redirect(socket, {:live, params, opts})
:external ->
raise ArgumentError,
"cannot push_patch/2 to #{inspect(to)} because the given path " <>
"does not point to the current view #{inspect(socket.view)}"
end
end
@doc """
Annotates the socket for navigation to another LiveView.
The current LiveView will be shutdown and a new one will be mounted
in its place LiveView, without reloading the whole page. This can
also be use to remount the same LiveView, in case you want to start
fresh. If you want to navigate to the same LiveView without remounting
it, use `push_patch/2` instead.
## Options
* `:to` - the required path to link to. It must always be a local path
* `:replace` - the flag to replace the current history or push a new state.
Defaults `false`.
## Examples
{:noreply, push_redirect(socket, to: "/")}
{:noreply, push_redirect(socket, to: "/", replace: true)}
"""
def push_redirect(%Socket{} = socket, opts) do
opts = push_opts!(socket, opts, "push_redirect/2")
put_redirect(socket, {:live, :redirect, opts})
end
defp push_opts!(socket, opts, context) do
assert_root_live_view!(socket, context)
to = Keyword.fetch!(opts, :to)
validate_local_url!(to, context)
kind = if opts[:replace], do: :replace, else: :push
%{to: to, kind: kind}
end
defp put_redirect(%Socket{redirected: nil} = socket, command) do
%Socket{socket | redirected: command}
end
defp put_redirect(%Socket{redirected: to} = _socket, _command) do
raise ArgumentError, "socket already prepared to redirect with #{inspect(to)}"
end
@invalid_local_url_chars ["\\"]
defp validate_local_url!("//" <> _ = to, where) do
raise_invalid_local_url!(to, where)
end
defp validate_local_url!("/" <> _ = to, where) do
if String.contains?(to, @invalid_local_url_chars) do
raise ArgumentError, "unsafe characters detected for #{where} in URL #{inspect(to)}"
else
to
end
end
defp validate_local_url!(to, where) do
raise_invalid_local_url!(to, where)
end
defp raise_invalid_local_url!(to, where) do
raise ArgumentError, "the :to option in #{where} expects a path but was #{inspect(to)}"
end
@doc """
Accesses the connect params sent by the client for use on connected mount.
Connect params are only sent when the client connects to the server and
only remain available during mount. `nil` is returned when called in a
disconnected state and a `RuntimeError` is raised if called after mount.
## Examples
def mount(_params, _session, socket) do
{:ok, assign(socket, width: get_connect_params(socket)["width"] || @width)}
end
"""
def get_connect_params(%Socket{private: private} = socket) do
cond do
connect_params = private[:connect_params] ->
if connected?(socket), do: connect_params, else: nil
child?(socket) ->
raise RuntimeError, """
attempted to read connect_params from a nested child LiveView #{inspect(socket.view)}.
Only the root LiveView has access to connect params.
"""
true ->
raise RuntimeError, """
attempted to read connect_params outside of #{inspect(socket.view)}.mount/3.
connect_params only exist while mounting. If you require access to this information
after mount, store the state in socket assigns.
"""
end
end
@doc """
Asynchronously updates a component with new assigns.
Requires a stateful component with a matching `:id` to send
the update to. Following the optional `preload/1` callback being invoked,
the updated values are merged with the component's assigns and `update/2`
is called for the updated component(s).
While a component may always be updated from the parent by updating some
parent assigns which will re-render the child, thus invoking `update/2` on
the child component, `send_update/2` is useful for updating a component
that entirely manages its own state, as well as messaging between components.
## Examples
def handle_event("cancel-order", _, socket) do
...
send_update(Cart, id: "cart", status: "cancelled")
{:noreply, socket}
end
"""
def send_update(module, assigns) do
assigns = Enum.into(assigns, %{})
id =
assigns[:id] ||
raise ArgumentError, "missing required :id in send_update. Got: #{inspect(assigns)}"
Phoenix.LiveView.Channel.send_update(module, id, assigns)
end
defp child?(%Socket{parent_pid: pid}), do: is_pid(pid)
defp assert_root_live_view!(%{parent_pid: nil}, _context),
do: :ok
defp assert_root_live_view!(_, context),
do: raise(ArgumentError, "cannot invoke #{context} from a child LiveView")
end | lib/phoenix_live_view.ex | 0.818483 | 0.669042 | phoenix_live_view.ex | starcoder |
defprotocol Collectable do
@moduledoc """
A protocol to traverse data structures.
The `Enum.into/2` function uses this protocol to insert an
enumerable into a collection:
iex> Enum.into([a: 1, b: 2], %{})
%{a: 1, b: 2}
## Why Collectable?
The `Enumerable` protocol is useful to take values out of a collection.
In order to support a wide range of values, the functions provided by
the `Enumerable` protocol do not keep shape. For example, passing a
dictionary to `Enum.map/2` always returns a list.
This design is intentional. `Enumerable` was designed to support infinite
collections, resources and other structures with fixed shape. For example,
it doesn't make sense to insert values into a range, as it has a fixed
shape where just the range limits are stored.
The `Collectable` module was designed to fill the gap left by the
`Enumerable` protocol. `into/1` can be seen as the opposite of
`Enumerable.reduce/3`. If `Enumerable` is about taking values out,
`Collectable.into/1` is about collecting those values into a structure.
"""
@type command :: {:cont, term} | :done | :halt
@doc """
Returns a function that collects values alongside
the initial accumulation value.
The returned function receives a collectable and injects a given
value into it for every `{:cont, term}` instruction.
`:done` is passed when no further values will be injected, useful
for closing resources and normalizing values. A collectable must
be returned on `:done`.
If injection is suddenly interrupted, `:halt` is passed and it can
return any value, as it won't be used.
"""
@spec into(t) :: {term, (term, command -> t | term)}
def into(collectable)
end
defimpl Collectable, for: List do
def into(original) do
{[], fn
list, {:cont, x} -> [x|list]
list, :done -> original ++ :lists.reverse(list)
_, :halt -> :ok
end}
end
end
defimpl Collectable, for: BitString do
def into(original) do
{original, fn
acc, {:cont, x} when is_bitstring(x) -> [acc|x]
acc, :done -> IO.iodata_to_binary(acc)
_, :halt -> :ok
end}
end
end
defimpl Collectable, for: Map do
def into(original) do
{original, fn
map, {:cont, {k, v}} -> :maps.put(k, v, map)
map, :done -> map
_, :halt -> :ok
end}
end
end | lib/elixir/lib/collectable.ex | 0.903224 | 0.81721 | collectable.ex | starcoder |
defmodule VintageNet.NameResolver do
@moduledoc """
This module manages the contents of "/etc/resolv.conf".
This file is used by the C standard library and by Erlang for resolving
domain names. Since both C programs and Erlang can do resolution, debugging
problems in this area can be confusing due to varying behavior based on who's
resolving at the time. See the `/etc/erl_inetrc` file on the target to review
Erlang's configuration.
This module assumes exclusive ownership on "/etc/resolv.conf", so if any
other code in the system tries to modify the file, their changes will be lost
on the next update.
It is expected that each network interface provides a configuration. This
module will track configurations to network interfaces so that it can reflect
which resolvers are around. Resolver order isn't handled.
"""
use GenServer
alias VintageNet.IP
alias VintageNet.Resolver.ResolvConf
require Logger
defmodule State do
@moduledoc false
defstruct [:path, :entries, :additional_name_servers]
end
@doc """
Start the resolv.conf manager.
Accepted args:
* `resolvconf` - path to the resolvconf file
* `additional_name_servers` - list of additional servers
"""
@spec start_link(keyword) :: GenServer.on_start()
def start_link(args) do
relevant_args = Keyword.take(args, [:resolvconf, :additional_name_servers])
GenServer.start_link(__MODULE__, relevant_args, name: __MODULE__)
end
@doc """
Stop the resolv.conf manager.
"""
@spec stop() :: :ok
def stop() do
GenServer.stop(__MODULE__)
end
@doc """
Set the search domain and name server list for the specified interface.
This replaces any entries in the `/etc/resolv.conf` for this interface.
"""
@spec setup(String.t(), String.t() | nil, [VintageNet.any_ip_address()]) :: :ok
def setup(ifname, domain, name_servers) do
GenServer.call(__MODULE__, {:setup, ifname, domain, name_servers})
end
@doc """
Clear all entries in "/etc/resolv.conf" that are associated with
the specified interface.
"""
@spec clear(String.t()) :: :ok
def clear(ifname) do
GenServer.call(__MODULE__, {:clear, ifname})
end
@doc """
Completely clear out "/etc/resolv.conf".
"""
@spec clear_all() :: :ok
def clear_all() do
GenServer.call(__MODULE__, :clear_all)
end
## GenServer
@impl GenServer
def init(args) do
resolvconf_path = Keyword.get(args, :resolvconf)
additional_name_servers =
Keyword.get(args, :additional_name_servers, [])
|> Enum.reduce([], &ip_to_tuple_safe/2)
|> Enum.reverse()
state = %State{
path: resolvconf_path,
entries: %{},
additional_name_servers: additional_name_servers
}
write_resolvconf(state)
{:ok, state}
end
@impl GenServer
def handle_call({:setup, ifname, domain, name_servers}, _from, state) do
servers = Enum.map(name_servers, &IP.ip_to_tuple!/1)
ifentry = %{domain: domain, name_servers: servers}
state = %{state | entries: Map.put(state.entries, ifname, ifentry)}
write_resolvconf(state)
{:reply, :ok, state}
end
@impl GenServer
def handle_call({:clear, ifname}, _from, state) do
state = %{state | entries: Map.delete(state.entries, ifname)}
write_resolvconf(state)
{:reply, :ok, state}
end
@impl GenServer
def handle_call(:clear_all, _from, state) do
state = %{state | entries: %{}}
write_resolvconf(state)
{:reply, :ok, state}
end
defp write_resolvconf(%State{
path: path,
entries: entries,
additional_name_servers: additional_name_servers
}) do
File.write!(path, ResolvConf.to_config(entries, additional_name_servers))
end
@spec ip_to_tuple_safe(VintageNet.any_ip_address(), [:inet.ip_address()]) :: [
:inet.ip_address()
]
defp ip_to_tuple_safe(ip, acc) do
case IP.ip_to_tuple(ip) do
{:error, reason} ->
Logger.error("Failed to parse IP address: #{inspect(ip)} (#{reason})")
acc
{:ok, ip} ->
[ip | acc]
end
end
end | lib/vintage_net/name_resolver.ex | 0.781664 | 0.444685 | name_resolver.ex | starcoder |
defmodule String do
@moduledoc %B"""
A String in Elixir is a UTF-8 encoded binary.
## String and binary operations
The functions in this module act according to the
Unicode Standard, version 6.2.0. For example,
`titlecase`, `downcase`, `strip` are provided by this
module.
Besides this module, Elixir provides more low-level
operations that works directly with binaries. Some
of those can be found in the `Kernel` module, as:
* `binary_part/2` and `binary_part/3` - retrieves part of the binary
* `bit_size/1` and `byte_size/1` - size related functions
* `is_bitstring/1` and `is_binary/1` - type checking function
* Plus a bunch of conversion functions, like `binary_to_atom/2`,
`binary_to_integer/2`, `binary_to_term/1` and their opposite
like `integer_to_binary/2`
Finally, [the `:binary` module](http://erlang.org/doc/man/binary.html)
provides a couple other functions that works on the byte level.
## Codepoints and graphemes
As per the Unicode Standard, a codepoint is an Unicode
Character, which may be represented by one or more bytes.
For example, the character "é" is represented with two
bytes:
iex> string = "é"
...> byte_size(string)
2
Furthermore, this module also presents the concept of
graphemes, which are multiple characters that may be
"perceived as a single character" by readers. For example,
the same "é" character written above could be represented
by the letter "e" followed by the accent ́:
iex> string = "\x{0065}\x{0301}"
...> byte_size(string)
3
Although the example above is made of two characters, it is
perceived by users as one.
Graphemes can also be two characters that are interpreted
as one by some languages. For example, some languages may
consider "ch" as a grapheme. However, since this information
depends on the locale, it is not taken into account by this
module.
In general, the functions in this module rely on the Unicode
Standard, but does not contain any of the locale specific
behaviour.
## Integer codepoints
Although codepoints could be represented as integers, this
module represents all codepoints as strings. For example:
iex> String.codepoints("josé")
["j", "o", "s", "é"]
There are a couple of ways to retrieve a character integer
codepoint. One may use the `?` special macro:
iex> ?j
106
iex> ?é
233
Or also via pattern matching:
iex> << eacute :: utf8 >> = "é"
...> eacute
233
As we have seen above, codepoints can be inserted into
a string by their hexadecimal code:
"jos\x{0065}\x{0301}" #=>
"josé"
## Self-synchronization
The UTF-8 encoding is self-synchronizing. This means that
if malformed data (i.e., data that is not possible according
to the definition of the encoding) is encountered, only one
codepoint needs to be rejected.
This module relies on this behaviour to ignore such invalid
characters. For example, `String.length` is going to return
a correct result even if an invalid codepoint is fed into it.
In other words, this module expects invalid data to be detected
when retrieving data from the external source. For example, a
driver that reads strings from a database will be the one
responsible to check the validity of the encoding.
"""
@type t :: binary
@type codepoint :: t
@type grapheme :: t
@doc """
Checks if a string is printable considering it is encoded
as UTF-8. Returns true if so, false otherwise.
## Examples
iex> String.printable?("abc")
true
"""
@spec printable?(t) :: boolean
def printable?(<< h :: utf8, t :: binary >>)
when h in ?\040..?\176
when h in 0xA0..0xD7FF
when h in 0xE000..0xFFFD
when h in 0x10000..0x10FFFF do
printable?(t)
end
def printable?(<<?\n, t :: binary>>), do: printable?(t)
def printable?(<<?\r, t :: binary>>), do: printable?(t)
def printable?(<<?\t, t :: binary>>), do: printable?(t)
def printable?(<<?\v, t :: binary>>), do: printable?(t)
def printable?(<<?\b, t :: binary>>), do: printable?(t)
def printable?(<<?\f, t :: binary>>), do: printable?(t)
def printable?(<<?\e, t :: binary>>), do: printable?(t)
def printable?(<<?\a, t :: binary>>), do: printable?(t)
def printable?(<<>>), do: true
def printable?(_), do: false
@doc """
Splits a string on sub strings at each Unicode whitespace
occurrence with leading and trailing whitespace ignored.
## Examples
iex> String.split("foo bar")
["foo", "bar"]
iex> String.split("foo" <> <<194,133>> <> "bar")
["foo", "bar"]
iex> String.split(" foo bar ")
["foo", "bar"]
"""
@spec split(t) :: [t]
defdelegate split(binary), to: String.Unicode
@doc """
Divides a string into sub strings based on a pattern,
returning a list of these sub string. The pattern can
be a string, a list of strings or a regular expression.
The string is split into as many parts as possible by
default, unless the `global` option is set to false.
## Examples
iex> String.split("a,b,c", ",")
["a", "b", "c"]
iex> String.split("a,b,c", ",", global: false)
["a", "b,c"]
iex> String.split("1,2 3,4", [" ", ","])
["1", "2", "3", "4"]
iex> String.split("a,b,c", %r{,})
["a", "b", "c"]
iex> String.split("a,b,c", %r{,}, global: false)
["a", "b,c"]
iex> String.split("a,b", %r{\\.})
["a,b"]
"""
@spec split(t, t | [t] | Regex.t) :: [t]
@spec split(t, t | [t] | Regex.t, Keyword.t) :: [t]
def split(binary, pattern, options // [])
def split(binary, pattern, options) when is_regex(pattern) do
Regex.split(pattern, binary, global: options[:global])
end
def split(binary, pattern, options) do
opts = if options[:global] != false, do: [:global], else: []
:binary.split(binary, pattern, opts)
end
@doc """
Convert all characters on the given string to upcase.
## Examples
iex> String.upcase("abcd")
"ABCD"
iex> String.upcase("ab 123 xpto")
"AB 123 XPTO"
iex> String.upcase("josé")
"JOSÉ"
"""
@spec upcase(t) :: t
defdelegate upcase(binary), to: String.Unicode
@doc """
Convert all characters on the given string to downcase.
## Examples
iex> String.downcase("ABCD")
"abcd"
iex> String.downcase("AB 123 XPTO")
"ab 123 xpto"
iex> String.downcase("JOSÉ")
"josé"
"""
@spec downcase(t) :: t
defdelegate downcase(binary), to: String.Unicode
@doc """
Converts the first character in the given string to
titlecase and the remaining to downcase.
This relies on the titlecase information provided
by the Unicode Standard. Note this function makes
no attempt in capitalizing all words in the string
(usually known as titlecase).
## Examples
iex> String.capitalize("abcd")
"Abcd"
iex> String.capitalize("fin")
"Fin"
iex> String.capitalize("josé")
"José"
"""
@spec capitalize(t) :: t
def capitalize(string) when is_binary(string) do
{ char, rest } = String.Unicode.titlecase_once(string)
char <> downcase(rest)
end
@doc """
Returns a string where trailing Unicode whitespace
has been removed.
## Examples
iex> String.rstrip(" abc ")
" abc"
"""
@spec rstrip(t) :: t
defdelegate rstrip(binary), to: String.Unicode
@doc """
Returns a string where trailing `char` have been removed.
## Examples
iex> String.rstrip(" abc _", ?_)
" abc "
"""
@spec rstrip(t, char) :: t
def rstrip("", _char), do: ""
# Do a quick check before we traverse the whole
# binary. :binary.last is a fast operation (it
# does not traverse the whole binary).
def rstrip(string, char) do
if :binary.last(string) == char do
do_rstrip(string, "", char)
else
string
end
end
defp do_rstrip(<<char, string :: binary>>, buffer, char) do
do_rstrip(string, <<char, buffer :: binary>>, char)
end
defp do_rstrip(<<char, string :: binary>>, buffer, another_char) do
<<buffer :: binary, char, do_rstrip(string, "", another_char) :: binary>>
end
defp do_rstrip(<<>>, _, _) do
<<>>
end
@doc """
Returns a string where leading Unicode whitespace
has been removed.
## Examples
iex> String.lstrip(" abc ")
"abc "
"""
defdelegate lstrip(binary), to: String.Unicode
@doc """
Returns a string where leading `char` have been removed.
## Examples
iex> String.lstrip("_ abc _", ?_)
" abc _"
"""
@spec lstrip(t, char) :: t
def lstrip(<<char, rest :: binary>>, char) do
<<lstrip(rest, char) :: binary>>
end
def lstrip(other, _char) do
other
end
@doc """
Returns a string where leading/trailing Unicode whitespace
has been removed.
## Examples
iex> String.strip(" abc ")
"abc"
"""
@spec strip(t) :: t
def strip(string) do
rstrip(lstrip(string))
end
@doc """
Returns a string where leading/trailing `char` have been
removed.
## Examples
iex> String.strip("a abc a", ?a)
" abc "
"""
@spec strip(t, char) :: t
def strip(string, char) do
rstrip(lstrip(string, char), char)
end
@doc """
Returns a new binary based on `subject` by replacing the parts
matching `pattern` for `replacement`. By default, it replaces
all entries, except if the `global` option is set to false.
If the replaced part must be used in `replacement`, then the
position or the positions where it is to be inserted must be
specified by using the option `insert_replaced`.
## Examples
iex> String.replace("a,b,c", ",", "-")
"a-b-c"
iex> String.replace("a,b,c", ",", "-", global: false)
"a-b,c"
iex> String.replace("a,b,c", "b", "[]", insert_replaced: 1)
"a,[b],c"
iex> String.replace("a,b,c", ",", "[]", insert_replaced: 2)
"a[],b[],c"
iex> String.replace("a,b,c", ",", "[]", insert_replaced: [1,1])
"a[,,]b[,,]c"
"""
@spec replace(t, t, t) :: t
@spec replace(t, t, t, Keyword.t) :: t
def replace(subject, pattern, replacement, options // []) do
opts = translate_replace_options(options)
:binary.replace(subject, pattern, replacement, opts)
end
defp translate_replace_options(options) do
opts = if options[:global] != false, do: [:global], else: []
if insert = options[:insert_replaced] do
opts = [{:insert_replaced,insert}|opts]
end
opts
end
@doc """
Returns a binary `subject` duplicated `n` times.
## Examples
iex> String.duplicate("abc", 1)
"abc"
iex> String.duplicate("abc", 2)
"abcabc"
"""
@spec duplicate(t, pos_integer) :: t
def duplicate(subject, n) when is_integer(n) and n > 0 do
:binary.copy(subject, n)
end
@doc """
Returns all codepoints in the string.
## Examples
iex> String.codepoints("josé")
["j", "o", "s", "é"]
iex> String.codepoints("оптими зации")
["о","п","т","и","м","и"," ","з","а","ц","и","и"]
iex> String.codepoints("ἅἪῼ")
["ἅ","Ἢ","ῼ"]
"""
@spec codepoints(t) :: [codepoint]
defdelegate codepoints(string), to: String.Unicode
@doc """
Returns the next codepoint in a String.
The result is a tuple with the codepoint and the
remaining of the string or `:no_codepoint` in case
the string reached its end.
As the other functions in the String module, this
function does not check for the validity of the codepoint.
That said, if an invalid codepoint is found, it will
be returned by this function.
## Examples
iex> String.next_codepoint("josé")
{ "j", "osé" }
"""
@spec next_codepoint(t) :: {codepoint, t} | :no_codepoint
defdelegate next_codepoint(string), to: String.Unicode
@doc %B"""
Checks whether `str` contains only valid characters.
## Examples
iex> String.valid?("a")
true
iex> String.valid?("ø")
true
iex> String.valid?(<<0xffff :: 16>>)
false
iex> String.valid?("asd" <> <<0xffff :: 16>>)
false
"""
@spec valid?(t) :: boolean
noncharacters = Enum.to_list(?\x{FDD0}..?\x{FDEF}) ++
[ ?\x{0FFFE}, ?\x{0FFFF}, ?\x{1FFFE}, ?\x{1FFFF}, ?\x{2FFFE}, ?\x{2FFFF},
?\x{3FFFE}, ?\x{3FFFF}, ?\x{4FFFE}, ?\x{4FFFF}, ?\x{5FFFE}, ?\x{5FFFF},
?\x{6FFFE}, ?\x{6FFFF}, ?\x{7FFFE}, ?\x{7FFFF}, ?\x{8FFFE}, ?\x{8FFFF},
?\x{9FFFE}, ?\x{9FFFF}, ?\x{10FFFE}, ?\x{10FFFF} ]
lc noncharacter inlist noncharacters do
def valid?(<< unquote(noncharacter) :: utf8, _ :: binary >>), do: false
end
def valid?(<<_ :: utf8, t :: binary>>), do: valid?(t)
def valid?(<<>>), do: true
def valid?(_), do: false
@doc %B"""
Checks whether `str` is a valid character.
All characters are codepoints, but some codepoints
are not valid characters. They may be reserved, private,
or other.
More info at: http://en.wikipedia.org/wiki/Mapping_of_Unicode_characters#Noncharacters
## Examples
iex> String.valid_character?("a")
true
iex> String.valid_character?("ø")
true
iex> String.valid_character?("\x{ffff}")
false
"""
@spec valid_character?(t) :: boolean
def valid_character?(<<_ :: utf8>> = codepoint), do: valid?(codepoint)
def valid_character?(_), do: false
@doc %B"""
Checks whether `str` is a valid codepoint.
Note that the empty string is considered invalid, as are
strings containing multiple codepoints.
## Examples
iex> String.valid_codepoint?("a")
true
iex> String.valid_codepoint?("ø")
true
iex> String.valid_codepoint?(<<0xffff :: 16>>)
false
iex> String.valid_codepoint?("asdf")
false
"""
@spec valid_codepoint?(codepoint) :: boolean
def valid_codepoint?(<<_ :: utf8>>), do: true
def valid_codepoint?(_), do: false
@doc """
Returns unicode graphemes in the string.
## Examples
iex> String.graphemes("Ā̀stute")
["Ā̀","s","t","u","t","e"]
"""
@spec graphemes(t) :: [grapheme]
defdelegate graphemes(string), to: String.Unicode
@doc """
Returns the next grapheme in a String.
The result is a tuple with the grapheme and the
remaining of the string or `:no_grapheme` in case
the String reached its end.
## Examples
iex> String.next_grapheme("josé")
{ "j", "osé" }
"""
@spec next_grapheme(t) :: { grapheme, t } | :no_grapheme
defdelegate next_grapheme(string), to: String.Unicode
@doc """
Returns the first grapheme from an utf8 string,
nil if the string is empty.
## Examples
iex> String.first("elixir")
"e"
iex> String.first("եոգլի")
"ե"
"""
@spec first(t) :: grapheme | nil
def first(string) do
case next_grapheme(string) do
{ char, _ } -> char
:no_grapheme -> nil
end
end
@doc """
Returns the last grapheme from an utf8 string,
nil if the string is empty.
## Examples
iex> String.last("elixir")
"r"
iex> String.last("եոգլի")
"ի"
"""
@spec last(t) :: grapheme | nil
def last(string) do
do_last(next_grapheme(string), nil)
end
defp do_last({char, rest}, _) do
do_last(next_grapheme(rest), char)
end
defp do_last(:no_grapheme, last_char), do: last_char
@doc """
Returns the number of unicode graphemes in an utf8 string.
## Examples
iex> String.length("elixir")
6
iex> String.length("եոգլի")
5
"""
@spec length(t) :: non_neg_integer
def length(string) do
do_length(next_grapheme(string))
end
defp do_length({_, rest}) do
1 + do_length(next_grapheme(rest))
end
defp do_length(:no_grapheme), do: 0
@doc """
Returns the grapheme in the `position` of the given utf8 `string`.
If `position` is greater than `string` length, than it returns `nil`.
## Examples
iex> String.at("elixir", 0)
"e"
iex> String.at("elixir", 1)
"l"
iex> String.at("elixir", 10)
nil
iex> String.at("elixir", -1)
"r"
iex> String.at("elixir", -10)
nil
"""
@spec at(t, integer) :: grapheme | nil
def at(string, position) when position >= 0 do
do_at(next_grapheme(string), position, 0)
end
def at(string, position) when position < 0 do
real_pos = do_length(next_grapheme(string)) - abs(position)
case real_pos >= 0 do
true -> do_at(next_grapheme(string), real_pos, 0)
false -> nil
end
end
defp do_at({_ , rest}, desired_pos, current_pos) when desired_pos > current_pos do
do_at(next_grapheme(rest), desired_pos, current_pos + 1)
end
defp do_at({char, _}, desired_pos, current_pos) when desired_pos == current_pos do
char
end
defp do_at(:no_grapheme, _, _), do: nil
@doc """
Returns a substring starting at the offset given by the first, and
a length given by the second.
If the offset is greater than string length, than it returns nil.
## Examples
iex> String.slice("elixir", 1, 3)
"lix"
iex> String.slice("elixir", 1, 10)
"lixir"
iex> String.slice("elixir", 10, 3)
nil
iex> String.slice("elixir", -4, 4)
"ixir"
iex> String.slice("elixir", -10, 3)
nil
iex> String.slice("a", 0, 1500)
"a"
iex> String.slice("a", 1, 1500)
""
iex> String.slice("a", 2, 1500)
nil
"""
@spec slice(t, integer, integer) :: grapheme | nil
def slice(string, start, len) when start >= 0 do
do_slice(next_grapheme(string), start, start + len - 1, 0, "")
end
def slice(string, start, len) when start < 0 do
real_start_pos = do_length(next_grapheme(string)) - abs(start)
case real_start_pos >= 0 do
true -> do_slice(next_grapheme(string), real_start_pos, real_start_pos + len - 1, 0, "")
false -> nil
end
end
defp do_slice(_, start_pos, last_pos, _, _) when start_pos > last_pos do
nil
end
defp do_slice({_, rest}, start_pos, last_pos, current_pos, acc) when current_pos < start_pos do
do_slice(next_grapheme(rest), start_pos, last_pos, current_pos + 1, acc)
end
defp do_slice({char, rest}, start_pos, last_pos, current_pos, acc) when current_pos >= start_pos and current_pos < last_pos do
do_slice(next_grapheme(rest), start_pos, last_pos, current_pos + 1, acc <> char)
end
defp do_slice({char, _}, start_pos, last_pos, current_pos, acc) when current_pos >= start_pos and current_pos == last_pos do
acc <> char
end
defp do_slice(:no_grapheme, start_pos, _, current_pos, acc) when start_pos == current_pos do
acc
end
defp do_slice(:no_grapheme, _, _, _, acc) do
case acc do
"" -> nil
_ -> acc
end
end
@doc """
Converts a string to an integer. If successful, returns a
tuple of form {integer, remainder of string}. If unsuccessful,
returns :error.
## Examples
iex> String.to_integer("34")
{34,""}
iex> String.to_integer("34.5")
{34,".5"}
iex> String.to_integer("three")
:error
"""
@spec to_integer(t) :: {integer, t} | :error
def to_integer(string) do
{result, remainder} = :string.to_integer(binary_to_list(string))
case result do
:error -> :error
_ -> {result, list_to_binary(remainder)}
end
end
@doc """
Converts a string to a float. If successful, returns a
tuple of form {float, remainder of string}. If unsuccessful,
returns :error. If given an integer value, will return
same as to_integer/1.
## Examples
iex> String.to_float("34")
{34.0,""}
iex> String.to_float("34.25")
{34.25,""}
iex> String.to_float("56.5xyz")
{56.5,"xyz"}
iex> String.to_float("pi")
:error
"""
@spec to_float(t) :: {integer, t} | :error
def to_float(string) do
charlist = binary_to_list(string)
{result, remainder} = :string.to_float(charlist)
case result do
:error ->
{int_result, int_remainder} = :string.to_integer(charlist)
case int_result do
:error -> :error
_ -> {float(int_result), list_to_binary(int_remainder)}
end
_ -> {result, list_to_binary(remainder)}
end
end
end | lib/elixir/lib/string.ex | 0.902211 | 0.608739 | string.ex | starcoder |
defmodule Google.Protobuf.FileDescriptorSet do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
file: []
]
@type t :: %__MODULE__{
file: [Google.Protobuf.FileDescriptorProto.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.repeated_unpacked_field(:struct, data.file, <<10>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.struct_field(Google.Protobuf.FileDescriptorProto, :file, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:file, v}, acc -> Map.update(acc, :file, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :file, Elixir.Enum.reverse(struct.file))
struct
end
end
defmodule Google.Protobuf.FileDescriptorProto do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
name: nil,
package: nil,
dependency: [],
message_type: [],
enum_type: [],
service: [],
extension: [],
options: nil,
source_code_info: nil,
public_dependency: [],
weak_dependency: [],
syntax: nil
]
@type t :: %__MODULE__{
name: String.t,
package: String.t,
dependency: [String.t],
message_type: [Google.Protobuf.DescriptorProto.t],
enum_type: [Google.Protobuf.EnumDescriptorProto.t],
service: [Google.Protobuf.ServiceDescriptorProto.t],
extension: [Google.Protobuf.FieldDescriptorProto.t],
options: Google.Protobuf.FileOptions.t,
source_code_info: Google.Protobuf.SourceCodeInfo.t,
public_dependency: [integer],
weak_dependency: [integer],
syntax: String.t
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.name, <<10>>),
Encoder.field(:string, data.package, <<18>>),
Encoder.repeated_unpacked_field(:string, data.dependency, <<26>>),
Encoder.repeated_unpacked_field(:struct, data.message_type, <<34>>),
Encoder.repeated_unpacked_field(:struct, data.enum_type, <<42>>),
Encoder.repeated_unpacked_field(:struct, data.service, <<50>>),
Encoder.repeated_unpacked_field(:struct, data.extension, <<58>>),
Encoder.field(:struct, data.options, <<66>>),
Encoder.field(:struct, data.source_code_info, <<74>>),
Encoder.repeated_unpacked_field(:int32, data.public_dependency, <<80>>),
Encoder.repeated_unpacked_field(:int32, data.weak_dependency, <<88>>),
Encoder.field(:string, data.syntax, <<98>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :name, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.field(:string, :package, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.field(:string, :dependency, acc, data)
end
def decode(acc, <<34, data::binary>>) do
Decoder.struct_field(Google.Protobuf.DescriptorProto, :message_type, acc, data)
end
def decode(acc, <<42, data::binary>>) do
Decoder.struct_field(Google.Protobuf.EnumDescriptorProto, :enum_type, acc, data)
end
def decode(acc, <<50, data::binary>>) do
Decoder.struct_field(Google.Protobuf.ServiceDescriptorProto, :service, acc, data)
end
def decode(acc, <<58, data::binary>>) do
Decoder.struct_field(Google.Protobuf.FieldDescriptorProto, :extension, acc, data)
end
def decode(acc, <<66, data::binary>>) do
Decoder.struct_field(Google.Protobuf.FileOptions, :options, acc, data)
end
def decode(acc, <<74, data::binary>>) do
Decoder.struct_field(Google.Protobuf.SourceCodeInfo, :source_code_info, acc, data)
end
def decode(acc, <<80, data::binary>>) do
Decoder.repeated_unpacked_field(:int32, :public_dependency, acc, data)
end
def decode(acc, <<88, data::binary>>) do
Decoder.repeated_unpacked_field(:int32, :weak_dependency, acc, data)
end
def decode(acc, <<98, data::binary>>) do
Decoder.field(:string, :syntax, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,4,5,6,7,8,9,10,11,12] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:weak_dependency, v}, acc -> Map.update(acc, :weak_dependency, [v], fn e -> [v | e] end)
{:public_dependency, v}, acc -> Map.update(acc, :public_dependency, [v], fn e -> [v | e] end)
{:extension, v}, acc -> Map.update(acc, :extension, [v], fn e -> [v | e] end)
{:service, v}, acc -> Map.update(acc, :service, [v], fn e -> [v | e] end)
{:enum_type, v}, acc -> Map.update(acc, :enum_type, [v], fn e -> [v | e] end)
{:message_type, v}, acc -> Map.update(acc, :message_type, [v], fn e -> [v | e] end)
{:dependency, v}, acc -> Map.update(acc, :dependency, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :weak_dependency, Elixir.Enum.reverse(struct.weak_dependency))
struct = Map.put(struct, :public_dependency, Elixir.Enum.reverse(struct.public_dependency))
struct = Map.put(struct, :extension, Elixir.Enum.reverse(struct.extension))
struct = Map.put(struct, :service, Elixir.Enum.reverse(struct.service))
struct = Map.put(struct, :enum_type, Elixir.Enum.reverse(struct.enum_type))
struct = Map.put(struct, :message_type, Elixir.Enum.reverse(struct.message_type))
struct = Map.put(struct, :dependency, Elixir.Enum.reverse(struct.dependency))
struct
end
end
defmodule Google.Protobuf.DescriptorProto do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
name: nil,
field: [],
nested_type: [],
enum_type: [],
extension_range: [],
extension: [],
options: nil,
oneof_decl: [],
reserved_range: [],
reserved_name: []
]
@type t :: %__MODULE__{
name: String.t,
field: [Google.Protobuf.FieldDescriptorProto.t],
nested_type: [Google.Protobuf.DescriptorProto.t],
enum_type: [Google.Protobuf.EnumDescriptorProto.t],
extension_range: [Google.Protobuf.DescriptorProto.ExtensionRange.t],
extension: [Google.Protobuf.FieldDescriptorProto.t],
options: Google.Protobuf.MessageOptions.t,
oneof_decl: [Google.Protobuf.OneofDescriptorProto.t],
reserved_range: [Google.Protobuf.DescriptorProto.ReservedRange.t],
reserved_name: [String.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.name, <<10>>),
Encoder.repeated_unpacked_field(:struct, data.field, <<18>>),
Encoder.repeated_unpacked_field(:struct, data.nested_type, <<26>>),
Encoder.repeated_unpacked_field(:struct, data.enum_type, <<34>>),
Encoder.repeated_unpacked_field(:struct, data.extension_range, <<42>>),
Encoder.repeated_unpacked_field(:struct, data.extension, <<50>>),
Encoder.field(:struct, data.options, <<58>>),
Encoder.repeated_unpacked_field(:struct, data.oneof_decl, <<66>>),
Encoder.repeated_unpacked_field(:struct, data.reserved_range, <<74>>),
Encoder.repeated_unpacked_field(:string, data.reserved_name, <<82>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :name, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.struct_field(Google.Protobuf.FieldDescriptorProto, :field, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.struct_field(Google.Protobuf.DescriptorProto, :nested_type, acc, data)
end
def decode(acc, <<34, data::binary>>) do
Decoder.struct_field(Google.Protobuf.EnumDescriptorProto, :enum_type, acc, data)
end
def decode(acc, <<42, data::binary>>) do
Decoder.struct_field(Google.Protobuf.DescriptorProto.ExtensionRange, :extension_range, acc, data)
end
def decode(acc, <<50, data::binary>>) do
Decoder.struct_field(Google.Protobuf.FieldDescriptorProto, :extension, acc, data)
end
def decode(acc, <<58, data::binary>>) do
Decoder.struct_field(Google.Protobuf.MessageOptions, :options, acc, data)
end
def decode(acc, <<66, data::binary>>) do
Decoder.struct_field(Google.Protobuf.OneofDescriptorProto, :oneof_decl, acc, data)
end
def decode(acc, <<74, data::binary>>) do
Decoder.struct_field(Google.Protobuf.DescriptorProto.ReservedRange, :reserved_range, acc, data)
end
def decode(acc, <<82, data::binary>>) do
Decoder.field(:string, :reserved_name, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,4,5,6,7,8,9,10] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:reserved_name, v}, acc -> Map.update(acc, :reserved_name, [v], fn e -> [v | e] end)
{:reserved_range, v}, acc -> Map.update(acc, :reserved_range, [v], fn e -> [v | e] end)
{:oneof_decl, v}, acc -> Map.update(acc, :oneof_decl, [v], fn e -> [v | e] end)
{:extension, v}, acc -> Map.update(acc, :extension, [v], fn e -> [v | e] end)
{:extension_range, v}, acc -> Map.update(acc, :extension_range, [v], fn e -> [v | e] end)
{:enum_type, v}, acc -> Map.update(acc, :enum_type, [v], fn e -> [v | e] end)
{:nested_type, v}, acc -> Map.update(acc, :nested_type, [v], fn e -> [v | e] end)
{:field, v}, acc -> Map.update(acc, :field, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :reserved_name, Elixir.Enum.reverse(struct.reserved_name))
struct = Map.put(struct, :reserved_range, Elixir.Enum.reverse(struct.reserved_range))
struct = Map.put(struct, :oneof_decl, Elixir.Enum.reverse(struct.oneof_decl))
struct = Map.put(struct, :extension, Elixir.Enum.reverse(struct.extension))
struct = Map.put(struct, :extension_range, Elixir.Enum.reverse(struct.extension_range))
struct = Map.put(struct, :enum_type, Elixir.Enum.reverse(struct.enum_type))
struct = Map.put(struct, :nested_type, Elixir.Enum.reverse(struct.nested_type))
struct = Map.put(struct, :field, Elixir.Enum.reverse(struct.field))
struct
end
end
defmodule Google.Protobuf.DescriptorProto.ExtensionRange do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
start: nil,
end: nil,
options: nil
]
@type t :: %__MODULE__{
start: integer,
end: integer,
options: Google.Protobuf.ExtensionRangeOptions.t
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:int32, data.start, <<8>>),
Encoder.field(:int32, data.end, <<16>>),
Encoder.field(:struct, data.options, <<26>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:int32, :start, acc, data)
end
def decode(acc, <<16, data::binary>>) do
Decoder.field(:int32, :end, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.struct_field(Google.Protobuf.ExtensionRangeOptions, :options, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.DescriptorProto.ReservedRange do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
start: nil,
end: nil
]
@type t :: %__MODULE__{
start: integer,
end: integer
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:int32, data.start, <<8>>),
Encoder.field(:int32, data.end, <<16>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:int32, :start, acc, data)
end
def decode(acc, <<16, data::binary>>) do
Decoder.field(:int32, :end, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.ExtensionRangeOptions do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
uninterpreted_option: []
]
@type t :: %__MODULE__{
uninterpreted_option: [Google.Protobuf.UninterpretedOption.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.repeated_unpacked_field(:struct, data.uninterpreted_option, <<186, 62>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<186, 62, data::binary>>) do
Decoder.struct_field(Google.Protobuf.UninterpretedOption, :uninterpreted_option, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [999] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:uninterpreted_option, v}, acc -> Map.update(acc, :uninterpreted_option, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :uninterpreted_option, Elixir.Enum.reverse(struct.uninterpreted_option))
struct
end
end
defmodule Google.Protobuf.FieldDescriptorProto do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
name: nil,
extendee: nil,
number: nil,
label: :0,
type: :0,
type_name: nil,
default_value: nil,
options: nil,
oneof_index: nil,
json_name: nil
]
@type t :: %__MODULE__{
name: String.t,
extendee: String.t,
number: integer,
label: Google.Protobuf.FieldDescriptorProto.Label.t,
type: Google.Protobuf.FieldDescriptorProto.Type.t,
type_name: String.t,
default_value: String.t,
options: Google.Protobuf.FieldOptions.t,
oneof_index: integer,
json_name: String.t
}
defmodule Label do
@moduledoc false
@type t :: :LABEL_OPTIONAL | 1 | :LABEL_REQUIRED | 2 | :LABEL_REPEATED | 3
@spec to_int(t | non_neg_integer) :: integer
def to_int(:LABEL_OPTIONAL), do: 1
def to_int(1), do: 1
def to_int(:LABEL_REPEATED), do: 3
def to_int(3), do: 3
def to_int(:LABEL_REQUIRED), do: 2
def to_int(2), do: 2
def to_int(invalid) do
raise Pbuf.Encoder.Error,
type: __MODULE__,
value: invalid,
tag: nil,
message: "#{inspect(invalid)} is not a valid enum value for #{__MODULE__}"
end
@spec from_int(integer) :: t
def from_int(1), do: :LABEL_OPTIONAL
def from_int(3), do: :LABEL_REPEATED
def from_int(2), do: :LABEL_REQUIRED
def from_int(_unknown), do: :invalid
end
defmodule Type do
@moduledoc false
@type t :: :TYPE_DOUBLE | 1 | :TYPE_FLOAT | 2 | :TYPE_INT64 | 3 | :TYPE_UINT64 | 4 | :TYPE_INT32 | 5 | :TYPE_FIXED64 | 6 | :TYPE_FIXED32 | 7 | :TYPE_BOOL | 8 | :TYPE_STRING | 9 | :TYPE_GROUP | 10 | :TYPE_MESSAGE | 11 | :TYPE_BYTES | 12 | :TYPE_UINT32 | 13 | :TYPE_ENUM | 14 | :TYPE_SFIXED32 | 15 | :TYPE_SFIXED64 | 16 | :TYPE_SINT32 | 17 | :TYPE_SINT64 | 18
@spec to_int(t | non_neg_integer) :: integer
def to_int(:TYPE_BOOL), do: 8
def to_int(8), do: 8
def to_int(:TYPE_BYTES), do: 12
def to_int(12), do: 12
def to_int(:TYPE_DOUBLE), do: 1
def to_int(1), do: 1
def to_int(:TYPE_ENUM), do: 14
def to_int(14), do: 14
def to_int(:TYPE_FIXED32), do: 7
def to_int(7), do: 7
def to_int(:TYPE_FIXED64), do: 6
def to_int(6), do: 6
def to_int(:TYPE_FLOAT), do: 2
def to_int(2), do: 2
def to_int(:TYPE_GROUP), do: 10
def to_int(10), do: 10
def to_int(:TYPE_INT32), do: 5
def to_int(5), do: 5
def to_int(:TYPE_INT64), do: 3
def to_int(3), do: 3
def to_int(:TYPE_MESSAGE), do: 11
def to_int(11), do: 11
def to_int(:TYPE_SFIXED32), do: 15
def to_int(15), do: 15
def to_int(:TYPE_SFIXED64), do: 16
def to_int(16), do: 16
def to_int(:TYPE_SINT32), do: 17
def to_int(17), do: 17
def to_int(:TYPE_SINT64), do: 18
def to_int(18), do: 18
def to_int(:TYPE_STRING), do: 9
def to_int(9), do: 9
def to_int(:TYPE_UINT32), do: 13
def to_int(13), do: 13
def to_int(:TYPE_UINT64), do: 4
def to_int(4), do: 4
def to_int(invalid) do
raise Pbuf.Encoder.Error,
type: __MODULE__,
value: invalid,
tag: nil,
message: "#{inspect(invalid)} is not a valid enum value for #{__MODULE__}"
end
@spec from_int(integer) :: t
def from_int(8), do: :TYPE_BOOL
def from_int(12), do: :TYPE_BYTES
def from_int(1), do: :TYPE_DOUBLE
def from_int(14), do: :TYPE_ENUM
def from_int(7), do: :TYPE_FIXED32
def from_int(6), do: :TYPE_FIXED64
def from_int(2), do: :TYPE_FLOAT
def from_int(10), do: :TYPE_GROUP
def from_int(5), do: :TYPE_INT32
def from_int(3), do: :TYPE_INT64
def from_int(11), do: :TYPE_MESSAGE
def from_int(15), do: :TYPE_SFIXED32
def from_int(16), do: :TYPE_SFIXED64
def from_int(17), do: :TYPE_SINT32
def from_int(18), do: :TYPE_SINT64
def from_int(9), do: :TYPE_STRING
def from_int(13), do: :TYPE_UINT32
def from_int(4), do: :TYPE_UINT64
def from_int(_unknown), do: :invalid
end
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.name, <<10>>),
Encoder.field(:string, data.extendee, <<18>>),
Encoder.field(:int32, data.number, <<24>>),
Encoder.enum_field(Google.Protobuf.FieldDescriptorProto.Label, data.label, <<32>>),
Encoder.enum_field(Google.Protobuf.FieldDescriptorProto.Type, data.type, <<40>>),
Encoder.field(:string, data.type_name, <<50>>),
Encoder.field(:string, data.default_value, <<58>>),
Encoder.field(:struct, data.options, <<66>>),
Encoder.field(:int32, data.oneof_index, <<72>>),
Encoder.field(:string, data.json_name, <<82>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :name, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.field(:string, :extendee, acc, data)
end
def decode(acc, <<24, data::binary>>) do
Decoder.field(:int32, :number, acc, data)
end
def decode(acc, <<32, data::binary>>) do
Decoder.enum_field(Google.Protobuf.FieldDescriptorProto.Label, :label, acc, data)
end
def decode(acc, <<40, data::binary>>) do
Decoder.enum_field(Google.Protobuf.FieldDescriptorProto.Type, :type, acc, data)
end
def decode(acc, <<50, data::binary>>) do
Decoder.field(:string, :type_name, acc, data)
end
def decode(acc, <<58, data::binary>>) do
Decoder.field(:string, :default_value, acc, data)
end
def decode(acc, <<66, data::binary>>) do
Decoder.struct_field(Google.Protobuf.FieldOptions, :options, acc, data)
end
def decode(acc, <<72, data::binary>>) do
Decoder.field(:int32, :oneof_index, acc, data)
end
def decode(acc, <<82, data::binary>>) do
Decoder.field(:string, :json_name, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,4,5,6,7,8,9,10] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.OneofDescriptorProto do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
name: nil,
options: nil
]
@type t :: %__MODULE__{
name: String.t,
options: Google.Protobuf.OneofOptions.t
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.name, <<10>>),
Encoder.field(:struct, data.options, <<18>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :name, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.struct_field(Google.Protobuf.OneofOptions, :options, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.EnumDescriptorProto do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
name: nil,
value: [],
options: nil,
reserved_range: [],
reserved_name: []
]
@type t :: %__MODULE__{
name: String.t,
value: [Google.Protobuf.EnumValueDescriptorProto.t],
options: Google.Protobuf.EnumOptions.t,
reserved_range: [Google.Protobuf.EnumDescriptorProto.EnumReservedRange.t],
reserved_name: [String.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.name, <<10>>),
Encoder.repeated_unpacked_field(:struct, data.value, <<18>>),
Encoder.field(:struct, data.options, <<26>>),
Encoder.repeated_unpacked_field(:struct, data.reserved_range, <<34>>),
Encoder.repeated_unpacked_field(:string, data.reserved_name, <<42>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :name, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.struct_field(Google.Protobuf.EnumValueDescriptorProto, :value, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.struct_field(Google.Protobuf.EnumOptions, :options, acc, data)
end
def decode(acc, <<34, data::binary>>) do
Decoder.struct_field(Google.Protobuf.EnumDescriptorProto.EnumReservedRange, :reserved_range, acc, data)
end
def decode(acc, <<42, data::binary>>) do
Decoder.field(:string, :reserved_name, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,4,5] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:reserved_name, v}, acc -> Map.update(acc, :reserved_name, [v], fn e -> [v | e] end)
{:reserved_range, v}, acc -> Map.update(acc, :reserved_range, [v], fn e -> [v | e] end)
{:value, v}, acc -> Map.update(acc, :value, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :reserved_name, Elixir.Enum.reverse(struct.reserved_name))
struct = Map.put(struct, :reserved_range, Elixir.Enum.reverse(struct.reserved_range))
struct = Map.put(struct, :value, Elixir.Enum.reverse(struct.value))
struct
end
end
defmodule Google.Protobuf.EnumDescriptorProto.EnumReservedRange do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
start: nil,
end: nil
]
@type t :: %__MODULE__{
start: integer,
end: integer
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:int32, data.start, <<8>>),
Encoder.field(:int32, data.end, <<16>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:int32, :start, acc, data)
end
def decode(acc, <<16, data::binary>>) do
Decoder.field(:int32, :end, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.EnumValueDescriptorProto do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
name: nil,
number: nil,
options: nil
]
@type t :: %__MODULE__{
name: String.t,
number: integer,
options: Google.Protobuf.EnumValueOptions.t
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.name, <<10>>),
Encoder.field(:int32, data.number, <<16>>),
Encoder.field(:struct, data.options, <<26>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :name, acc, data)
end
def decode(acc, <<16, data::binary>>) do
Decoder.field(:int32, :number, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.struct_field(Google.Protobuf.EnumValueOptions, :options, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.ServiceDescriptorProto do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
name: nil,
method: [],
options: nil
]
@type t :: %__MODULE__{
name: String.t,
method: [Google.Protobuf.MethodDescriptorProto.t],
options: Google.Protobuf.ServiceOptions.t
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.name, <<10>>),
Encoder.repeated_unpacked_field(:struct, data.method, <<18>>),
Encoder.field(:struct, data.options, <<26>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :name, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.struct_field(Google.Protobuf.MethodDescriptorProto, :method, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.struct_field(Google.Protobuf.ServiceOptions, :options, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:method, v}, acc -> Map.update(acc, :method, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :method, Elixir.Enum.reverse(struct.method))
struct
end
end
defmodule Google.Protobuf.MethodDescriptorProto do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
name: nil,
input_type: nil,
output_type: nil,
options: nil,
client_streaming: nil,
server_streaming: nil
]
@type t :: %__MODULE__{
name: String.t,
input_type: String.t,
output_type: String.t,
options: Google.Protobuf.MethodOptions.t,
client_streaming: boolean,
server_streaming: boolean
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.name, <<10>>),
Encoder.field(:string, data.input_type, <<18>>),
Encoder.field(:string, data.output_type, <<26>>),
Encoder.field(:struct, data.options, <<34>>),
Encoder.field(:bool, data.client_streaming, <<40>>),
Encoder.field(:bool, data.server_streaming, <<48>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :name, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.field(:string, :input_type, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.field(:string, :output_type, acc, data)
end
def decode(acc, <<34, data::binary>>) do
Decoder.struct_field(Google.Protobuf.MethodOptions, :options, acc, data)
end
def decode(acc, <<40, data::binary>>) do
Decoder.field(:bool, :client_streaming, acc, data)
end
def decode(acc, <<48, data::binary>>) do
Decoder.field(:bool, :server_streaming, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,4,5,6] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.FileOptions do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
java_package: nil,
java_outer_classname: nil,
optimize_for: :0,
java_multiple_files: nil,
go_package: nil,
cc_generic_services: nil,
java_generic_services: nil,
py_generic_services: nil,
java_generate_equals_and_hash: nil,
deprecated: nil,
java_string_check_utf8: nil,
cc_enable_arenas: nil,
objc_class_prefix: nil,
csharp_namespace: nil,
swift_prefix: nil,
php_class_prefix: nil,
php_namespace: nil,
php_generic_services: nil,
php_metadata_namespace: nil,
ruby_package: nil,
uninterpreted_option: []
]
@type t :: %__MODULE__{
java_package: String.t,
java_outer_classname: String.t,
optimize_for: Google.Protobuf.FileOptions.OptimizeMode.t,
java_multiple_files: boolean,
go_package: String.t,
cc_generic_services: boolean,
java_generic_services: boolean,
py_generic_services: boolean,
java_generate_equals_and_hash: boolean,
deprecated: boolean,
java_string_check_utf8: boolean,
cc_enable_arenas: boolean,
objc_class_prefix: String.t,
csharp_namespace: String.t,
swift_prefix: String.t,
php_class_prefix: String.t,
php_namespace: String.t,
php_generic_services: boolean,
php_metadata_namespace: String.t,
ruby_package: String.t,
uninterpreted_option: [Google.Protobuf.UninterpretedOption.t]
}
defmodule OptimizeMode do
@moduledoc false
@type t :: :SPEED | 1 | :CODE_SIZE | 2 | :LITE_RUNTIME | 3
@spec to_int(t | non_neg_integer) :: integer
def to_int(:CODE_SIZE), do: 2
def to_int(2), do: 2
def to_int(:LITE_RUNTIME), do: 3
def to_int(3), do: 3
def to_int(:SPEED), do: 1
def to_int(1), do: 1
def to_int(invalid) do
raise Pbuf.Encoder.Error,
type: __MODULE__,
value: invalid,
tag: nil,
message: "#{inspect(invalid)} is not a valid enum value for #{__MODULE__}"
end
@spec from_int(integer) :: t
def from_int(2), do: :CODE_SIZE
def from_int(3), do: :LITE_RUNTIME
def from_int(1), do: :SPEED
def from_int(_unknown), do: :invalid
end
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.java_package, <<10>>),
Encoder.field(:string, data.java_outer_classname, <<66>>),
Encoder.enum_field(Google.Protobuf.FileOptions.OptimizeMode, data.optimize_for, <<72>>),
Encoder.field(:bool, data.java_multiple_files, <<80>>),
Encoder.field(:string, data.go_package, <<90>>),
Encoder.field(:bool, data.cc_generic_services, <<128, 1>>),
Encoder.field(:bool, data.java_generic_services, <<136, 1>>),
Encoder.field(:bool, data.py_generic_services, <<144, 1>>),
Encoder.field(:bool, data.java_generate_equals_and_hash, <<160, 1>>),
Encoder.field(:bool, data.deprecated, <<184, 1>>),
Encoder.field(:bool, data.java_string_check_utf8, <<216, 1>>),
Encoder.field(:bool, data.cc_enable_arenas, <<248, 1>>),
Encoder.field(:string, data.objc_class_prefix, <<162, 2>>),
Encoder.field(:string, data.csharp_namespace, <<170, 2>>),
Encoder.field(:string, data.swift_prefix, <<186, 2>>),
Encoder.field(:string, data.php_class_prefix, <<194, 2>>),
Encoder.field(:string, data.php_namespace, <<202, 2>>),
Encoder.field(:bool, data.php_generic_services, <<208, 2>>),
Encoder.field(:string, data.php_metadata_namespace, <<226, 2>>),
Encoder.field(:string, data.ruby_package, <<234, 2>>),
Encoder.repeated_unpacked_field(:struct, data.uninterpreted_option, <<186, 62>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :java_package, acc, data)
end
def decode(acc, <<66, data::binary>>) do
Decoder.field(:string, :java_outer_classname, acc, data)
end
def decode(acc, <<72, data::binary>>) do
Decoder.enum_field(Google.Protobuf.FileOptions.OptimizeMode, :optimize_for, acc, data)
end
def decode(acc, <<80, data::binary>>) do
Decoder.field(:bool, :java_multiple_files, acc, data)
end
def decode(acc, <<90, data::binary>>) do
Decoder.field(:string, :go_package, acc, data)
end
def decode(acc, <<128, 1, data::binary>>) do
Decoder.field(:bool, :cc_generic_services, acc, data)
end
def decode(acc, <<136, 1, data::binary>>) do
Decoder.field(:bool, :java_generic_services, acc, data)
end
def decode(acc, <<144, 1, data::binary>>) do
Decoder.field(:bool, :py_generic_services, acc, data)
end
def decode(acc, <<160, 1, data::binary>>) do
Decoder.field(:bool, :java_generate_equals_and_hash, acc, data)
end
def decode(acc, <<184, 1, data::binary>>) do
Decoder.field(:bool, :deprecated, acc, data)
end
def decode(acc, <<216, 1, data::binary>>) do
Decoder.field(:bool, :java_string_check_utf8, acc, data)
end
def decode(acc, <<248, 1, data::binary>>) do
Decoder.field(:bool, :cc_enable_arenas, acc, data)
end
def decode(acc, <<162, 2, data::binary>>) do
Decoder.field(:string, :objc_class_prefix, acc, data)
end
def decode(acc, <<170, 2, data::binary>>) do
Decoder.field(:string, :csharp_namespace, acc, data)
end
def decode(acc, <<186, 2, data::binary>>) do
Decoder.field(:string, :swift_prefix, acc, data)
end
def decode(acc, <<194, 2, data::binary>>) do
Decoder.field(:string, :php_class_prefix, acc, data)
end
def decode(acc, <<202, 2, data::binary>>) do
Decoder.field(:string, :php_namespace, acc, data)
end
def decode(acc, <<208, 2, data::binary>>) do
Decoder.field(:bool, :php_generic_services, acc, data)
end
def decode(acc, <<226, 2, data::binary>>) do
Decoder.field(:string, :php_metadata_namespace, acc, data)
end
def decode(acc, <<234, 2, data::binary>>) do
Decoder.field(:string, :ruby_package, acc, data)
end
def decode(acc, <<186, 62, data::binary>>) do
Decoder.struct_field(Google.Protobuf.UninterpretedOption, :uninterpreted_option, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,8,9,10,11,16,17,18,20,23,27,31,36,37,39,40,41,42,44,45,999] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:uninterpreted_option, v}, acc -> Map.update(acc, :uninterpreted_option, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :uninterpreted_option, Elixir.Enum.reverse(struct.uninterpreted_option))
struct
end
end
defmodule Google.Protobuf.MessageOptions do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
message_set_wire_format: nil,
no_standard_descriptor_accessor: nil,
deprecated: nil,
map_entry: nil,
uninterpreted_option: []
]
@type t :: %__MODULE__{
message_set_wire_format: boolean,
no_standard_descriptor_accessor: boolean,
deprecated: boolean,
map_entry: boolean,
uninterpreted_option: [Google.Protobuf.UninterpretedOption.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:bool, data.message_set_wire_format, <<8>>),
Encoder.field(:bool, data.no_standard_descriptor_accessor, <<16>>),
Encoder.field(:bool, data.deprecated, <<24>>),
Encoder.field(:bool, data.map_entry, <<56>>),
Encoder.repeated_unpacked_field(:struct, data.uninterpreted_option, <<186, 62>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:bool, :message_set_wire_format, acc, data)
end
def decode(acc, <<16, data::binary>>) do
Decoder.field(:bool, :no_standard_descriptor_accessor, acc, data)
end
def decode(acc, <<24, data::binary>>) do
Decoder.field(:bool, :deprecated, acc, data)
end
def decode(acc, <<56, data::binary>>) do
Decoder.field(:bool, :map_entry, acc, data)
end
def decode(acc, <<186, 62, data::binary>>) do
Decoder.struct_field(Google.Protobuf.UninterpretedOption, :uninterpreted_option, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,7,999] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:uninterpreted_option, v}, acc -> Map.update(acc, :uninterpreted_option, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :uninterpreted_option, Elixir.Enum.reverse(struct.uninterpreted_option))
struct
end
end
defmodule Google.Protobuf.FieldOptions do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
ctype: :STRING,
packed: nil,
deprecated: nil,
lazy: nil,
jstype: :JS_NORMAL,
weak: nil,
uninterpreted_option: []
]
@type t :: %__MODULE__{
ctype: Google.Protobuf.FieldOptions.CType.t,
packed: boolean,
deprecated: boolean,
lazy: boolean,
jstype: Google.Protobuf.FieldOptions.JSType.t,
weak: boolean,
uninterpreted_option: [Google.Protobuf.UninterpretedOption.t]
}
defmodule CType do
@moduledoc false
@type t :: :STRING | 0 | :CORD | 1 | :STRING_PIECE | 2
@spec to_int(t | non_neg_integer) :: integer
def to_int(:CORD), do: 1
def to_int(1), do: 1
def to_int(:STRING), do: 0
def to_int(0), do: 0
def to_int(:STRING_PIECE), do: 2
def to_int(2), do: 2
def to_int(invalid) do
raise Pbuf.Encoder.Error,
type: __MODULE__,
value: invalid,
tag: nil,
message: "#{inspect(invalid)} is not a valid enum value for #{__MODULE__}"
end
@spec from_int(integer) :: t
def from_int(1), do: :CORD
def from_int(0), do: :STRING
def from_int(2), do: :STRING_PIECE
def from_int(_unknown), do: :invalid
end
defmodule JSType do
@moduledoc false
@type t :: :JS_NORMAL | 0 | :JS_STRING | 1 | :JS_NUMBER | 2
@spec to_int(t | non_neg_integer) :: integer
def to_int(:JS_NORMAL), do: 0
def to_int(0), do: 0
def to_int(:JS_NUMBER), do: 2
def to_int(2), do: 2
def to_int(:JS_STRING), do: 1
def to_int(1), do: 1
def to_int(invalid) do
raise Pbuf.Encoder.Error,
type: __MODULE__,
value: invalid,
tag: nil,
message: "#{inspect(invalid)} is not a valid enum value for #{__MODULE__}"
end
@spec from_int(integer) :: t
def from_int(0), do: :JS_NORMAL
def from_int(2), do: :JS_NUMBER
def from_int(1), do: :JS_STRING
def from_int(_unknown), do: :invalid
end
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.enum_field(Google.Protobuf.FieldOptions.CType, data.ctype, <<8>>),
Encoder.field(:bool, data.packed, <<16>>),
Encoder.field(:bool, data.deprecated, <<24>>),
Encoder.field(:bool, data.lazy, <<40>>),
Encoder.enum_field(Google.Protobuf.FieldOptions.JSType, data.jstype, <<48>>),
Encoder.field(:bool, data.weak, <<80>>),
Encoder.repeated_unpacked_field(:struct, data.uninterpreted_option, <<186, 62>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.enum_field(Google.Protobuf.FieldOptions.CType, :ctype, acc, data)
end
def decode(acc, <<16, data::binary>>) do
Decoder.field(:bool, :packed, acc, data)
end
def decode(acc, <<24, data::binary>>) do
Decoder.field(:bool, :deprecated, acc, data)
end
def decode(acc, <<40, data::binary>>) do
Decoder.field(:bool, :lazy, acc, data)
end
def decode(acc, <<48, data::binary>>) do
Decoder.enum_field(Google.Protobuf.FieldOptions.JSType, :jstype, acc, data)
end
def decode(acc, <<80, data::binary>>) do
Decoder.field(:bool, :weak, acc, data)
end
def decode(acc, <<186, 62, data::binary>>) do
Decoder.struct_field(Google.Protobuf.UninterpretedOption, :uninterpreted_option, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,5,6,10,999] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:uninterpreted_option, v}, acc -> Map.update(acc, :uninterpreted_option, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :uninterpreted_option, Elixir.Enum.reverse(struct.uninterpreted_option))
struct
end
end
defmodule Google.Protobuf.OneofOptions do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
uninterpreted_option: []
]
@type t :: %__MODULE__{
uninterpreted_option: [Google.Protobuf.UninterpretedOption.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.repeated_unpacked_field(:struct, data.uninterpreted_option, <<186, 62>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<186, 62, data::binary>>) do
Decoder.struct_field(Google.Protobuf.UninterpretedOption, :uninterpreted_option, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [999] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:uninterpreted_option, v}, acc -> Map.update(acc, :uninterpreted_option, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :uninterpreted_option, Elixir.Enum.reverse(struct.uninterpreted_option))
struct
end
end
defmodule Google.Protobuf.EnumOptions do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
allow_alias: nil,
deprecated: nil,
uninterpreted_option: []
]
@type t :: %__MODULE__{
allow_alias: boolean,
deprecated: boolean,
uninterpreted_option: [Google.Protobuf.UninterpretedOption.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:bool, data.allow_alias, <<16>>),
Encoder.field(:bool, data.deprecated, <<24>>),
Encoder.repeated_unpacked_field(:struct, data.uninterpreted_option, <<186, 62>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<16, data::binary>>) do
Decoder.field(:bool, :allow_alias, acc, data)
end
def decode(acc, <<24, data::binary>>) do
Decoder.field(:bool, :deprecated, acc, data)
end
def decode(acc, <<186, 62, data::binary>>) do
Decoder.struct_field(Google.Protobuf.UninterpretedOption, :uninterpreted_option, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [2,3,999] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:uninterpreted_option, v}, acc -> Map.update(acc, :uninterpreted_option, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :uninterpreted_option, Elixir.Enum.reverse(struct.uninterpreted_option))
struct
end
end
defmodule Google.Protobuf.EnumValueOptions do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
deprecated: nil,
uninterpreted_option: []
]
@type t :: %__MODULE__{
deprecated: boolean,
uninterpreted_option: [Google.Protobuf.UninterpretedOption.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:bool, data.deprecated, <<8>>),
Encoder.repeated_unpacked_field(:struct, data.uninterpreted_option, <<186, 62>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:bool, :deprecated, acc, data)
end
def decode(acc, <<186, 62, data::binary>>) do
Decoder.struct_field(Google.Protobuf.UninterpretedOption, :uninterpreted_option, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,999] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:uninterpreted_option, v}, acc -> Map.update(acc, :uninterpreted_option, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :uninterpreted_option, Elixir.Enum.reverse(struct.uninterpreted_option))
struct
end
end
defmodule Google.Protobuf.ServiceOptions do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
deprecated: nil,
uninterpreted_option: []
]
@type t :: %__MODULE__{
deprecated: boolean,
uninterpreted_option: [Google.Protobuf.UninterpretedOption.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:bool, data.deprecated, <<136, 2>>),
Encoder.repeated_unpacked_field(:struct, data.uninterpreted_option, <<186, 62>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<136, 2, data::binary>>) do
Decoder.field(:bool, :deprecated, acc, data)
end
def decode(acc, <<186, 62, data::binary>>) do
Decoder.struct_field(Google.Protobuf.UninterpretedOption, :uninterpreted_option, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [33,999] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:uninterpreted_option, v}, acc -> Map.update(acc, :uninterpreted_option, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :uninterpreted_option, Elixir.Enum.reverse(struct.uninterpreted_option))
struct
end
end
defmodule Google.Protobuf.MethodOptions do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
deprecated: nil,
idempotency_level: :IDEMPOTENCY_UNKNOWN,
uninterpreted_option: []
]
@type t :: %__MODULE__{
deprecated: boolean,
idempotency_level: Google.Protobuf.MethodOptions.IdempotencyLevel.t,
uninterpreted_option: [Google.Protobuf.UninterpretedOption.t]
}
defmodule IdempotencyLevel do
@moduledoc false
@type t :: :IDEMPOTENCY_UNKNOWN | 0 | :NO_SIDE_EFFECTS | 1 | :IDEMPOTENT | 2
@spec to_int(t | non_neg_integer) :: integer
def to_int(:IDEMPOTENCY_UNKNOWN), do: 0
def to_int(0), do: 0
def to_int(:IDEMPOTENT), do: 2
def to_int(2), do: 2
def to_int(:NO_SIDE_EFFECTS), do: 1
def to_int(1), do: 1
def to_int(invalid) do
raise Pbuf.Encoder.Error,
type: __MODULE__,
value: invalid,
tag: nil,
message: "#{inspect(invalid)} is not a valid enum value for #{__MODULE__}"
end
@spec from_int(integer) :: t
def from_int(0), do: :IDEMPOTENCY_UNKNOWN
def from_int(2), do: :IDEMPOTENT
def from_int(1), do: :NO_SIDE_EFFECTS
def from_int(_unknown), do: :invalid
end
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:bool, data.deprecated, <<136, 2>>),
Encoder.enum_field(Google.Protobuf.MethodOptions.IdempotencyLevel, data.idempotency_level, <<144, 2>>),
Encoder.repeated_unpacked_field(:struct, data.uninterpreted_option, <<186, 62>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<136, 2, data::binary>>) do
Decoder.field(:bool, :deprecated, acc, data)
end
def decode(acc, <<144, 2, data::binary>>) do
Decoder.enum_field(Google.Protobuf.MethodOptions.IdempotencyLevel, :idempotency_level, acc, data)
end
def decode(acc, <<186, 62, data::binary>>) do
Decoder.struct_field(Google.Protobuf.UninterpretedOption, :uninterpreted_option, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [33,34,999] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:uninterpreted_option, v}, acc -> Map.update(acc, :uninterpreted_option, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :uninterpreted_option, Elixir.Enum.reverse(struct.uninterpreted_option))
struct
end
end
defmodule Google.Protobuf.UninterpretedOption do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
name: [],
identifier_value: nil,
positive_int_value: nil,
negative_int_value: nil,
double_value: nil,
string_value: nil,
aggregate_value: nil
]
@type t :: %__MODULE__{
name: [Google.Protobuf.UninterpretedOption.NamePart.t],
identifier_value: String.t,
positive_int_value: non_neg_integer,
negative_int_value: integer,
double_value: number,
string_value: binary,
aggregate_value: String.t
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.repeated_unpacked_field(:struct, data.name, <<18>>),
Encoder.field(:string, data.identifier_value, <<26>>),
Encoder.field(:uint64, data.positive_int_value, <<32>>),
Encoder.field(:int64, data.negative_int_value, <<40>>),
Encoder.field(:double, data.double_value, <<49>>),
Encoder.field(:bytes, data.string_value, <<58>>),
Encoder.field(:string, data.aggregate_value, <<66>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.struct_field(Google.Protobuf.UninterpretedOption.NamePart, :name, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.field(:string, :identifier_value, acc, data)
end
def decode(acc, <<32, data::binary>>) do
Decoder.field(:uint64, :positive_int_value, acc, data)
end
def decode(acc, <<40, data::binary>>) do
Decoder.field(:int64, :negative_int_value, acc, data)
end
def decode(acc, <<49, data::binary>>) do
Decoder.field(:double, :double_value, acc, data)
end
def decode(acc, <<58, data::binary>>) do
Decoder.field(:bytes, :string_value, acc, data)
end
def decode(acc, <<66, data::binary>>) do
Decoder.field(:string, :aggregate_value, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [2,3,4,5,6,7,8] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:name, v}, acc -> Map.update(acc, :name, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :name, Elixir.Enum.reverse(struct.name))
struct
end
end
defmodule Google.Protobuf.UninterpretedOption.NamePart do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
name_part: nil,
is_extension: nil
]
@type t :: %__MODULE__{
name_part: String.t,
is_extension: boolean
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.name_part, <<10>>),
Encoder.field(:bool, data.is_extension, <<16>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :name_part, acc, data)
end
def decode(acc, <<16, data::binary>>) do
Decoder.field(:bool, :is_extension, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.SourceCodeInfo do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
location: []
]
@type t :: %__MODULE__{
location: [Google.Protobuf.SourceCodeInfo.Location.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.repeated_unpacked_field(:struct, data.location, <<10>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.struct_field(Google.Protobuf.SourceCodeInfo.Location, :location, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:location, v}, acc -> Map.update(acc, :location, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :location, Elixir.Enum.reverse(struct.location))
struct
end
end
defmodule Google.Protobuf.SourceCodeInfo.Location do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
path: [],
span: [],
leading_comments: nil,
trailing_comments: nil,
leading_detached_comments: []
]
@type t :: %__MODULE__{
path: [integer],
span: [integer],
leading_comments: String.t,
trailing_comments: String.t,
leading_detached_comments: [String.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.repeated_field(:int32, data.path, <<10>>),
Encoder.repeated_field(:int32, data.span, <<18>>),
Encoder.field(:string, data.leading_comments, <<26>>),
Encoder.field(:string, data.trailing_comments, <<34>>),
Encoder.repeated_unpacked_field(:string, data.leading_detached_comments, <<50>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.repeated_field(:int32, :path, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.repeated_field(:int32, :span, acc, data)
end
def decode(acc, <<26, data::binary>>) do
Decoder.field(:string, :leading_comments, acc, data)
end
def decode(acc, <<34, data::binary>>) do
Decoder.field(:string, :trailing_comments, acc, data)
end
def decode(acc, <<50, data::binary>>) do
Decoder.field(:string, :leading_detached_comments, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,4,6] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:leading_detached_comments, v}, acc -> Map.update(acc, :leading_detached_comments, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :leading_detached_comments, Elixir.Enum.reverse(struct.leading_detached_comments))
struct
end
end
defmodule Google.Protobuf.GeneratedCodeInfo do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
annotation: []
]
@type t :: %__MODULE__{
annotation: [Google.Protobuf.GeneratedCodeInfo.Annotation.t]
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.repeated_unpacked_field(:struct, data.annotation, <<10>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.struct_field(Google.Protobuf.GeneratedCodeInfo.Annotation, :annotation, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{:annotation, v}, acc -> Map.update(acc, :annotation, [v], fn e -> [v | e] end)
{k, v}, acc -> Map.put(acc, k, v)
end)
struct = Map.put(struct, :annotation, Elixir.Enum.reverse(struct.annotation))
struct
end
end
defmodule Google.Protobuf.GeneratedCodeInfo.Annotation do
@moduledoc false
alias Pbuf.Decoder
@derive {Jason.Encoder, []}
defstruct [
path: [],
source_file: nil,
begin: nil,
end: nil
]
@type t :: %__MODULE__{
path: [integer],
source_file: String.t,
begin: integer,
end: integer
}
@spec new(Enum.t) :: t
def new(data \\ []), do: struct(__MODULE__, data)
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.repeated_field(:int32, data.path, <<10>>),
Encoder.field(:string, data.source_file, <<18>>),
Encoder.field(:int32, data.begin, <<24>>),
Encoder.field(:int32, data.end, <<32>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.repeated_field(:int32, :path, acc, data)
end
def decode(acc, <<18, data::binary>>) do
Decoder.field(:string, :source_file, acc, data)
end
def decode(acc, <<24, data::binary>>) do
Decoder.field(:int32, :begin, acc, data)
end
def decode(acc, <<32, data::binary>>) do
Decoder.field(:int32, :end, acc, data)
end
import Bitwise, only: [bsr: 2, band: 2]
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1,2,3,4] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end | test/schemas/proto/google/protobuf/descriptor.pb.ex | 0.75392 | 0.520801 | descriptor.pb.ex | starcoder |
defmodule Day12 do
@moduledoc """
Assembunny code interpreter
"""
defmodule State do
defstruct pc: 0, a: 0, b: 0, c: 1, d: 0
end
def evaluate_file(path) do
path
|> File.read!
|> evaluate
end
@doc """
Evaluate the given instruction set
"""
def evaluate(str) do
str
|> parse_commands
|> run(%State{})
end
defp run(instructions, state = %State{pc: pc}) do
instruction = Enum.at(instructions, pc)
# IO.puts "Eval: #{inspect instruction}"
new_state = eval(instruction, state)
# IO.inspect new_state
unless new_state.pc >= length(instructions) do
run(instructions, new_state)
else
new_state
end
end
defp eval({:cpy, value, target}, state) when is_atom(value) do
state
|> Map.put(target, Map.get(state, value))
|> update_pc(1)
end
defp eval({:cpy, value, target}, state) when is_integer(value) do
state
|> Map.put(target, value)
|> update_pc(1)
end
defp eval({:inc, target}, state) do
state
|> Map.update!(target, &(&1 + 1))
|> update_pc(1)
end
defp eval({:dec, target}, state) do
state
|> Map.update!(target, &(&1 - 1))
|> update_pc(1)
end
defp eval({:jnz, test, offset}, state) when is_atom(test) do
val = Map.get(state, test)
if 0 == val do
update_pc(state, 1)
else
update_pc(state, offset)
end
end
defp eval({:jnz, test, offset}, state) when is_integer(test) do
if 0 == test do
update_pc(state, 1)
else
update_pc(state, offset)
end
end
defp update_pc(state, offset) do
Map.update!(state, :pc, &(&1 + offset))
end
defp parse_commands(str) do
str
|> String.trim
|> String.split("\n")
|> Enum.map(&String.trim/1)
|> Enum.map(&convert_command/1)
end
defp convert_command(<<"inc ", rest :: binary>>) do
{:inc, string_to_register(rest)}
end
defp convert_command(<<"dec ", rest :: binary>>) do
{:dec, string_to_register(rest)}
end
defp convert_command(<<"cpy ", rest :: binary>>) do
[value, register] = String.split(rest)
{:cpy, convert_value(value), string_to_register(register)}
end
defp convert_command(<<"jnz ", rest :: binary>>) do
[test, offset] = String.split(rest)
{:jnz, convert_value(test), convert_value(offset)}
end
defp convert_value(str) do
list = String.to_charlist(str)
if Enum.at(list, 0) >= ?a do
string_to_register(str)
else
String.to_integer(str)
end
end
defp string_to_register(str) do
String.to_atom(str)
end
end | day12/lib/day12.ex | 0.60288 | 0.511107 | day12.ex | starcoder |
defmodule Advent.Sixteen.TinyLCD do
alias Advent.Agents.Screen
def init(width, height) do
Screen.init(width, height)
end
def update(%{type: :rect, x: x, y: y}) do
for i <- 0..(x - 1) do
for j <- 0..(y - 1) do
Screen.set i, j, "█"
end
end
end
def update(%{type: :rotate, orientation: :row, y: y, magnitude: magnitude }) do
row_rotated = Screen.curr
|> Matrix.row(y)
|> Advent.Helpers.List.rotate_right(magnitude)
|> Vector.from_list
for col <- 0..Vector.length(row_rotated) - 1 do
Screen.set(col, y, row_rotated[col])
end
end
def update(%{type: :rotate, orientation: :col, x: x, magnitude: magnitude }) do
col_rotated = Screen.curr
|> Matrix.column(x)
|> Advent.Helpers.List.rotate_right(magnitude)
|> Vector.from_list
for row <- 0..Vector.length(col_rotated) - 1 do
Screen.set(x, row, col_rotated[row])
end
end
def tostring do
Screen.curr
|> Matrix.to_list
|> Enum.map(&(Enum.join(&1)))
|> Enum.join("\n")
end
def prettyprint do
IO.puts tostring
end
def lit do
tostring
|> String.graphemes
|> Enum.count(&(&1 == "█"))
end
end
defmodule Advent.Sixteen.Eight do
alias Advent.Sixteen.TinyLCD
@input "./input/2016/8"
def parse_input(line) do
line = String.strip(line)
cond do
match = Regex.run(~R/^rect (\d+)x(\d+)/, line) ->
[_, x, y] = match
%{type: :rect, x: String.to_integer(x), y: String.to_integer(y)}
match = Regex.run(~R/^rotate row y=(\d+) by (\d+)/, line) ->
[_, y, magnitude] = match
%{type: :rotate, orientation: :row, y: String.to_integer(y), magnitude: String.to_integer(magnitude)}
match = Regex.run(~R/^rotate column x=(\d+) by (\d+)/, line) ->
[_, x, magnitude] = match
%{type: :rotate, orientation: :col, x: String.to_integer(x), magnitude: String.to_integer(magnitude)}
true ->
%{type: :wtf}
end
end
def bitmap_step(step) do
TinyLCD.prettyprint
:timer.sleep(50)
TinyLCD.update(step)
end
def a do
TinyLCD.init(50, 6)
File.stream!(@input)
|> Enum.map(&parse_input/1)
|> Enum.each(&(bitmap_step(&1)))
TinyLCD.lit
end
def b do
end
end | lib/2016/8.ex | 0.659624 | 0.527256 | 8.ex | starcoder |
defmodule AtomTweaksWeb.PrimerHelpers do
@moduledoc """
View helper functions for generating elements that work with
[GitHub's Primer](https://primer.github.io/) CSS framework.
All functions can be used either within a template or composed together in code. Each function
should always emit `t:Phoenix.HTML.safe/0` objects or throw an exception.
"""
use Phoenix.HTML
require AtomTweaksWeb.Gettext
import AtomTweaksWeb.Gettext, only: [gettext: 1]
alias AtomTweaks.Accounts.User
@typedoc """
The application name as an atom.
"""
@type app_name :: atom
defmodule MissingConfigurationError do
@moduledoc """
Exception raised when there is an element of required application configuration missing.
"""
defexception [:missing_keys]
def exception(key) when is_atom(key), do: exception([key])
def exception(keys) when is_list(keys) do
%__MODULE__{missing_keys: keys}
end
def message(%{missing_keys: missing_keys}) do
"application configuration missing: #{inspect(missing_keys)}"
end
end
@doc """
Renders the `avatar` element for the `user`.
**See:** [Avatar element documentation](https://github.com/primer/primer/tree/master/modules/primer-avatars#basic-example)
## Options
* `:size` -- value in pixels to use for both the width and height of the avatar image
"""
@spec avatar(User.t(), Keword.t()) :: Phoenix.HTML.safe()
def avatar(user, options \\ [])
def avatar(user, options) do
size = options[:size] || 35
class = append_class("avatar", options[:class])
tag_options =
options
|> Keyword.drop([:size])
|> Keyword.put(:alt, user.name)
|> Keyword.put(:class, class)
|> Keyword.put(:src, append_query(user.avatar_url, s: size))
|> Keyword.put(:width, size)
|> Keyword.put(:height, size)
tag(:img, tag_options)
end
@doc """
Renders the GitHub-style `<> with ♥ by [author link]` footer item.
Retrieves the author's name and URL from the application configuration for the default application
for the current module. See `code_with_heart/2` for more information.
"""
@spec code_with_heart() :: Phoenix.HTML.safe()
def code_with_heart do
code_with_heart(Application.get_application(__MODULE__))
end
@doc """
Renders the GitHub-style `<> with ♥ by [author link]` footer item.
Retrieves the author's name and URL from the application configuration before passing to
`code_with_heart/3`. This information can be added to the application configuration by adding the
following to your `config.exs`:
```
config :app_name,
code_with_heart: [
name: "<NAME>",
url: "https://example.com"
]
```
Raises a `AtomTweaksWeb.PrimerHelpers.MissingConfigurationError` if any of the required
application configuration information is not specified and this function is called.
If passed two strings instead of an atom and a keyword list, this function will assume that you
mean to call `code_with_heart/3` with no options and do so for you.
"""
@spec code_with_heart(atom, Keyword.t()) :: Phoenix.HTML.safe()
def code_with_heart(app_name, options \\ [])
def code_with_heart(app_name, options)
def code_with_heart(app_name, options) when is_atom(app_name) and is_list(options) do
config = Application.get_env(app_name, :code_with_heart)
name = config[:name]
url = config[:url]
unless name && url, do: raise(MissingConfigurationError, :code_with_heart)
code_with_heart(name, url, options)
end
def code_with_heart(name, url) when is_binary(name) and is_binary(url),
do: code_with_heart(name, url, [])
@doc """
Renders the GitHub-style `<> with ♥ by [author link]` footer item.
The text in this element is intentionally left untranslated because the form of the element is
intended to be recognizable in its specific format.
## Options
All options are passed to the underlying HTML `a` element.
## Examples
```
Phoenix.HTML.safe_to_string(AtomTweaksWeb.PrimerHelpers.code_with_heart("Author's Name", "https://example.com"))
#=> "<svg .../> with <svg .../> by <a href=\"https://example.com\">Author's Name</a>"
```
"""
@spec code_with_heart(String.t(), String.t(), Keyword.t()) :: Phoenix.HTML.safe()
def code_with_heart(name, url, options) do
link_options = Keyword.merge([to: url, class: "link-gray-dark"], options)
html_escape([
PhoenixOcticons.octicon(:code),
" with ",
PhoenixOcticons.octicon(:heart),
" by ",
link(name, link_options)
])
end
@doc """
Renders a `Counter` element.
**See:** [Counter element documentation](https://primer.style/css/components/labels#Counters)
"""
@spec counter(non_neg_integer()) :: Phoenix.HTML.safe()
def counter(count) do
content_tag(:span, Integer.to_string(count), class: "Counter")
end
@doc """
Renders a link to the project on GitHub.
Retrieves the project name or URL from the application configuration for the default application.
"""
@spec github_link(Keyword.t()) :: Phoenix.HTML.safe()
def github_link(options \\ [])
def github_link(options), do: github_link(options, [])
@doc """
Renders a link to the project on GitHub.
If the first parameter is an atom, it retrieves the project name or URL from the application
configuration. Otherwise, the project can be either the GitHub `owner/project` identifier or the
full GitHub URL.
This configuration information can be added to the application configuration by adding the
following to your `config.exs`:
```
config :app_name,
github_link: "owner/name"
```
If the configuration information is missing and the first parameter is an atom, a
`AtomTweaksWeb.PrimerHelpers.MissingConfigurationError` is raised.
## Options
All options are passed to the underlying HTML `a` element.
"""
@spec github_link(app_name | String.t(), Keyword.t()) :: Phoenix.HTML.safe()
def github_link(app_name_or_project, options)
def github_link(options, _no_options) when is_list(options) do
github_link(Application.get_application(__MODULE__), options)
end
def github_link(app_name, options) when is_atom(app_name) do
url = Application.get_env(app_name, :github_link)
unless url, do: raise(MissingConfigurationError, :github_link)
github_link(url, options)
end
def github_link(project, options) when is_binary(project) do
# Prepend the `https://github.com/` if only the name with owner is specified
url = if project =~ ~r{^[^/]+/[^/]+$}, do: "https://github.com/#{project}", else: project
link_options =
Keyword.merge(
[
to: url,
"aria-label": gettext("View this project on GitHub"),
class: "link-gray-dark tooltipped tooltipped-n"
],
options
)
link(link_options) do
PhoenixOcticons.octicon("mark-github")
end
end
@doc """
Renders a link that visually appears as a button.
## Options
* `:to` - the URL to link to
"""
@spec link_button(String.t(), Keyword.t()) :: Phoenix.HTML.safe()
def link_button(text, options \\ []) do
options = Keyword.merge(options, type: "button")
link(text, options)
end
@doc """
Renders a menu element.
**See:** [Menu element documentation](https://github.com/primer/primer/tree/master/modules/primer-navigation#menu)
## Example
Slime template:
```
= menu do
= menu_item("Foo", "/path/to/foo", selected: true)
= menu_item("Bar", "/path/to/bar")
```
generates:
```html
<nav class="menu">
<a class="menu-item selected" href="/path/to/foo">Foo</a>
<a class="menu-item" href="/path/to/bar">Bar</a>
</nav>
```
"""
@spec menu(Keyword.t()) :: Phoenix.HTML.safe()
def menu(block)
def menu(do: block) do
content_tag(:nav, block, class: "menu")
end
@doc """
Renders a menu item element.
## Options
* `:octicon` - Renders an [Octicon](https://octicons.github.com) with the menu item
* `:selected` - If `true`, renders the menu item as selected
All other options are passed through to the underlying HTML `a` element.
"""
@spec menu_item(String.t(), String.t(), Keyword.t()) :: Phoenix.HTML.safe()
def menu_item(text, link, options \\ []) do
selected = options[:selected]
class =
"menu-item"
|> append_class(options[:class])
|> append_class(if selected, do: "selected", else: nil)
tag_options =
options
|> Keyword.drop([:octicon, :selected])
|> Keyword.put(:href, link)
|> Keyword.put(:class, class)
content =
if options[:octicon] do
[
PhoenixOcticons.octicon(options[:octicon], width: 16),
text
]
else
text
end
content_tag(:a, content, tag_options)
end
@doc """
Renders an `UnderlineNav` element.
The `underline_nav_item/3` function is used to generate the nav items within the nav element.
**See:** [UnderlineNav element documentation](https://github.com/primer/primer/tree/master/modules/primer-navigation#underline-nav)
## Options
All options are passed through to the underlying HTML `nav` element.
## Example
Slime template:
```
= underline_nav do
= underline_nav_item "Foo", "/path/to/foo", selected: true
= underline_nav_item "Bar", "/path/to/bar"
```
generates:
```html
<nav class="UnderlineNav">
<div class="UnderlineNav-body">
<a class="UnderlineNav-item selected" href="/path/to/foo">Foo</a>
<a class="UnderlineNav-item" href="/path/to/bar">Bar</a>
</div>
</div>
```
"""
@spec underline_nav(Keyword.t(), Keyword.t()) :: Phoenix.HTML.safe()
def underline_nav(options \\ [], block)
def underline_nav(options, do: block) do
class = append_class("UnderlineNav", options[:class])
content_tag(:nav, class: class) do
content_tag(:div, block, class: "UnderlineNav-body")
end
end
@doc """
Renders an `UnderlineNav-item` element.
## Options
* `:counter` - When supplied with an integer value, renders a `Counter` element
* `:selected` - When `true`, renders this item as selected
All other options are passed through to the underlying HTML `a` element.
"""
@spec underline_nav_item(String.t(), String.t(), Keyword.t()) :: Phoenix.HTML.safe()
def underline_nav_item(text, link, options \\ []) do
count = options[:counter]
selected = options[:selected]
class =
"UnderlineNav-item"
|> append_class(options[:class])
|> append_class(if selected, do: "selected", else: nil)
tag_options =
options
|> Keyword.drop([:counter, :selected])
|> Keyword.put(:class, class)
tag_options = if selected, do: tag_options, else: Keyword.put(tag_options, :href, link)
content = if count, do: [text, counter(count)], else: text
content_tag(:a, content, tag_options)
end
defp append_class(base, nil), do: base
defp append_class(base, ""), do: base
defp append_class(base, class) when is_binary(class), do: base <> " " <> class
defp append_query(avatar_url, options) do
map = Enum.into(options, %{})
uri = URI.parse(avatar_url)
new_query =
uri.query ||
""
|> URI.decode_query(map)
|> URI.encode_query()
uri
|> Map.put(:query, new_query)
|> to_string()
end
end | lib/atom_tweaks_web/helpers/primer_helpers.ex | 0.891559 | 0.813609 | primer_helpers.ex | starcoder |
defmodule VintageNetMobile.Modem.QuectelEC25 do
@behaviour VintageNetMobile.Modem
@moduledoc """
Quectel EC25 support
The Quectel EC25 is a series of LTE Cat 4 modules. Here's an example
configuration:
```elixir
VintageNet.configure(
"ppp0",
%{
type: VintageNetMobile,
vintage_net_mobile: %{
modem: VintageNetMobile.Modem.QuectelEC25,
service_providers: [%{apn: "wireless.twilio.com"}]
}
}
)
```
Options:
* `:modem` - `VintageNetMobile.Modem.QuectelEC25`
* `:service_providers` - A list of service provider information (only `:apn`
providers are supported)
* `:at_tty` - A tty for sending AT commands on. This defaults to `"ttyUSB2"`
which works unless other USB serial devices cause Linux to set it to
something different.
* `:ppp_tty` - A tty for the PPP connection. This defaults to `"ttyUSB2"`
which works unless other USB serial devices cause Linux to set it to
something different.
If multiple service providers are configured, this implementation only
attempts to connect to the first one.
Example of supported properties:
```elixir
iex> VintageNet.get_by_prefix(["interface", "ppp0"])
[
{["interface", "ppp0", "addresses"],
[
%{
address: {10, 64, 64, 64},
family: :inet,
netmask: {255, 255, 255, 255},
prefix_length: 32,
scope: :universe
}
]},
{["interface", "ppp0", "connection"], :internet},
{["interface", "ppp0", "lower_up"], true},
{["interface", "ppp0", "mobile", "access_technology"], "FDD LTE"},
{["interface", "ppp0", "mobile", "band"], "LTE BAND 4"},
{["interface", "ppp0", "mobile", "channel"], 2300},
{["interface", "ppp0", "mobile", "cid"], 11303407},
{["interface", "ppp0", "mobile", "lac"], 10234},
{["interface", "ppp0", "mobile", "mcc"], 360},
{["interface", "ppp0", "mobile", "mnc"], 200},
{["interface", "ppp0", "mobile", "network"], "Twilio"},
{["interface", "ppp0", "mobile", "signal_asu"], 21},
{["interface", "ppp0", "mobile", "signal_4bars"], 4},
{["interface", "ppp0", "mobile", "signal_dbm"], -71},
{["interface", "ppp0", "present"], true},
{["interface", "ppp0", "state"], :configured},
{["interface", "ppp0", "type"], VintageNetMobile}
]
```
## Required Linux kernel options
* CONFIG_USB_SERIAL=m
* CONFIG_USB_SERIAL_WWAN=m
* CONFIG_USB_SERIAL_OPTION=m
* CONFIG_USB_WDM=m
* CONFIG_USB_NET_QMI_WWAN=m
"""
alias VintageNet.Interface.RawConfig
alias VintageNetMobile.{CellMonitor, Chatscript, ExChat, PPPDConfig, SignalMonitor}
alias VintageNetMobile.Modem.Utils
@impl true
def normalize(config) do
VintageNetMobile.Modem.QuectelBG96.check_linux_version()
config
|> Utils.require_a_service_provider()
end
@impl true
def add_raw_config(raw_config, %{vintage_net_mobile: mobile} = _config, opts) do
ifname = raw_config.ifname
files = [{Chatscript.path(ifname, opts), Chatscript.default(mobile)}]
at_tty = Map.get(mobile, :at_tty, "ttyUSB2")
ppp_tty = Map.get(mobile, :ppp_tty, "ttyUSB3")
child_specs = [
{ExChat, [tty: at_tty, speed: 9600]},
{SignalMonitor, [ifname: ifname, tty: at_tty]},
{CellMonitor, [ifname: ifname, tty: at_tty]}
]
%RawConfig{
raw_config
| files: files,
child_specs: child_specs
}
|> PPPDConfig.add_child_spec(ppp_tty, 9600, opts)
end
end | lib/vintage_net_mobile/modem/quectel_EC25.ex | 0.8549 | 0.702032 | quectel_EC25.ex | starcoder |
defmodule Mux.Deadline do
@moduledoc """
Deadline context for Mux.Context.
This module provides deadlines over (multiple) Mux dispatch contexts and
passes the deadline downstream.
"""
@behaviour Mux.Context
@wire_key "com.twitter.finagle.Deadline"
@time_unit :nanosecond
@enforce_keys [:start, :finish, :time_offset]
defstruct [:start, :finish, :time_offset]
@type t :: %__MODULE__{start: integer,
finish: integer,
time_offset: integer}
@doc """
Create a deadline struct.
This function does not use Mux.Context.
"""
@spec new(non_neg_integer) :: t
def new(timeout) do
start = System.monotonic_time(@time_unit)
time_offset = System.time_offset(@time_unit)
%__MODULE__{start: start, finish: start + timeout, time_offset: time_offset}
end
@doc """
Merge two deadlines and return a new deadline.
This function is pure and does not use Mux.Context.
"""
@spec merge(t, t) :: t
def merge(deadline1, deadline2) do
%Mux.Deadline{start: start1, finish: finish1} = deadline1
%Mux.Deadline{start: start2, finish: finish2} = deadline2
# most recent start/time_offset (accuracy) and earliest finish (strict)
finish = min(finish1, finish2)
if start1 > start2 do
%Mux.Deadline{deadline1 | finish: finish}
else
%Mux.Deadline{deadline2 | finish: finish}
end
end
@doc """
Bind a deadline to the scope of an anonymouns function and run the function.
The first argument is either a deadline struct or a non-infinity timeout
(creating a new deadline). If a deadline already exists in the current context
the deadlines are merged.
## Examples
Mux.Deadline.bind(1000, fn ->
GenServer.call(MyServer, :request, Mux.Deadline.timeout())
end)
"""
@spec bind(t | non_neg_integer, (() -> result)) :: result when result: var
def bind(%Mux.Deadline{} = deadline, fun) do
case Mux.Context.fetch(Mux.Deadline) do
{:ok, old} ->
Mux.Context.bind(Mux.Deadline, merge(old, deadline), fun)
:error ->
Mux.Context.bind(Mux.Deadline, deadline, fun)
end
end
def bind(timeout, fun) do
timeout
|> new()
|> bind(fun)
end
@doc """
Start a timer that sends a message when the deadline expires.
If a deadline is not supplied, the deadline from the current context is used.
## Examples
Mux.Deadline.bind(1000, fn -> Mux.Deadline.start_timer(self(), :BOOM) end)
"""
@spec start_timer(pid | atom, any) :: reference()
@spec start_timer(t, pid | atom, any) :: reference()
def start_timer(deadline \\ Mux.Context.fetch!(Mux.Deadline), dest, msg)
def start_timer(%Mux.Deadline{finish: finish}, dest, msg) do
abs = System.convert_time_unit(finish, @time_unit, :millisecond)
:erlang.start_timer(abs, dest, msg, [abs: true])
end
@doc """
Return a timeout for when the deadline expires.
If a deadline is not supplied, the deadline from the current context is used.
## Examples
Mux.Deadline.bind(1000, fn ->
GenServer.call(MyServer, :request, Mux.Deadline.timeout())
end)
"""
@spec timeout() :: non_neg_integer
@spec timeout(t) :: non_neg_integer
def timeout(deadline \\ Mux.Context.fetch!(Mux.Deadline))
def timeout(%Mux.Deadline{finish: finish}) do
now = System.monotonic_time(@time_unit)
finish
|> Kernel.-(now)
|> System.convert_time_unit(@time_unit, :millisecond)
|> max(0)
end
@doc false
@impl Mux.Context
@spec put_wire(Mux.Context.wire, t) :: Mux.Context.wire
def put_wire(wire, deadline) do
%Mux.Deadline{start: start, finish: finish, time_offset: offset} = deadline
sys_start = start + offset
sys_finish = finish + offset
data = <<sys_start::64, sys_finish::64>>
Map.put(wire, @wire_key, data)
end
@doc false
@impl Mux.Context
@spec fetch_wire(Mux.Context.wire) :: {:ok, t} | :error
def fetch_wire(%{@wire_key => data}) do
case data do
<<sys_start::64, sys_finish::64>> ->
offset = System.time_offset(:nanosecond)
start = sys_start - offset
finish = sys_finish - offset
{:ok, %Mux.Deadline{start: start, finish: finish, time_offset: offset}}
_ ->
raise "expected 16 bytes, got: #{inspect data}"
end
end
def fetch_wire(_),
do: :error
end | lib/mux/deadline.ex | 0.759538 | 0.46308 | deadline.ex | starcoder |
defmodule Day02 do
@moduledoc """
--- Day 2: Inventory Management System ---
You stop falling through time, catch your breath, and check the screen on the device. "Destination reached. Current Year: 1518. Current Location: North Pole Utility Closet 83N10." You made it! Now, to find those anomalies.
Outside the utility closet, you hear footsteps and a voice. "...I'm not sure either. But now that so many people have chimneys, maybe he could sneak in that way?" Another voice responds, "Actually, we've been working on a new kind of suit that would let him fit through tight spaces like that. But, I heard that a few days ago, they lost the prototype fabric, the design plans, everything! Nobody on the team can even seem to remember important details of the project!"
"Wouldn't they have had enough fabric to fill several boxes in the warehouse? They'd be stored together, so the box IDs should be similar. Too bad it would take forever to search the warehouse for two similar box IDs..." They walk too far away to hear any more.
Late at night, you sneak to the warehouse - who knows what kinds of paradoxes you could cause if you were discovered - and use your fancy wrist device to quickly scan every box and produce a list of the likely candidates (your puzzle input).
To make sure you didn't miss any, you scan the likely candidate boxes again, counting the number that have an ID containing exactly two of any letter and then separately counting those with exactly three of any letter. You can multiply those two counts together to get a rudimentary checksum and compare it to what your device predicts.
For example, if you see the following box IDs:
abcdef contains no letters that appear exactly two or three times.
bababc contains two a and three b, so it counts for both.
abbcde contains two b, but no letter appears exactly three times.
abcccd contains three c, but no letter appears exactly two times.
aabcdd contains two a and two d, but it only counts once.
abcdee contains two e.
ababab contains three a and three b, but it only counts once.
Of these box IDs, four of them contain a letter which appears exactly twice, and three of them contain a letter which appears exactly three times. Multiplying these together produces a checksum of 4 * 3 = 12.
What is the checksum for your list of box IDs?
"""
@doc """
input.txt contents mapped to one charlist per line
"""
def input() do
"input.txt"
|> File.read!()
|> String.split()
|> Enum.map(&String.to_charlist/1)
end
@doc """
What is the checksum for your list of box IDs?
"""
def part1() do
input() |> part1()
end
def part1(list), do: part1(list, 0, 0)
defp part1([], twos, threes) do
twos * threes
end
defp part1([head | tail], twos, threes) do
{two, three} = process_line(head)
part1(tail, two, three, twos, threes)
end
defp part1(tail, false, false, twos, threes), do: part1(tail, twos+0, threes+0)
defp part1(tail, false, true, twos, threes), do: part1(tail, twos+0, threes+1)
defp part1(tail, true, false, twos, threes), do: part1(tail, twos+1, threes+0)
defp part1(tail, true, true, twos, threes), do: part1(tail, twos+1, threes+1)
@doc """
determines if the line has two or three occurances of a single character.
Returns {twos, threes} both booleans.
"""
def process_line(line) do
{twos, threes, _} =
Enum.reduce(line, {MapSet.new, MapSet.new, %{}}, fn c, {twos, threes, char_counts} ->
update_counts(twos, threes, char_counts, c, Map.get(char_counts, c))
end)
{MapSet.size(twos) > 0, MapSet.size(threes) > 0}
end
# first time seeing c
defp update_counts(twos, threes, char_counts, c, nil) do
{twos, threes, Map.put(char_counts, c, 1)}
end
# second time seeing c
defp update_counts(twos, threes, char_counts, c, 1) do
{MapSet.put(twos, c), threes, Map.put(char_counts, c, 2)}
end
# third time seeing c
defp update_counts(twos, threes, char_counts, c, 2) do
{MapSet.delete(twos, c), MapSet.put(threes, c), Map.put(char_counts, c, 3)}
end
# fourth or subsequent time seeing c
defp update_counts(twos, threes, char_counts, c, v) do
{twos, MapSet.delete(threes, c), Map.put(char_counts, c, v+1)}
end
end | day02/lib/day02.ex | 0.597256 | 0.648974 | day02.ex | starcoder |
defmodule ExWareki.Era do
@moduledoc """
Era module provides conversion functions between Japanese-formatted date (wareki) and A.D. formatted date (seireki).
"""
alias ExWareki.Data
alias ExWareki.Structs.Wareki
alias ExWareki.Structs.Seireki
@doc """
search_wareki_by_name/1 finds an era by name
## Examples
iex> ExWareki.Era.search_wareki_by_name("平成")
%{name: "平成", yomi: "へいせい", begin_year: 1989, begin_month: 1, begin_day: 8, end_year: 2019, end_month: 4, end_day: 30}
"""
def search_wareki_by_name(query) do
Data.data()
|> Enum.filter(fn d -> d.name == query end)
|> List.first
end
@doc """
wareki2seireki/1 converts wareki struct into seireki struct
## Examples
iex> ExWareki.Era.wareki2seireki(%ExWareki.Structs.Wareki{name: "平成", yomi: "へいせい", year: 1, month: 2, day: 13})
{:ok, %ExWareki.Structs.Seireki{year: 1989, month: 2, day: 13}}
"""
def wareki2seireki(%Wareki{} = wareki) do
case search_wareki_by_name(wareki.name) do
nil -> {:error, "cannot find wareki: #{wareki.name}"}
data ->
{:ok, %Seireki{
year: data.begin_year + wareki.year - 1,
month: wareki.month,
day: wareki.day,
}}
end
end
@doc """
seireki2wareki/1 converts seireki struct into wareki struct
## Examples
iex> ExWareki.Era.seireki2wareki(%ExWareki.Structs.Seireki{year: 1989, month: 2, day: 13})
{:ok, %ExWareki.Structs.Wareki{name: "平成", yomi: "へいせい", year: 1, month: 2, day: 13}}
iex> ExWareki.Era.seireki2wareki(%ExWareki.Structs.Seireki{year: 2019, month: 9, day: 13})
{:ok, %ExWareki.Structs.Wareki{name: "令和", yomi: "れいわ", year: 1, month: 9, day: 13}}
iex> ExWareki.Era.seireki2wareki(%ExWareki.Structs.Seireki{year: 0, month: 1, day: 1})
{:error, %ExWareki.Structs.Wareki{name: "【元号不明】", yomi: "", year: 0, month: 1, day: 1}}
"""
def seireki2wareki(%Seireki{} = seireki) do
Data.data()
|> Enum.filter(fn d -> compare_date(seireki.year, seireki.month, seireki.day, d.begin_year, d.begin_month, d.begin_day) >= 0 end)
|> List.last
|> compose_wareki(seireki)
end
defp compose_wareki(nil, seireki) do
{:error, %Wareki{
name: "【元号不明】",
yomi: "",
year: seireki.year,
month: seireki.month,
day: seireki.day,
}}
end
defp compose_wareki(era, seireki) do
{:ok, %Wareki{
name: era.name,
yomi: era.yomi,
year: seireki.year - era.begin_year + 1,
month: seireki.month,
day: seireki.day,
}}
end
@doc """
compare_date/6 returns
1 if date1 > date2,
-1 if date1 < date2,
0 if date1 = date2
## Examples
iex> ExWareki.Era.compare_date(2019, 12, 31, 2017, 1, 4)
1
iex> ExWareki.Era.compare_date(2019, 1, 5, 2019, 1, 4)
1
iex> ExWareki.Era.compare_date(2019, 1, 5, 2019, 1, 5)
0
iex> ExWareki.Era.compare_date(2018, 1, 5, 2019, 1, 5)
-1
"""
defp compare_date(year1, month1, day1, year2, month2, day2) do
cond do
year1 > year2 -> 1
year1 < year2 -> -1
month1 > month2 -> 1
month1 < month2 -> -1
day1 > day2 -> 1
day1 < day2 -> -1
true -> 0
end
end
end | lib/ex_wareki/era.ex | 0.589953 | 0.661713 | era.ex | starcoder |
defmodule Parser do
@spec lines(String.t()) :: [String.t()]
def lines(contents) do
contents
|> String.downcase()
|> String.split("\n", trim: true)
end
@spec data(String.t()) :: %{String.t() => %Signal{}}
def data(contents) do
lines(contents)
|> Enum.reduce(%{signals: %{}, line: 0}, fn x, acc ->
parsed = parseEachLine(x, acc.line)
%{signals: Map.merge(acc.signals, parsed), line: acc.line + 1}
end)
|> Map.get(:signals)
end
defp parseEachLine(line, number) do
[left | [right | _]] = String.split(line, "->", trim: true)
key = String.trim(right)
gate =
cond do
String.contains?(left, "and") -> :and
String.contains?(left, "or") -> :or
String.contains?(left, "lshift") -> :lshift
String.contains?(left, "rshift") -> :rshift
String.contains?(left, "not") -> :not
true -> :value
end
# treat as special case and do not create extra noded if value is an integer.
if gate == :value,
do: %Signal{
gate: gate,
left: parseOperands([left], number)
}
operands = String.split(left, Atom.to_string(gate), trim: true)
parsedOperands = parseOperands(operands, number)
names =
Map.merge(
%Signal{
gate: gate
},
parsedOperands.names
)
Map.merge(
%{
key => names
},
parsedOperands.values
)
end
defp parseOperand(operand, fieldName, line) do
first = operand |> String.trim()
case Integer.parse(first) do
{num, _} ->
%{
names: %{fieldName => Atom.to_string(fieldName) <> Integer.to_string(line)},
values: %{
(Atom.to_string(fieldName) <> Integer.to_string(line)) => %Signal{
left: num,
gate: :value
}
}
}
:error ->
%{
names: %{fieldName => first},
values: %{}
}
end
end
defp parseOperands(operands, number) when length(operands) == 1 do
left = parseOperand(Enum.at(operands, 0), :left, number)
%{
names: left.names,
values: left.values
}
end
defp parseOperands(operands, number) when length(operands) == 2 do
left = parseOperand(Enum.at(operands, 0), :left, number)
right = parseOperand(Enum.at(operands, 1), :right, number)
%{
names: Map.merge(left.names, right.names),
values: Map.merge(left.values, right.values)
}
end
end | 2015/day7/lib/parser.ex | 0.798972 | 0.590336 | parser.ex | starcoder |
defmodule Exeration.Validation do
alias Exeration.Operation.Argument
def check([%Argument{name: name, type: type} = argument | arguments], function_arguments) do
value = Keyword.get(function_arguments, name)
with :ok <- check_required(argument, value),
:ok <- check_type(argument, value) do
check(arguments, function_arguments)
else
:error -> {:error, name, type}
end
end
def check([], _) do
{:ok, :validation}
end
defp check_required(%Argument{required: true}, value) do
case not is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_required(%Argument{required: false}, _) do
:ok
end
defp check_type(%Argument{type: :boolean}, value) do
case is_boolean(value) or is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_type(%Argument{type: :integer}, value) do
case is_integer(value) or is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_type(%Argument{type: :float}, value) do
case is_float(value) or is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_type(%Argument{type: :string}, value) do
case is_binary(value) or is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_type(%Argument{type: :tuple}, value) do
case is_tuple(value) or is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_type(%Argument{type: :map}, value) do
case is_map(value) or is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_type(%Argument{type: :struct} = argument, value) do
case (is_map(value) and map_is_struct(value) and value.__struct__ == argument.struct) or
is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_type(%Argument{type: :list}, value) do
case is_list(value) or is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_type(%Argument{type: :atom}, value) do
case is_atom(value) or is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_type(%Argument{type: :function}, value) do
case is_function(value) or is_nil(value) do
true -> :ok
false -> :error
end
end
defp check_type(%Argument{type: :dont_check}, _) do
:ok
end
defp check_type(%Argument{type: custom} = argument, value) do
Application.fetch_env!(:exeration, :custom_validators)
|> Keyword.get(custom, nil)
|> case do
nil ->
raise Exeration.Validator.Error,
message: "Custom validator '#{custom}' not presented in config"
module ->
Kernel.apply(module, :check, [argument, value])
end
|> case do
:ok ->
:ok
:error ->
:error
_ ->
raise Exeration.Validator.Error,
message: "Custom validator '#{custom}' should return ':ok' or ':error'"
end
end
defp map_is_struct(%{__struct__: _} = item) when is_map(item), do: true
defp map_is_struct(_), do: false
end | lib/exeration/validation.ex | 0.572962 | 0.530845 | validation.ex | starcoder |
defmodule AWS.S3Control do
@moduledoc """
AWS S3 Control provides access to Amazon S3 control plane operations.
"""
@doc """
Creates an access point and associates it with the specified bucket. For
more information, see [Managing Data Access with Amazon S3 Access
Points](https://docs.aws.amazon.com/AmazonS3/latest/dev/access-points.html)
in the *Amazon Simple Storage Service Developer Guide*.
<p/> **Using this action with Amazon S3 on Outposts**
This action:
<ul> <li> Requires a virtual private cloud (VPC) configuration as S3 on
Outposts only supports VPC style access points.
</li> <li> Does not support ACL on S3 on Outposts buckets.
</li> <li> Does not support Public Access on S3 on Outposts buckets.
</li> <li> Does not support object lock for S3 on Outposts buckets.
</li> </ul> For more information, see [Using Amazon S3 on
Outposts](AmazonS3/latest/dev/S3onOutposts.html) in the *Amazon Simple
Storage Service Developer Guide *.
All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_CreateAccessPoint.html#API_control_CreateAccessPoint_Examples)
section below.
<p/> The following actions are related to `CreateAccessPoint`:
<ul> <li>
[GetAccessPoint](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetAccessPoint.html)
</li> <li>
[DeleteAccessPoint](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_DeleteAccessPoint.html)
</li> <li>
[ListAccessPoints](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_ListAccessPoints.html)
</li> </ul>
"""
def create_access_point(client, name, input, options \\ []) do
path_ = "/v20180820/accesspoint/#{URI.encode(name)}"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
<note> This API operation creates an Amazon S3 on Outposts bucket. To
create an S3 bucket, see [Create
Bucket](https://docs.aws.amazon.com/AmazonS3/latest/API/API_CreateBucket.html)
in the *Amazon Simple Storage Service API*.
</note> Creates a new Outposts bucket. By creating the bucket, you become
the bucket owner. To create an Outposts bucket, you must have S3 on
Outposts. For more information, see [Using Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in *Amazon Simple Storage Service Developer Guide*.
Not every string is an acceptable bucket name. For information on bucket
naming restrictions, see [Working with Amazon S3
Buckets](https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html#bucketnamingrules).
S3 on Outposts buckets do not support
<ul> <li> ACLs. Instead, configure access point policies to manage access
to buckets.
</li> <li> Public access.
</li> <li> Object Lock
</li> <li> Bucket Location constraint
</li> </ul> For an example of the request syntax for Amazon S3 on Outposts
that uses the S3 on Outposts endpoint hostname prefix and outpost-id in
your API request, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_CreateBucket.html#API_control_CreateBucket_Examples)
section below.
The following actions are related to `CreateBucket` for Amazon S3 on
Outposts:
<ul> <li>
[PutObject](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html)
</li> <li>
[GetBucket](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_GetBucket.html)
</li> <li>
[DeleteBucket](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_DeleteBucket.html)
</li> <li>
[CreateAccessPoint](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_CreateAccessPoint.html)
</li> <li>
[PutAccessPointPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_PutAccessPointPolicy.html)
</li> </ul>
"""
def create_bucket(client, bucket, input, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}"
{headers, input} =
[
{"ACL", "x-amz-acl"},
{"GrantFullControl", "x-amz-grant-full-control"},
{"GrantRead", "x-amz-grant-read"},
{"GrantReadACP", "x-amz-grant-read-acp"},
{"GrantWrite", "x-amz-grant-write"},
{"GrantWriteACP", "x-amz-grant-write-acp"},
{"ObjectLockEnabledForBucket", "x-amz-bucket-object-lock-enabled"},
{"OutpostId", "x-amz-outpost-id"},
]
|> AWS.Request.build_params(input)
query_ = []
case request(client, :put, path_, query_, headers, input, options, nil) do
{:ok, body, response} when not is_nil(body) ->
body =
[
{"Location", "Location"},
]
|> Enum.reduce(body, fn {header_name, key}, acc ->
case List.keyfind(response.headers, header_name, 0) do
nil -> acc
{_header_name, value} -> Map.put(acc, key, value)
end
end)
{:ok, body, response}
result ->
result
end
end
@doc """
S3 Batch Operations performs large-scale Batch Operations on Amazon S3
objects. Batch Operations can run a single operation or action on lists of
Amazon S3 objects that you specify. For more information, see [S3 Batch
Operations](https://docs.aws.amazon.com/AmazonS3/latest/dev/batch-ops-basics.html)
in the *Amazon Simple Storage Service Developer Guide*.
This operation creates a S3 Batch Operations job.
<p/> Related actions include:
<ul> <li>
[DescribeJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DescribeJob.html)
</li> <li>
[ListJobs](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_ListJobs.html)
</li> <li>
[UpdateJobPriority](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_UpdateJobPriority.html)
</li> <li>
[UpdateJobStatus](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_UpdateJobStatus.html)
</li> </ul>
"""
def create_job(client, input, options \\ []) do
path_ = "/v20180820/jobs"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the specified access point.
All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_DeleteAccessPoint.html#API_control_DeleteAccessPoint_Examples)
section below.
The following actions are related to `DeleteAccessPoint`:
<ul> <li>
[CreateAccessPoint](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateAccessPoint.html)
</li> <li>
[GetAccessPoint](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetAccessPoint.html)
</li> <li>
[ListAccessPoints](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_ListAccessPoints.html)
</li> </ul>
"""
def delete_access_point(client, name, input, options \\ []) do
path_ = "/v20180820/accesspoint/#{URI.encode(name)}"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the access point policy for the specified access point.
<p/> All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_DeleteAccessPointPolicy.html#API_control_DeleteAccessPointPolicy_Examples)
section below.
The following actions are related to `DeleteAccessPointPolicy`:
<ul> <li>
[PutAccessPointPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutAccessPointPolicy.html)
</li> <li>
[GetAccessPointPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetAccessPointPolicy.html)
</li> </ul>
"""
def delete_access_point_policy(client, name, input, options \\ []) do
path_ = "/v20180820/accesspoint/#{URI.encode(name)}/policy"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
<note> This API operation deletes an Amazon S3 on Outposts bucket. To
delete an S3 bucket, see
[DeleteBucket](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucket.html)
in the *Amazon Simple Storage Service API*.
</note> Deletes the Amazon S3 on Outposts bucket. All objects (including
all object versions and delete markers) in the bucket must be deleted
before the bucket itself can be deleted. For more information, see [Using
Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in *Amazon Simple Storage Service Developer Guide*.
All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_DeleteBucket.html#API_control_DeleteBucket_Examples)
section below.
<p class="title"> **Related Resources**
<ul> <li>
[CreateBucket](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateBucket.html)
</li> <li>
[GetBucket](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_GetBucket.html)
</li> <li>
[DeleteObject](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObject.html)
</li> </ul>
"""
def delete_bucket(client, bucket, input, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
<note> This API action deletes an Amazon S3 on Outposts bucket's lifecycle
configuration. To delete an S3 bucket's lifecycle configuration, see
[DeleteBucketLifecycle](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucketLifecycle.html)
in the *Amazon Simple Storage Service API*.
</note> Deletes the lifecycle configuration from the specified Outposts
bucket. Amazon S3 on Outposts removes all the lifecycle configuration rules
in the lifecycle subresource associated with the bucket. Your objects never
expire, and Amazon S3 on Outposts no longer automatically deletes any
objects on the basis of rules contained in the deleted lifecycle
configuration. For more information, see [Using Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in *Amazon Simple Storage Service Developer Guide*.
To use this operation, you must have permission to perform the
`s3outposts:DeleteLifecycleConfiguration` action. By default, the bucket
owner has this permission and the Outposts bucket owner can grant this
permission to others.
All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_DeleteBucketLifecycleConfiguration.html#API_control_DeleteBucketLifecycleConfiguration_Examples)
section below.
For more information about object expiration, see [ Elements to Describe
Lifecycle
Actions](https://docs.aws.amazon.com/AmazonS3/latest/dev/intro-lifecycle-rules.html#intro-lifecycle-rules-actions).
Related actions include:
<ul> <li>
[PutBucketLifecycleConfiguration](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutBucketLifecycleConfiguration.html)
</li> <li>
[GetBucketLifecycleConfiguration](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetBucketLifecycleConfiguration.html)
</li> </ul>
"""
def delete_bucket_lifecycle_configuration(client, bucket, input, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}/lifecycleconfiguration"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
<note> This API operation deletes an Amazon S3 on Outposts bucket policy.
To delete an S3 bucket policy, see
[DeleteBucketPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucketPolicy.html)
in the *Amazon Simple Storage Service API*.
</note> This implementation of the DELETE operation uses the policy
subresource to delete the policy of a specified Amazon S3 on Outposts
bucket. If you are using an identity other than the root user of the AWS
account that owns the bucket, the calling identity must have the
`s3outposts:DeleteBucketPolicy` permissions on the specified Outposts
bucket and belong to the bucket owner's account to use this operation. For
more information, see [Using Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in *Amazon Simple Storage Service Developer Guide*.
If you don't have `DeleteBucketPolicy` permissions, Amazon S3 returns a
`403 Access Denied` error. If you have the correct permissions, but you're
not using an identity that belongs to the bucket owner's account, Amazon S3
returns a `405 Method Not Allowed` error.
<important> As a security precaution, the root user of the AWS account that
owns a bucket can always use this operation, even if the policy explicitly
denies the root user the ability to perform this action.
</important> For more information about bucket policies, see [Using Bucket
Policies and User Policies](
https://docs.aws.amazon.com/AmazonS3/latest/dev/using-iam-policies.html).
All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_DeleteBucketPolicy.html#API_control_DeleteBucketPolicy_Examples)
section below.
The following actions are related to `DeleteBucketPolicy`:
<ul> <li>
[GetBucketPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetBucketPolicy.html)
</li> <li>
[PutBucketPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_PutBucketPolicy.html)
</li> </ul>
"""
def delete_bucket_policy(client, bucket, input, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}/policy"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
<note> This API operation deletes an Amazon S3 on Outposts bucket's tags.
To delete an S3 bucket tags, see
[DeleteBucketTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucketTagging.html)
in the *Amazon Simple Storage Service API*.
</note> Deletes the tags from the Outposts bucket. For more information,
see [Using Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in *Amazon Simple Storage Service Developer Guide*.
To use this operation, you must have permission to perform the
`PutBucketTagging` action. By default, the bucket owner has this permission
and can grant this permission to others.
All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_DeleteBucketTagging.html#API_control_DeleteBucketTagging_Examples)
section below.
The following actions are related to `DeleteBucketTagging`:
<ul> <li>
[GetBucketTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetBucketTagging.html)
</li> <li>
[PutBucketTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutBucketTagging.html)
</li> </ul>
"""
def delete_bucket_tagging(client, bucket, input, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}/tagging"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Removes the entire tag set from the specified S3 Batch Operations job. To
use this operation, you must have permission to perform the
`s3:DeleteJobTagging` action. For more information, see [Controlling access
and labeling jobs using
tags](https://docs.aws.amazon.com/AmazonS3/latest/dev/batch-ops-managing-jobs.html#batch-ops-job-tags)
in the *Amazon Simple Storage Service Developer Guide*.
<p/> Related actions include:
<ul> <li>
[CreateJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateJob.html)
</li> <li>
[GetJobTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetJobTagging.html)
</li> <li>
[PutJobTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutJobTagging.html)
</li> </ul>
"""
def delete_job_tagging(client, job_id, input, options \\ []) do
path_ = "/v20180820/jobs/#{URI.encode(job_id)}/tagging"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Removes the `PublicAccessBlock` configuration for an AWS account. For more
information, see [ Using Amazon S3 block public
access](https://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-block-public-access.html).
Related actions include:
<ul> <li>
[GetPublicAccessBlock](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetPublicAccessBlock.html)
</li> <li>
[PutPublicAccessBlock](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutPublicAccessBlock.html)
</li> </ul>
"""
def delete_public_access_block(client, input, options \\ []) do
path_ = "/v20180820/configuration/publicAccessBlock"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Retrieves the configuration parameters and status for a Batch Operations
job. For more information, see [S3 Batch
Operations](https://docs.aws.amazon.com/AmazonS3/latest/dev/batch-ops-basics.html)
in the *Amazon Simple Storage Service Developer Guide*.
<p/> Related actions include:
<ul> <li>
[CreateJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateJob.html)
</li> <li>
[ListJobs](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_ListJobs.html)
</li> <li>
[UpdateJobPriority](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_UpdateJobPriority.html)
</li> <li>
[UpdateJobStatus](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_UpdateJobStatus.html)
</li> </ul>
"""
def describe_job(client, job_id, account_id, options \\ []) do
path_ = "/v20180820/jobs/#{URI.encode(job_id)}"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns configuration information about the specified access point.
<p/> All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_GetAccessPoint.html#API_control_GetAccessPoint_Examples)
section below.
The following actions are related to `GetAccessPoint`:
<ul> <li>
[CreateAccessPoint](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateAccessPoint.html)
</li> <li>
[DeleteAccessPoint](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteAccessPoint.html)
</li> <li>
[ListAccessPoints](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_ListAccessPoints.html)
</li> </ul>
"""
def get_access_point(client, name, account_id, options \\ []) do
path_ = "/v20180820/accesspoint/#{URI.encode(name)}"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns the access point policy associated with the specified access point.
The following actions are related to `GetAccessPointPolicy`:
<ul> <li>
[PutAccessPointPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutAccessPointPolicy.html)
</li> <li>
[DeleteAccessPointPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteAccessPointPolicy.html)
</li> </ul>
"""
def get_access_point_policy(client, name, account_id, options \\ []) do
path_ = "/v20180820/accesspoint/#{URI.encode(name)}/policy"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Indicates whether the specified access point currently has a policy that
allows public access. For more information about public access through
access points, see [Managing Data Access with Amazon S3 Access
Points](https://docs.aws.amazon.com/AmazonS3/latest/dev/access-points.html)
in the *Amazon Simple Storage Service Developer Guide*.
"""
def get_access_point_policy_status(client, name, account_id, options \\ []) do
path_ = "/v20180820/accesspoint/#{URI.encode(name)}/policyStatus"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets an Amazon S3 on Outposts bucket. For more information, see [ Using
Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in the *Amazon Simple Storage Service Developer Guide*.
The following actions are related to `GetBucket` for Amazon S3 on Outposts:
<ul> <li>
[PutObject](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html)
</li> <li>
[CreateBucket](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_CreateBucket.html)
</li> <li>
[DeleteBucket](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_DeleteBucket.html)
</li> </ul>
"""
def get_bucket(client, bucket, account_id, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
<note> This API operation gets an Amazon S3 on Outposts bucket's lifecycle
configuration. To get an S3 bucket's lifecycle configuration, see
[GetBucketLifecycleConfiguration](https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetBucketLifecycleConfiguration.html)
in the *Amazon Simple Storage Service API*.
</note> Returns the lifecycle configuration information set on the Outposts
bucket. For more information, see [Using Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
and for information about lifecycle configuration, see [ Object Lifecycle
Management](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html)
in *Amazon Simple Storage Service Developer Guide*.
To use this operation, you must have permission to perform the
`s3outposts:GetLifecycleConfiguration` action. The Outposts bucket owner
has this permission, by default. The bucket owner can grant this permission
to others. For more information about permissions, see [Permissions Related
to Bucket Subresource
Operations](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-with-s3-actions.html#using-with-s3-actions-related-to-bucket-subresources)
and [Managing Access Permissions to Your Amazon S3
Resources](https://docs.aws.amazon.com/AmazonS3/latest/dev/s3-access-control.html).
All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_GetBucketLifecycleConfiguration.html#API_control_GetBucketLifecycleConfiguration_Examples)
section below.
`GetBucketLifecycleConfiguration` has the following special error:
<ul> <li> Error code: `NoSuchLifecycleConfiguration`
<ul> <li> Description: The lifecycle configuration does not exist.
</li> <li> HTTP Status Code: 404 Not Found
</li> <li> SOAP Fault Code Prefix: Client
</li> </ul> </li> </ul> The following actions are related to
`GetBucketLifecycleConfiguration`:
<ul> <li>
[PutBucketLifecycleConfiguration](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutBucketLifecycleConfiguration.html)
</li> <li>
[DeleteBucketLifecycleConfiguration](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteBucketLifecycleConfiguration.html)
</li> </ul>
"""
def get_bucket_lifecycle_configuration(client, bucket, account_id, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}/lifecycleconfiguration"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
<note> This API action gets a bucket policy for an Amazon S3 on Outposts
bucket. To get a policy for an S3 bucket, see
[GetBucketPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetBucketPolicy.html)
in the *Amazon Simple Storage Service API*.
</note> Returns the policy of a specified Outposts bucket. For more
information, see [Using Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in the *Amazon Simple Storage Service Developer Guide*.
If you are using an identity other than the root user of the AWS account
that owns the bucket, the calling identity must have the `GetBucketPolicy`
permissions on the specified bucket and belong to the bucket owner's
account in order to use this operation.
If you don't have `s3outposts:GetBucketPolicy` permissions, Amazon S3
returns a `403 Access Denied` error. If you have the correct permissions,
but you're not using an identity that belongs to the bucket owner's
account, Amazon S3 returns a `405 Method Not Allowed` error.
<important> As a security precaution, the root user of the AWS account that
owns a bucket can always use this operation, even if the policy explicitly
denies the root user the ability to perform this action.
</important> For more information about bucket policies, see [Using Bucket
Policies and User
Policies](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-iam-policies.html).
All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_GetBucketPolicy.html#API_control_GetBucketPolicy_Examples)
section below.
The following actions are related to `GetBucketPolicy`:
<ul> <li>
[GetObject](https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html)
</li> <li>
[PutBucketPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutBucketPolicy.html)
</li> <li>
[DeleteBucketPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteBucketPolicy.html)
</li> </ul>
"""
def get_bucket_policy(client, bucket, account_id, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}/policy"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
<note> This API operation gets an Amazon S3 on Outposts bucket's tags. To
get an S3 bucket tags, see
[GetBucketTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetBucketTagging.html)
in the *Amazon Simple Storage Service API*.
</note> Returns the tag set associated with the Outposts bucket. For more
information, see [Using Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in the *Amazon Simple Storage Service Developer Guide*.
To use this operation, you must have permission to perform the
`GetBucketTagging` action. By default, the bucket owner has this permission
and can grant this permission to others.
`GetBucketTagging` has the following special error:
<ul> <li> Error code: `NoSuchTagSetError`
<ul> <li> Description: There is no tag set associated with the bucket.
</li> </ul> </li> </ul> All Amazon S3 on Outposts REST API requests for
this action require an additional parameter of outpost-id to be passed with
the request and an S3 on Outposts endpoint hostname prefix instead of
s3-control. For an example of the request syntax for Amazon S3 on Outposts
that uses the S3 on Outposts endpoint hostname prefix and the outpost-id
derived using the access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_GetBucketTagging.html#API_control_GetBucketTagging_Examples)
section below.
The following actions are related to `GetBucketTagging`:
<ul> <li>
[PutBucketTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutBucketTagging.html)
</li> <li>
[DeleteBucketTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteBucketTagging.html)
</li> </ul>
"""
def get_bucket_tagging(client, bucket, account_id, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}/tagging"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns the tags on an S3 Batch Operations job. To use this operation, you
must have permission to perform the `s3:GetJobTagging` action. For more
information, see [Controlling access and labeling jobs using
tags](https://docs.aws.amazon.com/AmazonS3/latest/dev/batch-ops-managing-jobs.html#batch-ops-job-tags)
in the *Amazon Simple Storage Service Developer Guide*.
<p/> Related actions include:
<ul> <li>
[CreateJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateJob.html)
</li> <li>
[PutJobTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutJobTagging.html)
</li> <li>
[DeleteJobTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteJobTagging.html)
</li> </ul>
"""
def get_job_tagging(client, job_id, account_id, options \\ []) do
path_ = "/v20180820/jobs/#{URI.encode(job_id)}/tagging"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves the `PublicAccessBlock` configuration for an AWS account. For
more information, see [ Using Amazon S3 block public
access](https://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-block-public-access.html).
Related actions include:
<ul> <li>
[DeletePublicAccessBlock](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeletePublicAccessBlock.html)
</li> <li>
[PutPublicAccessBlock](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_PutPublicAccessBlock.html)
</li> </ul>
"""
def get_public_access_block(client, account_id, options \\ []) do
path_ = "/v20180820/configuration/publicAccessBlock"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of the access points currently associated with the specified
bucket. You can retrieve up to 1000 access points per call. If the
specified bucket has more than 1,000 access points (or the number specified
in `maxResults`, whichever is less), the response will include a
continuation token that you can use to list the additional access points.
<p/> All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_GetAccessPoint.html#API_control_GetAccessPoint_Examples)
section below.
The following actions are related to `ListAccessPoints`:
<ul> <li>
[CreateAccessPoint](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateAccessPoint.html)
</li> <li>
[DeleteAccessPoint](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteAccessPoint.html)
</li> <li>
[GetAccessPoint](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetAccessPoint.html)
</li> </ul>
"""
def list_access_points(client, bucket \\ nil, max_results \\ nil, next_token \\ nil, account_id, options \\ []) do
path_ = "/v20180820/accesspoint"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(bucket) do
[{"bucket", bucket} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists current S3 Batch Operations jobs and jobs that have ended within the
last 30 days for the AWS account making the request. For more information,
see [S3 Batch
Operations](https://docs.aws.amazon.com/AmazonS3/latest/dev/batch-ops-basics.html)
in the *Amazon Simple Storage Service Developer Guide*.
Related actions include:
<p/> <ul> <li>
[CreateJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateJob.html)
</li> <li>
[DescribeJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DescribeJob.html)
</li> <li>
[UpdateJobPriority](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_UpdateJobPriority.html)
</li> <li>
[UpdateJobStatus](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_UpdateJobStatus.html)
</li> </ul>
"""
def list_jobs(client, job_statuses \\ nil, max_results \\ nil, next_token \\ nil, account_id, options \\ []) do
path_ = "/v20180820/jobs"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(job_statuses) do
[{"jobStatuses", job_statuses} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of all Outposts buckets in an Outposts that are owned by the
authenticated sender of the request. For more information, see [Using
Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in the *Amazon Simple Storage Service Developer Guide*.
For an example of the request syntax for Amazon S3 on Outposts that uses
the S3 on Outposts endpoint hostname prefix and outpost-id in your API
request, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_ListRegionalBuckets.html#API_control_ListRegionalBuckets_Examples)
section below.
"""
def list_regional_buckets(client, max_results \\ nil, next_token \\ nil, account_id, outpost_id \\ nil, options \\ []) do
path_ = "/v20180820/bucket"
headers = []
headers = if !is_nil(account_id) do
[{"x-amz-account-id", account_id} | headers]
else
headers
end
headers = if !is_nil(outpost_id) do
[{"x-amz-outpost-id", outpost_id} | headers]
else
headers
end
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Associates an access policy with the specified access point. Each access
point can have only one policy, so a request made to this API replaces any
existing policy associated with the specified access point.
<p/> All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_PutAccessPointPolicy.html#API_control_PutAccessPointPolicy_Examples)
section below.
The following actions are related to `PutAccessPointPolicy`:
<ul> <li>
[GetAccessPointPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetAccessPointPolicy.html)
</li> <li>
[DeleteAccessPointPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteAccessPointPolicy.html)
</li> </ul>
"""
def put_access_point_policy(client, name, input, options \\ []) do
path_ = "/v20180820/accesspoint/#{URI.encode(name)}/policy"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
<note> This API action puts a lifecycle configuration to an Amazon S3 on
Outposts bucket. To put a lifecycle configuration to an S3 bucket, see
[PutBucketLifecycleConfiguration](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutBucketLifecycleConfiguration.html)
in the *Amazon Simple Storage Service API*.
</note> Creates a new lifecycle configuration for the Outposts bucket or
replaces an existing lifecycle configuration. Outposts buckets can only
support a lifecycle that deletes objects after a certain period of time.
For more information, see [Managing Lifecycle Permissions for Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html).
<p/> All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_PutBucketLifecycleConfiguration.html#API_control_PutBucketLifecycleConfiguration_Examples)
section below.
The following actions are related to `PutBucketLifecycleConfiguration`:
<ul> <li>
[GetBucketLifecycleConfiguration](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetBucketLifecycleConfiguration.html)
</li> <li>
[DeleteBucketLifecycleConfiguration](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteBucketLifecycleConfiguration.html)
</li> </ul>
"""
def put_bucket_lifecycle_configuration(client, bucket, input, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}/lifecycleconfiguration"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
<note> This API action puts a bucket policy to an Amazon S3 on Outposts
bucket. To put a policy on an S3 bucket, see
[PutBucketPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutBucketPolicy.html)
in the *Amazon Simple Storage Service API*.
</note> Applies an Amazon S3 bucket policy to an Outposts bucket. For more
information, see [Using Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in the *Amazon Simple Storage Service Developer Guide*.
If you are using an identity other than the root user of the AWS account
that owns the Outposts bucket, the calling identity must have the
`PutBucketPolicy` permissions on the specified Outposts bucket and belong
to the bucket owner's account in order to use this operation.
If you don't have `PutBucketPolicy` permissions, Amazon S3 returns a `403
Access Denied` error. If you have the correct permissions, but you're not
using an identity that belongs to the bucket owner's account, Amazon S3
returns a `405 Method Not Allowed` error.
<important> As a security precaution, the root user of the AWS account that
owns a bucket can always use this operation, even if the policy explicitly
denies the root user the ability to perform this action.
</important> For more information about bucket policies, see [Using Bucket
Policies and User
Policies](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-iam-policies.html).
All Amazon S3 on Outposts REST API requests for this action require an
additional parameter of outpost-id to be passed with the request and an S3
on Outposts endpoint hostname prefix instead of s3-control. For an example
of the request syntax for Amazon S3 on Outposts that uses the S3 on
Outposts endpoint hostname prefix and the outpost-id derived using the
access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_PutBucketPolicy.html#API_control_PutBucketPolicy_Examples)
section below.
The following actions are related to `PutBucketPolicy`:
<ul> <li>
[GetBucketPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetBucketPolicy.html)
</li> <li>
[DeleteBucketPolicy](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteBucketPolicy.html)
</li> </ul>
"""
def put_bucket_policy(client, bucket, input, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}/policy"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
{"ConfirmRemoveSelfBucketAccess", "x-amz-confirm-remove-self-bucket-access"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
<note> This API action puts tags on an Amazon S3 on Outposts bucket. To put
tags on an S3 bucket, see
[PutBucketTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutBucketTagging.html)
in the *Amazon Simple Storage Service API*.
</note> Sets the tags for an Outposts bucket. For more information, see
[Using Amazon S3 on
Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html)
in the *Amazon Simple Storage Service Developer Guide*.
Use tags to organize your AWS bill to reflect your own cost structure. To
do this, sign up to get your AWS account bill with tag key values included.
Then, to see the cost of combined resources, organize your billing
information according to resources with the same tag key values. For
example, you can tag several resources with a specific application name,
and then organize your billing information to see the total cost of that
application across several services. For more information, see [Cost
Allocation and
Tagging](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html).
<note> Within a bucket, if you add a tag that has the same key as an
existing tag, the new value overwrites the old value. For more information,
see [Using Cost Allocation in Amazon S3 Bucket
Tags](https://docs.aws.amazon.com/AmazonS3/latest/dev/CostAllocTagging.html).
</note> To use this operation, you must have permissions to perform the
`s3outposts:PutBucketTagging` action. The Outposts bucket owner has this
permission by default and can grant this permission to others. For more
information about permissions, see [ Permissions Related to Bucket
Subresource
Operations](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-with-s3-actions.html#using-with-s3-actions-related-to-bucket-subresources)
and [Managing Access Permissions to Your Amazon S3
Resources](https://docs.aws.amazon.com/AmazonS3/latest/dev/s3-access-control.html).
`PutBucketTagging` has the following special errors:
<ul> <li> Error code: `InvalidTagError`
<ul> <li> Description: The tag provided was not a valid tag. This error can
occur if the tag did not pass input validation. For information about tag
restrictions, see [ User-Defined Tag
Restrictions](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/allocation-tag-restrictions.html)
and [ AWS-Generated Cost Allocation Tag
Restrictions](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/aws-tag-restrictions.html).
</li> </ul> </li> <li> Error code: `MalformedXMLError`
<ul> <li> Description: The XML provided does not match the schema.
</li> </ul> </li> <li> Error code: `OperationAbortedError `
<ul> <li> Description: A conflicting conditional operation is currently in
progress against this resource. Try again.
</li> </ul> </li> <li> Error code: `InternalError`
<ul> <li> Description: The service was unable to apply the provided tag to
the bucket.
</li> </ul> </li> </ul> All Amazon S3 on Outposts REST API requests for
this action require an additional parameter of outpost-id to be passed with
the request and an S3 on Outposts endpoint hostname prefix instead of
s3-control. For an example of the request syntax for Amazon S3 on Outposts
that uses the S3 on Outposts endpoint hostname prefix and the outpost-id
derived using the access point ARN, see the [
Example](https://docs.aws.amazon.com/AmazonS3/latest/API/API__control_PutBucketTagging.html#API_control_PutBucketTagging_Examples)
section below.
The following actions are related to `PutBucketTagging`:
<ul> <li>
[GetBucketTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetBucketTagging.html)
</li> <li>
[DeleteBucketTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteBucketTagging.html)
</li> </ul>
"""
def put_bucket_tagging(client, bucket, input, options \\ []) do
path_ = "/v20180820/bucket/#{URI.encode(bucket)}/tagging"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Sets the supplied tag-set on an S3 Batch Operations job.
A tag is a key-value pair. You can associate S3 Batch Operations tags with
any job by sending a PUT request against the tagging subresource that is
associated with the job. To modify the existing tag set, you can either
replace the existing tag set entirely, or make changes within the existing
tag set by retrieving the existing tag set using
[GetJobTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetJobTagging.html),
modify that tag set, and use this API action to replace the tag set with
the one you modified. For more information, see [Controlling access and
labeling jobs using
tags](https://docs.aws.amazon.com/AmazonS3/latest/dev/batch-ops-managing-jobs.html#batch-ops-job-tags)
in the *Amazon Simple Storage Service Developer Guide*.
<p/> <note> <ul> <li> If you send this request with an empty tag set,
Amazon S3 deletes the existing tag set on the Batch Operations job. If you
use this method, you are charged for a Tier 1 Request (PUT). For more
information, see [Amazon S3 pricing](http://aws.amazon.com/s3/pricing/).
</li> <li> For deleting existing tags for your Batch Operations job, a
[DeleteJobTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteJobTagging.html)
request is preferred because it achieves the same result without incurring
charges.
</li> <li> A few things to consider about using tags:
<ul> <li> Amazon S3 limits the maximum number of tags to 50 tags per job.
</li> <li> You can associate up to 50 tags with a job as long as they have
unique tag keys.
</li> <li> A tag key can be up to 128 Unicode characters in length, and tag
values can be up to 256 Unicode characters in length.
</li> <li> The key and values are case sensitive.
</li> <li> For tagging-related restrictions related to characters and
encodings, see [User-Defined Tag
Restrictions](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/allocation-tag-restrictions.html)
in the *AWS Billing and Cost Management User Guide*.
</li> </ul> </li> </ul> </note> <p/> To use this operation, you must have
permission to perform the `s3:PutJobTagging` action.
Related actions include:
<ul> <li>
[CreatJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateJob.html)
</li> <li>
[GetJobTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetJobTagging.html)
</li> <li>
[DeleteJobTagging](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeleteJobTagging.html)
</li> </ul>
"""
def put_job_tagging(client, job_id, input, options \\ []) do
path_ = "/v20180820/jobs/#{URI.encode(job_id)}/tagging"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Creates or modifies the `PublicAccessBlock` configuration for an AWS
account. For more information, see [ Using Amazon S3 block public
access](https://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-block-public-access.html).
Related actions include:
<ul> <li>
[GetPublicAccessBlock](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_GetPublicAccessBlock.html)
</li> <li>
[DeletePublicAccessBlock](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DeletePublicAccessBlock.html)
</li> </ul>
"""
def put_public_access_block(client, input, options \\ []) do
path_ = "/v20180820/configuration/publicAccessBlock"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates an existing S3 Batch Operations job's priority. For more
information, see [S3 Batch
Operations](https://docs.aws.amazon.com/AmazonS3/latest/dev/batch-ops-basics.html)
in the *Amazon Simple Storage Service Developer Guide*.
<p/> Related actions include:
<ul> <li>
[CreateJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateJob.html)
</li> <li>
[ListJobs](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_ListJobs.html)
</li> <li>
[DescribeJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DescribeJob.html)
</li> <li>
[UpdateJobStatus](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_UpdateJobStatus.html)
</li> </ul>
"""
def update_job_priority(client, job_id, input, options \\ []) do
path_ = "/v20180820/jobs/#{URI.encode(job_id)}/priority"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
{query_, input} =
[
{"Priority", "priority"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates the status for the specified job. Use this operation to confirm
that you want to run a job or to cancel an existing job. For more
information, see [S3 Batch
Operations](https://docs.aws.amazon.com/AmazonS3/latest/dev/batch-ops-basics.html)
in the *Amazon Simple Storage Service Developer Guide*.
<p/> Related actions include:
<ul> <li>
[CreateJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_CreateJob.html)
</li> <li>
[ListJobs](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_ListJobs.html)
</li> <li>
[DescribeJob](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_DescribeJob.html)
</li> <li>
[UpdateJobStatus](https://docs.aws.amazon.com/AmazonS3/latest/API/API_control_UpdateJobStatus.html)
</li> </ul>
"""
def update_job_status(client, job_id, input, options \\ []) do
path_ = "/v20180820/jobs/#{URI.encode(job_id)}/status"
{headers, input} =
[
{"AccountId", "x-amz-account-id"},
]
|> AWS.Request.build_params(input)
{query_, input} =
[
{"RequestedJobStatus", "requestedJobStatus"},
{"StatusUpdateReason", "statusUpdateReason"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "s3"}
account_id = :proplists.get_value("x-amz-account-id", headers)
host = build_host(account_id, "s3-control", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "text/xml"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_account_id, _endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_account_id, _endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(:undefined, _endpoint_prefix, _client) do
raise "missing account_id"
end
defp build_host(account_id, endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{account_id}.#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :xml) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :xml)
end
end | lib/aws/generated/s3_control.ex | 0.864067 | 0.497131 | s3_control.ex | starcoder |
defmodule StarkInfra.PixChargeback do
alias __MODULE__, as: PixChargeback
alias StarkInfra.Utils.Rest
alias StarkInfra.Utils.Check
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
alias StarkInfra.Error
@moduledoc """
Groups PixChargeback related functions
"""
@doc """
A Pix chargeback can be created when fraud is detected on a transaction or a system malfunction
results in an erroneous transaction.
It notifies another participant of your request to reverse the payment they have received.
When you initialize a PixChargeback, the entity will not be automatically
created in the Stark Infra API. The 'create' function sends the objects
to the Stark Infra API and returns the created struct.
## Parameters (required):
- `:amount` [integer]: amount in cents to be reversed. ex: 11234 (= R$ 112.34)
- `:reference_id` [string]: end_to_end_id or return_id of the transaction to be reversed. ex: "E20018183202201201450u34sDGd19lz"
- `:reason` [string]: reason why the reversal was requested. Options: "fraud", "flaw", "reversalChargeback"
## Parameters (optional):
- `:description` [string, default nil]: description for the PixChargeback.
## Attributes (return-only):
- `:id` [string]: unique id returned when the PixChargeback is created. ex: "5656565656565656"
- `:analysis` [string]: analysis that led to the result.
- `:bacen_id` [string]: central bank's unique UUID that identifies the PixChargeback.
- `:sender_bank_code` [string]: bank_code of the Pix participant that created the PixChargeback. ex: "20018183"
- `:receiver_bank_code` [string]: bank_code of the Pix participant that received the PixChargeback. ex: "20018183"
- `:rejection_reason` [string]: reason for the rejection of the Pix chargeback. Options: "noBalance", "accountClosed", "unableToReverse"
- `:reversal_reference_id` [string]: return id of the reversal transaction. ex: "D20018183202202030109X3OoBHG74wo".
- `:result` [string]: result after the analysis of the PixChargeback by the receiving party. Options: "rejected", "accepted", "partiallyAccepted"
- `:status` [string]: current PixChargeback status. Options: "created", "failed", "delivered", "closed", "canceled".
- `:created` [DateTime]: creation datetime for the PixChargeback. ex: ~U[2020-3-10 10:30:0:0]
- `:updated` [DateTime]: latest update datetime for the PixChargeback. ex: ~U[2020-3-10 10:30:0:0]
"""
@enforce_keys [
:amount,
:reference_id,
:reason
]
defstruct [
:amount,
:reference_id,
:reason,
:description,
:id,
:analysis,
:bacen_id,
:sender_bank_code,
:receiver_bank_code,
:rejection_reason,
:reversal_reference_id,
:result,
:status,
:created,
:updated
]
@type t() :: %__MODULE__{}
@doc """
Create a PixChargeback in the Stark Infra API
## Parameters (required):
- `:chargebacks` [list of PixChargeback]: list of PixChargeback structs to be created in the API.
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of PixChargeback structs with updated attributes
"""
@spec create(
chargebacks: [PixChargeback.t() | map()],
user: Organization.t() | Project.t() | nil
) ::
{:ok, [PixChargeback.t()]} |
{:error, Error.t()}
def create(chargebacks, options \\ []) do
Rest.post(
resource(),
chargebacks,
options
)
end
@doc """
Same as create(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec create!(
chargebacks: [PixChargeback.t() | map()],
user: Organization.t() | Project.t() | nil
) :: any
def create!(chargebacks, options \\ []) do
Rest.post!(
resource(),
chargebacks,
options
)
end
@doc """
Retrieve the PixChargeback struct linked to your Workspace in the Stark Infra API using its id.
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656".
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixChargeback struct that corresponds to the given id.
"""
@spec get(
id: binary,
user: Organization.t() | Project.t() | nil
) ::
{:ok, PixChargeback.t()} |
{:error, Error.t()}
def get(id, options \\ []) do
Rest.get_id(
resource(),
id,
options
)
end
@doc """
Same as get(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec get!(
id: binary,
user: Organization.t() | Project.t() | nil
) :: any
def get!(id, options \\ []) do
Rest.get_id!(
resource(),
id,
options
)
end
@doc """
Receive a stream of PixChargebacks structs previously created in the Stark Infra API
## Options:
- `:limit` [integer, default nil]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:after` [Date or string, default nil]: date filter for structs created after a specified date. ex: ~D[2020-03-10]
- `:before` [Date or string, default nil]: date filter for structs created before a specified date. ex: ~D[2020-03-10]
- `:status` [list of strings, default nil]: filter for status of retrieved objects. ex: ["created", "failed", "delivered", "closed", "canceled"]
- `:ids` [list of strings, default nil]: list of ids to filter retrieved objects. ex: ["5656565656565656", "4545454545454545"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of PixChargeback structs with updated attributes
"""
@spec query(
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
ids: [binary],
user: Organization.t() | Project.t() | nil
) ::
{:ok, [PixChargeback.t()]} |
{:error, Error.t()}
def query(options \\ []) do
Rest.get_list(
resource(),
options
)
end
@doc """
Same as query(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec query!(
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
ids: [binary],
user: Organization.t() | Project.t() | nil
) :: any
def query!(options \\ []) do
Rest.get_list!(
resource(),
options
)
end
@doc """
Receive a stream of PixChargebacks structs previously created in the Stark Infra API
## Options:
- `:cursor` [string, default nil]: cursor returned on the previous page function call.
- `:limit` [integer, default 100]: maximum number of structs to be retrieved. Max = 100. ex: 35
- `:after` [Date or string, default nil]: date filter for structs created after a specified date. ex: ~D[2020-03-10]
- `:before` [Date or string, default nil]: date filter for structs created before a specified date. ex: ~D[2020-03-10]
- `:status` [list of strings, default nil]: filter for status of retrieved objects. ex: ["created", "failed", "delivered", "closed", "canceled"]
- `:ids` [list of strings, default nil]: list of ids to filter retrieved objects. ex: ["5656565656565656", "4545454545454545"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of PixChargeback structs with updated attributes
- cursor to retrieve the next page of PixChargeback objects
"""
@spec page(
cursor: binary,
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
ids: [binary],
user: Organization.t() | Project.t() | nil
) ::
{:ok, {binary, [PixChargeback.t()]}} |
{:error, Error.t()}
def page(options \\ []) do
Rest.get_page(
resource(),
options
)
end
@doc """
Same as page(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec page!(
cursor: binary,
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
ids: [binary],
user: Organization.t() | Project.t() | nil
) :: any
def page!(options \\ []) do
Rest.get_page!(
resource(),
options
)
end
@doc """
Respond to a received PixChargeback.
## Parameters (required):
- `:id` [string]: PixChargeback id. ex: '5656565656565656'
- `:result` [string]: result after the analysis of the PixChargeback. Options: "rejected", "accepted", "partiallyAccepted".
## Parameters (conditionally required):
- `rejection_reason` [string, default nil]: if the PixChargeback is rejected a reason is required. Options: "noBalance", "accountClosed", "unableToReverse",
- `reversal_reference_id` [string, default nil]: return_id of the reversal transaction. ex: "D20018183202201201450u34sDGd19lz"
## Parameters (optional):
- `analysis` [string, default nil]: description of the analysis that led to the result.
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixChargeback with updated attributes
"""
@spec update(
binary,
result: binary,
rejection_reason: binary,
reversal_reference_id: binary,
analysis: binary,
user: Project.t() | Organization.t()
) ::
{:ok, Workspace.t()} |
{:error, [%Error{}]}
def update(id, result, parameters \\ []) do
parameters = [result: result] ++ parameters
Rest.patch_id(
resource(),
id,
parameters
)
end
@doc """
Same as update(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec update!(
binary,
result: binary,
rejection_reason: binary,
reversal_reference_id: binary,
analysis: binary,
user: Project.t() | Organization.t()
) :: any
def update!(id, result, parameters \\ []) do
parameters = [result: result] ++ parameters
Rest.patch_id!(
resource(),
id,
parameters
)
end
@doc """
Cancel a PixChargeback entity previously created in the Stark Infra API
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- canceled PixChargeback struct
"""
@spec cancel(
id: binary,
user: Organization.t() | Project.t() | nil
) ::
{:ok, PixChargeback.t()} |
{:error, Error.t()}
def cancel(id, options \\ []) do
Rest.delete_id(
resource(),
id,
options
)
end
@doc """
Same as cancel(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec cancel!(
id: binary,
user: Organization.t() | Project.t() | nil
) :: any
def cancel!(id, options \\ []) do
Rest.delete_id!(
resource(),
id,
options
)
end
@doc false
def resource() do
{
"PixChargeback",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%PixChargeback{
amount: json[:amount],
reference_id: json[:reference_id],
reason: json[:reason],
description: json[:description],
analysis: json[:analysis],
bacen_id: json[:bacen_id],
sender_bank_code: json[:sender_bank_code],
receiver_bank_code: json[:receiver_bank_code],
rejection_reason: json[:rejection_reason],
reversal_reference_id: json[:reversal_reference_id],
id: json[:id],
result: json[:result],
status: json[:status],
created: json[:created] |> Check.datetime(),
updated: json[:updated] |> Check.datetime(),
}
end
end | lib/pix_chargeback/pix_chargeback.ex | 0.910784 | 0.632531 | pix_chargeback.ex | starcoder |
defmodule ExPlasma.Transaction.Type.PaymentV1 do
@moduledoc false
@behaviour ExPlasma.Transaction
alias ExPlasma.Transaction
@type validation_responses() ::
ExPlasma.Output.Type.PaymentV1.validation_responses()
| {:error, {:inputs, :cannot_exceed_maximum_value}}
| {:error, {:outputs, :cannot_exceed_maximum_value}}
# The maximum input and outputs the Transaction can have.
@output_limit 4
@tx_type 1
# Currently, the plasma-contracts don't have these
# values set, so we mark them explicitly empty.
@empty_tx_data 0
@empty_metadata <<0::256>>
@doc """
Encode the given Transaction into an RLP encodeable list.
## Example
iex> txn = %ExPlasma.Transaction{
...> inputs: [%ExPlasma.Output{output_data: nil, output_id: %{blknum: 0, oindex: 0, position: 0, txindex: 0}, output_type: nil}],
...> metadata: <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
...> outputs: [
...> %ExPlasma.Output{
...> output_data: %{amount: 1, output_guard: <<29, 246, 47, 41, 27, 46, 150, 159, 176, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>,
...> token: <<46, 38, 45, 41, 28, 46, 150, 159, 176, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>}, output_id: nil, output_type: 1
...> }
...> ],
...> sigs: [],
...> tx_data: <<0>>,
...> tx_type: 1
...>}
iex> ExPlasma.Transaction.Type.PaymentV1.to_rlp(txn)
[[], <<1>>, [<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>], [[<<1>>, [<<29, 246, 47, 41, 27, 46, 150, 159, 176, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>, <<46, 38, 45, 41, 28, 46, 150, 159, 176, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>, <<1>>]]], 0, <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>]
"""
@impl Transaction
@spec to_rlp(Transaction.t()) :: list()
def to_rlp(%{} = transaction) do
[
transaction.sigs,
<<@tx_type>>,
Enum.map(transaction.inputs, &ExPlasma.Output.to_rlp_id/1),
Enum.map(transaction.outputs, &ExPlasma.Output.to_rlp/1),
@empty_tx_data,
transaction.metadata || @empty_metadata
]
end
@doc """
Decodes an RLP list into a Payment V1 Transaction.
## Example
iex> rlp = [
...> [],
...> <<1>>,
...> [<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>],
...> [
...> [
...> <<1>>,
...> [<<29, 246, 47, 41, 27, 46, 150, 159, 176, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>, <<46, 38, 45, 41, 28, 46, 150, 159, 176, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>, <<1>>]
...> ]
...> ],
...> 0,
...> <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
...>]
iex> ExPlasma.Transaction.Type.PaymentV1.to_map(rlp)
%ExPlasma.Transaction{
inputs: [
%ExPlasma.Output{
output_data: nil,
output_id: %{blknum: 0, oindex: 0, position: 0, txindex: 0},
output_type: nil
}
],
metadata: <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
outputs: [
%ExPlasma.Output{
output_data: %{
amount: 1,
output_guard: <<29, 246, 47, 41, 27, 46, 150, 159, 176, 132, 157, 153,
217, 206, 65, 226, 241, 55, 0, 110>>,
token: <<46, 38, 45, 41, 28, 46, 150, 159, 176, 132, 157, 153, 217, 206,
65, 226, 241, 55, 0, 110>>
},
output_id: nil,
output_type: 1
}
],
sigs: [],
tx_data: 0,
tx_type: 1
}
"""
@impl Transaction
@spec to_map(list()) :: Transaction.t()
def to_map(rlp) when is_list(rlp), do: do_to_map(rlp)
defp do_to_map([sigs, tx_type, inputs, outputs, "", metadata]),
do: do_to_map([sigs, tx_type, inputs, outputs, 0, metadata])
defp do_to_map([sigs, <<tx_type>>, inputs, outputs, tx_data, metadata]),
do: do_to_map([sigs, tx_type, inputs, outputs, tx_data, metadata])
defp do_to_map([sigs, tx_type, inputs, outputs, tx_data, metadata]) do
%ExPlasma.Transaction{
sigs: sigs,
tx_type: tx_type,
inputs: Enum.map(inputs, &ExPlasma.Output.decode_id/1),
outputs: Enum.map(outputs, &ExPlasma.Output.decode/1),
tx_data: tx_data,
metadata: metadata
}
end
@doc """
Validates the Transaction.
## Example
iex> txn = %{inputs: [%{output_data: [], output_id: %{blknum: 0, oindex: 0, position: 0, txindex: 0}, output_type: nil}], metadata: <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>, outputs: [%{output_data: %{amount: <<0, 0, 0, 0, 0, 0, 0, 1>>, output_guard: <<29, 246, 47, 41, 27, 46, 150, 159, 176, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>, token: <<46, 38, 45, 41, 28, 46, 150, 159, 176, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>}, output_id: nil, output_type: 1}], sigs: [], tx_data: <<0>>, tx_type: <<1>>}
iex> {:ok, ^txn} = ExPlasma.Transaction.Type.PaymentV1.validate(txn)
"""
@impl Transaction
@spec validate(map()) :: validation_responses()
def validate(%{} = transaction) do
with {:ok, _inputs} <- do_validate_total(:inputs, transaction.inputs, 0),
{:ok, _outputs} <- do_validate_total(:outputs, transaction.outputs, 1) do
{:ok, transaction}
end
end
defp do_validate_total(field, list, _min_limit) when length(list) > @output_limit do
{:error, {field, :cannot_exceed_maximum_value}}
end
defp do_validate_total(field, list, min_limit) when length(list) < min_limit do
{:error, {field, :cannot_subceed_minimum_value}}
end
defp do_validate_total(_field, list, _min_limit), do: {:ok, list}
end | lib/ex_plasma/transaction/type/payment_v1.ex | 0.799481 | 0.512327 | payment_v1.ex | starcoder |
defmodule Snitch.Data.Model.Payment do
@moduledoc """
Payment API and utilities.
Payment is a polymorphic entity due to the many different kinds of "sources"
of a payment. Hence, Payments are not a concrete entity in Snitch, and thus
can be created or updated only by their concrete subtypes.
To fetch the (associated) concrete subtype, use the convenience utility,
`to_subtype/1`
> For a list of supported payment sources, see
`Snitch.Data.Schema.Payment.PaymentMethod`
"""
use Snitch.Data.Model
import Snitch.Tools.Helper.QueryFragment
alias Snitch.Data.Schema.Payment
alias Snitch.Data.Model.CardPayment, as: CardPaymentModel
@doc """
Updates an existing `Payment`
See `Snitch.Data.Schema.Payment.changeset/3` with the `:update` action.
"""
def update(id_or_instance, params) do
QH.update(Payment, params, id_or_instance, Repo)
end
@spec get(map | non_neg_integer) :: {:ok, Payment.t()} | {:error, atom}
def get(query_fields_or_primary_key) do
QH.get(Payment, query_fields_or_primary_key, Repo)
end
@spec get_all() :: [Payment.t()]
def get_all, do: Repo.all(Payment)
@doc """
Deletes the record for supplied `payment` struct.
"""
@spec delete(Payment.t()) :: {:ok, Payment.t()} | {:error, Ecto.Changeset.t()}
def delete(%Payment{} = payment) do
QH.delete(Payment, payment, Repo)
end
@doc """
Fetch the (associated) concrete Payment subtype.
> Note that the `:payment` association is not loaded.
"""
@spec to_subtype(non_neg_integer | Payment.t()) :: struct | nil
def to_subtype(id_or_instance)
def to_subtype(payment_id) when is_integer(payment_id) do
{:ok, payment} = get(%{id: payment_id})
payment
|> to_subtype()
end
def to_subtype(payment) when is_nil(payment), do: nil
def to_subtype(%Payment{payment_type: "chk"} = payment), do: payment
def to_subtype(%Payment{payment_type: "ccd"} = payment) do
CardPaymentModel.from_payment(payment.id)
end
def get_payment_count_by_date(start_date, end_date) do
Payment
|> where([p], p.inserted_at >= ^start_date and p.inserted_at <= ^end_date)
|> group_by([p], to_char(p.inserted_at, "YYYY-MM-DD"))
|> select([p], %{
date: to_char(p.inserted_at, "YYYY-MM-DD"),
count: type(sum(p.amount), p.amount)
})
|> Repo.all()
|> Enum.sort_by(&{Map.get(&1, :date)})
end
end | apps/snitch_core/lib/core/data/model/payment/payment.ex | 0.851768 | 0.54056 | payment.ex | starcoder |
defmodule Cldr.Calendar.Duration do
@moduledoc """
Functions to create and format a difference between
two dates, times or datetimes.
The difference between two dates (or times or datetimes) is
usually defined in terms of days or seconds.
A duration is calculated as the difference in time in calendar
units: years, months, days, hours, minutes, seconds and microseconds.
This is useful to support formatting a string for users in
easy-to-understand terms. For example `11 months, 3 days and 4 minutes`
is a lot easier to understand than `28771440` seconds.
The package [ex_cldr_units](https://hex.pm/packages/ex_cldr_units) can
be optionally configured to provide localized formatting of durations.
If configured, the following providers should be configured in the
appropriate CLDR backend module. For example:
```elixir
defmodule MyApp.Cldr do
use Cldr,
locales: ["en", "ja"],
providers: [Cldr.Calendar, Cldr.Number, Cldr.Unit, Cldr.List]
end
```
"""
@struct_list [year: 0, month: 0, day: 0, hour: 0, minute: 0, second: 0, microsecond: 0]
@keys Keyword.keys(@struct_list)
defstruct @struct_list
@typedoc "Duration in calendar units"
@type t :: %__MODULE__{
year: non_neg_integer(),
month: non_neg_integer(),
day: non_neg_integer(),
hour: non_neg_integer(),
minute: non_neg_integer(),
second: non_neg_integer(),
microsecond: non_neg_integer()
}
@typedoc "A date, time, naivedatetime or datetime"
@type date_or_time_or_datetime ::
Calendar.date()
| Calendar.time()
| Calendar.datetime()
| Calendar.naive_datetime()
@typedoc "A interval as either Date.Range.t() CalendarInterval.t()"
@type interval :: Date.Range.t() | CalendarInterval.t()
@microseconds_in_second 1_000_000
@microseconds_in_day 86_400_000_000
if Code.ensure_loaded?(Cldr.Unit) do
@doc """
Returns a string formatted representation of
a duration.
Note that time units that are zero are ommitted
from the output.
Formatting is
## Arguments
* `duration` is a duration of type `t()` returned
by `Cldr.Calendar.Duration.new/2`
* `options` is a Keyword list of options
## Options
* `:except` is a list of time units to be omitted from
the formatted output. It may be useful to use
`except: [:microsecond]` for example. The default is
`[]`.
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`
The default is `Cldr.get_locale/0`
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module. The default is `Cldr.default_backend/0`
* `:list_options` is a list of options passed to `Cldr.List.to_string/3` to
control the final list output.
Any other options are passed to `Cldr.Number.to_string/3` and
`Cldr.Unit.to_string/3` during the formatting process.
## Note
* Any duration parts that are `0` are not output.
## Example
iex> {:ok, duration} = Cldr.Calendar.Duration.new(~D[2019-01-01], ~D[2019-12-31])
iex> Cldr.Calendar.Duration.to_string(duration)
{:ok, "11 months and 30 days"}
"""
def to_string(%__MODULE__{} = duration, options \\ []) do
{except, options} = Keyword.pop(options, :except, [])
for key <- @keys, value = Map.get(duration, key), value != 0 && key not in except do
Cldr.Unit.new!(key, value)
end
|> Cldr.Unit.to_string(options)
end
else
@doc """
Returns a string formatted representation of
a duration.
Note that time units that are zero are ommitted
from the output.
## Localized formatting
If localized formatting of a duration is desired,
add `{:ex_cldr_units, "~> 2.0"}` to your `mix.exs`
and ensure you have configured your providers in
your backend configuration to include: `providers:
[Cldr.Calendar, Cldr.Number, Cldr.Unit, Cldr.List]`
## Arguments
* `duration` is a duration of type `t()` returned
by `Cldr.Calendar.Duration.new/2`
* `options` is a Keyword list of options
## Options
* `:except` is a list of time units to be omitted from
the formatted output. It may be useful to use
`except: [:microsecond]` for example. The default is
`[]`.
## Example
iex> {:ok, duration} = Cldr.Calendar.Duration.new(~D[2019-01-01], ~D[2019-12-31])
iex> Cldr.Calendar.Duration.to_string(duration)
{:ok, "11 months, 30 days"}
"""
def to_string(%__MODULE__{} = duration, options \\ []) do
except = Keyword.get(options, :except, [])
formatted =
for key <- @keys, value = Map.get(duration, key), value != 0 && key not in except do
if value > 1, do: "#{value} #{key}s", else: "#{value} #{key}"
end
|> Enum.join(", ")
{:ok, formatted}
end
end
@doc """
Formats a duration as a string or raises
an exception on error.
## Arguments
* `duration` is a duration of type `t()` returned
by `Cldr.Calendar.Duration.new/2`
* `options` is a Keyword list of options
## Options
See `Cldr.Calendar.Duration.to_string/2`
## Returns
* A formatted string or
* raises an exception
"""
@spec to_string!(t(), Keyword.t()) :: String.t() | no_return
def to_string!(%__MODULE__{} = duration, options \\ []) do
case to_string(duration, options) do
{:ok, string} -> string
{:error, {exception, reason}} -> raise exception, reason
end
end
@doc """
Calculates the calendar difference between two dates
returning a `Duration` struct.
The difference calculated is in terms of years, months,
days, hours, minutes, seconds and microseconds.
## Arguments
* `from` is a date, time or datetime representing the
start of the duration.
* `to` is a date, time or datetime representing the
end of the duration
## Notes
* `from` must be before or at the same time
as `to`. In addition, both `from` and `to` must
be in the same calendar
* If `from` and `to` are `datetime`s then
they must both be in the same time zone
## Returns
* A `{:ok, duration}` tuple or a
* `{:error, {exception, reason}}` tuple
## Example
iex> Cldr.Calendar.Duration.new(~D[2019-01-01], ~D[2019-12-31])
{:ok,
%Cldr.Calendar.Duration{
year: 0,
month: 11,
day: 30,
hour: 0,
microsecond: 0,
minute: 0,
second: 0
}}
"""
@spec new(from :: date_or_time_or_datetime(), to :: date_or_time_or_datetime()) ::
{:ok, t()} | {:error, {module(), String.t()}}
def new(unquote(Cldr.Calendar.datetime()) = from, unquote(Cldr.Calendar.datetime()) = to) do
with :ok <- confirm_same_time_zone(from, to),
:ok <- confirm_date_order(from, to) do
time_diff = time_duration(from, to)
date_diff = date_duration(from, to)
apply_time_diff_to_duration(date_diff, time_diff, from)
end
end
def new(unquote(Cldr.Calendar.date()) = from, unquote(Cldr.Calendar.date()) = to) do
with {:ok, from} <- cast_date_time(from),
{:ok, to} <- cast_date_time(to) do
new(from, to)
end
end
def new(unquote(Cldr.Calendar.time()) = from, unquote(Cldr.Calendar.time()) = to) do
with {:ok, from} <- cast_date_time(from),
{:ok, to} <- cast_date_time(to) do
time_diff = time_duration(from, to)
{seconds, microseconds} = Cldr.Math.div_mod(time_diff, 1000000)
{minutes, seconds} = Cldr.Math.div_mod(seconds, 60)
{hours, minutes} = Cldr.Math.div_mod(minutes, 60)
{:ok,
struct(__MODULE__, hour: hours, minute: minutes, second: seconds, microsecond: microseconds)}
end
end
@doc """
Calculates the calendar difference in
a `Date.Range` or `CalendarInterval`
returning a `Duration` struct.
The difference calculated is in terms of years, months,
days, hours, minutes, seconds and microseconds.
## Arguments
* `interval` is either ` Date.Range.t()` or a
`CalendarInterval.t()`
## Returns
* A `{:ok, duration}` tuple or a
* `{:error, {exception, reason}}` tuple
## Notes
* `CalendarInterval` is defined by the most wonderful
[calendar_interval](https://hex.pm/packages/calendar_interval)
library.
## Example
iex> Cldr.Calendar.Duration.new(Date.range(~D[2019-01-01], ~D[2019-12-31]))
{:ok,
%Cldr.Calendar.Duration{
year: 0,
month: 11,
day: 30,
hour: 0,
microsecond: 0,
minute: 0,
second: 0
}}
"""
@spec new(interval()) :: {:ok, t()} | {:error, {module(), String.t()}}
if Code.ensure_loaded?(CalendarInterval) do
def new(%CalendarInterval{first: first, last: last, precision: precision})
when precision in [:year, :month, :day] do
first = %{first | hour: 0, minute: 0, second: 0, microsecond: {0, 6}}
last = %{last | hour: 0, minute: 0, second: 0, microsecond: {0, 6}}
new(first, last)
end
def new(%CalendarInterval{first: first, last: last}) do
new(first, last)
end
end
def new(%Date.Range{first: first, last: last}) do
new(first, last)
end
defp apply_time_diff_to_duration(date_diff, time_diff, from) do
duration =
if time_diff < 0 do
back_one_day(date_diff, from)
|> merge(@microseconds_in_day + time_diff)
else
date_diff |> merge(time_diff)
end
{:ok, duration}
end
def new(%{calendar: _calendar1} = from, %{calendar: _calendar2} = to) do
{:error,
{Cldr.IncompatibleCalendarError,
"The two dates must be in the same calendar. Found #{inspect(from)} and #{inspect(to)}"}}
end
defp cast_date_time(unquote(Cldr.Calendar.datetime()) = datetime) do
_ = calendar
{:ok, datetime}
end
defp cast_date_time(unquote(Cldr.Calendar.naivedatetime()) = naivedatetime) do
_ = calendar
DateTime.from_naive(naivedatetime, "Etc/UTC")
end
defp cast_date_time(unquote(Cldr.Calendar.date()) = date) do
{:ok, dt} = NaiveDateTime.new(date.year, date.month, date.day, 0, 0, 0, {0, 6}, calendar)
DateTime.from_naive(dt, "Etc/UTC")
end
defp cast_date_time(unquote(Cldr.Calendar.time()) = time) do
{:ok, dt} =
NaiveDateTime.new(1, 1, 1, time.hour, time.minute, time.second, time.microsecond, Calendar.ISO)
DateTime.from_naive(dt, "Etc/UTC")
end
defp confirm_date_order(from, to) do
if DateTime.compare(from, to) in [:lt, :eq] do
:ok
else
{:error,
{
Cldr.InvalidDateOrder,
"`from` must be earlier or equal to `to`. " <>
"Found #{inspect(from)} and #{inspect(to)}"
}}
end
end
defp confirm_same_time_zone(%{time_zone: zone}, %{time_zone: zone}) do
:ok
end
defp confirm_same_time_zone(from, to) do
{:error,
{Cldr.IncompatibleTimeZone,
"`from` and `to` must be in the same time zone. " <>
"Found #{inspect(from)} and #{inspect(to)}"}}
end
@doc """
Calculates the calendar difference between two dates
returning a `Duration` struct.
The difference calculated is in terms of years, months,
days, hours, minutes, seconds and microseconds.
## Arguments
* `from` is a date, time or datetime representing the
start of the duration
* `to` is a date, time or datetime representing the
end of the duration
Note that `from` must be before or at the same time
as `to`. In addition, both `from` and `to` must
be in the same calendar.
## Returns
* A `duration` struct or
* raises an exception
## Example
iex> Cldr.Calendar.Duration.new!(~D[2019-01-01], ~D[2019-12-31])
%Cldr.Calendar.Duration{
year: 0,
month: 11,
day: 30,
hour: 0,
microsecond: 0,
minute: 0,
second: 0
}
"""
@spec new!(from :: date_or_time_or_datetime(), to :: date_or_time_or_datetime()) ::
t() | no_return()
def new!(from, to) do
case new(from, to) do
{:ok, duration} -> duration
{:error, {exception, reason}} -> raise exception, reason
end
end
@doc """
Calculates the calendar difference in
a `Date.Range` or `CalendarInterval`
returning a `Duration` struct.
The difference calculated is in terms of years, months,
days, hours, minutes, seconds and microseconds.
## Arguments
* `interval` is either ` Date.Range.t()` or a
`CalendarInterval.t()`
## Returns
* A `duration` struct or
* raises an exception
## Notes
* `CalendarInterval` is defined by the most wonderful
[calendar_interval](https://hex.pm/packages/calendar_interval)
library.
## Example
iex> Cldr.Calendar.Duration.new!(Date.range(~D[2019-01-01], ~D[2019-12-31]))
%Cldr.Calendar.Duration{
year: 0,
month: 11,
day: 30,
hour: 0,
microsecond: 0,
minute: 0,
second: 0
}
"""
@spec new!(interval()) :: t() | no_return()
def new!(interval) do
case new(interval) do
{:ok, duration} -> duration
{:error, {exception, reason}} -> raise exception, reason
end
end
defp time_duration(unquote(Cldr.Calendar.time()) = from, unquote(Cldr.Calendar.time()) = to) do
Time.diff(to, from, :microsecond)
end
# The two dates are the same so there is no duration
@doc false
def date_duration(
%{year: year, month: month, day: day, calendar: calendar},
%{year: year, month: month, day: day, calendar: calendar}
) do
%__MODULE__{}
end
# Two dates in the same calendar can be used
def date_duration(%{calendar: calendar} = from, %{calendar: calendar} = to) do
increment =
if from.day > to.day do
calendar.days_in_month(from.year, from.month)
else
0
end
{day_diff, increment} =
if increment != 0 do
{increment + to.day - from.day, 1}
else
{to.day - from.day, 0}
end
{month_diff, increment} =
if from.month + increment > to.month do
{to.month + calendar.months_in_year(to.year) - from.month - increment, 1}
else
{to.month - from.month - increment, 0}
end
year_diff = to.year - from.year - increment
%__MODULE__{year: year_diff, month: month_diff, day: day_diff}
end
# When we have a negative time duration then
# we need to apply a one day adjustment to
# the date difference
defp back_one_day(date_diff, calendar) do
back_one_day(date_diff, :day, calendar)
end
defp back_one_day(%{day: 0} = date_diff, :day, from) do
months_in_year = Cldr.Calendar.months_in_year(from)
previous_month = Cldr.Math.amod(from.month - 1, months_in_year)
days_in_month = from.calendar.days_in_month(from.year, previous_month)
%{date_diff | day: days_in_month}
|> back_one_day(:month, from)
end
defp back_one_day(%{day: day} = date_diff, :day, _from) do
%{date_diff | day: day - 1}
end
defp back_one_day(%{month: 0} = date_diff, :month, from) do
months_in_year = Cldr.Calendar.months_in_year(from)
%{date_diff | month: months_in_year}
|> back_one_day(:year, from)
end
defp back_one_day(%{month: month} = date_diff, :month, _from) do
%{date_diff | month: month - 1}
end
defp back_one_day(%{year: year} = date_diff, :year, _from) do
%{date_diff | year: year - 1}
end
defp merge(duration, microseconds) do
{seconds, microseconds} = Cldr.Math.div_mod(microseconds, @microseconds_in_second)
{hours, minutes, seconds} = :calendar.seconds_to_time(seconds)
duration
|> Map.put(:hour, hours)
|> Map.put(:minute, minutes)
|> Map.put(:second, seconds)
|> Map.put(:microsecond, microseconds)
end
end | lib/cldr/calendar/duration.ex | 0.951953 | 0.917303 | duration.ex | starcoder |
defmodule RMap.ActiveSupport do
@moduledoc """
Summarized all of Hash functions in Rails.ActiveSupport.
If a function with the same name already exists in Elixir, that is not implemented.
Defines all of here functions when `use RMap.ActiveSupport`.
"""
@spec __using__(any) :: list
defmacro __using__(_opts) do
RUtils.define_all_functions!(__MODULE__)
end
import RMap.Ruby
# https://www.rubydoc.info/gems/activesupport/Hash
# [:as_json, :assert_valid_keys, :compact_blank, :compact_blank!, :deep_dup, :deep_merge, :deep_merge!, :deep_stringify_keys, :deep_stringify_keys!, :deep_symbolize_keys, :deep_symbolize_keys!, :deep_transform_keys, :deep_transform_keys!, :deep_transform_values, :deep_transform_values!, :except, :except!, :extract!, :extractable_options?, :reverse_merge, :reverse_merge!, :slice!, :stringify_keys, :stringify_keys!, :symbolize_keys, :symbolize_keys!, :to_query, :to_xml, :with_indifferent_access]
# |> RUtils.required_functions([List, RMap.Ruby, REnum])
# × as_json
# ✔ assert_valid_keys
# × deep_dup
# × deep_merge
# ✔ deep_stringify_keys
# ✔ deep_symbolize_keys
# ✔ deep_transform_keys
# ✔ deep_transform_values
# × extractable_options?
# × reverse_merge
# ✔ stringify_keys
# ✔ symbolize_keys
# to_query
# to_xml
# with_indifferent_access TODO: Low priority
@doc """
Validates all keys in a map match given keys, raising ArgumentError on a mismatch.
## Examples
iex> RMap.assert_valid_keys(%{name: "Rob", years: "28"}, [:name, :age])
** (ArgumentError) Unknown key: years. Valid keys are: name, age
iex> RMap.assert_valid_keys(%{name: "Rob", age: "28"}, ["age"])
** (ArgumentError) Unknown key: age. Valid keys are: age
iex> RMap.assert_valid_keys(%{name: "Rob", age: "28"}, [:name, :age])
:ok
"""
@spec assert_valid_keys(map(), list()) :: :ok
def assert_valid_keys(map, keys) do
valid_keys_str = keys |> Enum.map(&IO.inspect(&1)) |> Enum.join(", ")
each_key(map, fn key ->
if(key not in keys) do
raise ArgumentError, "Unknown key: #{key}. Valid keys are: #{valid_keys_str}"
end
end)
end
@doc """
Returns a map with all keys converted to strings.
## Examples
iex> RMap.stringify_keys(%{name: "Rob", years: "28", nested: %{ a: 1 }})
%{"name" => "Rob", "nested" => %{a: 1}, "years" => "28"}
"""
@spec stringify_keys(map()) :: map()
def stringify_keys(map) do
transform_keys(map, &to_string(&1))
end
@doc """
Returns a list with all keys converted to strings.
This includes the keys from the root map and from all nested maps and arrays.
## Examples
iex> RMap.deep_stringify_keys(%{name: "Rob", years: "28", nested: %{ a: 1 }})
%{"name" => "Rob", "nested" => %{"a" => 1}, "years" => "28"}
iex> RMap.deep_stringify_keys(%{a: %{b: %{c: 1}, d: [%{a: 1, b: %{c: 2}}]}})
%{"a" => %{"b" => %{"c" => 1}, "d" => [%{"a" => 1, "b" => %{"c" => 2}}]}}
"""
@spec deep_stringify_keys(map()) :: map()
def deep_stringify_keys(map) do
deep_transform_keys(map, &to_string(&1))
end
@doc """
Returns a map with all keys converted to atom.
## Examples
iex> RMap.symbolize_keys(%{"name" => "Rob", "years" => "28", "nested" => %{ "a" => 1 }})
%{name: "Rob", nested: %{"a" => 1}, years: "28"}
"""
@spec symbolize_keys(map()) :: map()
def symbolize_keys(map) do
transform_keys(map, &String.to_atom(&1))
end
@doc """
Returns a list with all keys converted to atom.
This includes the keys from the root map and from all nested maps and arrays.
## Examples
iex> RMap.deep_symbolize_keys(%{"name" => "Rob", "years" => "28", "nested" => %{ "a" => 1 }})
%{name: "Rob", nested: %{a: 1}, years: "28"}
iex> RMap.deep_symbolize_keys(%{"a" => %{"b" => %{"c" => 1}, "d" => [%{"a" => 1, "b" => %{"c" => 2}}]}})
%{a: %{b: %{c: 1}, d: [%{a: 1, b: %{c: 2}}]}}
"""
@spec deep_symbolize_keys(map()) :: map()
def deep_symbolize_keys(map) do
deep_transform_keys(map, &String.to_atom(&1))
end
@doc """
Returns a map with all keys converted by the function.
This includes the keys from the root map and from all nested maps and arrays.
## Examples
iex> RMap.deep_transform_keys(%{a: %{b: %{c: 1}}}, &to_string(&1))
%{"a" => %{"b" => %{"c" => 1}}}
iex> RMap.deep_transform_keys(%{a: %{b: %{c: 1}, d: [%{a: 1, b: %{c: 2}}]}}, &inspect(&1))
%{":a" => %{":b" => %{":c" => 1}, ":d" => [%{":a" => 1, ":b" => %{":c" => 2}}]}}
"""
@spec deep_transform_keys(map(), function()) :: map()
def deep_transform_keys(map, func) do
map
|> Enum.map(fn {k, v} ->
cond do
is_map(v) -> {func.(k), deep_transform_keys(v, func)}
is_list(v) -> {func.(k), Enum.map(v, fn el -> deep_transform_keys(el, func) end)}
true -> {func.(k), v}
end
end)
|> Map.new()
end
@doc """
Returns a map with all values converted by the function.
This includes the keys from the root map and from all nested maps and arrays.
## Examples
iex> RMap.deep_transform_values(%{a: %{b: %{c: 1}}, d: 2}, &inspect(&1))
%{a: %{b: %{c: "1"}}, d: "2"}
iex> RMap.deep_transform_values(%{a: %{b: %{c: 1}, d: [%{a: 1, b: %{c: 2}}]}}, &inspect(&1))
%{a: %{b: %{c: "1"}, d: [%{a: "1", b: %{c: "2"}}]}}
"""
@spec deep_transform_values(map(), function()) :: map()
def deep_transform_values(map, func) do
Enum.map(map, fn {k, v} ->
cond do
is_map(v) -> {k, deep_transform_values(v, func)}
is_list(v) -> {k, Enum.map(v, fn el -> deep_transform_values(el, func) end)}
true -> {k, func.(v)}
end
end)
|> Map.new()
end
defdelegate atomize_keys(map), to: __MODULE__, as: :symbolize_keys
defdelegate deep_atomize_keys(map), to: __MODULE__, as: :deep_symbolize_keys
end | lib/r_map/active_support.ex | 0.691185 | 0.596257 | active_support.ex | starcoder |
defmodule Delugex.Projection do
@moduledoc """
Project events upon an entity to convert it to an up-to-date value.
## use Delugex.Projection
Will provide a default `apply` implementation that will catch any event and
just return the entity as is. It will also log a warn, reporting that the
event is unhandled.
The developer is expected to `use` this module and provide its own
implementations of `apply` (using guard-clauses).
### Examples
```
defmodule UserProjection do
use Delugex.Projection
def apply(%User{} = user, %EmailChanged{email: email}) do
Map.put(user, :email, email)
end
end
In this case, when called with `apply(%User{}, %NotHandledEvent{})` the
developer will simply get back `%User{}`, however if called with:
```
apply(%User{email: "<EMAIL>"}, %EmailChanged{email: "<EMAIL>"}))
```
The returned value is `%User{email: "<EMAIL>"}`
"""
alias Delugex.Logger
@callback apply(entity :: any(), event :: any()) :: any()
@callback apply_all(
entity :: any(),
events :: Enumerable.t()
) :: any()
defmacro __using__(_) do
quote location: :keep do
@behaviour unquote(__MODULE__)
@before_compile Delugex.Projection.Unhandled
@impl unquote(__MODULE__)
def apply_all(entity, events) when is_list(events) do
unquote(__MODULE__).apply_all(__MODULE__, entity, events)
end
end
end
@doc """
Takes an entity, a list of events and a module that can project such events
on the passed entity and it applies all of them, returning the newly updated
entity.
It also logs (debug) info whenever an event is applied
"""
@spec apply_all(
projection :: module,
current_entity :: any(),
events :: Enumerable.t()
) :: any()
def apply_all(projection, current_entity, events \\ []) do
Enum.reduce(events, current_entity, fn event, entity ->
Logger.debug(fn ->
"Applying #{event.__struct__}"
end)
projection.apply(entity, event)
end)
end
end | lib/delugex/projection.ex | 0.822973 | 0.776665 | projection.ex | starcoder |
defprotocol Transformable do
@moduledoc """
Transform arbitrary maps and keyword lists into structs.
Transformable is a wrapper around `struct/2`. Out of the box, it supports
easily converting Maps and Keyword Lists into structs. Like with `struct/2`,
only the keys in the struct will be pulled out of your initial data structure.
Maps passed in can have either string or atom keys, but Transformable doesn't
use the (unsafe) `String.to_atom/1`. Default values on the struct will be
respected, or can be overriden.
Transformable is defined as a Protocol with implementations for Map and List.
You can write your own implementation and use `transform/2` to specify custom
outputs.
"""
@doc """
Takes a Map or Keyword List and a target, and transforms the former into the latter.
The target can be a struct or a module that exposes a struct. The target can
also be a set of options to configure the transformation logic. Valid options
are:
- `as` - The transform target (struct or module)
- `default` - Overrides the default value defined in the target struct with the
value provided
## Examples
# Our target
defmodule Tester do
defstruct [:id, name: ""]
end
# The target is just a module alias
iex> Transformable.transform(%{id: 1}, Tester)
%Tester{id: 1, name: ""}
# The target is a struct
iex> Transformable.transform(%{id: 1}, %Tester{})
%Tester{id: 1, name: ""}
# With options, here we are overriding the struct defaults
iex> Transformable.transform(%{id: 1}, as: Tester, default: false)
%Tester{id: 1, name: false}
# The data to transform can also have string keys
iex> Transformable.transform(%{"id" => 1}, Tester)
%Tester{id: 1, name: ""}
# OR the data to transform can be a keyword list
iex> Transformable.transform([id: 1], Tester)
%Tester{id: 1, name: ""}
"""
@spec transform(map() | keyword(), keyword() | module() | struct()) :: struct()
def transform(entity, opts)
end
defimpl Transformable, for: Map do
alias Transformable.Utils
def transform(map, opts), do: Utils.transform(map, opts)
end
defimpl Transformable, for: List do
alias Transformable.Utils
def transform(kwl, opts) do
kwl
|> Map.new()
|> Utils.transform(opts)
end
end | lib/transformable.ex | 0.920083 | 0.7641 | transformable.ex | starcoder |
defmodule Routemaster.Fetcher.Caching do
@moduledoc """
Response caching middleware.
For each HTTP request (the `Fetcher` only supports GET requests)
it looks up a response in the cache using the URL as key.
If a cached response is found, it is immediately returned. If
nothing is found, it executes the HTTP request and then it
caches successful (200..299) responses before returning them
to the caller. Unsuccessful responses and redirects (e.g. 302,
404 or 500) are never cached and always returned to the caller.
If writing to the cache fails for some reason, an error is logged
but the request chain is not halted and the response is normally
returned.
The entire cache layer (lookups and writes) can be bypassed by
passing the `cache: false` option to `Routemaster.Fetcher.get/2`.
## Caveats
At the moment the caching strategy is very simple and does not
implement the features of the official Ruby client. For example,
it doesn't separately cache different representations of the same
entities (i.e. API version and language headers) and as a consequence
the `Fetcher` doesn't _yet_ support that kind of granularity.
Also, the entire `Tesla.Env` structure is compressed and cached as
a Redis string, while it might be more efficient (space wise) to
use a Redis Hash for different fields and rebuild the struct later.
Compressing the data in Elixir is also something to be benchmarked,
since it really boils down to CPU time vs Network IO time wile
exchanging payloads with the cache.
"""
alias Routemaster.Cache
alias Routemaster.Utils
require Logger
def call(env, next, _options) do
cache_enabled = env.opts[:cache]
env = %{env | opts: Keyword.delete(env.opts, :cache)}
if cache_enabled do
lookup_or_fetch(env, next)
else
http_request(env, next)
end
end
defp lookup_or_fetch(env, next) do
key = cache_key(env)
case Cache.read(key) do
{:ok, data} ->
Logger.debug fn ->
Utils.debug_message("Fetcher.Caching", "cache hit for #{key}", :blue)
end
data
{:miss, _} ->
http_request_and_cache(env, next, key)
end
end
defp http_request_and_cache(env, next, key) do
response = http_request(env, next)
cache_successful_response(key, response)
response
end
defp http_request(env, next) do
Tesla.run(env, next)
end
defp cache_key(env) do
env.url
end
defp cache_successful_response(key, %{status: s} = response) when s in 200..299 do
cache_response(key, response)
end
defp cache_successful_response(_, _), do: nil
defp cache_response(key, data) do
case Cache.write(key, data) do
{:ok, _} ->
nil
{:error, _} = error ->
Logger.error "Routemaster.Fetcher: can't write HTTP response to cache. Error: #{inspect error}, data: #{inspect data}"
nil
end
end
end | lib/routemaster/fetcher/caching.ex | 0.802826 | 0.400603 | caching.ex | starcoder |
defmodule Lapin.Producer do
@moduledoc """
Extensible behaviour to define producer configuration.
Lapin provides a number of submodules which implement the patterns found in
the [RabbitMQ Tutorials](http://www.rabbitmq.com/getstarted.html).
```
defmodule ExampleApp.SomePatter do
use Lapin.Producer
[... callbacks implementation ...]
end
```
"""
require Logger
alias AMQP.{Basic, Channel, Confirm, Connection}
alias Lapin.{Exchange, Message}
@doc """
Request publisher confirms (RabbitMQ only)
"""
@callback confirm(producer :: t()) :: boolean()
@doc """
Declare exchange
"""
@callback exchange(producer :: t()) :: Exchange.t()
@doc """
Request message persistence when publishing
"""
@callback persistent(producer :: t()) :: boolean()
@doc """
Request message mandatory routing when publishing
"""
@callback mandatory(producer :: t()) :: boolean()
defmacro __using__([]) do
quote do
alias Lapin.Producer
@behaviour Producer
def confirm(%Producer{config: config}), do: Keyword.get(config, :confirm, false)
def exchange(%Producer{config: config}), do: Keyword.fetch!(config, :exchange)
def mandatory(%Producer{config: config}), do: Keyword.get(config, :mandatory, false)
def persistent(%Producer{config: config}), do: Keyword.get(config, :persistent, false)
defoverridable Producer
end
end
@typedoc """
Producer configuration
The following keys are supported:
- pattern: producer pattern (module using the `Lapin.Producer` behaviour)
If using the `Lapin.Producer.Config` default implementation, the following keys are also supported:
- exchange: exchange used for publish (`String.t`, *required*)
- confirm: expect RabbitMQ publish confirms (`boolean()`, *default: false*)
- mandatory: messages published as mandatory by default (`boolean()`, *default: false*)
- persistent: messages published as persistent by default (`boolean()`, *default: false*)
"""
@type config :: Keyword.t()
@typedoc "Lapin Producer"
@type t :: %__MODULE__{
channel: Channel.t(),
pattern: atom,
config: config,
exchange: String.t()
}
defstruct channel: nil,
pattern: nil,
config: nil,
exchange: nil
@doc """
Creates a producer from configuration
"""
@spec create(Connection.t(), config) :: t
def create(connection, config) do
pattern = Keyword.get(config, :pattern, Lapin.Producer.Config)
producer = %__MODULE__{config: config, pattern: pattern}
with {:ok, channel} <- Channel.open(connection),
producer <- %{producer | channel: channel},
exchange <- pattern.exchange(producer),
:ok <- set_confirm(producer, pattern.confirm(producer)) do
%{producer | exchange: exchange}
else
{:error, error} ->
Logger.error("Error creating producer from config #{config}: #{inspect(error)}")
producer
end
end
@doc """
Find consumer by consumer_tag
"""
@spec get([t], String.t()) :: {:ok, t} | {:error, :not_found}
def get(producers, exchange) do
case Enum.find(producers, &(&1.exchange == exchange)) do
nil -> {:error, :not_found}
producer -> {:ok, producer}
end
end
@doc """
Publish message
"""
@spec publish(t, Exchange.name(), Exchange.routing_key(), Message.payload(), Keyword.t()) ::
:ok | {:error, term}
def publish(%{channel: channel}, exchange, routing_key, payload, options) do
Basic.publish(channel, exchange, routing_key, payload, options)
end
@doc """
Wait for publish confirmation
"""
@spec confirm(t) :: boolean()
def confirm(%{channel: channel}) do
case Confirm.wait_for_confirms(channel) do
true -> true
_ -> false
end
end
defp set_confirm(_producer, false = _confirm), do: :ok
defp set_confirm(%{channel: channel}, true = _confirm) do
with :ok <- Confirm.select(channel),
:ok <- Basic.return(channel, self()) do
:ok
else
error ->
error
end
end
end | lib/lapin/producer.ex | 0.874104 | 0.728821 | producer.ex | starcoder |
defmodule ExPng.Image.Encoding do
@moduledoc """
Utility module containing functtions necessary to encode an `ExPng.Image`
back into a PNG file.
"""
use ExPng.Constants
import ExPng.Utilities, only: [reduce_to_binary: 1]
alias ExPng.Chunks.{End, Header, ImageData, Palette, Transparency}
alias ExPng.{Color, Image, Image.Adam7, RawData}
@spec to_raw_data(Image.t(), ExPng.maybe(keyword)) :: {:ok, RawData.t()}
def to_raw_data(%Image{} = image, encoding_options \\ []) do
header = build_header(image, encoding_options)
filter_type = Keyword.get(encoding_options, :filter, @filter_up)
interlaced = Keyword.get(encoding_options, :interlace, false)
palette = Image.unique_pixels(image)
to_raw_data(image, header, palette, filter_type, interlaced)
end
defp to_raw_data(image, header, palette, filter_type, true) do
image_data =
image
|> Adam7.decompose_into_sub_images()
|> Enum.map(fn sub_image ->
ImageData.from_pixels(sub_image, header, filter_type, palette)
end)
|> Enum.map(fn %ImageData{data: data} -> data end)
|> Enum.reject(&is_nil/1)
|> reduce_to_binary()
raw_data = %RawData{
header_chunk: header,
data_chunk: %ImageData{data: image_data},
end_chunk: %End{}
}
raw_data =
case header.color_mode do
@indexed ->
transparency = Transparency.build_from_pixel_palette(palette)
%{
raw_data
| palette_chunk: %Palette{palette: palette},
transparency_chunk: transparency
}
_ ->
raw_data
end
{:ok, raw_data}
end
defp to_raw_data(image, header, palette, filter_type, false) do
image_data_chunk = ImageData.from_pixels(image, header, filter_type, palette)
raw_data = %RawData{
header_chunk: header,
data_chunk: image_data_chunk,
end_chunk: %End{}
}
raw_data =
case header.color_mode do
@indexed ->
transparency = Transparency.build_from_pixel_palette(palette)
%{
raw_data
| palette_chunk: %Palette{palette: palette},
transparency_chunk: transparency
}
_ ->
raw_data
end
{:ok, raw_data}
end
defp build_header(%Image{} = image, encoding_options) do
interlace =
case Keyword.get(encoding_options, :interlace, false) do
true -> 1
false -> 0
end
{bit_depth, color_mode} = bit_depth_and_color_mode(image)
%Header{
width: image.width,
height: image.height,
bit_depth: bit_depth,
color_mode: color_mode,
compression: 0,
filter: 0,
interlace: interlace
}
end
# 1. black and white
# 2. grayscale (alpha)
# 3. indexed
# 4. truecolor (alpha)
defp bit_depth_and_color_mode(%Image{} = image) do
pixels = Image.unique_pixels(image)
case black_and_white?(pixels) do
true ->
{1, @grayscale}
false ->
case {indexable?(pixels), opaque?(pixels), grayscale?(pixels)} do
{_, true, true} -> {8, @grayscale}
{_, false, true} -> {8, @grayscale_alpha}
{true, _, _} -> {indexed_bit_depth(pixels), @indexed}
{_, true, false} -> {8, @truecolor}
{_, false, false} -> {8, @truecolor_alpha}
end
end
end
defp indexed_bit_depth(pixels) do
case length(pixels) do
i when i <= 2 -> 1
i when i <= 4 -> 2
i when i <= 16 -> 4
i when i <= 256 -> 8
end
end
defp indexable?(pixels) do
length(pixels) <= 256
end
defp opaque?(pixels) do
pixels
|> Enum.all?(&Color.opaque?/1)
end
defp grayscale?(pixels) do
pixels
|> Enum.all?(&Color.grayscale?/1)
end
defp black_and_white?(pixels) do
pixels
|> Enum.all?(&Color.black_or_white?/1)
end
end | lib/ex_png/image/encoding.ex | 0.822046 | 0.530601 | encoding.ex | starcoder |
defmodule Hive.H3 do
@moduledoc """
Interfaces with Erlang H3 and provides abstraction
layer to work with vehicles or `%GeoPosition{}`.
## Usage
iex> Hive.H3.num_hexagons(resolution)
"""
use Hive.Base
@type vehicle_id() :: binary()
@type h3_index() :: non_neg_integer()
@type resolution() :: 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15
@doc """
Indexes the location at the specified resolution
"""
@spec index(GeoPosition.t(), resolution()) :: h3_index()
def index(%GeoPosition{latitude: lat, longitude: lon}, resolution) do
:h3.from_geo({lat, lon}, resolution)
end
@doc """
Indexes the location for given `vehicle_id` at the specified resolution
"""
@spec index(vehicle_id(), resolution()) :: h3_index()
def index(vehicle_id, resolution) do
position = VehicleWorker.get_position(%Vehicle{id: vehicle_id})
:h3.from_geo({position.latitude, position.longitude}, resolution)
end
@doc """
Convert string representation of H3 index to
numeric index value.
Example:
iex> Hive.H3.from_string("8928308280fffff")
"""
@spec from_string(binary()) :: h3_index()
def from_string(index) do
index
|> String.to_charlist()
|> :h3.from_string()
end
@doc """
Convert string representation of H3 index to
numeric index value.
Example:
iex> Hive.H3.to_string(617_700_169_958_293_503)
"""
@spec to_hex_string(non_neg_integer()) :: binary()
def to_hex_string(index) do
index
|> :h3.to_string()
|> List.to_string()
end
@doc """
Returns the `%GeoPosition{}` center of the cell
from string representation
"""
@spec index_to_geo(binary()) :: GeoPosition.t()
def index_to_geo(index) when is_binary(index) do
index
|> from_string()
|> index_to_geo()
end
@doc """
Returns the `%GeoPosition{}` center of the cell
from numeric index
"""
@spec index_to_geo(h3_index()) :: GeoPosition.t()
def index_to_geo(index) do
{lat, lon} = :h3.to_geo(index)
%GeoPosition{
latitude: lat,
longitude: lon
}
end
@doc """
Returns index from `%GeoPosition{}`
"""
@spec index_from_geo(GeoPosition.t(), resolution()) :: h3_index()
def index_from_geo(position, resolution) do
:h3.from_geo(
{position.latitude, position.longitude},
resolution
)
end
@doc """
Returns bounds for index
"""
@spec to_geo_boundary(binary()) :: list(GeoPosition.t())
def to_geo_boundary(index) when is_binary(index) do
index
|> from_string()
|> to_geo_boundary()
end
@doc """
Returns bounds for index
"""
@spec to_geo_boundary(h3_index()) :: list(GeoPosition.t())
def to_geo_boundary(index) do
index
|> :h3.to_geo_boundary()
|> Enum.map(fn {lat, lon} ->
%GeoPosition{latitude: lat, longitude: lon}
end)
end
@doc """
Returns the resolution of the index.
"""
@spec get_resolution(h3_index()) :: resolution()
def get_resolution(index) do
:h3.get_resolution(index)
end
@doc """
Check if given index represents a pentagonal cell
"""
@spec pentagon?(h3_index()) :: boolean()
def pentagon?(index) do
:h3.is_pentagon(index)
end
@doc """
Get all hexagons in a k-ring around
a given center and a distance.
"""
@spec k_ring(binary(), non_neg_integer()) :: list(h3_index())
def k_ring(index, distance) when is_binary(index) do
index
|> from_string()
|> k_ring(distance)
end
@doc """
Get all hexagons in a k-ring around
a given center and a distance.
"""
@spec k_ring(h3_index(), non_neg_integer()) :: list(h3_index())
def k_ring(index, distance) do
:h3.k_ring(index, distance)
|> Enum.map(&to_hex_string/1)
end
end | lib/hive/h3.ex | 0.924594 | 0.808786 | h3.ex | starcoder |
defmodule Day9 do
def from_file(path) do
File.read!(path)
|> String.split(",")
|> Enum.map(&Integer.parse/1)
|> Enum.map(&(elem(&1, 0)))
end
def modify(memory, address, value) do
memory |> Map.put(address, value)
end
def read_instruction(value) do
{params, inst} = Integer.digits(value) |> Enum.split((value |> Integer.digits |> length) - 2)
{Enum.reverse(params), Integer.undigits(inst)}
end
def execute(%{:memory => memory, :pc => pc} = runtime) do
{modes, inst} = read_instruction(Map.get(memory, pc))
cond do
inst == 99 ->
Map.put(runtime, :done, true)
inst == 3 && runtime.inputs == [] ->
Map.put(runtime, :done, false)
true ->
case exec_inst(runtime, inst, modes) do
%{} = runtime -> execute(runtime)
:error -> runtime
end
end
end
def value(memory, address, mode, base) do
cond do
mode == 0 -> Map.get(memory, Map.get(memory, address, 0), 0)
mode == 1 -> Map.get(memory, address, 0)
mode == 2 -> Map.get(memory, base + Map.get(memory, address, 0), 0)
end
end
def write_address(memory, address, mode, base) do
cond do
mode == 0 -> Map.get(memory, address, 0)
mode == 1 -> Map.get(memory, address, 0)
mode == 2 -> base + Map.get(memory, address, 0)
end
end
def mode(modes, param) do
case Enum.fetch(modes, param) do
{:ok, mode} -> mode
:error -> 0
end
end
def exec_inst(%{:memory => memory, :pc => pc} = runtime, 1, modes) do
x = value(memory, pc + 1, mode(modes, 0), runtime.base)
y = value(memory, pc + 2, mode(modes, 1), runtime.base)
address = write_address(memory, pc + 3, mode(modes, 2), runtime.base)
%{runtime | :memory => memory |> modify(address, x + y), :pc => pc + 4}
end
def exec_inst(%{:memory => memory, :pc => pc} = runtime, 2, modes) do
x = value(memory, pc + 1, mode(modes, 0), runtime.base)
y = value(memory, pc + 2, mode(modes, 1), runtime.base)
address = write_address(memory, pc + 3, mode(modes, 2), runtime.base)
%{runtime | :memory => memory |> modify(address, x * y), :pc => pc + 4}
end
def exec_inst(%{:memory => memory, :pc => pc, :inputs => inputs} = runtime, 3, modes) do
address = write_address(memory, pc + 1, mode(modes, 0), runtime.base)
[input | rest] = inputs
%{runtime | :memory => memory |> modify(address, input), :inputs => rest, :pc => pc + 2}
end
def exec_inst(%{:memory => memory, :pc => pc} = runtime, 4, modes) do
value = value(memory, pc + 1, mode(modes, 0), runtime.base)
%{runtime | :memory => memory, :pc => pc + 2, :output => runtime.output ++ [value]}
end
def exec_inst(%{:memory => memory, :pc => pc} = runtime, 5, modes) do
jump_if_true = value(memory, pc + 1, mode(modes, 0), runtime.base)
jump_to = value(memory, pc + 2, mode(modes, 1), runtime.base)
if jump_if_true != 0 do
%{runtime | :pc => jump_to}
else
%{runtime | :pc => pc + 3}
end
end
def exec_inst(%{:memory => memory, :pc => pc} = runtime, 6, modes) do
jump_if_false = value(memory, pc + 1, mode(modes, 0), runtime.base)
jump_to = value(memory, pc + 2, mode(modes, 1), runtime.base)
if jump_if_false == 0 do
%{runtime | :pc => jump_to}
else
%{runtime | :pc => pc + 3}
end
end
def exec_inst(%{:memory => memory, :pc => pc} = runtime, 7, modes) do
first = value(memory, pc + 1, mode(modes, 0), runtime.base)
second = value(memory, pc + 2, mode(modes, 1), runtime.base)
address = write_address(memory, pc + 3, mode(modes, 2), runtime.base)
if first < second do
%{runtime | :memory => memory |> modify(address, 1), :pc => pc + 4}
else
%{runtime | :memory => memory |> modify(address, 0), :pc => pc + 4}
end
end
def exec_inst(%{:memory => memory, :pc => pc} = runtime, 8, modes) do
first = value(memory, pc + 1, mode(modes, 0), runtime.base)
second = value(memory, pc + 2, mode(modes, 1), runtime.base)
address = write_address(memory, pc + 3, mode(modes, 2), runtime.base)
if first == second do
%{runtime | :memory => memory |> modify(address, 1), :pc => pc + 4}
else
%{runtime | :memory => memory |> modify(address, 0), :pc => pc + 4}
end
end
def exec_inst(%{:memory => memory, :pc => pc} = runtime, 9, modes) do
new_relative_base = value(memory, pc + 1, mode(modes, 0), runtime.base)
%{runtime | :pc => pc + 2, :base => runtime.base + new_relative_base}
end
def exec_inst(_, _, inst, _) do
IO.puts("invalid instruction #{inst}")
:error
end
def run(program, inputs \\ []) do
execute(%{:memory => read_program(program), :pc => 0, :inputs => inputs, :output => [], :base => 0})
end
def read_program(program) do
program |> Enum.with_index |> Map.new(fn {v, k} -> {k, v} end)
end
def solution do
IO.puts("#{from_file("day9_input.txt") |> run([1]) |> Map.get(:output) |> List.first}")
IO.puts("#{from_file("day9_input.txt") |> run([2]) |> Map.get(:output) |> List.first}")
end
end | lib/day9.ex | 0.589007 | 0.48749 | day9.ex | starcoder |
defmodule Canvas.Resources.Accounts do
@moduledoc """
Provides functions to interact with the
[assignment endpoints](https://canvas.instructure.com/doc/api/assignments).
"""
alias Canvas.{Client, Listing, Response}
alias Canvas.Resources.{Account, Course, EnrollmentTerm}
@doc """
A paginated list of accounts that the current user can view or manage.
Typically, students and even teachers will get an empty list in response,
only account admins can view the accounts that they are in.
See:
- https://canvas.instructure.com/doc/api/accounts.html#method.accounts.index
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Accounts.list_accounts(client)
{:ok, response} = Canvas.Resources.Accounts.list_accounts(client, per_page: 50, page: 4)
"""
@spec list_accounts(Client.t(), Keyword.t()) :: {:ok | :error, Response.t()}
def list_accounts(client, options \\ []) do
url = Client.versioned("/accounts")
Listing.get(client, url, options)
|> Response.parse([%Account{}])
end
@doc """
List all accounts automatically paginating if necessary.
This function will automatically page through all pages, returning all assignments.
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Accounts.all_accounts(client)
"""
@spec all_accounts(Client.t(), Keyword.t()) :: {:ok, list(%Account{})} | {:error, Response.t()}
def all_accounts(client, options \\ []) do
Listing.get_all(__MODULE__, :list_accounts, [client, options])
end
def list_accounts_for_course_admins() do
end
def get_a_single_account() do
end
def permissions() do
end
@doc """
List accounts that are sub-accounts of the given account.
See:
- https://canvas.instructure.com/doc/api/accounts.html#method.accounts.sub_accounts
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Accounts.get_the_subaccounts(client, account_id = 1)
{:ok, response} = Canvas.Resources.Accounts.get_the_subaccounts(client, account_id = 1, per_page: 50, page: 4)
"""
@spec get_the_subaccounts(Client.t(), String.t() | integer, Keyword.t()) ::
{:ok | :error, Response.t()}
def get_the_subaccounts(client, account_id, options \\ []) do
url = Client.versioned("/accounts/#{account_id}/sub_accounts")
Listing.get(client, url, options)
|> Response.parse([%Account{}])
end
def get_the_terms_of_service() do
end
def get_help_links() do
end
@doc """
Retrieve a paginated list of courses in this account.
See:
- https://canvas.instructure.com/doc/api/accounts.html#method.accounts.courses_api
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Accounts.list_active_courses_in_an_account(client, account_id = 1)
{:ok, response} = Canvas.Resources.Accounts.list_active_courses_in_an_account(client, account_id = 1, per_page: 50, page: 4)
"""
@spec list_active_courses_in_an_account(Client.t(), String.t() | integer, Keyword.t()) ::
{:ok | :error, Response.t()}
def list_active_courses_in_an_account(client, account_id, options \\ []) do
url = Client.versioned("/accounts/#{account_id}/courses")
Listing.get(client, url, options)
|> Response.parse([%Course{term: %EnrollmentTerm{}, account: %Account{}}])
end
@doc """
List all active courses in an account automatically paginating if necessary.
This function will automatically page through all pages, returning all assignments.
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Accounts.all_active_courses_in_an_account(client, account_id = 1)
"""
@spec all_active_courses_in_an_account(Client.t(), String.t() | integer, Keyword.t()) ::
{:ok, list(%Course{})} | {:error, Response.t()}
def all_active_courses_in_an_account(client, account_id, options \\ []) do
Listing.get_all(__MODULE__, :list_active_courses_in_an_account, [client, account_id, options])
end
def update_an_account() do
end
def delete_a_user_from_the_root_account() do
end
end | lib/canvas/resources/accounts.ex | 0.814901 | 0.475666 | accounts.ex | starcoder |
defmodule MicroTimer do
@moduledoc """
A timer module with microsecond resolution.
"""
@sleep_done :___usleep_done
@type executable :: {module(), atom()} | function()
@doc """
Suspend the current process for the given `timeout` and then returns `:ok`.
`timeout` is the number of microsends to sleep as an integer.
## Examples
iex> MicroTimer.usleep(250)
:ok
"""
@spec µsleep(non_neg_integer()) :: :ok
defdelegate µsleep(timeout), to: __MODULE__, as: :usleep
@spec usleep(non_neg_integer()) :: :ok
def usleep(timeout) when is_integer(timeout) and timeout > 0 do
do_usleep(timeout)
receive do
@sleep_done -> :ok
end
end
def usleep(timeout) when is_integer(timeout) do
:ok
end
@doc """
Invokes the given `executable` after `timeout` microseconds with the list of
arguments `args`.
`executable` can either be the tuple `{Module, :function}`, an anonymous function
or a function capture.
Returns the `pid` of the timer.
See also `cancel_timer/1`.
## Examples
MicroTimer.apply_after(250, {Module. :function}, [])
MicroTimer.apply_after(250, fn a -> a + 1 end, [1])
iex> pid = MicroTimer.apply_after(250, fn arg -> arg end, [1])
iex> is_pid(pid)
true
"""
@spec apply_after(non_neg_integer(), executable, [any]) :: pid()
def apply_after(timeout, executable, args \\ [])
def apply_after(timeout, {module, function}, args)
when is_atom(module) and is_atom(function) do
spawn(fn ->
do_apply_after(timeout, {module, function}, args)
end)
end
def apply_after(time, function, args) when is_function(function) do
spawn(fn ->
do_apply_after(time, function, args)
end)
end
@doc """
Invokes the given `executable` repeatedly every `timeout` microseconds with the list of
arguments `args`.
`executable` can either be the tuple `{Module, :function}`, an anonymous function
or a function capture.
Returns the `pid` of the timer.
See also `cancel_timer/1`.
## Examples
MicroTimer.apply_every(250, {Module. :function}, [])
MicroTimer.apply_every(250, fn a -> a + 1 end, [1])
iex> pid = MicroTimer.apply_every(250, fn arg -> arg end, [1])
iex> is_pid(pid)
true
"""
@spec apply_every(non_neg_integer(), executable, [any]) :: pid()
def apply_every(timeout, executable, args \\ [])
def apply_every(timeout, {module, function}, args)
when is_atom(module) and is_atom(function) do
spawn(fn ->
do_apply_every(timeout, {module, function}, args)
end)
end
def apply_every(timeout, function, args) when is_function(function) do
spawn(fn ->
do_apply_every(timeout, function, args)
end)
end
@doc """
Send `message` to `pid` after `timeout` microseconds.
If `pid` is left empty, the `message` is sent to `self()`.
Returns the `pid` of the timer.
See also `cancel_timer/1`.
## Examples
MicroTimer.send_after(250, :msg)
MicroTimer.send_after(250, "msg", self())
iex> pid = MicroTimer.send_after(250, :msg)
iex> is_pid(pid)
true
"""
@spec send_after(non_neg_integer(), any, pid()) :: pid()
def send_after(timeout, message, pid \\ self()) do
spawn(fn ->
do_apply_after(timeout, fn -> send(pid, message) end, [])
end)
end
@doc """
Send `message` to `pid` every `timeout` microseconds.
If `pid` is left empty, the `message` is sent to `self()`.
Returns the `pid` of the timer.
See also `cancel_timer/1`.
## Examples
MicroTimer.send_every(250, :msg)
MicroTimer.send_every(250, "msg", self())
iex> pid = MicroTimer.send_every(250, :msg)
iex> is_pid(pid)
true
"""
@spec send_every(non_neg_integer(), any, pid()) :: pid()
def send_every(timeout, message, pid \\ self()) do
spawn(fn ->
do_apply_every(timeout, fn -> send(pid, message) end, [])
end)
end
@doc """
Cancel a timer `pid` created by `apply_after/2`, `apply_after/3`, `apply_every/2`
or `apply_every/3`
Always returns `true`
## Examples
timer = MicroTimer.apply_every(250, {Module. :function}, [])
MicroTimer.cancel_timer(timer)
iex> pid = MicroTimer.apply_every(250, fn arg -> arg end, [1])
iex> MicroTimer.cancel_timer(pid)
iex> Process.alive?(pid)
false
"""
@spec cancel_timer(pid()) :: true
def cancel_timer(pid) when is_pid(pid) do
Process.exit(pid, :kill)
end
defp do_usleep(timeout) when timeout > 2_000 do
ms_timeout = div(timeout, 1_000) - 1
{real_sleep_time, _} =
:timer.tc(fn ->
Process.sleep(ms_timeout)
end)
do_usleep(System.monotonic_time(:microsecond), timeout - real_sleep_time)
end
defp do_usleep(timeout) do
do_usleep(System.monotonic_time(:microsecond), timeout)
end
defp do_usleep(start, timeout) do
if System.monotonic_time(:microsecond) - start >= timeout do
send(self(), @sleep_done)
else
do_usleep(start, timeout)
end
end
defp do_apply_after(timeout, {module, function}, args) do
usleep(timeout)
apply(module, function, args)
end
defp do_apply_after(timeout, function, args) do
usleep(timeout)
apply(function, args)
end
defp do_apply_every(timeout, executable, args) do
do_apply_after(timeout, executable, args)
do_apply_every(timeout, executable, args)
end
end | lib/micro_timer.ex | 0.832713 | 0.555496 | micro_timer.ex | starcoder |
defmodule SanbaseWeb.Graphql.Schema.BlockchainMetricQueries do
use Absinthe.Schema.Notation
import SanbaseWeb.Graphql.Cache, only: [cache_resolve: 1]
alias SanbaseWeb.Graphql.Resolvers.{EtherbiResolver, ClickhouseResolver, ExchangeResolver}
alias SanbaseWeb.Graphql.Complexity
alias Sanbase.Billing.Product
alias SanbaseWeb.Graphql.Middlewares.AccessControl
object :blockchain_metric_queries do
# STANDART PLAN
@desc ~s"""
Fetch burn rate for a project within a given time period, grouped by interval.
Projects are referred to by a unique identifier (slug).
Each transaction has an equivalent burn rate record. The burn rate is calculated
by multiplying the number of tokens moved by the number of blocks in which they appeared.
Spikes in burn rate could indicate large transactions or movement of tokens that have been held for a long time.
Grouping by interval works by summing all burn rate records in the interval.
"""
field :burn_rate, list_of(:burn_rate_data) do
deprecate(~s/Use getMetric(metric: "age_destroyed") instead/)
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&EtherbiResolver.token_age_consumed/3)
end
field :token_age_consumed, list_of(:token_age_consumed_data) do
deprecate(~s/Use getMetric(metric: "age_destroyed") instead/)
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&EtherbiResolver.token_age_consumed/3)
end
@desc ~s"""
Fetch total amount of tokens for a project that were transacted on the blockchain, grouped by interval.
Projects are referred to by a unique identifier (slug).
This metric includes only on-chain volume, not volume in exchanges.
Grouping by interval works by summing all transaction volume records in the interval.
"""
field :transaction_volume, list_of(:transaction_volume) do
deprecate(~s/Use getMetric(metric: "transaction_volume") instead/)
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&EtherbiResolver.transaction_volume/3)
end
@desc ~s"""
Fetch token age consumed in days for a project, grouped by interval.
Projects are referred to by a unique identifier (slug). The token age consumed
in days shows the average age of the tokens that were transacted for a given time period.
This metric includes only on-chain transaction volume, not volume in exchanges.
"""
field :average_token_age_consumed_in_days, list_of(:token_age) do
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&EtherbiResolver.average_token_age_consumed_in_days/3)
end
@desc ~s"""
Fetch token circulation for a project, grouped by interval.
Projects are referred to by a unique identifier (slug).
"""
field :token_circulation, list_of(:token_circulation) do
deprecate(~s/Use getMetric(metric: "circulation_1d") instead/)
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
@desc "The interval should represent whole days, i.e. `1d`, `48h`, `1w`, etc."
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.token_circulation/3)
end
@desc ~s"""
Fetch token velocity for a project, grouped by interval.
Projects are referred to by a unique identifier (slug).
"""
field :token_velocity, list_of(:token_velocity) do
deprecate(~s/Use getMetric(metric: "velocity") instead/)
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
@desc "The interval should represent whole days, i.e. `1d`, `48h`, `1w`, etc."
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.token_velocity/3)
end
@desc ~s"""
Fetch daily active addresses for a project within a given time period.
Projects are referred to by a unique identifier (slug).
This metric includes the number of unique addresses that participated in
the transfers of given token during the day.
Grouping by interval works by taking the mean of all daily active address
records in the interval. The default value of the interval is 1 day, which yields
the exact number of unique addresses for each day.
"""
field :daily_active_addresses, list_of(:active_addresses) do
deprecate(~s/Use getMetric(metric: "daily_active_addresses") instead/)
meta(access: :free)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.daily_active_addresses/3)
end
@desc ~s"""
Fetch the flow of funds into and out of an exchange wallet.
This query returns the difference IN-OUT calculated for each interval.
"""
field :exchange_funds_flow, list_of(:exchange_funds_flow) do
deprecate(~s/Use getMetric(metric: "exchange_balance") instead/)
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&EtherbiResolver.exchange_funds_flow/3)
end
@desc "Network growth returns the newly created addresses for a project in a given timeframe"
field :network_growth, list_of(:network_growth) do
deprecate(~s/Use getMetric(metric: "network_growth") instead/)
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, non_null(:interval), default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.network_growth/3)
end
@desc "Returns what percent of token supply is on exchanges"
field :percent_of_token_supply_on_exchanges, list_of(:percent_of_token_supply_on_exchanges) do
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.percent_of_token_supply_on_exchanges/3)
end
@desc """
Returns used Gas by a blockchain.
When you send tokens, interact with a contract or do anything else on the blockchain,
you must pay for that computation. That payment is calculated in Gas.
"""
field :gas_used, list_of(:gas_used) do
meta(access: :restricted)
arg(:slug, :string, default_value: "ethereum")
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.gas_used/3)
end
@desc """
Returns the first `number_of_holders` top holders for ETH or ERC20 token.
Arguments description:
* slug - a string uniquely identifying a project
* number_of_holders - take top `number_of_holders` into account when calculating.
* from - a string representation of datetime value according to the iso8601 standard, e.g. "2018-04-16T10:02:19Z"
* to - a string representation of datetime value according to the iso8601 standard, e.g. "2018-04-16T10:02:19Z"
"""
field :top_holders, list_of(:top_holders) do
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:number_of_holders, non_null(:integer),
deprecate: "pageSize argument should be used instead"
)
arg(:page, non_null(:integer), default_value: 1)
arg(:page_size, non_null(:integer), default_value: 20)
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:owners, list_of(:string))
arg(:labels, list_of(:string))
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.top_holders/3)
end
@desc """
Returns the first `number_of_holders` current top holders for ETH or ERC20 token.
Arguments description:
* slug - a string uniquely identifying a project
* page and page_size - choose what top holders to return, sorted in descending order by value
"""
field :realtime_top_holders, list_of(:top_holders) do
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:page, non_null(:integer), default_value: 1)
arg(:page_size, non_null(:integer), default_value: 20)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.realtime_top_holders/3)
end
@desc """
Returns the top holders' percent of total supply - in exchanges, outside exchanges and combined.
Arguments description:
* slug - a string uniquely identifying a project
* number_of_holders - take top `number_of_holders` into account when calculating.
* from - a string representation of datetime value according to the iso8601 standard, e.g. "2018-04-16T10:02:19Z"
* to - a string representation of datetime value according to the iso8601 standard, e.g. "2018-04-16T10:02:19Z"
"""
field :top_holders_percent_of_total_supply, list_of(:top_holders_percent_of_total_supply) do
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:number_of_holders, non_null(:integer))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.top_holders_percent_of_total_supply/3)
end
# ADVANCED PLAN
@desc "Returns Realized value - sum of the acquisition costs of an asset located in a wallet.
The realized value across the whole network is computed by summing the realized values
of all wallets holding tokens at the moment."
field :realized_value, list_of(:realized_value) do
deprecate(~s/Use getMetric(metric: "realized_value_usd") instead/)
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.realized_value/3)
end
@desc "Returns MVRV(Market-Value-to-Realized-Value)"
field :mvrv_ratio, list_of(:mvrv_ratio) do
deprecate(~s/Use getMetric(metric: "mvrv_usd") instead/)
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, non_null(:interval), default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.mvrv_ratio/3)
end
@desc """
Returns NVT (Network-Value-to-Transactions-Ratio
Daily Market Cap / Daily Transaction Volume
Since Daily Transaction Volume gets rather noisy and easy to manipulate
by transferring the same tokens through а couple of addresses repeatedly,
it’s not an ideal measure of a network’s economic activity.
That’s why we also offer another way to calculate NVT by using Daily Token Circulation.
This method filters out excess transactions and provides a cleaner overview of
a blockchain’s daily transaction throughput.
"""
field :nvt_ratio, list_of(:nvt_ratio) do
deprecate(
~s/Use getMetric(metric: "nvt") and getMetric(metric: "nvt_transaction_volume") instead/
)
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.nvt_ratio/3)
end
@desc ~s"""
Fetch daily active deposits for a project within a given time period.
Projects are referred to by a unique identifier (slug).
"""
field :daily_active_deposits, list_of(:active_deposits) do
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.daily_active_deposits/3)
end
@desc """
Fetch a list of all exchange wallets on a given blockchain.
This query requires you to have a plan extension or basic authentication.
"""
field :exchange_wallets, list_of(:wallet) do
meta(access: :extension, product: Product.product_exchange_wallets())
arg(:slug, non_null(:string))
middleware(AccessControl)
cache_resolve(&EtherbiResolver.exchange_wallets/3)
end
@desc "List all exchanges"
field :all_exchanges, list_of(:string) do
meta(access: :free)
arg(:slug, non_null(:string))
arg(:is_dex, :boolean)
cache_resolve(&ExchangeResolver.all_exchanges/3)
end
@desc """
Returns distribution of miners between mining pools.
What part of the miners are using top3, top10 and all the other pools.
Currently only ETH is supported.
"""
field :mining_pools_distribution, list_of(:mining_pools_distribution) do
meta(access: :restricted)
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.mining_pools_distribution/3)
end
field :eth_fees_distribution, list_of(:fees_distribution) do
meta(access: :free)
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:limit, :integer, default_value: 20)
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl)
cache_resolve(&ClickhouseResolver.eth_fees_distribution/3)
end
end
end | lib/sanbase_web/graphql/schema/queries/blockchain_metric_queries.ex | 0.863866 | 0.489686 | blockchain_metric_queries.ex | starcoder |
defmodule EctoSearcher.Searcher do
@moduledoc """
Module for searching
## Usage
```elixir
search = %{"name_eq" => "<NAME>", "description_cont" => "My president"}
query = EctoSearcher.Searcher.search(MyMegaModel, search)
MySuperApp.Repo.all(query)
```
"""
require Ecto.Query
alias Ecto.Query
alias EctoSearcher.Mapping.Default
alias EctoSearcher.Utils.{Field, Value, Matcher, SearchCondition}
@type search_params() :: %{String.t() => String.t()}
@type searchable_fields() :: [atom()]
@doc """
Shortcut for `search/5` with `EctoSearcher.Mapping.Default` as `mapping` and `nil` as `searchable_fields`
"""
@spec search(Ecto.Queryable.t(), Ecto.Schema.t(), search_params()) :: Ecto.Queryable.t()
def search(base_query, schema, search_params) do
mapping = Default
search(base_query, schema, search_params, mapping)
end
@doc """
Shortcut for `search/5` with `EctoSearcher.Mapping.Default` as mapping
"""
@spec search(Ecto.Queryable.t(), Ecto.Schema.t(), search_params(), searchable_fields()) ::
Ecto.Queryable.t()
def search(base_query, schema, search_params, searchable_fields)
when is_list(searchable_fields) do
mapping = Default
search(base_query, schema, search_params, mapping, searchable_fields)
end
@doc """
Builds search query
`search_params` should be a map with search_fields in form of `"field_matcher"` like this:
```elixir
%{
"name_eq" => "<NAME>",
"description_cont" => "My president"
}
```
`mapping` should implement `EctoSearcher.Mapping` behavior. `EctoSearcher.Mapping.Default` provides some basics.
`searchable_fields` is a list with fields (atoms) permitted for searching. If not provided (or `nil`) all fields are allowed for searching.
"""
@spec search(
Ecto.Queryable.t(),
Ecto.Schema.t(),
search_params(),
module(),
searchable_fields() | nil
) :: Ecto.Queryable.t()
def search(base_query = %Ecto.Query{}, schema, search_params, mapping, searchable_fields \\ nil)
when is_atom(mapping) do
if is_map(search_params) do
build_query(base_query, schema, search_params, mapping, searchable_fields)
else
base_query
end
end
defp build_query(base_query, schema, search_params, mapping, searchable_fields) do
searchable_fields = searchable_fields || Field.searchable_fields(schema, mapping)
search_params
|> SearchCondition.from_params(searchable_fields)
|> Enum.reduce(base_query, fn search_condition, query_with_conditions ->
put_condition(query_with_conditions, search_condition, schema, mapping)
end)
end
defp put_condition(query, search_condition, schema, mapping) do
field_query = Field.lookup(search_condition.field, schema, mapping)
casted_value = Value.cast(search_condition, schema, mapping)
match = Matcher.lookup(search_condition.matcher, mapping)
if match && field_query && casted_value do
condition = match.(field_query, casted_value)
Query.from(q in query, where: ^condition)
else
query
end
end
end | lib/ecto_searcher/searcher.ex | 0.816589 | 0.649676 | searcher.ex | starcoder |
defmodule XDR.Float do
@moduledoc """
This module manages the `Floating-Point` type based on the RFC4506 XDR Standard.
"""
@behaviour XDR.Declaration
alias XDR.Error.Float, as: FloatError
defstruct [:float]
defguard valid_float?(value) when is_float(value) or is_integer(value)
@typedoc """
`XDR.Float` structure type specification.
"""
@type t :: %XDR.Float{float: integer | float | binary}
@doc """
Create a new `XDR.Float` structure with the `float` passed.
"""
@spec new(float :: float | integer | binary) :: t
def new(float), do: %XDR.Float{float: float}
@impl XDR.Declaration
@doc """
Encode a `XDR.Float` structure into a XDR format.
"""
@spec encode_xdr(float :: t) :: {:ok, binary} | {:error, :not_number}
def encode_xdr(%XDR.Float{float: float}) when not valid_float?(float),
do: {:error, :not_number}
def encode_xdr(%XDR.Float{float: float}), do: {:ok, <<float::big-signed-float-size(32)>>}
@impl XDR.Declaration
@doc """
Encode a `XDR.Float` structure into a XDR format.
If the `float` is not valid, an exception is raised.
"""
@spec encode_xdr!(float :: t) :: binary
def encode_xdr!(float) do
case encode_xdr(float) do
{:ok, binary} -> binary
{:error, reason} -> raise(FloatError, reason)
end
end
@impl XDR.Declaration
@doc """
Decode the Floating-Point in XDR format to a `XDR.Float` structure.
"""
@spec decode_xdr(bytes :: binary, float :: t) :: {:ok, {t, binary}} | {:error, :not_binary}
def decode_xdr(bytes, float \\ nil)
def decode_xdr(bytes, _float) when not is_binary(bytes),
do: {:error, :not_binary}
def decode_xdr(<<float::big-signed-float-size(32), rest::binary>>, _float),
do: {:ok, {new(float), rest}}
@impl XDR.Declaration
@doc """
Decode the Floating-Point in XDR format to a `XDR.Float` structure.
If the binaries are not valid, an exception is raised.
"""
@spec decode_xdr!(bytes :: binary, float :: t) :: {t, binary}
def decode_xdr!(bytes, float \\ nil)
def decode_xdr!(bytes, float) do
case decode_xdr(bytes, float) do
{:ok, result} -> result
{:error, reason} -> raise(FloatError, reason)
end
end
end | lib/xdr/float.ex | 0.929103 | 0.662099 | float.ex | starcoder |
defmodule Mix.Tasks.Ggity.Visual.Geom.Line do
@shortdoc "Launch a browser and draw sample line geom plots."
@moduledoc @shortdoc
use Mix.Task
alias GGity.{Examples, Plot}
@default_browser "firefox"
@doc false
@spec run(list(any)) :: any
def run([]), do: run([@default_browser])
def run(argv) do
plots =
Enum.join(
[
basic(),
fixed_line_and_mapped_points(),
fixed_aesthetics(),
date_time(),
group_by_color(),
group_by_linetype()
],
"\n"
)
test_file = "test/visual/visual_test.html"
browser =
case argv do
["--wsl"] ->
"/mnt/c/Program Files/Mozilla Firefox/firefox.exe"
[browser] ->
browser
end
File.write!(test_file, "<html><body #{grid_style()}>\n#{plots}\n</body></html>")
open_html_file(browser, test_file)
Process.sleep(1000)
File.rm(test_file)
end
defp open_html_file(browser, file) do
System.cmd(browser, [file])
end
defp grid_style do
"style='display: grid;grid-template-columns: repeat(3, 1fr)'"
end
defp basic do
Examples.economics()
|> Enum.filter(fn record -> Date.compare(record["date"], ~D[1970-12-31]) == :lt end)
|> Plot.new(%{x: "date", y: "unemploy"})
|> Plot.geom_line(size: 1)
|> Plot.labs(title: "Date data")
|> Plot.scale_x_date(date_labels: "%Y")
|> Plot.plot()
end
defp fixed_line_and_mapped_points do
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.labs(title: "Fixed linetype: :twodash", x: "Weight")
|> Plot.geom_line(linetype: :twodash, size: 1)
|> Plot.plot()
end
defp fixed_aesthetics do
Examples.economics()
|> Plot.new(%{x: "date", y: "unemploy"})
|> Plot.geom_line(color: "red", size: 1)
|> Plot.labs(title: "Fixed color: \"red\"")
|> Plot.scale_x_date(breaks: 6, date_labels: "%m/%d/%Y")
|> Plot.theme(axis_text_x: GGity.Element.Text.element_text(angle: 30))
|> Plot.plot()
end
defp date_time do
[
%{date_time: ~N[2001-01-01 00:00:00], price: 0.13},
%{date_time: ~N[2001-01-01 03:00:00], price: 0.5},
%{date_time: ~N[2001-01-01 06:00:00], price: 0.9},
%{date_time: ~N[2001-01-01 09:00:00], price: 0.63},
%{date_time: ~N[2001-01-01 12:00:00], price: 0.45},
%{date_time: ~N[2001-01-01 15:00:00], price: 0.25},
%{date_time: ~N[2001-01-01 18:00:00], price: 0.12},
%{date_time: ~N[2001-01-01 21:00:00], price: 0.13},
%{date_time: ~N[2001-01-02 00:00:00], price: 0.24},
%{date_time: ~N[2001-01-02 03:00:00], price: 0.74},
%{date_time: ~N[2001-01-02 06:00:00], price: 0.77},
%{date_time: ~N[2001-01-02 09:00:00], price: 0.63},
%{date_time: ~N[2001-01-02 12:00:00], price: 0.23},
%{date_time: ~N[2001-01-02 15:00:00], price: 0.53},
%{date_time: ~N[2001-01-02 21:00:00], price: 0.26},
%{date_time: ~N[2001-01-03 00:00:00], price: 0.27},
%{date_time: ~N[2001-01-03 03:00:00], price: 0.03},
%{date_time: ~N[2001-01-03 06:00:00], price: 0.79},
%{date_time: ~N[2001-01-03 09:00:00], price: 0.78},
%{date_time: ~N[2001-01-03 12:00:00], price: 0.08},
%{date_time: ~N[2001-01-03 18:00:00], price: 0.3},
%{date_time: ~N[2001-01-04 00:00:00], price: 0.7}
]
|> Plot.new(%{x: :date_time, y: :price})
|> Plot.geom_line(size: 1)
|> Plot.scale_x_datetime(date_labels: "%b %d H%H")
|> Plot.labs(title: "DateTime data")
|> Plot.plot()
end
defp group_by_color do
Examples.economics_long()
|> Plot.new(%{x: "date", y: "value01"})
|> Plot.labs(title: "Mapped to color")
|> Plot.geom_line(%{color: "variable"})
|> Plot.scale_x_date(breaks: 6, date_labels: "%Y")
|> Plot.plot()
end
defp group_by_linetype do
Examples.economics_long()
|> Plot.new(%{x: "date", y: "value01"})
|> Plot.labs(title: "Mapped to linetype, custom glyph")
|> Plot.geom_line(%{linetype: "variable"}, key_glyph: :path)
|> Plot.scale_x_date(breaks: 6, date_labels: "%Y")
|> Plot.plot()
end
end | lib/mix/tasks/ggity_visual_geom_line.ex | 0.839422 | 0.420838 | ggity_visual_geom_line.ex | starcoder |
defmodule Gradient.AstData do
@moduledoc """
Stores the test cases data for expressions line specifying. To increase the flexibility
the data need normalization before equality assertion. Thus we check only the line change,
not the exact value and there is no need to update expected values when the file content
changes.
This way of testing is useful only for more complex expressions in which we can observe
some line change. For example, look at the pipe operator cases.
"""
require Gradient.Debug
import Gradient.Debug, only: [elixir_to_ast: 1]
import Gradient.TestHelpers
alias Gradient.Types
@tokens __ENV__.file |> load_tokens()
defp pipe do
{__ENV__.function,
{__ENV__.line,
elixir_to_ast do
1
|> is_atom()
'1'
|> is_atom()
:ok
|> is_atom()
[1, 2, 3]
|> is_atom()
{1, 2, 3}
|> is_atom()
"a"
|> is_atom()
end, __ENV__.line},
{:block, 22,
[
{:call, 24, {:remote, 24, {:atom, 24, :erlang}, {:atom, 24, :is_atom}},
[{:integer, 23, 1}]},
{:call, 27, {:remote, 27, {:atom, 27, :erlang}, {:atom, 27, :is_atom}},
[{:cons, 26, {:integer, 26, 49}, {nil, 26}}]},
{:call, 30, {:remote, 30, {:atom, 30, :erlang}, {:atom, 30, :is_atom}},
[{:atom, 29, :ok}]},
{:call, 33, {:remote, 33, {:atom, 33, :erlang}, {:atom, 33, :is_atom}},
[
{:cons, 32, {:integer, 32, 1},
{:cons, 32, {:integer, 32, 2}, {:cons, 32, {:integer, 32, 3}, {nil, 32}}}}
]},
{:call, 36, {:remote, 36, {:atom, 36, :erlang}, {:atom, 36, :is_atom}},
[{:tuple, 35, [{:integer, 35, 1}, {:integer, 35, 2}, {:integer, 35, 3}]}]},
{:call, 39, {:remote, 39, {:atom, 39, :erlang}, {:atom, 39, :is_atom}},
[{:bin, 38, [{:bin_element, 38, {:string, 38, 'a'}, :default, :default}]}]}
]}}
end
defp pipe_with_fun_converted_to_erl_equivalent do
{__ENV__.function,
{__ENV__.line,
elixir_to_ast do
:ok
|> elem(0)
end, __ENV__.line},
{:call, 56, {:remote, 56, {:atom, 56, :erlang}, {:atom, 56, :element}},
[{:integer, 56, 1}, {:atom, 55, :ok}]}}
end
defp complex_list_pipe do
{__ENV__.function,
{__ENV__.line,
elixir_to_ast do
[
{1, %{a: 1}},
{2, %{a: 2}}
]
|> Enum.map(&elem(&1, 0))
end, __ENV__.line},
{:call, 80, {:remote, 80, {:atom, 80, Enum}, {:atom, 80, :map}},
[
{:cons, 76,
{:tuple, 77,
[
{:integer, 77, 1},
{:map, 77, [{:map_field_assoc, 77, {:atom, 77, :a}, {:integer, 77, 1}}]}
]},
{:cons, 77,
{:tuple, 78,
[
{:integer, 78, 2},
{:map, 78, [{:map_field_assoc, 78, {:atom, 78, :a}, {:integer, 78, 2}}]}
]}, {nil, 77}}},
{:fun, 80,
{:clauses,
[
{:clause, 80, [{:var, 0, :_@1}], [],
[
{:call, 80, {:remote, 80, {:atom, 80, :erlang}, {:atom, 80, :element}},
[{:integer, 80, 1}, {:var, 0, :_@1}]}
]}
]}}
]}}
end
defp complex_tuple_pipe do
{__ENV__.function,
{__ENV__.line,
elixir_to_ast do
{
{1, %{a: 1}},
{2, %{a: 2}}
}
|> Tuple.to_list()
end, __ENV__.line},
{:call, 119, {:remote, 119, {:atom, 119, :erlang}, {:atom, 119, :tuple_to_list}},
[
{:tuple, 115,
[
{:tuple, 116,
[
{:integer, 116, 1},
{:map, 116, [{:map_field_assoc, 116, {:atom, 116, :a}, {:integer, 116, 1}}]}
]},
{:tuple, 117,
[
{:integer, 117, 2},
{:map, 117, [{:map_field_assoc, 117, {:atom, 117, :a}, {:integer, 117, 2}}]}
]}
]}
]}}
end
@spec ast_data() :: [
{atom(), {Types.abstract_expr(), Types.tokens(), Types.options()},
Types.abstract_expr()}
]
def ast_data do
[
pipe(),
pipe_with_fun_converted_to_erl_equivalent(),
complex_list_pipe(),
complex_tuple_pipe()
]
|> Enum.map(fn {{name, _}, {start_line, ast, end_line}, expected} ->
tokens = Gradient.Tokens.drop_tokens_to_line(@tokens, start_line + 1)
{name, {ast, tokens, [line: start_line + 1, end_line: end_line]}, expected}
end)
end
def normalize_expression(expression) do
{expression, _} =
:erl_parse.mapfold_anno(
fn anno, acc ->
{{:erl_anno.line(anno) - acc, :erl_anno.column(anno)}, acc}
end,
:erl_anno.line(elem(expression, 1)),
expression
)
expression
end
end | test/support/ast_data.ex | 0.782081 | 0.60092 | ast_data.ex | starcoder |
defmodule Eljiffy do
@moduledoc """
Documentation for Eljiffy.
Eljiffy (Elixir Jiffy) is an Elixir wrapper around the erlang JSON nif library Jiffy.
It also provides functions to convert json to maps directly rather than having to pass the option return_maps explicitly
(https://github.com/davisp/jiffy) (decode!/1 and decode!/2)
### The opts parameter for `decode_proplist/2` is a list of terms:
- `:return_maps` - Tell Jiffy to return objects using the maps data type on VMs that support it. This raises an error on VMs that don't support maps.
- `null_term:` term - Returns the specified Term instead of null when decoding JSON. This is for people that wish to use undefined instead of null.
- `:use_nil` - Returns the atom nil instead of null when decoding JSON. This is a short hand for `{:null_term, nil}`.
- `:return_trailer` - If any non-whitespace is found after the first JSON term is decoded the return value of `decode_proplist/2` becomes `{:has_trailer, firstTerm, restData}`. This is useful to decode multiple terms in a single binary.
- `:dedupe_keys` - If a key is repeated in a JSON object this flag will ensure that the parsed object only contains a single entry containing the last value seen. This mirrors the parsing beahvior of virtually every other JSON parser.
- `:copy_strings` - Normaly when strings are decoded they are created as sub-binaries of the input data. With some workloads this can lead to an undeseriable bloating of memory when a few small strings in JSON keep a reference to the full JSON document alive. Setting this option will instead allocate new binaries for each string to avoid keeping the original JSON document around after garbage collection.
- `bytes_per_red: n where n >= 0` - This controls the number of bytes that Jiffy will process as an equivalent to a reduction. Each 20 reductions we consume 1% of our allocated time slice for the current process. When the Erlang VM indicates we need to return from the NIF.
- `bytes_per_iter: n where n >= 0` - Backwards compatible option that is converted into the `bytes_per_red` value.
### The opts parameter for `encode!/2` is a list of terms:
- `:uescape` - Escapes UTF-8 sequences to produce a 7-bit clean output
- `:pretty` - Produce JSON using two-space indentation
- `:force_utf8` - Force strings to encode as UTF-8 by fixing broken surrogate pairs and/or using the replacement character to remove broken UTF-8 sequences in data.
- `:use_nil` - Encode's the atom nil as null.
- `:escape_forward_slashes` - Escapes the / character which can be useful when encoding URLs in some cases.
- `bytes_per_red:` n - Refer to the decode options
- `bytes_per_iter:` n - Refer to the decode options
"""
@doc """
Transforms a json string into a key/value list in EEP 18 format
## Examples
iex> jsonData = ~s({\"people\": [{\"name\": \"Joe\"}, {\"name\": \"Robert\"}, {\"name\": \"Mike\"}]})
iex> Eljiffy.decode_proplist(jsonData)
{[
{"people", [
{[{"name", "Joe"}]},
{[{"name", "Robert"}]},
{[{"name", "Mike"}]}
]}
]}
"""
def decode_proplist(data) do
:jiffy.decode(data)
end
@doc """
Does the same thing as `decode_proplist/1`, but accepts decode options (see [opts](#module-the-opts-parameter-for-decode-2-is-a-list-of-terms))
"""
def decode_proplist(data, opts) do
:jiffy.decode(data, opts)
end
@doc """
Transforms a EEP 18 format key/value list or map into a json string
Accepts encode options (see [opts](#module-the-opts-parameter-for-encode-2-is-a-list-of-terms))
## Examples
iex> term = %{langs: [%{elixir: %{beam: :true}}, %{erlang: %{beam: :true}}, %{rust: %{beam: :false}}]}
iex> Eljiffy.encode!(term)
~s({\"langs\":[{\"elixir\":{\"beam\":true}},{\"erlang\":{\"beam\":true}},{\"rust\":{\"beam\":false}}]})
"""
def encode!(data, opts \\ []) do
:jiffy.encode(data, opts)
end
@doc """
Transforms a EEP 18 format key/value list or map into a json string
Accepts encode options (see [opts](#module-the-opts-parameter-for-encode-2-is-a-list-of-terms))
## Examples
iex> term = %{langs: [%{elixir: %{beam: :true}}, %{erlang: %{beam: :true}}, %{rust: %{beam: :false}}]}
iex> Eljiffy.encode(term)
{:ok, ~s({\"langs\":[{\"elixir\":{\"beam\":true}},{\"erlang\":{\"beam\":true}},{\"rust\":{\"beam\":false}}]})}
iex> term = <<255>>
iex> Eljiffy.encode(term)
{:error, %ErlangError{original: {:invalid_string, <<255>>}}}
"""
def encode(data, opts \\ []) do
{:ok, encode!(data, opts)}
rescue
exception -> {:error, exception}
end
@doc """
Transforms a json string into a map
Accepts decode options (see [opts](#module-the-opts-parameter-for-decode-2-is-a-list-of-terms))
## Examples
iex> jsonData = ~s({\"people\": [{\"name\": \"Joe\"}, {\"name\": \"Robert\"}, {\"name\": \"Mike\"}]})
iex> Eljiffy.decode_maps(jsonData)
%{"people" => [
%{"name" => "Joe"},
%{"name" => "Robert"},
%{"name" => "Mike"}
]}
"""
@doc since: "1.1.0"
@deprecated "use decode!/2 instead"
def decode_maps(data, opts \\ []) do
:jiffy.decode(data, [:return_maps] ++ opts)
end
@doc """
Transforms a json string into a map
Accepts decode options (see [opts](#module-the-opts-parameter-for-decode-2-is-a-list-of-terms))
## Examples
iex> jsonData = ~s({\"people\": [{\"name\": \"Joe\"}, {\"name\": \"Robert\"}, {\"name\": \"Mike\"}]})
iex> Eljiffy.decode!(jsonData)
%{"people" => [
%{"name" => "Joe"},
%{"name" => "Robert"},
%{"name" => "Mike"}
]}
"""
def decode!(data, opts \\ []) do
:jiffy.decode(data, [:return_maps] ++ opts)
end
@doc """
Transforms a json string into a map
Accepts decode options (see [opts](#module-the-opts-parameter-for-decode-2-is-a-list-of-terms))
## Examples
iex> jsonData = ~s({\"people\": [{\"name\": \"Joe\"}, {\"name\": \"Robert\"}, {\"name\": \"Mike\"}]})
iex> Eljiffy.decode(jsonData)
{:ok, %{"people" => [
%{"name" => "Joe"},
%{"name" => "Robert"},
%{"name" => "Mike"}
]}}
iex> jsonData = ~s(invalid_json)
iex> Eljiffy.decode(jsonData)
{:error, %ErlangError{original: {1, :invalid_json}}}
"""
def decode(data, opts \\ []) do
{:ok, decode!(data, [:return_maps] ++ opts)}
rescue
exception -> {:error, exception}
end
end | lib/eljiffy.ex | 0.921145 | 0.651743 | eljiffy.ex | starcoder |
defmodule MarsRoverKata.Input do
@moduledoc """
Converting input instruction strings into actionable terms.
The input should be someting like:
5:3
0:0 0:1
1:1:E
FBFBFBF
Where
- the first line represent the maximux X and Y in the grid;
- the second lie represent a list of obstacle points;
- the third line represent the actual position and orientation for the rover;
- the last line represent actionable instruction terms.
"""
alias MarsRoverKata.Point
alias MarsRoverKata.Position
@type t :: %__MODULE__{
max_x: integer(),
max_y: integer(),
obstacles: list(Point.t()),
position: Position.t(),
instructions: list(:F | :B)
}
defstruct max_x: 0,
max_y: 0,
obstacles: [],
position: %Position{},
instructions: []
@spec parse(String.t()) :: {:ok, __MODULE__.t()} | {:error, String.t()}
def parse(input) when is_binary(input) and byte_size(input) > 0 do
[grid_dimension | [obstacles | [position | [sets | _rest]]]] = String.split(input, "\n")
[x, y] = parse_grid_dimension(grid_dimension)
{:ok,
%__MODULE__{
max_x: x,
max_y: y,
obstacles: parse_obstacles(obstacles),
instructions: parse_instructions(sets),
position: parse_position(position)
}}
end
def parse(_) do
{:error, "invalid input data"}
end
defp parse_grid_dimension(line) do
line
|> String.split(":", trim: true)
|> Enum.map(&parse_int/1)
end
defp parse_instructions(line) do
line
|> String.split("", trim: true)
|> Enum.map(&to_atom/1)
end
defp parse_position(line) do
[x, y, direction] = String.split(line, ":")
%Position{
point: %Point{
x: parse_int(x),
y: parse_int(y)
},
direction: to_atom(direction)
}
end
defp parse_obstacles(line) do
line
|> String.split(" ", trim: true)
|> Enum.map(fn obstacle ->
[x, y] = String.split(obstacle, ":")
%Point{
x: parse_int(x),
y: parse_int(y)
}
end)
end
defp to_atom(string) do
string
|> String.upcase()
|> String.to_atom()
end
defp parse_int(string) do
{int, _} = Integer.parse(string)
int
end
end | lib/mars_rover_kata/input.ex | 0.845863 | 0.612063 | input.ex | starcoder |
defmodule Telegraf.Transport.UnixSocket do
@children_opts_definition [
socket_path: [
type: :string,
doc: "Path to the unix socket.",
default: "/tmp/telegraf.sock"
],
pool_size: [
type: :pos_integer,
doc: "The size of the pool tcp sockets. Defaults to `System.schedulers_online()`."
]
]
@moduledoc """
Send events to telegraf via a Unix Domain Socket.
It uses `NimblePool` to create a pool of open tcp sockets connected
to the `socket_path`.
It expects the telegraf daemon to have the [Socket Listener Input Plugin](https://github.com/influxdata/telegraf/blob/release-1.18/plugins/inputs/socket_listener/README.md)
configured to listen for messages.
```
# telegraf.conf
[[inputs.socket_listener]]
service_address = "/tmp/telegraf.sock"
```
## Usage
Add to your supervision tree:
{Telegraf, name: MyTelegraf, transport: #{inspect(__MODULE__)}}
With custom options:
{Telegraf,
name: MyTelegraf,
transport: #{inspect(__MODULE__)},
transport_options: [socket_path: "/tmp/cool.sock"]}
## Supported options
#{NimbleOptions.docs(@children_opts_definition)}
"""
@behaviour NimblePool
@behaviour Telegraf.Transport
@impl Telegraf.Transport
def children(name, opts) do
opts = validate_children_options!(opts)
socket_path = Keyword.fetch!(opts, :socket_path)
pool_size = Keyword.fetch!(opts, :pool_size)
[
{NimblePool,
worker: {__MODULE__, %{socket_path: socket_path}},
pool_size: pool_size,
lazy: true,
name: pool_name(name)}
]
end
@impl Telegraf.Transport
def send(name, message, opts \\ []) do
pool_timeout = Keyword.get(opts, :pool_timeout, 5000)
NimblePool.checkout!(
pool_name(name),
:checkout,
fn _from, socket ->
return = :gen_tcp.send(socket, message)
{return, socket}
end,
pool_timeout
)
end
@impl NimblePool
def init_worker(%{socket_path: socket_path} = pool_state) do
parent = self()
async = fn ->
{:ok, socket} =
:gen_tcp.connect({:local, socket_path}, 0, [
:binary,
active: false,
packet: :raw
])
:ok = :gen_tcp.controlling_process(socket, parent)
socket
end
{:async, async, pool_state}
end
@impl NimblePool
def handle_checkout(:checkout, _from, socket, pool_state) do
{:ok, socket, socket, pool_state}
end
@impl NimblePool
def terminate_worker(_reason, socket, pool_state) do
:gen_tcp.close(socket)
{:ok, pool_state}
end
# credo:disable-for-next-line Credo.Check.Warning.UnsafeToAtom
defp pool_name(name), do: Module.concat(name, Pool)
defp validate_children_options!(opts) do
opts = Keyword.put_new(opts, :pool_size, System.schedulers_online())
case NimbleOptions.validate(opts, @children_opts_definition) do
{:ok, opts} ->
opts
{:error, %NimbleOptions.ValidationError{message: message}} ->
raise ArgumentError,
"invalid configuration given to #{inspect(__MODULE__)}.children/2, " <> message
end
end
end | lib/telegraf/transport/unix_socket.ex | 0.800107 | 0.710013 | unix_socket.ex | starcoder |
defmodule Bagg do
alias Bagg.{Aggday, Datapoint}
@valid_aggdays for {name, 1} <- Aggday.__info__(:functions), do: Atom.to_string(name)
@type aggregate_opt() ::
{:aggday, atom() | String.t()}
| {:kyoom, boolean()}
| {:odom, boolean()}
@type aggregate_error() :: {:invalid_aggday, any()} | :no_datapoints
@spec aggregate_goal(map()) ::
{:ok, [%Datapoint{}]}
| {:error, aggregate_error()}
def aggregate_goal(%{
"datapoints" => datapoints,
"odom" => odom,
"kyoom" => kyoom,
"aggday" => aggday
})
when is_list(datapoints) do
datapoints =
datapoints
|> Enum.sort_by(fn %{"timestamp" => t, "daystamp" => d} -> {d, t} end)
|> Enum.map(&Datapoint.new/1)
aggregate(datapoints,
odom: odom,
kyoom: kyoom,
aggday: aggday
)
end
def aggregate_goal(%{"odom" => _, "kyoom" => _, "aggday" => _}) do
{:error, :no_datapoints}
end
@spec aggregate([%Datapoint{}], [aggregate_opt()]) ::
{:ok, [%Datapoint{}]}
| {:error, aggregate_error()}
def aggregate(data, opts \\ []) do
kyoom = Keyword.get(opts, :kyoom, false)
odom = Keyword.get(opts, :odom, false)
aggday = to_string(Keyword.get(opts, :aggday, if(kyoom, do: :sum, else: :last)))
cond do
Enum.empty?(data) ->
{:error, :no_datapoints}
aggday not in @valid_aggdays ->
{:error, {:invalid_aggday, aggday}}
true ->
aggday = String.to_existing_atom(aggday)
data = if odom, do: odomify(data), else: data
data =
data
|> Enum.group_by(& &1.date)
|> Enum.map(fn {date, datapoints} ->
aggregate_day(date, datapoints, aggday)
end)
|> Enum.sort(&compare_dates/2)
data = if kyoom, do: kyoomify(data), else: data
{:ok, data}
end
end
defp aggregate_day(date, datapoints, aggday) do
values = Enum.map(datapoints, & &1.value)
aggregated = apply(Aggday, aggday, [values])
hashtags =
Enum.reduce(datapoints, MapSet.new(), fn %{hashtags: hashtags}, acc ->
MapSet.union(hashtags, acc)
end)
%Datapoint{
date: date,
value: aggregated,
hashtags: hashtags
}
end
defp compare_dates(%Datapoint{date: first}, %Datapoint{date: second}) do
Date.compare(first, second) == :lt
end
defp odomify([first | rest]) do
%{out: out} =
Enum.reduce(rest, %{out: [first], prev: first, curradd: 0}, fn
datapoint, %{out: out, prev: prev, curradd: curradd} ->
curradd = if datapoint.value == 0, do: curradd + prev.value, else: curradd
new_datapoint = %Datapoint{datapoint | value: datapoint.value + curradd}
%{curradd: curradd, prev: datapoint, out: [new_datapoint | out]}
end)
Enum.reverse(out)
end
defp kyoomify(data) do
%{out: out} =
Enum.reduce(data, %{pre: 0, out: []}, fn
datapoint, %{pre: pre, out: out} ->
datapoint = %Datapoint{datapoint | value: datapoint.value + pre}
%{pre: datapoint.value, out: [datapoint | out]}
end)
Enum.reverse(out)
end
end | lib/bagg.ex | 0.767254 | 0.475544 | bagg.ex | starcoder |
defmodule Nebulex.Adapter.Entry do
@moduledoc """
Specifies the entry API required from adapters.
This behaviour specifies all read/write key-based functions,
the ones applied to a specific cache entry.
"""
@typedoc "Proxy type to the adapter meta"
@type adapter_meta :: Nebulex.Adapter.adapter_meta()
@typedoc "Proxy type to the cache key"
@type key :: Nebulex.Cache.key()
@typedoc "Proxy type to the cache value"
@type value :: Nebulex.Cache.value()
@typedoc "Proxy type to the cache options"
@type opts :: Nebulex.Cache.opts()
@typedoc "Proxy type to the cache entries"
@type entries :: Nebulex.Cache.entries()
@typedoc "TTL for a cache entry"
@type ttl :: timeout
@typedoc "Write command"
@type on_write :: :put | :put_new | :replace
@doc """
Gets the value for a specific `key` in `cache`.
See `c:Nebulex.Cache.get/2`.
"""
@callback get(adapter_meta, key, opts) :: value
@doc """
Gets a collection of entries from the Cache, returning them as `Map.t()` of
the values associated with the set of keys requested.
For every key that does not hold a value or does not exist, that key is
simply ignored. Because of this, the operation never fails.
See `c:Nebulex.Cache.get_all/2`.
"""
@callback get_all(adapter_meta, [key], opts) :: map
@doc """
Puts the given `value` under `key` into the `cache`.
Returns `true` if the `value` with key `key` is successfully inserted;
otherwise `false` is returned.
The `ttl` argument sets the time-to-live for the stored entry. If it is not
set, it means the entry hasn't a time-to-live, then it shouldn't expire.
## OnWrite
The `on_write` argument supports the following values:
* `:put` - If the `key` already exists, it is overwritten. Any previous
time-to-live associated with the key is discarded on successful `write`
operation.
* `:put_new` - It only stores the entry if the `key` does not already exist,
otherwise, `false` is returned.
* `:replace` - Alters the value stored under the given `key`, but only
if the key already exists into the cache, otherwise, `false` is
returned.
See `c:Nebulex.Cache.put/3`, `c:Nebulex.Cache.put_new/3`,
`c:Nebulex.Cache.replace/3`.
"""
@callback put(adapter_meta, key, value, ttl, on_write, opts) :: boolean
@doc """
Puts the given `entries` (key/value pairs) into the `cache`.
Returns `true` if all the keys were inserted. If no key was inserted
(at least one key already existed), `false` is returned.
The `ttl` argument sets the time-to-live for the stored entry. If it is not
set, it means the entry hasn't a time-to-live, then it shouldn't expire.
The given `ttl` is applied to all keys.
## OnWrite
The `on_write` argument supports the following values:
* `:put` - If the `key` already exists, it is overwritten. Any previous
time-to-live associated with the key is discarded on successful `write`
operation.
* `:put_new` - It only stores the entry if the `key` does not already exist,
otherwise, `false` is returned.
Ideally, this operation should be atomic, so all given keys are set at once.
But it depends purely on the adapter's implementation and the backend used
internally by the adapter. Hence, it is recommended to checkout the
adapter's documentation.
See `c:Nebulex.Cache.put_all/2`.
"""
@callback put_all(adapter_meta, entries, ttl, on_write, opts) :: boolean
@doc """
Deletes a single entry from cache.
See `c:Nebulex.Cache.delete/2`.
"""
@callback delete(adapter_meta, key, opts) :: :ok
@doc """
Returns and removes the entry with key `key` in the cache.
See `c:Nebulex.Cache.take/2`.
"""
@callback take(adapter_meta, key, opts) :: value
@doc """
Updates the counter mapped to the given `key`.
If `amount` > 0, the counter is incremented by the given `amount`.
If `amount` < 0, the counter is decremented by the given `amount`.
If `amount` == 0, the counter is not updated.
See `c:Nebulex.Cache.incr/3`.
See `c:Nebulex.Cache.decr/3`.
"""
@callback update_counter(adapter_meta, key, amount, ttl, default, opts) ::
integer
when amount: integer, default: integer
@doc """
Returns whether the given `key` exists in cache.
See `c:Nebulex.Cache.has_key?/1`.
"""
@callback has_key?(adapter_meta, key) :: boolean
@doc """
Returns the TTL (time-to-live) for the given `key`. If the `key` does not
exist, then `nil` is returned.
See `c:Nebulex.Cache.ttl/1`.
"""
@callback ttl(adapter_meta, key) :: ttl | nil
@doc """
Returns `true` if the given `key` exists and the new `ttl` was successfully
updated, otherwise, `false` is returned.
See `c:Nebulex.Cache.expire/2`.
"""
@callback expire(adapter_meta, key, ttl) :: boolean
@doc """
Returns `true` if the given `key` exists and the last access time was
successfully updated, otherwise, `false` is returned.
See `c:Nebulex.Cache.touch/1`.
"""
@callback touch(adapter_meta, key) :: boolean
end | lib/nebulex/adapter/entry.ex | 0.9345 | 0.47025 | entry.ex | starcoder |
defmodule ExAlgo.Stack.MinMaxStack do
@moduledoc """
A min-max stack. In addition to being a LIFO, this stack also keeps track of
the smallest and largest values. And can efficiently show them. In addition to
`push`, `pop`, and `peek`, a `MinMaxStack` also pop_minimum and pop_maximum
values.1
"""
defstruct container: []
@type item() :: any()
@type history() :: [item()]
@type frame() :: %{top: item(), minimum: item(), maximum: item()}
@type t() :: %__MODULE__{container: [frame()]}
@doc """
Create a new empty stack
## Example
iex> MinMaxStack.new()
%MinMaxStack{container: []}
"""
def new, do: %__MODULE__{}
@doc """
Create a new min-max stack from an enumerable.
Note that the stack container has the order inversed as each element of the
iterable is pushed into the stack, thereby putting the last element on top.
## Example
iex> MinMaxStack.from([])
%MinMaxStack{container: []}
iex> MinMaxStack.from(1..3)
%MinMaxStack{container: [
%{current: 3, maximum: 3, minimum: 1},
%{current: 2, maximum: 2, minimum: 1},
%{current: 1, maximum: 1, minimum: 1}
]}
iex> MinMaxStack.from([7, -1, 5])
%MinMaxStack{container: [
%{current: 5, maximum: 7, minimum: -1},
%{current: -1, maximum: 7, minimum: -1},
%{current: 7, maximum: 7, minimum: 7}
]}
"""
@spec from([item()]) :: t()
def from(enumerable), do: Enum.reduce(enumerable, new(), &push(&2, &1))
@doc """
Returns the current item (aka top) of the stack.
## Example
iex> MinMaxStack.new() |> MinMaxStack.current()
nil
iex> stack =
...> MinMaxStack.new()
...> |> MinMaxStack.push(10)
...> |> MinMaxStack.push(-23)
...> |> MinMaxStack.push(5)
iex> stack |> MinMaxStack.current()
5
"""
@spec current(t()) :: item()
def current(stack), do: stack |> extract(:current)
@doc """
Returns the minimum item of the stack.
## Example
iex> MinMaxStack.new() |> MinMaxStack.minimum()
nil
iex> stack =
...> MinMaxStack.new()
...> |> MinMaxStack.push(10)
...> |> MinMaxStack.push(-23)
...> |> MinMaxStack.push(5)
iex> stack |> MinMaxStack.minimum()
-23
"""
@spec minimum(t()) :: item()
def minimum(stack), do: stack |> extract(:minimum)
@doc """
Returns the maximum item of the stack.
## Example
iex> MinMaxStack.new() |> MinMaxStack.maximum()
nil
iex> stack =
...> MinMaxStack.new()
...> |> MinMaxStack.push(10)
...> |> MinMaxStack.push(-23)
...> |> MinMaxStack.push(5)
iex> stack |> MinMaxStack.maximum()
10
"""
@spec maximum(t()) :: item()
def maximum(stack), do: stack |> extract(:maximum)
@doc """
Pushes the value into the stack.
## Example
iex> stack = MinMaxStack.new() |> MinMaxStack.push(10)
iex> stack
%MinMaxStack{container: [%{current: 10, maximum: 10, minimum: 10}]}
iex> stack = stack |> MinMaxStack.push(-1)
iex> stack
%MinMaxStack{container: [
%{current: -1, maximum: 10, minimum: -1},
%{current: 10, maximum: 10, minimum: 10}
]}
iex> stack = stack |> MinMaxStack.push(7)
iex> stack
%MinMaxStack{container: [
%{current: 7, maximum: 10, minimum: -1},
%{current: -1, maximum: 10, minimum: -1},
%{current: 10, maximum: 10, minimum: 10}
]}
"""
@spec push(t(), item()) :: t()
def push(%__MODULE__{container: container} = stack, item) do
new_frame = %{
current: item,
minimum: min(item, minimum(stack)),
maximum: max(item, maximum(stack) || item)
}
%__MODULE__{container: [new_frame | container]}
end
@doc """
Returns the top frame of the stack.
## Example
iex> MinMaxStack.new() |> MinMaxStack.peek()
nil
iex> stack =
...> MinMaxStack.new()
...> |> MinMaxStack.push(10)
...> |> MinMaxStack.push(-23)
...> |> MinMaxStack.push(5)
iex> stack |> MinMaxStack.peek()
%{current: 5, minimum: -23, maximum: 10}
"""
@spec peek(t()) :: frame() | nil
def peek(%__MODULE__{container: []}), do: nil
def peek(%__MODULE__{container: [current | _]}), do: current
@doc """
Pops the top-most frame from the stack.
## Example
iex> MinMaxStack.new() |> MinMaxStack.pop()
nil
iex> MinMaxStack.from([4, 7, 0, -3]) |> MinMaxStack.pop()
{-3, %MinMaxStack{container: [
%{current: 0, minimum: 0, maximum: 7},
%{current: 7, minimum: 4, maximum: 7},
%{current: 4, minimum: 4, maximum: 4}
]}}
iex> MinMaxStack.from([4, 7, 0, 30]) |> MinMaxStack.pop()
{30, %MinMaxStack{container: [
%{current: 0, minimum: 0, maximum: 7},
%{current: 7, minimum: 4, maximum: 7},
%{current: 4, minimum: 4, maximum: 4}
]}}
"""
@spec pop(t()) :: {item(), t()} | nil
def pop(%__MODULE__{container: []}), do: nil
def pop(%__MODULE__{container: [%{current: val} | rest]}),
do: {val, %__MODULE__{container: rest}}
@doc """
Pops the minimum value from the stack. Note that this will only remove once the
minimum value is reached. In case of multiple repetition of that minimum value
it will only pop once. Therefore, when there is duplicate values, `pop_minimum`
will not change the minimum attirbute of the frame.
It will return a tuple containing `{minimum_value, popped_values, new_stack}`
## Example
iex> MinMaxStack.new() |> MinMaxStack.pop_minimum()
nil
iex> MinMaxStack.from([4, -7, 0, -3]) |> MinMaxStack.pop_minimum()
{
-7,
[-7, 0, -3],
%MinMaxStack{container: [%{current: 4, minimum: 4, maximum: 4}]}
}
iex> MinMaxStack.from([4, 7, 0, 0, 30]) |> MinMaxStack.pop_minimum()
{0, [0, 30], %MinMaxStack{container: [
%{current: 0, minimum: 0, maximum: 7},
%{current: 7, minimum: 4, maximum: 7},
%{current: 4, minimum: 4, maximum: 4}
]}}
iex> MinMaxStack.from([-4, 7, 0, 0, 30]) |> MinMaxStack.pop_minimum()
{-4, [-4, 7, 0, 0, 30], %MinMaxStack{container: []}}
"""
@spec pop_minimum(t()) :: {item(), history(), t()} | nil
def pop_minimum(%__MODULE__{container: []}), do: nil
def pop_minimum(stack) do
do_pop_minimum(stack, [])
end
defp do_pop_minimum(
%__MODULE__{container: [%{current: minimum, minimum: minimum} | rest]},
history
) do
{minimum, [minimum | history], %__MODULE__{container: rest}}
end
defp do_pop_minimum(%__MODULE__{container: [%{current: current} | rest]}, history) do
do_pop_minimum(%__MODULE__{container: rest}, [current | history])
end
@doc """
Pops the maximum value from the stack. Note that this will only remove once the
maximum value is reached. In case of multiple repetition of that maximum value
it will only pop once. Therefore, when there is duplicate values, `pop_maximum`
will not change the maximum attirbute of the frame.
It will return a tuple containing `{maximum_value, popped_values, new_stack}`
## Example
iex> MinMaxStack.new() |> MinMaxStack.pop_maximum()
nil
iex> MinMaxStack.from([4, 17, 0, -3]) |> MinMaxStack.pop_maximum()
{
17,
[17, 0, -3],
%MinMaxStack{container: [%{current: 4, minimum: 4, maximum: 4}]}
}
iex> MinMaxStack.from([4, 7, 10, 10, -30]) |> MinMaxStack.pop_maximum()
{10, [10, -30], %MinMaxStack{container: [
%{current: 10, minimum: 4, maximum: 10},
%{current: 7, minimum: 4, maximum: 7},
%{current: 4, minimum: 4, maximum: 4}
]}}
iex> MinMaxStack.from([45, 7, 10, 10, -30]) |> MinMaxStack.pop_maximum()
{45, [45, 7, 10, 10, -30], %MinMaxStack{container: []}}
"""
@spec pop_maximum(t()) :: {item(), history(), t()} | nil
def pop_maximum(%__MODULE__{container: []}), do: nil
def pop_maximum(stack) do
do_pop_maximum(stack, [])
end
defp do_pop_maximum(
%__MODULE__{container: [%{current: maximum, maximum: maximum} | rest]},
history
) do
{maximum, [maximum | history], %__MODULE__{container: rest}}
end
defp do_pop_maximum(%__MODULE__{container: [%{current: current} | rest]}, history) do
do_pop_maximum(%__MODULE__{container: rest}, [current | history])
end
defp extract(%__MODULE__{container: []}, _), do: nil
defp extract(%__MODULE__{container: [%{minimum: val} | _]}, :minimum), do: val
defp extract(%__MODULE__{container: [%{maximum: val} | _]}, :maximum), do: val
defp extract(%__MODULE__{container: [%{current: val} | _]}, :current), do: val
end | lib/ex_algo/stack/min_max_stack.ex | 0.951718 | 0.462959 | min_max_stack.ex | starcoder |
defmodule Terp.TypeSystem.Type do
@moduledoc """
Type
Constructors:
- Tconst -> Constants
- Tvar -> type variables
- Tarrow -> arrow type; function
- Tlist -> list
"""
alias __MODULE__
alias Terp.TypeSystem.Environment
defstruct [:constructor, :t, :vars, :type_constructor]
@type t :: %__MODULE__{}
@spec bool() :: Type.t
def bool() do
%Type{constructor: :Tconst, t: :Bool}
end
@spec int() :: Type.t
def int() do
%Type{constructor: :Tconst, t: :Int}
end
@spec string() :: Type.t
def string() do
%Type{constructor: :Tconst, t: :String}
end
@spec function(Type.t, Type.t) :: Type.t
def function(%Type{} = t1, %Type{} = t2) do
%Type{
constructor: :Tarrow,
t: {t1, t2},
}
end
@spec list(Type.t) :: Type.t
def list(%Type{} = x) do
%Type{constructor: :Tlist, t: x}
end
@spec tuple(Type.t, Type.t) :: Type.t
def tuple(%Type{} = x, %Type{} = y) do
%Type{constructor: :Ttuple, t: {x, y}}
end
@spec var(String.t | atom()) :: Type.t
def var(x) do
%Type{constructor: :Tvar, t: x}
end
@doc """
Define a sum type.
Expects the type name, a list of type variables (as atoms),
and a list of tuples containing {type, type vars}.
"""
@spec sum_type(String.t, [String.t], [{String.t, [String.t]}]) :: Type.t
def sum_type(name, type_vars, constructors) do
ts = for {name, args} <- constructors do
constructor = to_string(name)
# This to_type won't work for HKTs
types = Enum.map(args, &to_type/1)
all_args_in_type_vars = types
|> Enum.all?(fn type ->
Enum.member?(type_vars, type.t) || type.constructor !== :Tvar
end)
if all_args_in_type_vars do
%Type{constructor: constructor,
t: types,
vars: args}
else
{:error, :invalid_type_var}
end
end
%Type{type_constructor: name, t: ts, vars: type_vars}
end
def constructor_for_type(name) do
type = Environment.contents.type_defs
|> Enum.find(fn {_k, v} ->
Enum.member?(Enum.map(v.t, &(&1.constructor)), name)
end)
case type do
nil ->
{:error, {:type, :not_a_constructor}}
{_n, t} ->
{:ok, t}
end
end
def value_constructor(name) do
case (constructor_for_type(name)) do
{:error, _e} = error->
error
{:ok, t} ->
c = t.t
|> Enum.find(&(to_string(&1.constructor) == name))
case c do
[] -> {:error, {:type, :no_matching_constructor}}
type -> {:ok, type}
end
end
end
# to_type/1
def to_type(%Type{} = x), do: x
def to_type("Int"), do: int()
def to_type("Bool"), do: bool()
def to_type("String"), do: string()
def to_type(x), do: var(x)
# to_type/2
def to_type("List", x), do: list(to_type(x))
def to_type(constructor, vars) do
case Environment.lookup_def(constructor) do
{:error, e} ->
{:error, e}
{:ok, t} ->
replace_type_vars({t, vars})
end
end
# to_type/3
def to_type("Tuple", x, y), do: tuple(to_type(x), to_type(y))
def to_type("Arrow", x, y), do: function(to_type(x), to_type(y))
def to_type(:__arrow, x, y) do
left = if is_list(x) do
apply(Type, :to_type, x)
else
to_type(x)
end
right = if is_list(y) do
apply(Type, :to_type, y)
else
to_type(y)
end
function(left, right)
end
def replace_type_vars({type, new_vars}) do
zipped = Enum.zip(type.vars, List.wrap(new_vars))
updated_type = Enum.reduce(zipped, type, fn (var, type) -> replace_type_var(type, var) end)
updated_type
end
def replace_type_var(%Type{constructor: :Tvar, t: t} = type, {old_var, new_var}) do
if t == old_var, do: to_type(new_var), else: type
end
def replace_type_var(%Type{constructor: :Tlist, t: t} = type, vars) do
subbed_t = replace_type_var(t, vars)
%{type | t: subbed_t}
end
def replace_type_var(%Type{constructor: :Ttuple, t: {t1, t2}} = type, vars) do
subbed_t1 = replace_type_var(t1, vars)
subbed_t2 = replace_type_var(t2, vars)
%{type | t: {subbed_t1, subbed_t2}}
end
def replace_type_var(%Type{constructor: :Tarrow, t: {t1, t2}} = type, vars) do
subbed_t1 = replace_type_var(t1, vars)
subbed_t2 = replace_type_var(t2, vars)
%{type | t: {subbed_t1, subbed_t2}}
end
def replace_type_var(%Type{type_constructor: nil, t: ts} = type, vars) when is_list(ts) do
updated = Enum.map(ts, &replace_type_var(&1, vars))
updated_vars = Enum.map(type.vars, &(if &1 == elem(vars, 0), do: elem(vars, 1), else: &1))
%{type | t: updated, vars: updated_vars}
end
def replace_type_var(%Type{type_constructor: _t, t: ts} = type, vars) when is_list(ts) do
updated_data_constructors = Enum.map(ts, &replace_type_var(&1, vars))
# TODO can only handle 1 type variable currently
updated_vars = Enum.map(type.vars, &(if &1 == elem(vars, 0), do: elem(vars, 1), else: &1))
%{type | t: updated_data_constructors, vars: updated_vars}
end
def replace_type_var(type, _vars), do: type
defimpl String.Chars do
def to_string(%Type{constructor: :Tconst, t: t}), do: Kernel.to_string(t)
def to_string(%Type{constructor: :Tvar, t: t}), do: Kernel.to_string(t)
def to_string(%Type{constructor: :Tlist, t: x}), do: "[#{Kernel.to_string(x)}]"
def to_string(%Type{constructor: :Ttuple, t: {x, y}}), do: "{#{Kernel.to_string(x)}, #{Kernel.to_string(y)}}"
def to_string(%Type{constructor: :Tarrow, t: {x, y}}), do: "(-> #{Kernel.to_string(x)} #{Kernel.to_string(y)})"
def to_string(%Type{constructor: nil, type_constructor: t, vars: vars}) do
var_string = vars
|> Enum.map(&Kernel.to_string/1)
|> Enum.join(" ")
"[#{Enum.join([Kernel.to_string(t), var_string], " ")}]"
end
def to_string(%Type{constructor: c, t: ts}) do
var_string = ts
|> Enum.map(&Kernel.to_string/1)
|> Enum.join(" ")
if var_string == "" do
"[#{Kernel.to_string(c)}]"
else
"[#{Kernel.to_string(c)} #{var_string}]"
end
end
end
end | lib/type_system/type.ex | 0.53048 | 0.550487 | type.ex | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.