code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Cldr.DateTime do
@moduledoc """
Provides localization and formatting of a `DateTime`
struct or any map with the keys `:year`, `:month`,
`:day`, `:calendar`, `:hour`, `:minute`, `:second` and optionally `:microsecond`.
`Cldr.DateTime` provides support for the built-in calendar
`Calendar.ISO` or any calendars defined with
[ex_cldr_calendars](https://hex.pm/packages/ex_cldr_calendars)
CLDR provides standard format strings for `DateTime` which
are reresented by the names `:short`, `:medium`, `:long`
and `:full`. This allows for locale-independent
formatting since each locale will define the underlying
format string as appropriate.
"""
alias Cldr.DateTime.Format
alias Cldr.LanguageTag
@style_types [:short, :medium, :long, :full]
@default_type :medium
defmodule Styles do
@moduledoc false
defstruct Module.get_attribute(Cldr.DateTime, :style_types)
end
@doc """
Formats a DateTime according to a format string
as defined in CLDR and described in [TR35](http://unicode.org/reports/tr35/tr35-dates.html)
## Arguments
* `datetime` is a `%DateTime{}` `or %NaiveDateTime{}`struct or any map that contains the keys
`:year`, `:month`, `:day`, `:calendar`. `:hour`, `:minute` and `:second` with optional
`:microsecond`.
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module. The default is `Cldr.default_backend/0`.
* `options` is a keyword list of options for formatting.
## Options
* `format:` `:short` | `:medium` | `:long` | `:full` or a format string or
any of the keys returned by `Cldr.DateTime.available_format_names`.
The default is `:medium`
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`
* `number_system:` a number system into which the formatted date digits should
be transliterated
* `era: :variant` will use a variant for the era is one is available in the locale.
In the "en" for example, the locale `era: :variant` will return "BCE" instead of "BC".
* `period: :variant` will use a variant for the time period and flexible time period if
one is available in the locale. For example, in the "en" locale `period: :variant` will
return "pm" instead of "PM"
## Returns
* `{:ok, formatted_datetime}` or
* `{:error, reason}`
## Examples
iex> {:ok, datetime} = DateTime.from_naive(~N[2000-01-01 23:59:59.0], "Etc/UTC")
iex> Cldr.DateTime.to_string datetime
{:ok, "Jan 1, 2000, 11:59:59 PM"}
iex> Cldr.DateTime.to_string datetime, MyApp.Cldr, locale: "en"
{:ok, "Jan 1, 2000, 11:59:59 PM"}
iex> Cldr.DateTime.to_string datetime, MyApp.Cldr, format: :long, locale: "en"
{:ok, "January 1, 2000 at 11:59:59 PM UTC"}
iex> Cldr.DateTime.to_string datetime, MyApp.Cldr, format: :hms, locale: "en"
{:ok, "23:59:59"}
iex> Cldr.DateTime.to_string datetime, MyApp.Cldr, format: :full, locale: "en"
{:ok, "Saturday, January 1, 2000 at 11:59:59 PM GMT"}
iex> Cldr.DateTime.to_string datetime, MyApp.Cldr, format: :full, locale: "fr"
{:ok, "samedi 1 janvier 2000 à 23:59:59 UTC"}
"""
@spec to_string(map, Cldr.backend() | Keyword.t(), Keyword.t()) ::
{:ok, String.t()} | {:error, {module, String.t()}}
def to_string(datetime, backend \\ Cldr.Date.default_backend(), options \\ [])
def to_string(%{calendar: Calendar.ISO} = datetime, backend, options) do
%{datetime | calendar: Cldr.Calendar.Gregorian}
|> to_string(backend, options)
end
def to_string(datetime, options, []) when is_list(options) do
to_string(datetime, Cldr.Date.default_backend(), options)
end
def to_string(%{calendar: calendar} = datetime, backend, options)
when is_atom(backend) and is_list(options) do
options = normalize_options(backend, options)
format_backend = Module.concat(backend, DateTime.Formatter)
number_system = Keyword.get(options, :number_system)
with {:ok, locale} <- Cldr.validate_locale(options[:locale], backend),
{:ok, cldr_calendar} <- type_from_calendar(calendar),
{:ok, _} <- Cldr.Number.validate_number_system(locale, number_system, backend),
{:ok, format_string} <- format_string(options[:format], locale, cldr_calendar, backend),
{:ok, formatted} <- format_backend.format(datetime, format_string, locale, options) do
{:ok, formatted}
end
rescue
e in [Cldr.DateTime.UnresolvedFormat] ->
{:error, {e.__struct__, e.message}}
end
def to_string(datetime, _backend, _options) do
error_return(datetime, [:year, :month, :day, :hour, :minute, :second, :calendar])
end
defp normalize_options(backend, []) do
{locale, _backend} = Cldr.locale_and_backend_from(nil, backend)
number_system = Cldr.Number.System.number_system_from_locale(locale, backend)
[locale: locale, number_system: number_system, format: @default_type]
end
defp normalize_options(backend, options) do
{locale, _backend} = Cldr.locale_and_backend_from(options[:locale], backend)
format = options[:format] || options[:style] || @default_type
locale_number_system = Cldr.Number.System.number_system_from_locale(locale, backend)
number_system = Keyword.get(options, :number_system, locale_number_system)
options
|> Keyword.put(:locale, locale)
|> Keyword.put(:format, format)
|> Keyword.delete(:style)
|> Keyword.put_new(:number_system, number_system)
end
@doc false
# Returns the CLDR calendar type for a calendar
def type_from_calendar(Cldr.Calendar.Gregorian = calendar) do
{:ok, calendar.cldr_calendar_type()}
end
def type_from_calendar(calendar) do
with {:ok, calendar} <- Cldr.Calendar.validate_calendar(calendar) do
{:ok, calendar.cldr_calendar_type()}
end
end
@doc """
Formats a DateTime according to a format string
as defined in CLDR and described in [TR35](http://unicode.org/reports/tr35/tr35-dates.html)
returning a formatted string or raising on error.
## Arguments
* `datetime` is a `%DateTime{}` `or %NaiveDateTime{}`struct or any map that contains the keys
`:year`, `:month`, `:day`, `:calendar`. `:hour`, `:minute` and `:second` with optional
`:microsecond`.
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module. The default is `Cldr.default_backend/0`.
* `options` is a keyword list of options for formatting.
## Options
* `format:` `:short` | `:medium` | `:long` | `:full` or a format string or
any of the keys returned by `Cldr.DateTime.available_format_names` or a format string.
The default is `:medium`
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`
* `number_system:` a number system into which the formatted date digits should
be transliterated
* `era: :variant` will use a variant for the era is one is available in the locale.
In the "en" for example, the locale `era: :variant` will return "BCE" instead of "BC".
* `period: :variant` will use a variant for the time period and flexible time period if
one is available in the locale. For example, in the "en" locale `period: :variant` will
return "pm" instead of "PM"
## Returns
* `formatted_datetime` or
* raises an exception
## Examples
iex> {:ok, datetime} = DateTime.from_naive(~N[2000-01-01 23:59:59.0], "Etc/UTC")
iex> Cldr.DateTime.to_string! datetime, MyApp.Cldr, locale: "en"
"Jan 1, 2000, 11:59:59 PM"
iex> Cldr.DateTime.to_string! datetime, MyApp.Cldr, format: :long, locale: "en"
"January 1, 2000 at 11:59:59 PM UTC"
iex> Cldr.DateTime.to_string! datetime, MyApp.Cldr, format: :full, locale: "en"
"Saturday, January 1, 2000 at 11:59:59 PM GMT"
iex> Cldr.DateTime.to_string! datetime, MyApp.Cldr, format: :full, locale: "fr"
"samedi 1 janvier 2000 à 23:59:59 UTC"
"""
@spec to_string!(map, Cldr.backend() | Keyword.t(), Keyword.t()) :: String.t() | no_return
def to_string!(datetime, backend \\ Cldr.Date.default_backend(), options \\ [])
def to_string!(datetime, options, []) when is_list(options) do
to_string!(datetime, Cldr.Date.default_backend(), options)
end
def to_string!(datetime, backend, options) do
case to_string(datetime, backend, options) do
{:ok, string} -> string
{:error, {exception, message}} -> raise exception, message
end
end
# Standard format
defp format_string(style, %LanguageTag{cldr_locale_name: locale_name}, cldr_calendar, backend)
when style in @style_types do
with {:ok, styles} <- Format.date_time_formats(locale_name, cldr_calendar, backend) do
{:ok, Map.get(styles, style)}
end
end
# Look up for the format in :available_formats
defp format_string(style, %LanguageTag{cldr_locale_name: locale_name}, cldr_calendar, backend)
when is_atom(style) do
with {:ok, styles} <-
Format.date_time_available_formats(locale_name, cldr_calendar, backend),
format_string <- Map.get(styles, style) do
if format_string do
{:ok, format_string}
else
{:error,
{Cldr.DateTime.InvalidStyle,
"Invalid datetime style #{inspect(style)}. " <>
"The valid styles are #{inspect(styles)}."}}
end
end
end
# Format with a number system
defp format_string(%{number_system: number_system, format: style}, locale, calendar, backend) do
{:ok, format_string} = format_string(style, locale, calendar, backend)
{:ok, %{number_system: number_system, format: format_string}}
end
# Straight up format string
defp format_string(format_string, _locale, _calendar, _backend)
when is_binary(format_string) do
{:ok, format_string}
end
defp error_return(map, requirements) do
requirements =
requirements
|> Enum.map(&inspect/1)
|> Cldr.DateTime.Formatter.join_requirements()
{:error,
{ArgumentError,
"Invalid DateTime. DateTime is a map that contains at least #{requirements}. " <>
"Found: #{inspect(map)}"}}
end
end | lib/cldr/datetime.ex | 0.946262 | 0.757839 | datetime.ex | starcoder |
defmodule Guss.RequestHeaders do
@moduledoc """
Conveniences for working with canonical request headers.
Normally you do not need to access this module directly. It is
meant to be used by the signing processes.
If you want to get a list of signed headers for your request,
see `Guss.Resource.signed_headers/1`.
"""
@doc """
Convert the enumerable to a dasherized list, suitable for URL signing.
The enumerable will have the following transformations applied:
* Header keys are downcased.
* Underscores (`"_"`) are replaced with dashes (`"-"`).
* Leading and trailing whitespace is removed.
* Keys with empty values will be removed.
* Atom values will be dasherized like header keys. This is useful for some built-in values,
like `:public_read`.
* Integer values are converted to strings.
* Enumerable values will be expanded following the same transformation rules. See the examples
for more details.
The result is a list of `{key, value}` tuples for each request header, sorted by key name.
## Examples
iex> Guss.RequestHeaders.dasherize(x_foo_bar: "qux")
[{"x-foo-bar", "qux"}]
iex> Guss.RequestHeaders.dasherize(x: [foo_bar: "qux"])
[{"x-foo-bar", "qux"}]
iex> Guss.RequestHeaders.dasherize(x: [foo: [bar: "qux"]])
[{"x-foo-bar", "qux"}]
ies> Guss.RequestHeaders.dasherize(x: [meta: [int: 42, atom: :foo_bar]])
[{"x-meta-int", "42"}, {"x-meta-atom", "foo-bar"}]
iex> Guss.RequestHeaders.dasherize(content_type: "text/plain", content_md5: "3a0ef89...")
[{"content-md5", "3a0ef89..."}, {"content-type", "text/plain"}]
iex> Guss.RequestHeaders.dasherize(X: [{:user, "Bob"}, {"User", "Alice"}])
[{"x-user", "Bob"}, {"x-user", "Alice"}]
iex> Guss.RequestHeaders.dasherize(x: [vendor: [id: "guss"], goog: [acl: :public_read]])
[{"x-goog-acl", "public-read"}, {"x-vendor-id", "guss"}]
iex> Guss.RequestHeaders.dasherize(%{"X" => %{"Goog" => %{"Acl" => "public-read", "Meta" => %{"Value" => 1}}}})
[{"x-goog-acl", "public-read"}, {"x-goog-meta-value", "1"}]
iex> Guss.RequestHeaders.dasherize(%{"X" => %{"Goog" => %{"Meta" => %{" Value " => 1}}}})
[{"x-goog-meta-value", "1"}]
"""
def dasherize(data) when is_map(data) and data == %{}, do: []
def dasherize(data) when is_map(data), do: data |> Enum.into([]) |> do_dasherize()
def dasherize(data) when is_list(data), do: data |> do_dasherize()
# Starts collapsing items. Empty lists are ignored.
defp do_dasherize([]), do: []
defp do_dasherize(enum), do: dasherize_items(enum, [])
# Input values exhausted
defp dasherize_items([], acc), do: acc |> ordered_sort()
# Expands nested values
defp dasherize_items([{key, val} | rest], acc) when is_list(val) or is_map(val) do
dasherize_items(rest, dasherize_nested(key_name(key), val, acc))
end
# Ignores empty values
defp dasherize_items([{_, val} | rest], acc) when is_nil(val) or val == "" do
dasherize_items(rest, acc)
end
# Dasherizes atom values
defp dasherize_items([{key, val} | rest], acc) when is_atom(val) do
dasherize_items(rest, [{key_name(key), to_dashed(val)} | acc])
end
# Dasherizes key and stringifies values
defp dasherize_items([{key, val} | rest], acc) do
dasherize_items(rest, [{key_name(key), to_string(val)} | acc])
end
defp dasherize_nested(prefix, enum, acc) do
Enum.reduce(enum, acc, fn {key, val}, acc ->
next_key = "#{prefix}-#{key_name(key)}"
case val do
val when is_map(val) -> dasherize_nested(next_key, Enum.into(val, []), acc)
val when is_list(val) -> dasherize_nested(next_key, val, acc)
val when is_atom(val) -> [{next_key, to_dashed(val)} | acc]
val -> [{next_key, to_string(val)} | acc]
end
end)
end
defp to_dashed(str) when is_atom(str), do: to_dashed(Atom.to_string(str))
defp to_dashed(str) when is_binary(str), do: String.replace(str, "_", "-")
defp key_name(key), do: key |> to_dashed() |> String.trim() |> String.downcase()
defp ordered_sort(items) do
items
|> Enum.reverse()
|> Enum.with_index()
|> Enum.sort_by(fn {{k, _v}, i} -> {k, i} end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Eliminates duplicate keys in the enumerable.
Duplicate keys will be replaced with a single key and a
comma-separated list of values.
The result is a list sorted alphabetically by key. Values will
retain their ordering in the original list.
## Examples
iex> Guss.RequestHeaders.deduplicate([{"x", "Foo"}, {"x", "Bar"}])
[{"x", "Foo,Bar"}]
iex> Guss.RequestHeaders.deduplicate([{"x", "this"}, {"bar", "qux"}, {"x", "that"}])
[{"bar", "qux"}, {"x", "this,that"}]
"""
def deduplicate(enumerable) do
for {k, v} <- Enum.group_by(enumerable, &elem(&1, 0), &elem(&1, 1)) do
{k, join_values(v, ",")}
end
|> Enum.into([])
|> Enum.sort_by(&elem(&1, 0))
end
defp join_values(items, joiner) when is_list(items) do
items |> Enum.map_join(joiner, &String.trim/1)
end
end | lib/guss/request_headers.ex | 0.845225 | 0.45181 | request_headers.ex | starcoder |
defmodule VintageNetBridge do
@moduledoc """
Configure network bridges with VintageNet
Configurations for this technology are maps with a `:type` field set to
`VintageNetBridge`. The following additional fields are supported:
* `:vintage_net_bridge` - Bridge options
* `:interfaces` - Set to a list of interface names to add to the bridge.
This option is required for the bridge to be useful.
* `:forward_delay`
* `:priority`
* `:hello_time`
* `:max_age`
* `:path_cost`
* `:path_priority`
* `:hairpin`
* `:stp`
Here's an example configuration for setting up a bridge:
```elixir
%{
type: VintageNetBridge,
vintage_net_bridge: %{
vintage_net_bridge: %{
interfaces: ["eth0", "wlan0"],
}
}
```
See [brctl(8)](https://www.man7.org/linux/man-pages/man8/brctl.8.html) for
more information on individual options.
"""
@behaviour VintageNet.Technology
alias VintageNet.Interface.RawConfig
alias VintageNet.IP.{IPv4Config, DhcpdConfig, DnsdConfig}
alias VintageNetBridge.Server
@impl true
def normalize(config), do: config
@impl true
def to_raw_config(ifname, config, opts) do
normalized_config = normalize(config)
bridge_config = normalized_config[:vintage_net_bridge]
brctl = Keyword.fetch!(opts, :bin_brctl)
interfaces = Map.fetch!(bridge_config, :interfaces)
up_cmds = [
{:run, brctl, ["addbr", ifname]}
]
down_cmds = [
{:run, brctl, ["delbr", ifname]}
]
bridge_up_cmds = Enum.flat_map(bridge_config, &config_to_cmd(&1, brctl, ifname))
addif_up_cmds =
Map.get(bridge_config, :interfaces, [])
|> Enum.map(fn addif ->
{:run_ignore_errors, brctl, ["addif", ifname, addif]}
end)
%RawConfig{
ifname: ifname,
type: __MODULE__,
source_config: normalized_config,
up_cmds: up_cmds ++ bridge_up_cmds ++ addif_up_cmds,
down_cmds: down_cmds,
required_ifnames: [],
child_specs: [{Server, %{brctl: brctl, bridge_ifname: ifname, interfaces: interfaces}}]
}
|> IPv4Config.add_config(normalized_config, opts)
|> DhcpdConfig.add_config(normalized_config, opts)
|> DnsdConfig.add_config(normalized_config, opts)
end
@impl true
def ioctl(_ifname, _command, _args) do
{:error, :unsupported}
end
@impl true
def check_system(_opts) do
{:error, "unimplemented"}
end
defp config_to_cmd({:forward_delay, value}, brctl, ifname) do
[{:run, brctl, ["setfd", ifname, to_string(value)]}]
end
defp config_to_cmd({:priority, value}, brctl, ifname) do
[{:run, brctl, ["setbridgeprio", ifname, to_string(value)]}]
end
defp config_to_cmd({:hello_time, value}, brctl, ifname) do
[{:run, brctl, ["sethello", ifname, to_string(value)]}]
end
defp config_to_cmd({:max_age, value}, brctl, ifname) do
[{:run, brctl, ["setmaxage", ifname, to_string(value)]}]
end
defp config_to_cmd({:path_cost, value}, brctl, ifname) do
[{:run, brctl, ["setpathcost", ifname, to_string(value)]}]
end
defp config_to_cmd({:path_priority, value}, brctl, ifname) do
[{:run, brctl, ["setportprio", ifname, to_string(value)]}]
end
defp config_to_cmd({:hairpin, {port, value}}, brctl, ifname) do
[{:run, brctl, ["hairpin", ifname, to_string(port), bool_to_yn(value)]}]
end
defp config_to_cmd({:stp, value}, brctl, ifname) do
[{:run, brctl, ["stp", ifname, bool_to_yn(value)]}]
end
defp config_to_cmd(_other, _brctl, _ifname), do: []
defp bool_to_yn(true), do: "yes"
defp bool_to_yn(false), do: "no"
end | lib/vintage_net_bridge.ex | 0.864239 | 0.76625 | vintage_net_bridge.ex | starcoder |
defmodule OT.Text.Transformation do
require Logger
@moduledoc """
The transformation of two concurrent operations such that they satisfy the
[TP1][tp1] property of operational transformation.
[tp1]: https://en.wikipedia.org/wiki/Operational_transformation#Convergence_properties
"""
alias OT.Text.{Component, Operation, Scanner}
@doc """
Transform an operation against another operation.
Given an operation A that occurred at the same time as operation B against the
same text state, transform the components of operation A such that the state
of the text after applying operation A and then operation B is the same as
after applying operation B and then the transformation of operation A against
operation B:
*S ○ Oa ○ transform(Ob, Oa) = S ○ Ob ○ transform(Oa, Ob)*
This function also takes a third `side` argument that indicates which
operation came later. This is important when deciding whether it is acceptable
to break up insert components from one operation or the other.
"""
@spec transform(Operation.t, Operation.t) :: [Operation.t]
def transform(op_a, op_b, side) do
{op_a, op_b}
|> next
|> do_transform(side)
end
defp transform_loop(_, _, nil, nil, operation1Prime, operation2Prime, _, _) do
[operation1Prime, operation2Prime]
end
# op1 == :insert
defp transform_loop(op1s, op2s, op1=%{i: _}, op2, operation1Prime, operation2Prime, op1_position, op2_position) do
operation1Prime = List.insert_at(operation1Prime, -1, op1)
operation2Prime = List.insert_at(operation2Prime, -1, Component.length(op1))
op1_position = op1_position + 1
op1 = Enum.at(op1s, op1_position)
transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
end
# op2 == :insert
defp transform_loop(op1s, op2s, op1, op2=%{i: _}, operation1Prime, operation2Prime, op1_position, op2_position) do
operation1Prime = List.insert_at(operation1Prime, -1, Component.length(op2))
operation2Prime = List.insert_at(operation2Prime, -1, op2)
op2_position = op2_position + 1
op2 = Enum.at(op2s, op2_position)
transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
end
# op1: retain, op2: retain
defp transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position) when is_integer(op1) and is_integer(op2) and 0 <= op1 and 0 <= op2 do
[minl, op1, op2, op1_position, op2_position] = cond do
Component.length(op1) > Component.length(op2) ->
minl = op2
op1 = op1 - op2
op2_position = op2_position + 1
op2 = Enum.at(op2s, op2_position)
[minl, op1, op2, op1_position, op2_position]
Component.length(op1) == Component.length(op2) ->
minl = op2
op1_position = op1_position + 1
op2_position = op2_position + 1
op1 = Enum.at(op1s, op1_position)
op2 = Enum.at(op2s, op2_position)
[minl, op1, op2, op1_position, op2_position]
true ->
minl = op1
op2 = op2 - op1
op1_position = op1_position + 1
op1 = Enum.at(op1s, op1_position)
[minl, op1, op2, op1_position, op2_position]
end
operation1Prime = List.insert_at(operation1Prime, -1, minl)
operation2Prime = List.insert_at(operation2Prime, -1, minl)
transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
end
# op1: delete, op2: delete
defp transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
when is_integer(op1) and is_integer(op2) and op1 < 0 and op2 < 0 do
cond do
Component.length(op1) > Component.length(op2) ->
op1 = op1 - op2
op2_position = op2_position + 1
op2 = Enum.at(op2s, op2_position)
transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
Component.length(op1) == Component.length(op2) ->
op1_position = op1_position + 1
op2_position = op2_position + 1
op1 = Enum.at(op1s, op1_position)
op2 = Enum.at(op2s, op2_position)
transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
true ->
op2 = op2 - op1
op1_position = op1_position + 1
op1 = Enum.at(op1s, op1_position)
transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
end
end
# op1: delete, op2: retain
defp transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
when is_integer(op1) and is_integer(op2) and op1 < 0 and 0 <= op2 do
[minl, op1, op2, op1_position, op2_position] = cond do
Component.length(op1) > Component.length(op2) ->
minl = op2
op1 = op1 + op2
op2_position = op2_position + 1
op2 = Enum.at(op2s, op2_position)
[minl, op1, op2, op1_position, op2_position]
Component.length(op1) == Component.length(op2) ->
minl = op2
op1_position = op1_position + 1
op2_position = op2_position + 1
op1 = Enum.at(op1s, op1_position)
op2 = Enum.at(op2s, op2_position)
[minl, op1, op2, op1_position, op2_position]
true ->
minl = -op1
op2 = op2 + op1
op1_position = op1_position + 1
op1 = Enum.at(op1s, op1_position)
[minl, op1, op2, op1_position, op2_position]
end
operation1Prime = List.insert_at(operation1Prime, -1, -minl)
transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
end
# op1: retain, op2: delete
defp transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
when is_integer(op1) and is_integer(op2) and 0 <= op1 and op2 < 0 do
[minl, op1, op2, op1_position, op2_position] = cond do
Component.length(op1) > Component.length(op2) ->
minl = -op2
op1 = op1 + op2
op2_position = op2_position + 1
op2 = Enum.at(op2s, op2_position)
[minl, op1, op2, op1_position, op2_position]
Component.length(op1) == Component.length(op2) ->
minl = op1
op1_position = op1_position + 1
op2_position = op2_position + 1
op1 = Enum.at(op1s, op1_position)
op2 = Enum.at(op2s, op2_position)
[minl, op1, op2, op1_position, op2_position]
true ->
minl = op1
op2 = op2 + op1
op1_position = op1_position + 1
op1 = Enum.at(op1s, op1_position)
[minl, op1, op2, op1_position, op2_position]
end
operation2Prime = List.insert_at(operation2Prime, -1, -minl)
transform_loop(op1s, op2s, op1, op2, operation1Prime, operation2Prime, op1_position, op2_position)
end
# Unexpected condition
defp transform_loop(op1s, op2s, op1, op2, _, _, _, _) do
Logger.debug("INVALID op1s: #{inspect op1s}, op2s: #{inspect op2s}, op1: #{inspect op1}, op2: #{inspect op2}")
raise "The two operations aren't compatible or "
end
@spec transform(Operation.t, Operation.t, OT.Type.side) :: Operation.t
def transform(op1s, op2s) do
op1 = Enum.at(op1s, 0)
op2 = Enum.at(op2s, 0)
transform_loop(op1s, op2s, op1, op2, [], [], 0, 0)
end
@spec do_transform(Scanner.output, OT.Type.side, Operation.t) :: Operation.t
defp do_transform(next_pair, side, result \\ [])
# Operation A is exhausted
defp do_transform({{nil, _}, _}, _, result) do
result
end
# Operation B is exhausted
defp do_transform({{head_a, tail_a}, {nil, _}}, _, result) do
result
|> Operation.append(head_a)
|> Operation.join(tail_a)
end
# insert / insert / left
defp do_transform({{head_a = %{i: _}, tail_a}, {head_b = %{i: _}, tail_b}}, :left, result) do
{tail_a, [head_b | tail_b]}
|> next
|> do_transform(:left, Operation.append(result, head_a))
end
# insert / insert / right
defp do_transform({{head_a = %{i: _}, tail_a}, {head_b = %{i: _}, tail_b}}, :right, result) do
{[head_a | tail_a], tail_b}
|> next
|> do_transform(:right, Operation.append(result, Component.length(head_b)))
end
# insert / retain
defp do_transform({{head_a = %{i: _}, tail_a}, {head_b, tail_b}}, side, result) when is_integer(head_b) do
{tail_a, [head_b | tail_b]}
|> next
|> do_transform(side, Operation.append(result, head_a))
end
# insert / delete
defp do_transform({{head_a = %{i: _}, tail_a}, {head_b = %{d: _}, tail_b}}, side, result) do
{tail_a, [head_b | tail_b]}
|> next
|> do_transform(side, Operation.append(result, head_a))
end
# retain / insert
defp do_transform({{head_a, tail_a}, {head_b = %{i: _}, tail_b}}, side, result)
when is_integer(head_a) do
{[head_a | tail_a], tail_b}
|> next
|> do_transform(side, Operation.append(result, Component.length(head_b)))
end
# retain / retain
defp do_transform({{head_a, tail_a}, {head_b, tail_b}}, side, result)
when is_integer(head_a) and is_integer(head_b) do
{tail_a, tail_b}
|> next
|> do_transform(side, Operation.append(result, head_a))
end
# retain / delete
defp do_transform({{head_a, tail_a}, {%{d: _}, tail_b}}, side, result)
when is_integer(head_a) do
{tail_a, tail_b}
|> next
|> do_transform(side, result)
end
# delete / insert
defp do_transform({{head_a = %{d: _}, tail_a}, {head_b = %{i: _}, tail_b}}, side, result) do
{[head_a | tail_a], tail_b}
|> next
|> do_transform(side, Operation.append(result, Component.length(head_b)))
end
# delete / retain
defp do_transform({{head_a = %{d: _}, tail_a}, {head_b, tail_b}}, side, result) when is_integer(head_b) do
{tail_a, tail_b}
|> next
|> do_transform(side, Operation.append(result, head_a))
end
# delete / delete
defp do_transform({{%{d: _}, tail_a}, {%{d: _}, tail_b}}, side, result) do
{tail_a, tail_b}
|> next
|> do_transform(side, result)
end
@spec next(Scanner.input) :: Scanner.output
defp next(scanner_input), do: Scanner.next(scanner_input, :insert)
end | lib/ot/text/transformation.ex | 0.770724 | 0.710754 | transformation.ex | starcoder |
defmodule Fika.Compiler.TypeChecker.Match do
alias Fika.Compiler.TypeChecker.Types, as: T
@moduledoc """
This module takes care of the type checking needed for pattern matching.
This is currently a naive algorithm with scope for optimization,
but it should do for now. Here's how the algorithm works:
1. Expand all unions in the RHS and convert it into a list of possible types
2. Remove all types from this list which are matched by the LHS
3. Return {:ok, env, unmatched_types} when a match happens,
Return :error if no match happens
"""
# Returns:
# {:ok, env, unmatched_types} | :error
def match_case(env, lhs_ast, rhs_types) when is_list(rhs_types) do
find_unmatched(env, lhs_ast, rhs_types)
end
def match_case(env, lhs_ast, rhs_types) do
match_case(env, lhs_ast, expand_unions(rhs_types))
end
# Returns {:ok, env} | :error
def match(env, lhs_ast, rhs_type) do
case match_case(env, lhs_ast, rhs_type) do
{:ok, env, []} -> {:ok, env}
_ -> :error
end
end
def expand_unions(%T.Union{types: types}) do
Enum.flat_map(types, &expand_unions(&1))
end
def expand_unions(%T.Tuple{elements: types}) do
types
|> do_expand_all()
|> Enum.map(&%T.Tuple{elements: &1})
end
def expand_unions(%T.Record{fields: key_values}) do
{keys, values} =
Enum.reduce(key_values, {[], []}, fn {k, v}, {ks, vs} ->
{[k | ks], [v | vs]}
end)
keys = Enum.reverse(keys)
values = Enum.reverse(values)
values
|> do_expand_all()
|> Enum.map(fn values ->
fields = Enum.zip(keys, values)
%T.Record{fields: fields}
end)
end
def expand_unions(x) do
[x]
end
def do_expand_all([]) do
[[]]
end
def do_expand_all([type | rest]) do
branches = expand_unions(type)
next_branches = do_expand_all(rest)
Enum.flat_map(branches, fn branch ->
Enum.map(next_branches, fn next_branch ->
[branch | next_branch]
end)
end)
end
defp find_unmatched(env, lhs_ast, all_rhs_types) do
find_unmatched(env, lhs_ast, all_rhs_types, [], false)
end
defp find_unmatched(_, _, [], _, false) do
:error
end
defp find_unmatched(env, _, [], acc, true) do
{:ok, env, Enum.reverse(acc)}
end
defp find_unmatched(env, lhs_ast, [type | rest], acc, matched?) do
case do_match_case(env, lhs_ast, type) do
{:ok, env} ->
find_unmatched(env, lhs_ast, rest, acc, true)
{:keep, env} ->
find_unmatched(env, lhs_ast, rest, [type | acc], true)
:error ->
find_unmatched(env, lhs_ast, rest, [type | acc], matched?)
end
end
defp do_match_case_all(env, [], [], status) do
{status, env}
end
defp do_match_case_all(env, [lhs_exp | lhs_exps], [type | rhs_types], status) do
case do_match_case(env, lhs_exp, type) do
:error ->
:error
{new_status, env} ->
status =
cond do
new_status == :ok && status in [:ok, nil] -> :ok
new_status == :keep -> :keep
end
do_match_case_all(env, lhs_exps, rhs_types, status)
end
end
defp do_match_case(env, {:identifier, _, name}, rhs) do
env =
update_in(env, [:scope, name], fn
nil -> rhs
%T.Union{types: types} -> T.Union.new([rhs | T.Union.to_list(types)])
type -> T.Union.new([rhs, type])
end)
{:ok, env}
end
defp do_match_case(env, {:atom, _, lhs_atom}, rhs_atom) when lhs_atom == rhs_atom do
{:ok, env}
end
defp do_match_case(env, {:integer, _, _}, :Int) do
{:keep, env}
end
defp do_match_case(env, {:string, _, _}, :String) do
{:keep, env}
end
defp do_match_case(env, {:tuple, _, lhs_exps}, %T.Tuple{elements: rhs_types})
when length(lhs_exps) == length(rhs_types) do
do_match_case_all(env, lhs_exps, rhs_types, nil)
end
defp do_match_case(env, {:record, _, _, lhs_k_v}, %T.Record{fields: rhs_k_v}) do
rhs = Map.new(rhs_k_v)
# TODO: Use key instead of identifier after fixing GH #65
Enum.reduce_while(lhs_k_v, {nil, env}, fn {{:identifier, _, lhs_k}, lhs_v}, {status, env} ->
rhs_v = Map.get(rhs, lhs_k)
if rhs_v do
case do_match_case(env, lhs_v, rhs_v) do
:error ->
{:halt, :error}
{new_status, env} ->
status =
cond do
new_status == :ok && status in [:ok, nil] -> :ok
new_status == :keep -> :keep
end
{:cont, {status, env}}
end
else
{:halt, :error}
end
end)
end
defp do_match_case(_, _, _) do
:error
end
end | lib/fika/compiler/type_checker/match.ex | 0.626924 | 0.555315 | match.ex | starcoder |
defmodule UeberauthToken.Strategy do
@moduledoc """
A workflow for validation of oauth2 tokens on the resource server.
The strategy `handle_callback/1` function is invoked for
validation token calidation in either of the following cases:
1. As a plug in a plug pipeline which assigns an ueberauth struct to `%Conn{}`
pipeline :api do
plug :accepts, ["json"]
plug UeberauthToken.Plug, provider: UeberauthToken.TestProvider
end
As a plug, the callback phase of ueberauth is adapted to validate the oauth2 access token.
The ueberauth struct is returned in the assigns fields of the struct in one of the two
following ways:
# Failed validation
Plug.Conn{assigns: %{ueberauth_failure: %Ueberauth.Failure{}}}
# Successful validation
Plug.Conn{assigns: %{ueberauth_auth: %Ueberauth.Auth{}}}
2. As a `token_auth/3` function call which returns an ueberauth struct.
token_auth(token, provider, [])
The `token_auth/3` validation function returns one of the following forms:
# Failed validation
%Ueberauth.Failure{}
# Successful validation
%Ueberauth.Auth{}
See full description of the config options in `UeberauthToken.Config` @moduledoc.
## Defining an provider module
An provider module must be specified in order for UeberauthToken to know what
authorization server provider to validate against. The provider must
implement the callbacks specified in the module `UeberauthToken.Strategy`.
The following functions should be implemented by the provider module:
@callback get_payload(token :: String.t(), opts :: list()) :: {:ok, map()} | {:error, map()}
@callback valid_token?(token :: String.t(), opts :: list) :: boolean()
@callback get_uid(conn :: Conn.t()) :: any()
@callback get_credentials(conn :: Conn.t()) :: Credentials.t()
@callback get_info(conn :: Conn.t()) :: Info.t()
@callback get_extra(conn :: Conn.t()) :: Extra.t()
@callback get_ttl(conn :: Conn.t()) :: integer()
For a basic example of token validation in a plug pipeline, see `UeberauthToken.Plug`
For a basic example of token validation as a function, see `UeberauthToken.token_auth/3`
"""
alias Ueberauth.Strategy.Helpers
alias Ueberauth.Auth
alias UeberauthToken.Config
alias Plug.Conn.TokenParsingError
alias Plug.Conn
@behaviour Ueberauth.Strategy
@ttl_offset 1_000
@doc false
def handle_request!(%Conn{} = conn), do: conn
@doc """
Handles the callback as follows:
1. Extracts token from "Bearer token" if it is in that format
2. Tries to get the token data from the cache if
- The cache is turned on
- The token is present in the cache already
- If this stage successfully retrieves the token, then subsequent steps will be skipped.
3. By way of a callback function, it seeks data for populating the
ueberauth struct using the token. The callback function must be provided
through an provider in the config or can be provided manually in the
`conn.assigns` field.
4. The provider will cache the data if the `use_cache` configuration
option is set to true.
"""
@spec handle_callback!(Conn.t()) :: Conn.t()
def handle_callback!(
%Conn{
private: %{
ueberauth_token: %{
provider: provider,
token: %{"authorization" => raw_token}
}
}
} = conn
) do
case raw_token do
nil ->
error = Helpers.error("token", "#{validation_error_msg(provider)}. Bearer token empty")
rework_error_struct(Helpers.set_errors!(conn, [error]), provider)
raw_token ->
do_handle_callback(conn, raw_token)
end
end
def handle_callback!(
%Conn{
req_headers: req_headers,
private: %{
ueberauth_token: %{
provider: provider
}
}
} = conn
) do
req_headers = Enum.into(req_headers, %{})
case Map.has_key?(req_headers, "authorization") do
true ->
do_handle_callback(conn, req_headers["authorization"])
false ->
error =
Helpers.error(
"token",
"#{validation_error_msg(provider)}. The authorization request header is missing"
)
rework_error_struct(Helpers.set_errors!(conn, [error]), provider)
end
end
defp do_handle_callback(conn, bearer_token) when is_binary(bearer_token) do
access_token = extract_token(bearer_token)
conn =
with %Conn{
private: %{
ueberauth_token: %{
payload: _payload
}
}
} = conn <- try_use_potentially_cached_data(conn, access_token) do
conn
else
%Conn{} = conn ->
get_payload_and_return_conn(conn, access_token)
{:error, error} ->
error = Helpers.error(error.key, error.message)
rework_error_struct(Helpers.set_errors!(conn, [error]), provider(conn))
end
conn
end
@doc """
Clean up private fields after construction of the Ueberauth struct
"""
def handle_cleanup!(%Conn{private: %{ueberauth_token: _}} = conn) do
%{conn | private: Map.delete(conn.private, :ueberauth_token)}
|> handle_cleanup!()
end
def handle_cleanup!(%Conn{} = conn) do
conn
end
@doc false
def uid(%Conn{} = conn), do: provider(conn).get_uid(conn)
@doc false
def credentials(%Conn{} = conn), do: provider(conn).get_credentials(conn)
@doc false
def info(%Conn{} = conn), do: provider(conn).get_info(conn)
@doc false
def extra(%Conn{} = conn), do: provider(conn).get_extra(conn)
@doc false
def auth(%Conn{} = conn) do
Kernel.struct(
Auth,
provider: provider(conn),
strategy: __MODULE__,
uid: uid(conn),
info: info(conn),
extra: extra(conn),
credentials: credentials(conn)
)
end
@doc false
def valid_token?(token, provider, opts \\ []) when is_binary(token) and is_atom(provider) do
provider.valid_token?(token, opts)
end
# private
def extract_token(access_token) when is_binary(access_token) do
try do
["", test] = String.split(access_token, "Bearer ")
test
rescue
exception ->
reraise(
%TokenParsingError{
access_token: access_token,
original_exception: exception
},
System.stacktrace()
)
end
end
defp get_payload_and_return_conn(%Conn{assigns: %{ueberauth_failure: _}} = conn, _) do
conn
end
defp get_payload_and_return_conn(
%Conn{
private: %{
ueberauth_token: ueberauth_token
}
} = conn,
access_token
) do
case provider(conn).get_payload(access_token) do
{:ok, payload} ->
maybe_put_cached_data(conn, access_token, payload)
Conn.put_private(conn, :ueberauth_token, Map.put(ueberauth_token, :payload, payload))
{:error, error} ->
error = Helpers.error(error.key, error.message)
rework_error_struct(Helpers.set_errors!(conn, [error]), provider(conn))
end
end
defp maybe_put_cached_data(conn, access_token, payload) do
with true <- Config.use_cache?(provider(conn)),
{:ok, nil} <- Cachex.get(Config.cache_name(provider(conn)), access_token) do
Cachex.put(
Config.cache_name(provider(conn)),
access_token,
payload,
ttl: provider(conn).get_ttl(payload) - @ttl_offset
)
else
# Not using cache at all.
false ->
:ok
# Token already cached, do not interfere with existing ttl
{:ok, _payload} ->
:ok
end
end
defp try_use_potentially_cached_data(
%Conn{
private: %{
ueberauth_token: ueberauth_token
}
} = conn,
access_token
) do
with true <- Config.use_cache?(provider(conn)),
{:ok, nil} <- Cachex.get(Config.cache_name(provider(conn)), access_token) do
conn
else
{:ok, payload} ->
Conn.put_private(conn, :ueberauth_token, Map.put(ueberauth_token, :payload, payload))
false ->
conn
_ ->
conn
end
end
defp provider(%Conn{private: %{ueberauth_token: %{provider: provider}}}) do
provider
end
defp provider(%Conn{}) do
raise("No provider found, a provider module must be specified")
end
defp put_strategy(%Conn{assigns: %{ueberauth_failure: failure}} = conn) do
Conn.assign(conn, :ueberauth_failure, Map.put(failure, :strategy, __MODULE__))
end
defp put_provider(%Conn{assigns: %{ueberauth_failure: failure}} = conn, provider) do
Conn.assign(conn, :ueberauth_failure, Map.put(failure, :provider, provider))
end
def rework_error_struct(%Conn{} = conn, provider) do
conn
|> put_strategy()
|> put_provider(provider)
end
defp validation_error_msg(provider) when is_atom(provider) do
provider = String.replace(Macro.underscore(provider), "/", "_")
"""
Token validation failed for a token against the #{provider} provider
"""
end
@doc """
To get the payload.
Callback function to be implemented by the provider
The payload in turn is put into a private field `:ueberauth_token`.
The payload is the map from which other callback functions will
need to build the `:ueberauth` structs.
"""
@callback get_payload(token :: String.t(), opts :: list()) ::
{:ok, map()} | {:error, %{key: String.t(), message: String.t()}}
@doc """
Verifies a token.
Callback function to be implemented by the provider.
"""
@callback valid_token?(token :: String.t(), opts :: list) :: boolean()
@doc """
To populate the ueberauth uid struct from the payload in
`:ueberauth_token` private conn field.
Callback function to be implemented by the provider
"""
@callback get_uid(conn :: %Conn{private: %{ueberauth_token: %{payload: map()}}}) :: any()
@doc """
To populate the ueberauth credentials struct from the payload in
`:ueberauth_token` private conn field.
Callback function to be implemented by the provider
"""
@callback get_credentials(conn :: %Conn{private: %{ueberauth_token: %{payload: map()}}}) ::
Credentials.t()
@doc """
To populate the ueberauth info struct from the payload in
`:ueberauth_token` private conn field.
Callback function to be implemented by the provider
"""
@callback get_info(conn :: %Conn{private: %{ueberauth_token: %{payload: map()}}}) :: Info.t()
@doc """
To populate the ueberauth extra struct from the payload in
`:ueberauth_token` private conn field.
Callback function to be implemented by the provider
"""
@callback get_extra(conn :: %Conn{private: %{ueberauth_token: %{payload: map()}}}) :: Extra.t()
@doc """
To get the ttl from the ueberauth struct. The ttl
must be returned n milliseconds.
Callback function to be implemented by the provider
"""
@callback get_ttl(payload :: map()) :: integer()
end | lib/ueberauth_token/strategy.ex | 0.852859 | 0.619586 | strategy.ex | starcoder |
defmodule PinElixir.Customer do
import PinElixir.Utils.RequestOptions
import PinElixir.Utils.Response
@pin_url Application.get_env :pin_elixir, :pin_url
@moduledoc """
Module handling customer operations
"""
@doc """
Given an email and card_map, creates a customer. The map may contain a card or a card token.
```
Customer.create("<EMAIL>", %{card_token: "abc_a123" })
```
OR
```
card_map = %{
number: 4200000000000000,
expiry_month: "10",
expiry_year: 2016,
cvc: 456,
name: "<NAME>",
address_line1: "The Game Keepers Cottage",
address_city: "Hogwarts",
address_postcode: "H0G",
address_state: "WA",
address_country: "Straya"
}
Customer.create("<EMAIL>", %{card: card_map})
```
returns a tuple
```
{:ok,
%{customer: %{card: %{address_city: "Hogwarts", address_country: "Straya",
address_line1: "The Game Keepers Cottage", address_line2: nil,
address_postcode: "H0G", address_state: "WA",
customer_token: "<KEY>",
display_number: "XXXX-XXXX-XXXX-0000", expiry_month: 10,
expiry_year: 2016, name: "<NAME>", primary: true, scheme: "visa",
token: "<KEY>"},
created_at: "2015-11-15T08:40:50Z", email: "<EMAIL>",
token: "<KEY>"}}}
```
OR
```
{:error, error_map}
```
"""
def create(email, %{card: card}) do
Poison.encode!(%{email: email, card: card})
|> post_to_api
|> handle_create_customer_response
end
def create(email, %{card_token: card_token}) do
Poison.encode!(%{email: email, card_token: card_token})
|> post_to_api
|> handle_create_customer_response
end
defp handle_create_customer_response(%{status_code: 201, body: body}) do
decoded = decode(body)
{:ok, %{customer: decoded.response}}
end
defp handle_create_customer_response(%{status_code: 422, body: body}) do
body |> to_error_tuple
end
@doc """
Given a customer token, deletes the customer
return a tuple
```
{:ok}
```
OR
```
{:error, error_map}
```
"""
def delete(token) do
HTTPotion.delete(customer_url <> "/#{token}", with_auth)
|> handle_delete
end
defp handle_delete(%{status_code: 200}), do: :ok
defp handle_delete(%{status_code: 422, body: body}) do
body |> to_error_tuple
end
@doc """
Retrieves all customers
Returns a tuple
```
{:ok,
%{customers: [%{card: %{address_city: "Hogwarts", address_country: "Straya",
address_line1: "The Game Keepers Cottage", address_line2: nil,
address_postcode: "H0G", address_state: "WA",
customer_token: "<KEY>",
display_number: "XXXX-XXXX-XXXX-0000", expiry_month: 10,
expiry_year: 2016, name: "<NAME>", primary: true, scheme: "visa",
token: "<KEY>"},
created_at: "2015-11-15T08:40:50Z", email: "<EMAIL>",
token: "<KEY>"}
],
pagination: %{count: 3, current: 1, next: nil, pages: 1, per_page: 25,
previous: nil}}}
```
OR
{:error, error_map}
"""
def get do
HTTPotion.get(customer_url, with_auth)
|> handle_get_all
end
@doc """
Given a customer token, retrieves customer details
Returns a tuple
```
{:ok,
%{card: %{address_city: "Hogwarts", address_country: "Straya",
address_line1: "The Game Keepers Cottage", address_line2: nil,
address_postcode: "H0G", address_state: "WA",
customer_token: "<KEY>",
display_number: "XXXX-XXXX-XXXX-0000", expiry_month: 10, expiry_year: 2016,
name: "<NAME>", primary: true, scheme: "visa",
token: "<KEY>"}, created_at: "2015-11-15T08:40:50Z",
email: "<EMAIL>", token: "<KEY>"}}
```
OR
{:error, error_map}
"""
def get(id) do
HTTPotion.get(customer_url <> "/#{id}", with_auth)
|> handle_get
end
defp handle_get(%{status_code: 200, body: body}) do
decoded = decode(body)
{:ok, decoded.response}
end
defp handle_get(%{status_code: ___, body: body}) do
body |> to_error_tuple
end
defp handle_get_all(%{status_code: 200, body: body}) do
decoded = decode(body)
mapped = %{pagination: decoded.pagination, customers: decoded.response}
{:ok, mapped}
end
defp handle_get_all(%{status_code: ___, body: body}) do
body |> to_error_tuple
end
defp customer_url do
"https://#{@pin_url}/customers"
end
defp post_to_api(json) do
HTTPotion.post(customer_url, with_auth([headers: ["Content-Type": "application/json"], body: json]))
end
end | lib/customers/customer.ex | 0.678007 | 0.707114 | customer.ex | starcoder |
defmodule Screens.Util do
@moduledoc false
def format_time(t) do
t |> DateTime.truncate(:second) |> DateTime.to_iso8601()
end
@spec time_period(DateTime.t()) :: :peak | :off_peak
def time_period(utc_time) do
{:ok, dt} = DateTime.shift_zone(utc_time, "America/New_York")
day_of_week = dt |> DateTime.to_date() |> Date.day_of_week()
weekday? = day_of_week in 1..5
t = {dt.hour, dt.minute}
am_rush? = t >= {7, 0} and t < {9, 0}
pm_rush? = t >= {16, 0} and t <= {18, 30}
rush_hour? = am_rush? or pm_rush?
if(weekday? and rush_hour?, do: :peak, else: :off_peak)
end
@doc """
Similar to Enum.group_by, except it returns a list of {key, value} tuples instead of a map to maintain order.
Order of the groups is determined by the position of the first occurrence of a member of that group.
iex> Screens.Util.group_by_with_order(0..10, &rem(&1, 3))
[
{0, [0, 3, 6, 9]},
{1, [1, 4, 7, 10]},
{2, [2, 5, 8]}
]
iex> Screens.Util.group_by_with_order(
[%{group_id: 2, val: :a}, %{group_id: 1, val: :b}, %{group_id: 2, val: :c}, %{group_id: 1, val: :d}],
& &1.group_id
)
[
{2, [%{group_id: 2, val: :a}, %{group_id: 2, val: :c}]},
{1, [%{group_id: 1, val: :b}, %{group_id: 1, val: :d}]},
]
"""
@spec group_by_with_order(Enumerable.t(), (any() -> any())) :: [{any(), [any()]}]
def group_by_with_order(enumerable, key_fun) do
enumerable
|> Enum.reduce([], fn entry, acc ->
key = key_fun.(entry)
group =
acc
|> List.keyfind(key, 0, {nil, []})
|> elem(1)
List.keystore(acc, key, 0, {key, [entry | group]})
end)
|> Enum.map(fn {key, group} -> {key, Enum.reverse(group)} end)
end
@doc """
Gets the keys of a struct given the module where the struct is defined.
Converts the keys to strings by default.
"""
@spec struct_keys(module(), keyword()) :: list(atom()) | list(String.t())
def struct_keys(mod, opts \\ []) do
keys =
mod
|> Map.from_struct()
|> Map.keys()
if Keyword.get(opts, :as_strings, true) do
Enum.map(keys, &Atom.to_string/1)
else
keys
end
end
@doc """
Similar to Enum.unzip, except it expects an enumerable of 3-element instead of 2-element tuples.
"""
@spec unzip3(Enum.t()) :: {[Enum.element()], [Enum.element()], [Enum.element()]}
def unzip3(enumerable) do
{list1, list2, list3} =
Enum.reduce(enumerable, {[], [], []}, fn {el1, el2, el3}, {list1, list2, list3} ->
{[el1 | list1], [el2 | list2], [el3 | list3]}
end)
{:lists.reverse(list1), :lists.reverse(list2), :lists.reverse(list3)}
end
@doc """
Returns a list of elements in an enumerable that occur before the given target value,
or an empty list if the target is not present in the enumerable.
"""
@spec slice_before(Enum.t(), any()) :: list()
def slice_before(enumerable, target) do
case Enum.find_index(enumerable, &(&1 == target)) do
nil -> []
i -> Enum.take(enumerable, i)
end
end
@doc """
Returns a list of elements in an enumerable that occur after the given target value,
or an empty list if the target is not present in the enumerable.
"""
@spec slice_after(Enum.t(), any()) :: list()
def slice_after(list, target) do
case Enum.find_index(list, &(&1 == target)) do
nil -> []
i -> Enum.drop(list, i + 1)
end
end
end | lib/screens/util.ex | 0.77907 | 0.591782 | util.ex | starcoder |
defmodule Hippocrene.Article do
defstruct title: "", date: {1970, 1, 1}, author: "", body: []
def title(title), do: {:title, title}
def date(year, month, day), do: {:date, {year, month, day}}
def date(date) when is_tuple(date), do: {:date, date}
def author(author), do: {:author, author}
defmacro begin(do: {:__block__, _, blocks}), do: blocks
defmacro begin(do: blocks), do: blocks
defmacro begin(blocks), do: blocks
@tags [:body, :par, :cite, :code, :item, :table]
Enum.each @tags, fn (tag) ->
defmacro unquote(tag)(do: {:__block__, _, blocks}) do
tag = unquote(tag)
quote do: {unquote(tag), unquote(blocks)}
end
defmacro unquote(tag)(do: line) do
tag = unquote(tag)
quote do: {unquote(tag), [unquote line]}
end
defmacro unquote(tag)(line) do
tag = unquote(tag)
quote do: {unquote(tag), [unquote line]}
end
end
@tags_with_one_arg [:section, :code]
Enum.each @tags_with_one_arg, fn (tag) ->
defmacro unquote(tag)(arg, do: {:__block__, _, blocks}) do
tag = unquote(tag)
quote do: {unquote(tag), unquote(arg), unquote(blocks)}
end
defmacro unquote(tag)(arg, do: line) do
tag = unquote(tag)
quote do: {unquote(tag), unquote(arg), [unquote line]}
end
defmacro unquote(tag)(arg, line) do
tag = unquote(tag)
quote do: {unquote(tag), unquote(arg), [unquote line]}
end
end
# define list(:bullet) and list(:numbered)
@list_styles [:bullet, :numbered, "bullet", "numbered"]
Enum.each @list_styles, fn (style) ->
style_atom = if is_atom(style) do
style
else
String.to_atom style
end
defmacro list(unquote(style), do: {:__block__, _, blocks}) do
style_atom = unquote(style_atom)
quote do: {unquote(style_atom), unquote(blocks)}
end
defmacro list(unquote(style), do: line) do
style_atom = unquote(style_atom)
quote do: {unquote(style_atom), [unquote line]}
end
end
def header(headers), do: {:header, headers}
def th(headers), do: {:header, headers}
def row(data), do: {:row, data}
def td(data), do: {:row, data}
end | lib/hippocrene/article.ex | 0.617859 | 0.5047 | article.ex | starcoder |
defmodule Shared.Month do
defmodule InvalidMonthIndex do
defexception [:message]
end
if Code.ensure_loaded?(Jason.Encoder) do
@derive Jason.Encoder
end
@enforce_keys [:year, :month]
defstruct [:year, :month]
@type t :: %__MODULE__{
year: integer,
month: integer
}
@doc ~S"""
## Examples
iex> Month.new(2019, 7)
{:ok, %Month{year: 2019, month: 7}}
iex> Month.new!(2019, 7)
%Month{year: 2019, month: 7}
iex> Month.new(2021, 13)
{:error, :invalid_month_index}
iex> Month.new(2023, 0)
{:error, :invalid_month_index}
iex> Month.new(2019, -5)
{:error, :invalid_month_index}
"""
def new(year, month)
def new(year, month) when is_integer(year) and month in 1..12,
do: {:ok, %__MODULE__{year: year, month: month}}
def new(_, _), do: {:error, :invalid_month_index}
@doc ~S"""
## Examples
iex> Month.new!(2019, 7)
%Month{year: 2019, month: 7}
iex> Month.new!(2019, -7)
** (Shared.Month.InvalidMonthIndex) Month must be an integer between 1 and 12, but was -7
"""
def new!(year, month) do
case new(year, month) do
{:ok, month} ->
month
{:error, :invalid_month_index} ->
raise InvalidMonthIndex,
"Month must be an integer between 1 and 12, but was " <> inspect(month)
end
end
@doc ~S"""
## Examples:
iex> Month.from_day(%Date{year: 2018, month: 5, day: 17})
{:ok, ~m[2018-05]}
iex> Month.from_day(%Date{year: 2018, month: 13, day: 17})
{:error, :invalid_month_index}
iex> Month.from_day(%Date{year: 2018, month: 0, day: 17})
{:error, :invalid_month_index}
iex> Month.from_day(%Date{year: 2018, month: -1, day: 17})
{:error, :invalid_month_index}
"""
def from_day(%Date{year: year, month: month}) do
new(year, month)
end
@doc ~S"""
## Examples
iex> Month.from_day!(%Date{year: 2018, month: 5, day: 17})
%Month{year: 2018, month: 5}
iex> Month.from_day!(%Date{year: 2018, month: 13, day: 17})
** (Shared.Month.InvalidMonthIndex) Month must be an integer between 1 and 12, but was 13
"""
def from_day!(%Date{year: year, month: month}) do
new!(year, month)
end
@doc ~S"""
## Examples:
iex> Month.parse("2019-10")
{:ok, %Month{year: 2019, month: 10}}
iex> Month.parse("2019-1")
{:ok, %Month{year: 2019, month: 1}}
iex> Month.parse("2019-00")
{:error, :invalid_month_index}
iex> Month.parse("2019-13")
{:error, :invalid_month_index}
iex> Month.parse("foo")
{:error, :invalid_month_format}
"""
def parse(<<year::bytes-size(4)>> <> "-" <> <<month::bytes-size(2)>>) do
new(String.to_integer(year), String.to_integer(month))
end
def parse(<<year::bytes-size(4)>> <> "-" <> <<month::bytes-size(1)>>) do
new(String.to_integer(year), String.to_integer(month))
end
def parse(_str), do: {:error, :invalid_month_format}
@doc ~S"""
## Examples
iex> Month.name(@fifth_month_of_2020)
"Mai"
"""
def name(%__MODULE__{month: month}), do: Timex.month_name(month)
@doc ~S"""
## Examples
iex> Month.first_day(@third_month_of_2018)
%Date{year: 2018, month: 3, day: 1}
"""
def first_day(%__MODULE__{} = month) do
{first_day, _} = to_dates(month)
first_day
end
@doc ~S"""
## Examples
iex> Month.last_day(@third_month_of_2018)
%Date{year: 2018, month: 3, day: 31}
"""
def last_day(%__MODULE__{} = month) do
{_, last} = to_dates(month)
last
end
@doc ~S"""
## Examples
iex> Month.to_range(@third_month_of_2018)
#DateRange<~D[2018-03-01], ~D[2018-03-31]>
"""
def to_range(%__MODULE__{} = month) do
{first_day, last_day} = to_dates(month)
Date.range(first_day, last_day)
end
@doc ~S"""
## Examples
iex> Month.to_dates(@third_month_of_2018)
{~D[2018-03-01], ~D[2018-03-31]}
"""
def to_dates(%__MODULE__{year: year, month: month}) do
{:ok, first_day} = Date.new(year, month, 1)
last_day = Timex.end_of_month(year, month)
{first_day, last_day}
end
@doc ~S"""
## Examples
iex> Month.add(@third_month_of_2018, 9)
%Month{year: 2018, month: 12}
iex> Month.add(@third_month_of_2018, 10)
%Month{year: 2019, month: 1}
iex> Month.add(@third_month_of_2018, 22)
%Month{year: 2020, month: 1}
iex> Month.add(@third_month_of_2018, -2)
%Month{year: 2018, month: 1}
iex> Month.add(@third_month_of_2018, -3)
%Month{year: 2017, month: 12}
iex> Month.add(@third_month_of_2018, -15)
%Month{year: 2016, month: 12}
iex> Month.add(@third_month_of_2018, 0)
%Month{year: 2018, month: 3}
"""
def add(%__MODULE__{year: year, month: month}, months_to_add) when is_integer(months_to_add) do
zero_based_month_index = month - 1
amount_of_months_since_anno_domini = year * 12 + zero_based_month_index + months_to_add
{amount_of_years, amount_of_months} = divmod(amount_of_months_since_anno_domini, 12)
%__MODULE__{year: amount_of_years, month: amount_of_months + 1}
end
defp divmod(dividend, divisor) do
{div(dividend, divisor), mod(dividend, divisor)}
end
defp mod(x, y) when x > 0, do: rem(x, y)
defp mod(x, y) when x < 0, do: rem(x, y) + y
defp mod(0, _y), do: 0
@doc ~S"""
## Examples:
iex> @third_month_of_2018 |> Month.earlier_than?(@third_month_of_2019)
true
iex> @third_month_of_2018 |> Month.earlier_than?(@third_month_of_2017)
false
iex> @third_month_of_2018 |> Month.earlier_than?(@fourth_month_of_2018)
true
iex> @third_month_of_2018 |> Month.earlier_than?(@second_month_of_2019)
true
iex> @third_month_of_2018 |> Month.earlier_than?(@third_month_of_2018)
false
iex> @third_month_of_2018 |> Month.earlier_than?(@second_month_of_2018)
false
"""
def earlier_than?(%__MODULE__{year: year, month: month}, %__MODULE__{
year: other_year,
month: other_month
}) do
year < other_year || (year == other_year && month < other_month)
end
@doc ~S"""
## Examples:
iex> @third_month_of_2018 |> Month.equal_or_earlier_than?(@third_month_of_2019)
true
iex> @third_month_of_2018 |> Month.equal_or_earlier_than?(@third_month_of_2017)
false
iex> @third_month_of_2018 |> Month.equal_or_earlier_than?(@fourth_month_of_2018)
true
iex> @third_month_of_2018 |> Month.equal_or_earlier_than?(@second_month_of_2019)
true
iex> @third_month_of_2018 |> Month.equal_or_earlier_than?(@third_month_of_2018)
true
iex> @third_month_of_2018 |> Month.equal_or_earlier_than?(@second_month_of_2018)
false
"""
def equal_or_earlier_than?(%__MODULE__{} = month, %__MODULE__{} = other_month) do
month == other_month || earlier_than?(month, other_month)
end
@doc ~S"""
## Examples:
iex> @third_month_of_2018 |> Month.compare(@third_month_of_2018)
:eq
iex> @second_month_of_2018 |> Month.compare(@third_month_of_2018)
:lt
iex> @fifth_month_of_2020 |> Month.compare(@third_month_of_2018)
:gt
"""
def compare(%__MODULE__{year: year, month: month}, %__MODULE__{
year: year,
month: month
}),
do: :eq
def compare(%__MODULE__{} = first, %__MODULE__{} = second) do
if first |> earlier_than?(second) do
:lt
else
:gt
end
end
def compare(%Date{} = first, %__MODULE__{} = second) do
first |> from_day!() |> compare(second)
end
def compare(%__MODULE__{} = first, %Date{} = second) do
compare(first, from_day!(second))
end
@doc ~S"""
## Examples
iex> ~m[2018-05]
%Month{year: 2018, month: 5}
"""
def sigil_m(string, []) do
with {:ok, month} <- parse(string) do
month
else
_ -> raise "Invalid month"
end
end
defimpl String.Chars, for: Shared.Month do
alias Shared.Month
def to_string(%Month{year: year, month: month}) do
"#{year}-#{format_month(month)}"
end
defp format_month(month) do
month
|> Integer.to_string()
|> String.pad_leading(2, "0")
end
end
defimpl Inspect, for: Shared.Month do
alias Shared.Month
def inspect(%Month{year: year, month: month} = month_struct, _)
when is_integer(year) and is_integer(month) do
"~m[" <> to_string(month_struct) <> "]"
end
def inspect(%Month{year: year, month: month}, _) do
"#Month" <>
"<year: " <>
Inspect.inspect(year, %Inspect.Opts{}) <>
", month: " <> Inspect.inspect(month, %Inspect.Opts{}) <> ">"
end
end
defimpl Shared.Zeitvergleich, for: Shared.Month do
alias Shared.Month
def frueher_als?(%Month{} = self, %Month{} = other) do
Month.compare(self, other) == :lt
end
def zeitgleich?(%Month{} = self, %Month{} = other) do
Month.compare(self, other) == :eq
end
def frueher_als_oder_zeitgleich?(%Month{} = self, %Month{} = other) do
self |> frueher_als?(other) || self |> zeitgleich?(other)
end
end
end | lib/month.ex | 0.802942 | 0.65747 | month.ex | starcoder |
defmodule Shipping.HandlingEvents do
@moduledoc """
The Handling Events Aggregate*. Its root is the module Shipping.HandlingEvent
From the DDD book: [An AGGREGATE is] a cluster of associated objects that
are treated as a unit for the purgpose of data changes. External references are
restricted to one member of the AGGREGATE, designated as the root.
"""
import Ecto.Query, warn: false
alias Shipping.Repo
alias Shipping.HandlingEvents.HandlingEvent
@doc """
Returns the list of all handling_events.
## Examples
iex> list_handling_events()
[%HandlingEvent{}, ...]
"""
def list_handling_events do
Repo.all(HandlingEvent)
end
@doc """
Gets all handling events for a tracking id and returns them in
completion_time order with the newest first.
Raises `Ecto.NoResultsError` if the Handling event does not exist.
## Examples
iex> get_handling_event_by_tracking_id!(123)
[%HandlingEvent{}]
iex> get_handling_event_by_tracking_id!(456)
[]
"""
def get_all_with_tracking_id!(tracking_id) do
Repo.get_by_tracking_id!(HandlingEvent, tracking_id)
|> Enum.sort(&(&1.completion_time >= &2.completion_time))
end
@doc """
Gets a single handling_event.
Raises `Ecto.NoResultsError` if the Handling event does not exist.
## Examples
iex> get_handling_event!(123)
%HandlingEvent{}
iex> get_handling_event!(456)
[]
"""
def get_handling_event!(id), do: Repo.get!(HandlingEvent, id)
@doc """
Creates a handling_event.
## Examples
iex> create_handling_event(%{field: value})
{:ok, %HandlingEvent{}}
iex> create_handling_event(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_handling_event(attrs \\ %{}) do
HandlingEvent.new()
|> HandlingEvent.changeset(attrs)
|> Repo.insert()
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking handling_event changes.
## Examples
iex> change_handling_event(handling_event)
%Ecto.Changeset{source: %HandlingEvent{}}
"""
def change_handling_event(%HandlingEvent{} = handling_event) do
HandlingEvent.changeset(handling_event, %{})
end
end | apps/shipping/lib/shipping/handling_events/handling_events.ex | 0.845608 | 0.51946 | handling_events.ex | starcoder |
defmodule Ecto.Adapters.Postgres do
@moduledoc """
Adapter module for PostgreSQL.
It handles and pools the connections to the postgres
database using `postgrex` with `poolboy`.
## Options
Postgrex options split in different categories described
below. All options should be given via the repository
configuration.
### Connection options
* `:hostname` - Server hostname
* `:port` - Server port (default: 5432)
* `:username` - Username
* `:password` - <PASSWORD>
* `:parameters` - Keyword list of connection parameters
* `:ssl` - Set to true if ssl should be used (default: false)
* `:ssl_opts` - A list of ssl options, see Erlang's `ssl` docs
### Pool options
* `:size` - The number of connections to keep in the pool
* `:max_overflow` - The maximum overflow of connections (see poolboy docs)
* `:lazy` - If false all connections will be started immediately on Repo startup (default: true)
### Storage options
* `:template` - the template to create the database from (default: "template0")
* `:encoding` - the database encoding (default: "UTF8")
* `:lc_collate` - the collation order (default: "en_US.UTF-8")
* `:lc_ctype` - the character classification (default: "en_US.UTF-8")
"""
use Ecto.Adapters.SQL, :postgrex
@behaviour Ecto.Adapter.Storage
## Storage API
@doc false
def storage_up(opts) do
database = Keyword.fetch!(opts, :database)
template = Keyword.get(opts, :template, "template0")
encoding = Keyword.get(opts, :encoding, "UTF8")
lc_collate = Keyword.get(opts, :lc_collate, "en_US.UTF-8")
lc_ctype = Keyword.get(opts, :lc_ctype, "en_US.UTF-8")
output =
run_with_psql opts,
"CREATE DATABASE " <> database <> " " <>
"TEMPLATE=#{template} ENCODING='#{encoding}' " <>
"LC_COLLATE='#{lc_collate}' LC_CTYPE='#{lc_ctype}'"
cond do
String.length(output) == 0 -> :ok
String.contains?(output, "already exists") -> {:error, :already_up}
true -> {:error, output}
end
end
@doc false
def storage_down(opts) do
output = run_with_psql(opts, "DROP DATABASE #{opts[:database]}")
cond do
String.length(output) == 0 -> :ok
String.contains?(output, "does not exist") -> {:error, :already_down}
true -> {:error, output}
end
end
defp run_with_psql(database, sql_command) do
command = ""
if password = database[:password] do
command = ~s(PGPASSWORD=#{password} )
end
if username = database[:username] do
command = ~s(PGUSER=#{username} ) <> command
end
if port = database[:port] do
command = ~s(PGPORT=#{port} ) <> command
end
command =
command <>
~s(psql --quiet ) <>
~s(template1 ) <>
~s(--host #{database[:hostname]} ) <>
~s(-c "#{sql_command};" )
String.to_char_list(command)
|> :os.cmd
|> List.to_string
end
end | lib/ecto/adapters/postgres.ex | 0.790934 | 0.411347 | postgres.ex | starcoder |
defmodule Freshcom.Request do
@moduledoc """
Use this module to wrap and modify request data to pass in to API functions.
## Fields
- `requester_id` - The user's ID that is making this request.
- `client_id` - The app's ID that is making the request on behalf of the user.
- `account_id` - The target account's ID.
- `filter` - A filter to apply if you are calling an API function that list some resources. Please see `Freshcom.Filter` for the format of the filter to provide.
All other fields are self explanatory. Not all fields are used for all API functions,
for example if you provide a pagination for a function that create a single resource
it will have no effect.
Fields in the form of `_****_` are not meant to be directly used, you should never
set them to any user provided data. These fields are used by the internal system.
"""
use TypedStruct
typedstruct do
field :requester_id, String.t()
field :client_id, String.t()
field :account_id, String.t()
field :data, map(), default: %{}
field :identifier, map(), default: %{}
field :filter, list(), default: []
field :search, String.t()
field :pagination, map() | nil, default: %{size: 25, number: 1}
field :sort, list(), default: []
field :include, [String.t()]
field :locale, String.t()
field :_requester_, map()
field :_client_, map()
field :_role_, String.t()
field :_account_, map()
field :_default_locale_, String.t()
field :_identifiable_keys_, atom | [String.t()], default: :all
field :_include_filters_, map(), default: %{}
field :_filterable_keys_, atom | [String.t()], default: :all
field :_searchable_keys_, [String.t()], default: []
field :_sortable_keys_, [String.t()], default: :all
end
def put(req, root_key, key, value) do
root_value =
req
|> Map.get(root_key)
|> Map.put(key, value)
Map.put(req, root_key, root_value)
end
def put(req, key, value), do: Map.put(req, key, value)
end | lib/freshcom/core/request.ex | 0.761849 | 0.482917 | request.ex | starcoder |
defmodule Quack.Formatter do
@moduledoc """
Module responsible for formatting and composing messages to be sent to slack
"""
import Logger.Formatter, only: [format_date: 1, format_time: 1]
@doc """
Function to compose a new message based on event data
"""
def create_message({level, message, timestamp, ""}) do
%{
text: "*Incoming Log*",
attachments: [
%{
author_name: "Quack - Elixir logging for Slack",
author_link: "https://github.com/azohra/quack",
color: get_colour(level),
fields: [
%{
title: "Priority",
value: Atom.to_string(level),
short: false
},
%{
title: "Timestamp",
value: parse_ts(timestamp),
short: false
},
%{
title: "Message",
value: to_preformatted(message)
}
]
}
]
}
end
def create_message({level, message, timestamp, metadata}) do
%{
text: "*Incoming Log*",
attachments: [
%{
author_name: "Quack - Elixir logging for Slack",
author_link: "https://github.com/azohra/quack",
color: get_colour(level),
fields: [
%{
title: "Priority",
value: Atom.to_string(level),
short: false
},
%{
title: "Timestamp",
value: parse_ts(timestamp),
short: false
},
%{
title: "Metadata",
value: metadata
},
%{
title: "Message",
value: to_preformatted(message)
}
]
}
]
}
end
@doc """
Function to format text as code
iex> Quack.Formatter.to_code("example")
"`example`"
"""
def to_code(text), do: ~s(`#{text}`)
@doc """
Function to format text as preformatted
iex> Quack.Formatter.to_preformatted("example")
"```example```"
"""
def to_preformatted(text), do: ~s(```#{text}```)
@doc """
Function to format text as bold
iex> Quack.Formatter.to_bold("example")
"*example*"
"""
def to_bold(text), do: ~s(*#{text}*)
@doc """
Function to format text as italics
iex> Quack.Formatter.to_italics("example")
"_example_"
"""
def to_italics(text), do: ~s(_#{text}_)
@doc """
Function to format text as a quote
iex> Quack.Formatter.to_quote("example")
">example"
"""
def to_quote(text), do: ~s(>#{text})
@doc """
Function to format a timestamp as a string
iex>Quack.Formatter.parse_ts({{2018, 11, 5}, {15, 4, 46, 613}})
"2018-11-05 15:04:46.613"
"""
def parse_ts({date, time}) do
d =
date
|> format_date()
|> to_string()
t =
time
|> format_time
|> to_string
d <> " " <> t
end
# Function to return a colour based on logger level
defp get_colour(:debug), do: "#9215E8"
defp get_colour(:info), do: "#00B4FF"
defp get_colour(:warn), do: "#E8BD08"
defp get_colour(:error), do: "#FF3B0A"
end | lib/quack/formatter.ex | 0.786991 | 0.706494 | formatter.ex | starcoder |
defmodule Hyperex.Flattree do
@moduledoc """
A Flat Tree is a deterministic way of using a list as an index
for nodes in a tree. Essentially a simpler way of representing the
position of nodes.
A Flat Tree is also refered to as 'bin numbers' described here
in RFC 7574: https://datatracker.ietf.org/doc/html/rfc7574#section-4.2
As an example (from the RFC), here's a tree with a width of 8 leafs
and a depth of 3:
```text
3 7
|-------|--------|
2 3 11
|----|----| |----|----|
1 1 5 9 13
|-|-| |-|-| |-|-| |-|-|
Depth 0 0 2 4 6 8 10 12 14
C0 C1 C2 C3 C4 C5 C6 C7
The flat tree is the list [0..14]. The content/leafs are C0..C7
```
Using the flat tree, we can see that index:
- 7 represents all the content (C0..C7)
- 1 represents C0 and C1
- 3 represent C0..C3
... etc ...
Even numbers are always leafs at depth 0
Odd numbers are parents at depths > 0
"""
use Bitwise, only_operators: true
@doc """
Calculate the index given the depth and offset in the tree
"""
@spec index(depth :: pos_integer, offset :: pos_integer) :: pos_integer
def index(depth, offset) do
offset <<< (depth + 1) ||| (1 <<< depth) - 1
end
@doc """
Find the depth of the tree for a given index in the array.
Zero-based index
```
Ex:
depth(1) == 1
depth(5) == 1
depth(3) == 2
```
"""
@spec depth(index :: non_neg_integer) :: non_neg_integer
def depth(index) do
walk_depth(index + 1, 0)
end
defp walk_depth(index, depth) do
case index &&& 1 do
0 ->
i = index >>> 1
walk_depth(i, depth + 1)
_ ->
depth
end
end
@doc """
Return the offset for an index from the left side of the tree.
```text
For example: (0, 1, 3, 7) have an offset of 0
(Tree is rotated to right in diagram)
(0)┐
(1)┐
2─┘ │
(3)┐
4─┐ │ │
5─┘ │
6─┘ │
(7)
While (2, 5, 11) have an offset of 1:
0──┐
1──┐
(2)─┘ │
3──┐
4──┐ │ │
(5)─┘ │
6──┘ │
7
8──┐ │
9──┐ │
10──┘ │ │
(11)─┘
12──┐ │
13──┘
14──┘
```
"""
@spec offset(index :: non_neg_integer) :: non_neg_integer
def offset(index) when (index &&& 1) == 0 do
index >>> 1
end
def offset(index) do
d = depth(index)
v = div(index + 1, 1 <<< d)
v >>> 1
end
@doc """
Return the parent of the given index
```text
Given:
1
/ \\
0 2
1 = parent(2)
```
"""
@spec parent(index :: non_neg_integer) :: non_neg_integer
def parent(index) do
d = depth(index)
index(d + 1, offset(index) >>> 1)
end
@doc """
Return the index of node that shares a parent
```text
Given:
1
/ \\
0 2
0 = sibling(2)
```
"""
@spec sibling(index :: non_neg_integer) :: non_neg_integer
def sibling(index) do
d = depth(index)
index(d, :erlang.bxor(offset(index), 1))
end
@doc """
Return the uncle of the index. The uncle is the parent's sibling
```text
3
/ \\
1 5
/ \\ / \\
0 2 4 6
5 = uncle(0)
1 = uncle(4)
```
"""
@spec uncle(index :: non_neg_integer) :: non_neg_integer
def uncle(index) do
d = depth(index)
index(d + 1, :erlang.bxor(offset(parent(index)), 1))
end
@doc """
Return the children of a given index
If the given index is a leaf or depth == 0 (still a leaf) return: `:none`
"""
@spec children(non_neg_integer) :: {non_neg_integer, non_neg_integer}
def children(index) do
get_children(index, depth(index))
end
# No children of a leaf
defp get_children(index, _) when (index &&& 1) == 0, do: :none
# No children at depth 0
defp get_children(_, 0), do: :none
defp get_children(index, depth) do
off = offset(index) * 2
{index(depth - 1, off), index(depth - 1, off + 1)}
end
@doc """
Get the child to the left of the given index
If the index is a leaf, or depth == 0, return :none
"""
@spec left_child(index :: non_neg_integer) :: :none | pos_integer
def left_child(index) do
d = depth(index)
get_left_child(index, d)
end
defp get_left_child(index, _) when (index &&& 1) == 0, do: :none
defp get_left_child(_, 0), do: :none
defp get_left_child(index, depth) do
index(depth - 1, offset(index) <<< 1)
end
@doc """
Get the right child for the given index
If the index is a leaf, or depth == 0, return :none
"""
@spec right_child(index :: pos_integer) :: :none | pos_integer
def right_child(index) do
d = depth(index)
get_right_child(index, d)
end
defp get_right_child(index, _) when (index &&& 1) == 0, do: :none
defp get_right_child(_, 0), do: :none
defp get_right_child(index, depth) do
index(depth - 1, (offset(index) <<< 1) + 1)
end
@doc """
Return the whole span for the given index, from left to right
"""
@spec spans(index :: pos_integer) :: {:none | pos_integer, :none | pos_integer}
def spans(index) do
{left_span(index), right_span(index)}
end
@doc """
Get the left most child from the index. Note, this could be
a 'grandchild'.
If depth is 0, return :none
"""
@spec left_span(index :: pos_integer) :: :none | pos_integer
def left_span(index) do
d = depth(index)
case d do
0 ->
:none
_ ->
offset(index) * (2 <<< d)
end
end
@doc """
Get the right most child from the index. Note, this could be
a grandchild
If depth = 0, return :none
"""
@spec right_span(index :: integer) :: :none | integer
def right_span(index) do
d = depth(index)
case d do
0 ->
:none
_ ->
(offset(index) + 1) * (2 <<< d) - 2
end
end
@doc """
Return the count of all nodes in the subtree at the given index.
Note, the count *includes* the node at the index.
For example
3 = count(1)
includes the node 1,2,3
"""
@spec count(index :: pos_integer) :: pos_integer
def count(index) do
(2 <<< depth(index)) - 1
end
@doc """
Return a list of indices that represent the full nodes (and subtrees)
to the left of the given index. Note, the given index must be a leaf (even) index.
For example, given:
```text
3
|----|----|
1 5
|---|---| |---|---|
0 2 4 6
[1,4] = full_roots(6)
[1] = full_roots(4)
[] = full_roots(0)
```
"""
@spec full_roots(index :: pos_integer) :: list | {:error, :only_leaf_indices_allowed}
def full_roots(index) when (index &&& 1) == 1, do: {:error, :only_leaf_indices_allowed}
def full_roots(index), do: walk_roots(index >>> 1, 0, 1, [])
defp walk_roots(0, _, _, nodes), do: Enum.reverse(nodes)
defp walk_roots(index, offset, factor, nodes) do
next_factor = determine_factor(factor, index)
walk_roots(
index - next_factor,
offset + 2 * next_factor,
1,
[
offset + next_factor - 1 | nodes
]
)
end
defp determine_factor(factor, index) when factor * 2 <= index do
determine_factor(factor * 2, index)
end
defp determine_factor(factor, _), do: factor
end | lib/hyperex/flattree.ex | 0.927831 | 0.961353 | flattree.ex | starcoder |
defmodule JuliaPort do
@moduledoc """
example project to invoke julia functions in elixir to do scientific computing using port and metaprogramming
"""
alias JuliaPort.GenFunction
use GenFunction, rand: 2, sum: 1, *: 2
use GenFunction, init_network: 1, train: 3, net_eval: 2
use GenFunction, load_data: 1, lr_train: 2, lr_test: 3
@doc """
open a port to start a julia process
"""
def init() do
Port.open({:spawn, "julia"}, [:binary])
end
@doc """
close a port to end a julia process
"""
def terminate(port) do
send(port, {self(), :close})
end
@doc """
example to print julia version
"""
def print_version(port) do
port_send(port, "VERSION")
IO.puts(port_receive(port, true))
end
@doc """
example to do arithmetics
"""
def simple_test(port) do
port_send(port, "1+2")
IO.puts(port_receive(port, true))
end
@doc """
example to do linear algebra
"""
def complex_test(port) do
rand(port, :a, 3, 3)
rand(port, :b, 3, 3)
JuliaPort.*(port, :c, :a, :b)
port_receive(port, false)
sum(port, :d, :c)
IO.puts(port_receive(port, true))
end
@doc """
example to do neural network
prerequisite: [`BackpropNeuralNet`](https://github.com/compressed/BackpropNeuralNet.jl) installed
"""
def real_test(port) do
port_send(port, "using BackpropNeuralNet")
init_network(port, :net, [2, 3, 2])
port_receive(port, false)
train(port, :result1, :net, [0.15, 0.7], [0.1, 0.9])
IO.puts(port_receive(port, true))
net_eval(port, :result2, :net, [0.15, 0.7])
IO.puts(port_receive(port, true))
end
@doc """
example to do linear regression
"""
def script_test(port) do
include_script(port, "./julia/lr.jl")
load_data(port, {:x_train, :y_train}, "./data/train")
load_data(port, {:x_test, :y_test}, "./data/test")
lr_train(port, :beta, :x_train, :y_train)
port_receive(port, false)
lr_test(port, :error, :x_test, :y_test, :beta)
IO.puts(port_receive(port, true))
end
@doc """
send a command through a port
"""
def port_send(port, command) do
send(port, {self(), {:command, command <> "\n"}})
end
@doc """
include a script in julia repl
"""
def include_script(port, path) do
port_send(port, "include(\"" <> path <> "\")")
end
@doc """
recieve messages from a port
remark: a trick to use Ω and ω as finished signal
"""
def port_receive(port, verbose?) do
port_send(port, ":Ω")
loop(verbose?, "")
end
@doc """
helper function to recieve messages
remark: one may modify this function to customise output format
"""
def loop(verbose?, data) do
receive do
{_pid, {:data, raw}} ->
data_new = String.replace(raw, "\n", "ω") |> String.trim() |> String.replace("ω", " ")
cond do
String.contains?(data_new, "Ω") ->
if verbose?, do: "received data: " <> String.trim(data)
data_new == ":" or data_new == "" ->
loop(verbose?, data)
true ->
loop(verbose?, data <> data_new)
end
_ ->
raise "receive error"
end
end
end | lib/julia_port.ex | 0.757884 | 0.61855 | julia_port.ex | starcoder |
defmodule ExifParser do
@moduledoc """
Parse EXIF/TIFF metadata from JPEG and TIFF files.
Exif/TIFF referes to the metadata added to jpeg images. It is encoded as part of the jpeg file.
There are multiple so-called "Image File Directories" or IFD that store information about the image.
+ IFD0 generally stores the image, EXIF and GPS metadata
+ IFD1 when available stores the information about a thumbnail image.
## Usage
### Read from jpeg file
Read data from a binary jpeg file.
```
iex(1)> {:ok, tags} = ExifParser.parse_jpeg_file("/path/to/file.jpg")
{:ok,
%{
ifd0: %{
date_time: "2008:07:31 10:05:49",
exif: %{color_space: 1, pixel_x_dimension: 100, pixel_y_dimension: 77},
orientation: 1,
resolution_unit: 2,
software: "GIMP 2.4.5",
x_resolution: 300.0,
y_resolution: 300.0
},
ifd1: %{
compression: 6,
jpeg_interchange_format: 282,
jpeg_interchange_format_length: 2022,
resolution_unit: 2,
x_resolution: 72.0,
y_resolution: 72.0
}
}}
```
A specific tag data can be retrived by
```
iex(2)> tags.ifd0.date_time
"2008:07:31 10:05:49"
iex(3)> tags.ifd0.exif.color_space
1
```
### Read from tiff file
Data can also be read from binary tiff files.
```
iex(2)> {:ok, tags} = ExifParser.parse_tiff_file("/home/sri/exif_tests/test1.tiff")
{:ok,
%{
ifd0: %{
bits_per_sample: '\b\b\b\b',
compression: 5,
extra_samples: 1,
image_length: 38,
image_width: 174,
orientation: 1,
photometric_interpretation: 2,
planar_configuration: 1,
predictor: 2,
rows_per_strip: 38,
sample_format: [1, 1, 1, 1],
samples_per_pixel: 4,
strip_byte_counts: 6391,
strip_offsets: 8
}}
```
"""
@max_length 2 * (65536 + 2)
# jpeg constants
@jpeg_start_of_image 0xFFD8
@jpeg_app1 0xFFE1
alias ExifParser.Header
alias ExifParser.ImageFileDirectory, as: IFD
alias ExifParser.CustomLocationTag, as: CLT
defmodule Options do
@moduledoc """
Options that are passed to the API.
Currently only two options are used.
### prettify
This enables makes the tag output pretty.
The values can be set to false to get data used to parse.
**Default: true**
### tag_offsets_and_names
This lets the user parse custom tags at custom memory locations.
```
%ExifParser.Options {
tag_offsets_and_names: [{MEMORY_LOCATION, :custom_tag_name}]
}
```
"""
defstruct prettify: true,
tag_offsets_and_names: nil
@type t :: %__MODULE__{
prettify: Boolean,
tag_offsets_and_names: map
}
end
@doc """
EXIF/TIFF data can be loaded from tiff binary files
```
ExifParser.parse_tiff_file("/path/to/file.tiff")
```
returns
```
{:ok, tags}
```
"""
def parse_tiff_file(filepath, options \\ %ExifParser.Options{}) do
with {:ok, buffer} <- File.open(filepath, [:read], &IO.binread(&1, @max_length)),
{:ok, tiff} <- parse_tiff_binary(buffer, options) do
{:ok, tiff}
else
err -> err
end
end
@doc """
EXIF/TIFF data can be loaded from tiff binary buffers
"""
def parse_tiff_binary(
<<header::binary-size(8), _rest::binary>> = start_of_tiff,
options \\ %ExifParser.Options{}
) do
with {:ok, header} <- Header.parse(header),
tags <-
IFD.parse_tiff_body(
header.identifier,
start_of_tiff,
header.ifd_offset,
options.prettify
),
custom_tags <-
CLT.parse_custom_tags(
options.tag_offsets_and_names,
header.identifier,
start_of_tiff,
options.prettify
) do
case custom_tags do
nil -> {:ok, tags}
custom_tags -> {:ok, tags, custom_tags}
end
else
err -> err
end
end
@doc """
EXIF/TIFF data can be loaded from jpeg binary files
```
ExifParser.parse_jpeg_file("/path/to/file.jpeg")
```
returns
```
{:ok, tags}
```
"""
def parse_jpeg_file(filepath, options \\ %ExifParser.Options{}) do
with {:ok, buffer} <- File.open(filepath, [:read], &IO.binread(&1, @max_length)),
{:ok, buffer} <- find_app1(buffer),
{:ok, tiff} <- parse_tiff_binary(buffer, options) do
{:ok, tiff}
else
err -> err
end
end
defp find_app1(<<@jpeg_app1::16, _length::16, "Exif"::binary, 0::16, rest::binary>>),
do: {:ok, rest}
defp find_app1(<<@jpeg_start_of_image::16, rest::binary>>), do: find_app1(rest)
defp find_app1(<<0xFF::8, _num::8, len::16, rest::binary>>) do
# Not app1, skip it
# the len desciption is part of the length
len = len - 2
<<_skip::size(len)-unit(8), rest::binary>> = rest
find_app1(rest)
end
defp find_app1(_), do: {:error, "Can't find app1 in jpeg image"}
end | lib/exif_parser.ex | 0.674372 | 0.840848 | exif_parser.ex | starcoder |
defmodule RatchetWrench.Model do
@moduledoc """
Define struct module of record in database.
## Examples
```elixir
defmodule Data do
use RatchetWrench.Model
schema do
uuid :data_id
pk: [:data_id]
attributes data_id: {"STRING", nil},
string: {"STRING", ""},
bool: {"BOOL", nil },
int: {"INT64", nil},
float: {"FLOAT64", nil},
date: {"DATE", nil},
time_stamp: {"TIMESTAMP", nil}
end
end
```
"""
defmacro __using__(_) do
quote do
table_name =
__MODULE__
|> Atom.to_string()
|> String.split(".")
|> List.last()
|> Macro.underscore()
|> String.downcase()
|> Inflex.pluralize()
default_table_name = "#{table_name}"
Module.put_attribute(__MODULE__, :table_name, default_table_name)
Module.register_attribute(__MODULE__, :uuid, accumulate: false)
Module.register_attribute(__MODULE__, :pk, accumulate: false)
Module.register_attribute(__MODULE__, :interleave, accumulate: false)
Module.register_attribute(__MODULE__, :attributes, accumulate: true)
import RatchetWrench.Model
end
end
defmacro schema(do: block) do
do_schema(block)
end
defp do_schema(block) do
quote do
unquote(block)
table_name = Module.get_attribute(__ENV__.module, :table_name)
Module.put_attribute(__ENV__.module, :table_name, table_name)
uuid = Module.get_attribute(__ENV__.module, :uuid)
Module.put_attribute(__ENV__.module, :uuid, uuid)
pk = Module.get_attribute(__ENV__.module, :pk)
Module.put_attribute(__ENV__.module, :pk, pk)
interleave = Module.get_attribute(__ENV__.module, :interleave)
if interleave == nil do
interleave = []
Module.put_attribute(__ENV__.module, :interleave, interleave)
else
Module.put_attribute(__ENV__.module, :interleave, interleave)
end
Module.eval_quoted(__ENV__, [
RatchetWrench.Model.__defstruct__(__ENV__.module),
RatchetWrench.Model.__valid_define_uuid__!(__ENV__.module),
RatchetWrench.Model.__valid_define_pk__!(__ENV__.module),
RatchetWrench.Model.__valid_define_interleave__!(__ENV__.module),
RatchetWrench.Model.__def_helper_funcs__(__ENV__.module)
])
end
end
def __defstruct__(target) do
quote bind_quoted: [target: target] do
attributes = Module.get_attribute(target, :attributes)
fields = attributes |> Enum.map(fn {name, {_type, default}} -> {name, default} end)
defstruct fields
end
end
def __valid_define_uuid__!(mod) do
attributes = Module.get_attribute(mod, :attributes)
uuid = Module.get_attribute(mod, :uuid)
unless defined_column?(attributes, uuid) do
raise "Not define uuid in #{mod} module schema"
end
end
def __valid_define_pk__!(mod) do
attributes = Module.get_attribute(mod, :attributes)
pk = Module.get_attribute(mod, :pk)
if pk == nil do
raise "Must set pk in #{mod} module schema"
end
result =
Enum.map(pk, fn key ->
defined_column?(attributes, key)
end)
|> Enum.all?()
if result == false do
raise "Not define colum name in #{mod} module schema pk"
end
end
def __valid_define_interleave__!(mod) do
attributes = Module.get_attribute(mod, :attributes)
interleave = Module.get_attribute(mod, :interleave)
result =
Enum.map(interleave, fn key ->
defined_column?(attributes, key)
end)
|> Enum.all?()
if result == false do
raise "Not define colum name in #{mod} module schema interleave"
end
end
def defined_column?(attributes, target) do
result =
attributes
|> Enum.map(fn {name, {_type, _default}} -> "#{name}" == "#{target}" end)
|> Enum.any?()
if result == false do
false
else
true
end
end
def __def_helper_funcs__(mod) do
table_name = Module.get_attribute(mod, :table_name)
attributes = Module.get_attribute(mod, :attributes)
uuid = Module.get_attribute(mod, :uuid)
pk = Module.get_attribute(mod, :pk)
interleave = Module.get_attribute(mod, :interleave)
quote do
def __table_name__, do: unquote(table_name)
def __attributes__, do: unquote(attributes)
def __uuid__, do: unquote(uuid)
def __pk__, do: unquote(pk)
def __interleave__, do: unquote(interleave)
end
end
defmacro table_name(table_name) do
quote bind_quoted: [table_name: table_name] do
RatchetWrench.Model.__table_name__(__MODULE__, table_name)
end
end
def __table_name__(mod, table_name) do
Module.put_attribute(mod, :table_name, table_name)
end
defmacro attributes(decl) do
{list_of_attrs, _} = Code.eval_quoted(decl)
for attr <- list_of_attrs do
quote do: attribute([unquote(attr)])
end
end
defmacro attribute(decl) do
quote bind_quoted: [decl: decl] do
{name, type, default} =
case decl do
[{name, {type, default}}] -> {name, type, default}
end
RatchetWrench.Model.__attribute__(__MODULE__, name, type, default)
end
end
def __attribute__(mod, name, type, default) do
Module.put_attribute(mod, :attributes, {name, {type, default}})
end
defmacro uuid(uuid) do
quote bind_quoted: [uuid: uuid] do
RatchetWrench.Model.__uuid__(__MODULE__, uuid)
end
end
def __uuid__(mod, uuid) do
Module.put_attribute(mod, :uuid, uuid)
end
defmacro pk(pk) do
quote bind_quoted: [pk: pk] do
RatchetWrench.Model.__pk__(__MODULE__, pk)
end
end
def __pk__(mod, pk) do
Module.put_attribute(mod, :pk, pk)
end
defmacro interleave(interleave) do
quote bind_quoted: [interleave: interleave] do
RatchetWrench.Model.__interleave__(__MODULE__, interleave)
end
end
def __interleave__(mod, interleave) do
Module.put_attribute(mod, :interleave, interleave)
end
end | lib/ratchet_wrench/model.ex | 0.743354 | 0.519887 | model.ex | starcoder |
defmodule Combinators do
@moduledoc """
This module provides fundamental combinators for matching and parsing
strings. All public functions in this module return a function which takes a
`State`, and optionally, a `label` and a `visitor` function.
`State`: struct with the original string and an offset from where the new
matching should start.
`label`: An identifier for this combinator. Defaults to the name of the
combinator function
`visitor`: A function that transforms the node created by this combinator.
Defaults to `nil` (no transformation is done)
Return value: An anonymous function. These anonymous functions simply return
`nil` if no match was found. When a match has been found it will return a
2-tuple `{nodes, new_state}`.
`nodes` is a list where the head is the label of the combinator and the tail is
a list of consumed substring by that combinator.
`new_state` is the `State` struct with the original string and a new offset.
"""
@type state :: %State{string: binary, offset: integer}
@doc """
Match a literal string and return a new `State` with the next offset
"""
@spec str(string :: binary, visitor :: (any -> any) | nil) :: (state -> {[any], state} | nil)
def str(string, label \\ :lit_str, visitor \\ nil) do
fn state ->
len = String.length(string)
chunk = State.peek(state, len)
if chunk == string do
{[label, hd(apply_visitor([chunk], visitor))], State.read(state, len)}
end
end
end
@doc """
Attempt to match a single character against the given regex range or
character class
"""
@spec char(pattern :: binary, visitor :: (any -> any) | nil) :: (state -> {[any], state} | nil)
def char(pattern, label \\ :char, visitor \\ nil) do
fn state ->
chunk = State.peek(state, 1)
if chunk =~ ~r{[#{pattern}]} do
{[label, hd(apply_visitor([chunk], visitor))], State.read(state, 1)}
end
end
end
def opt(parser, label \\ :opt, visitor \\ nil) do
fn state ->
case parser.(state) do
{node, new_state} -> {node, new_state}
_ -> {[label, []], state}
end
end
end
@doc """
Match all the given combinators sequentially. If any of the combinators fails
to parse, that is, it returns `nil`, this function will also return `nil`.
One way to look at it as as a chain of logical conjunction:
`parser_1 ∧ parser_2 ∧ ... ∧ parser_n`
"""
@spec seq(parsers :: [function], visitor :: (any -> any) | nil) ::
(state -> {[any], state} | nil)
def seq(parsers, label \\ :seq, visitor \\ nil) do
fn state ->
{nodes, new_state} =
Enum.reduce_while(parsers, {[], state}, fn parser, {acc_nodes, acc_state} ->
case parser.(acc_state) do
{node, new_state} -> {:cont, {acc_nodes ++ [node], new_state}}
nil -> {:halt, {acc_nodes, nil}}
end
end)
if new_state do
{[label | apply_visitor(nodes, visitor)], new_state}
end
end
end
@doc """
Return `nil` for negative numbers of repetitions
"""
@spec rep(any(), n :: integer()) :: nil
def rep(_, n) when n < 0 do
nil
end
@doc """
Repetition of minimum `n` occurences in the string that satisfies the given
combinator. The function returned by this function will greedily match until
no matches are found for the given combinator. If we have found at least `n`
matches it's a success, else it's a failure and the inner function shall
return `nil`.
"""
@spec rep(parser :: function, visitor :: (any -> any) | nil) :: (state -> {[any], state} | nil)
def rep(parser, n, label \\ :rep, visitor \\ nil) do
fn state ->
{_, new_state, nodes, count} = rep_recurse(parser, state, [], 0)
if count >= n do
{[label | apply_visitor(nodes, visitor)], new_state}
end
end
end
defp rep_recurse(parser, nil, nodes, count) do
{parser, nil, nodes, count}
end
defp rep_recurse(parser, state, nodes, count) do
result = parser.(state)
case result do
{node, new_state} -> rep_recurse(parser, new_state, nodes ++ [node], count + 1)
nil -> {parser, state, nodes, count}
end
end
@doc """
Given a list of combinators returns success (2-tuple) if at least one of them
satisfies the string starting at the given offset, else it's a failure
(`nil`). All the combinators passed to this function start from the same
offset in the string.
One way to look at this combinator is as a chain of logical disjunction:
`parser_1 ∨ parser_2 ∨ ... ∨ parser_n`
"""
@spec alt(parsers :: [function], visitor :: (any -> any) | nil) ::
(state -> {[any], state} | nil)
def alt(parsers, label \\ :alt, visitor \\ nil) do
fn state ->
Enum.find_value(parsers, fn parser ->
parser.(state)
end)
end
end
def ref(name) do
fn state ->
apply(__MODULE__, name, [state])
end
end
defp apply_visitor(nodes, visitor) when is_function(visitor) do
Enum.map(nodes, visitor)
end
defp apply_visitor(nodes, _), do: nodes
end
defmodule Combinators.Builtin do
import Combinators
# Some operators to alleviate verbosity
def a <|> b when is_binary(a) and is_binary(b) do
alt([str(a), str(b)])
end
def a <|> b when is_binary(a) and is_function(b) do
alt([str(a), b])
end
def a <|> b when is_function(a) and is_binary(b) do
alt([a, str(b)])
end
def a <|> b when is_function(a) and is_function(b) do
alt([a, b])
end
def zero, do: str("0")
def non_zero_digit, do: char("1-9")
def digit, do: zero() <|> non_zero_digit()
def positive_integer do
seq([non_zero_digit(), digits()])
end
def negative_integer do
seq([str("-"), non_zero_digit(), digits()])
end
def integer do
alt([zero(), negative_integer(), positive_integer()])
end
def digits, do: rep(digit(), 1)
def ws, do: rep(char("\R"), 1)
def sep_by(separator), do: nil
def many1, do: nil
def choice, do: nil
def between, do: nil
def one_of, do: nil
end | lib/combinators.ex | 0.838382 | 0.873269 | combinators.ex | starcoder |
defmodule ID3.Picture do
@moduledoc """
A struct representing a picture in the ID3 tag.
See https://docs.rs/id3/0.3.0/id3/frame/struct.Picture.html
### PictureType
Due to limitations of `rust-id3`, multiple pictures with same `picture_type` is not available.
When writing, a picture with the same `picture_type` within the existing tab, will overwrite that picture.
When reading, `rust-id3` only returns one of the picture of the same type.
"""
defstruct mime_type: nil, picture_type: nil, description: "", data: nil
@typedoc """
A structure representing an ID3 picture frame's contents.
- `data` is a binary of unsigned char(8bit)s.
- `picture_type` will be `:Other` otherwise correctly given.
"""
@type t :: %__MODULE__{
mime_type: String.t(),
picture_type: picture_type,
description: String.t(),
data: :binary
}
@picture_types ~W(
Other Icon OtherIcon
CoverFront CoverBack
Leaflet Media LeadArtist
Conductor Band Composer Lyricist
RecordingLocation DuringRecording DuringPerformance
ScreenCapture BrightFish
Illustration BandLogo PublisherLogo
)a
@typedoc """
Types of pictures used in APIC frames.
"""
@type picture_type ::
:Other
| :Icon
| :OtherIcon
| :CoverFront
| :CoverBack
| :Leaflet
| :Media
| :LeadArtist
| :Conductor
| :Band
| :Composer
| :Lyricist
| :RecordingLocation
| :DuringRecording
| :DuringPerformance
| :ScreenCapture
| :BrightFish
| :Illustration
| :BandLogo
| :PublisherLogo
@doc """
Creates a new Picture.
Needs the data binary and the MIME type of the data.
## Options
- `picture_type`: ID3 tags can have a picture for each type. One of `t:Picture.picture_type/0`.
- `description`: Text description about the picture.
## Examples
iex> data = File.read!("full/path/to/foo.jpg")
iex> ID3.Picture.new(data, "image/jpeg", picture_type: :CoverFront)
{:ok,
%ID3.Picture{
data: <<255, 167, ...>>,
description: "",
mime_type: "image/jpeg",
picture_type: :CoverFront
}
}
"""
@spec new(binary, String.t(), options :: [option]) :: {:ok, Picture.t()} | :error
when option: {:picture_type, picture_type()} | {:description, String.t()}
def new(data, mime_type, options \\ [])
when data |> is_binary and mime_type |> is_binary and options |> is_list do
picture_type = options |> Keyword.get(:picture_type, :Other)
description = options |> Keyword.get(:description, "")
with true <- @picture_types |> Enum.member?(picture_type),
true <- description |> String.valid?() do
picture = %__MODULE__{
picture_type: picture_type,
mime_type: mime_type,
description: "",
data: data
}
{:ok, picture}
else
_ -> :error
end
end
end | lib/id3/picture.ex | 0.85186 | 0.511839 | picture.ex | starcoder |
defmodule GenRegex.Interpreter do
@moduledoc """
This is the interpreter module.
It reduces the parsed structures to string generator nodes, which in turn will be used to generate the final string.
"""
alias GenRegex.Generator
def read(ast, parent \\ nil), do: interpret(ast, parent)
defp interpret({:word, [head | tail] = elems}, parent) do
elems =
if head == {:atom, :^} and parent == nil do
tail
else
elems
end
result =
elems
|> List.wrap()
|> Enum.map(&interpret(&1, :word))
|> Enum.map(fn item ->
case item do
%Generator{type: :set, value: value} -> value
%Generator{type: :negset, value: value} -> value
item -> item
end
end)
%Generator{
type: :word,
value: result
}
end
defp interpret({:option, choices}, _parent) do
result =
choices
|> Enum.map(&interpret(&1, :option))
|> List.flatten()
%Generator{
type: :option,
value: result
}
end
defp interpret({:choice, choice}, _parent), do: interpret(choice, :choice)
defp interpret({:set, items}, :word), do: interpret(items, :word)
defp interpret({:set, items}, :set), do: interpret(items, :set)
defp interpret({:set, items}, _parent), do: do_interpret_set(:set, items)
defp interpret({:negset, items}, :word), do: interpret(items, :word)
defp interpret({:negset, items}, :negset), do: interpret(items, :negset)
defp interpret({:negset, items}, _parent), do: do_interpret_set(:negset, items)
defp interpret({:wildcard, :.}, _parent),
do: %Generator{
type: :wildcard,
value: nil
}
defp interpret({:atom, val}, _parent), do: to_string(val)
defp interpret({:repexpr, [expr, min, max]}, _parent) do
min = to_integer(min, :repexpr)
max = to_integer(max, :repexpr)
[result] =
expr
|> List.wrap()
|> interpret(:repexpr)
result
|> Map.put(:min, min)
|> Map.put(:max, max)
end
defp interpret({:range, val}, :set) do
[first, last] =
val
|> to_string()
|> String.split("-")
|> Enum.map(&:binary.decode_unsigned(&1))
%Generator{
type: :range,
value: first..last
}
end
defp interpret({:range, val}, :negset), do: interpret({:range, val}, :set)
defp interpret({:range, val}, :word), do: to_string(val)
defp interpret({:range, val}, _) do
%Generator{
type: :word,
value: to_string(val)
}
end
defp interpret({:escape, _seq} = input, parent) do
{set_type, result} = do_escape(input)
case parent do
:set ->
result
:negset ->
result
:choice ->
result
_ ->
%Generator{
type: set_type,
value: result
}
end
end
defp interpret(ast, _) when is_number(ast), do: ast
defp interpret(ast, _) when is_binary(ast), do: ast
defp interpret(ast, _) when is_nil(ast), do: ast
defp interpret(ast, parent) do
result =
ast
|> Enum.map(&interpret(&1, parent))
result
end
defp do_interpret_set(type, items) do
result =
items
|> Enum.uniq()
|> Enum.map(&interpret(&1, :set))
|> Enum.map(fn item ->
case item do
%Generator{type: :wildcard} -> :wildcard
%Generator{type: :set, value: value} -> value
item -> item
end
end)
|> List.wrap()
|> List.flatten()
%Generator{
type: type,
value: result
}
end
defp do_escape({:escape, '\\d'}) do
{:set,
%Generator{
max: 1,
min: 1,
type: :set,
value: [
%Generator{max: 1, min: 1, type: :range, value: 48..57}
]
}
}
end
defp do_escape({:escape, '\\D'}) do
{:set, result} = do_escape({:escape, '\\d'})
{:negset, Map.put(result, :type, :negset)}
end
defp do_escape({:escape, '\\w'}) do
{:set,
%Generator{
max: 1,
min: 1,
type: :set,
value: [
%Generator{max: 1, min: 1, type: :range, value: 48..57},
%Generator{max: 1, min: 1, type: :range, value: 97..122},
%Generator{max: 1, min: 1, type: :range, value: 65..90},
"_"
]
}
}
end
defp do_escape({:escape, '\\W'}) do
{:set, result} = do_escape({:escape, '\\w'})
{:negset, Map.put(result, :type, :negset)}
end
defp do_escape({:escape, '\\s'}) do
{:set,
%Generator{
max: 1,
min: 1,
type: :set,
value: [" ", "\t", "\r", "\n", "\v", "\f"]
}
}
end
defp do_escape({:escape, '\\S'}) do
{:set, result} = do_escape({:escape, '\\s'})
{:negset, Map.put(result, :type, :negset)}
end
defp do_escape({:escape, char}) do
{:set,
{
:atom,
Macro.unescape_string(to_string(char))
}
|> interpret(:escape)
}
end
defp to_integer(nil, _parent), do: nil
defp to_integer(val, _parent)
when is_integer(val),
do: val
defp to_integer([{:word, elems}], parent) do
{num, _} =
elems
|> Enum.map(&interpret(&1, parent))
|> Enum.join()
|> Integer.parse()
num
end
defp to_integer(val, parent) do
{num, _} =
val
|> interpret(parent)
|> to_string()
|> Integer.parse()
num
end
end | lib/grammar/interpreter.ex | 0.673621 | 0.64563 | interpreter.ex | starcoder |
defmodule LargeSort.Shared.IntegerFile do
alias LargeSort.Shared.IntegerFileBehavior
@behaviour IntegerFileBehavior
@moduledoc """
Contains functionality for working with integer files
"""
@doc """
Creates a stream for an integer file that operates in line mode
Any existing file will be overwritten.
If something goes wrong when creating the file stream, this function
will throw an exception.
## Parameters
- path: the path of the file to be written to
## Returns
A stream that can be used to read from or write to the file
"""
@impl IntegerFileBehavior
@spec integer_file_stream(String.t()) :: Enumerable.t()
def integer_file_stream(path) do
create_file_directory(path)
File.stream!(path, [:utf8], :line)
end
@doc """
Writes an enumerable containing integers to a stream
## Parameters
- enumerable: the enumerable whose integers are to be written to the file
- out_stream: the stream to be written to. Actually, this doesn't necessarily
have to be a stream. Any collectable will do.
## Returns
A stream consisting of the write operations
"""
@impl IntegerFileBehavior
@spec write_integers_to_stream(Enumerable.t(), Collectable.t()) :: Enumerable.t()
def write_integers_to_stream(enumerable, out_stream) do
enumerable
|> integers_to_lines()
|> Stream.into(out_stream)
end
@doc """
Creates a stream that reads integers from an integer stream
## Parameters
- integer_stream: A stream that reads lines of integer text,
most likely lines of text from an integer file
## Returns
A stream that emits the integers in the integer file
"""
@impl IntegerFileBehavior
@spec read_stream(Enumerable.t()) :: Enumerable.t()
def read_stream(integer_stream) do
integer_stream
|> Stream.map(&String.trim/1)
|> Stream.map(&String.to_integer/1)
end
@doc """
Counts the number of lines in an raw integer text stream
This function makes no attempt to parse the integers or determine the
validity of the integers in the stream. It just counts the number of
items it encounters.
Note that when the function has completed, all the items will have been
read from the stream. So the stream is not likely to be that useful
after this function has completed and you'd have to create a new
stream with the same data to do anything else with that data.
## Parameters
- integer_stream: A stream that reads lines of integer text,
most likely lines of text from an integer file
## Returns
The number of lines found in the stream
"""
@impl IntegerFileBehavior
@spec integer_count(Enumerable.t()) :: non_neg_integer()
def integer_count(integer_stream) do
Enum.count(integer_stream)
end
# Creates the directory for a file path, if it doesn't already exist
defp create_file_directory(file_path, directory_exists \\ nil)
defp create_file_directory(file_path, nil) do
directory = Path.dirname(file_path)
create_file_directory(file_path, File.dir?(directory))
end
defp create_file_directory(_, true), do: :ok
defp create_file_directory(file_path, false) do
directory = Path.dirname(file_path)
File.mkdir_p(directory)
end
@doc """
Creates an integer file device for reading
This function assumes that the integer file exists and can be opened for reading
## Parameters
- path: The path of the file to be opened for reading
## Returns
An IO device that can be used to read from the integer file
"""
@impl IntegerFileBehavior
@spec read_device!(String.t()) :: IO.device()
def read_device!(path) do
File.open!(path, [:utf8, :read, :read_ahead])
end
@doc """
Creates an integer file device for writing
This function assumes that the integer file exists and can be opened for writing
## Parameters
- path: The path of the file to be opened for writing
## Returns
An IO device that can be used to write to the integer file
"""
@impl IntegerFileBehavior
@spec write_device!(String.t()) :: IO.device()
def write_device!(path) do
File.open!(path, [:utf8, :write, :delayed_write])
end
@doc """
Closes an integer file device
## Parameters
- device: The integer file device to be closed
## Returns
`:ok`
"""
@impl IntegerFileBehavior
@spec close_device(IO.device()) :: :ok
def close_device(device) do
File.close(device)
end
@doc """
Reads an integer from a device that contains integer file-formatted data
This function assumes that the IO device is operating in a read mode as well
as :utf8 mode.
## Parameters
- device: The IO device to be read from
## Returns
The integer that was read from the device, an `:eof` when the end of file
was encountered, or `{:error, reason}` when there was an error reading
from the device.
"""
@impl IntegerFileBehavior
@spec read_integer(IO.device()) :: integer() | IO.no_data()
def read_integer(device) do
device
|> IO.read(:line)
|> data_to_integer()
end
@doc """
Writes an integer to a device using the integer file format
This function assumes that the IO device is operating in a write mode as well
as :utf8 mode.
## Parameters
- device: The IO device to be written to
## Returns
:ok to indicate success
"""
@impl IntegerFileBehavior
@spec write_integer(IO.device(), integer()) :: :ok
def write_integer(device, integer) do
integer
# Convert the integer to a string
|> Integer.to_string()
# Concatenate the integer string with a newline character
|> Kernel.<>("\n")
# Write the resulting line to the device
|> (fn line -> IO.write(device, line) end).()
end
@doc """
Converts an enumerable containing integers
to a stream of integer file lines (including the newline
characters)
## Parameters
- integers: an enumerable containing integers to be converted
## Returns
A collection of strings that contain the integers in integer file format,
with each element an integer file line
"""
@impl IntegerFileBehavior
@spec integers_to_lines(Enum.t()) :: Enum.t()
def integers_to_lines(integers) do
integers
|> Stream.map(&Integer.to_string/1)
|> Stream.map(&(&1 <> "\n"))
end
# Converts data read from an IO device to an integer
defp data_to_integer(:eof), do: :eof
defp data_to_integer(data = {:error, _}), do: data
defp data_to_integer(data) do
data
|> String.trim()
|> String.to_integer()
end
end | largesort_shared/lib/integer_file.ex | 0.896455 | 0.538983 | integer_file.ex | starcoder |
defmodule Unicode.IndicSyllabicCategory do
@moduledoc """
Functions to introspect Unicode
indic syllabic categories for binaries
(Strings) and codepoints.
"""
@behaviour Unicode.Property.Behaviour
alias Unicode.Utils
@indic_syllabic_categories Utils.indic_syllabic_categories()
|> Utils.remove_annotations()
@doc """
Returns the map of Unicode
indic syllabic categorys.
The indic syllabic category name is the map
key and a list of codepoint
ranges as tuples as the value.
"""
def indic_syllabic_categories do
@indic_syllabic_categories
end
@doc """
Returns a list of known Unicode
indic syllabic category names.
This function does not return the
names of any indic syllabic category aliases.
"""
@known_indic_syllabic_categories Map.keys(@indic_syllabic_categories)
def known_indic_syllabic_categories do
@known_indic_syllabic_categories
end
@indic_syllabic_category_alias Utils.property_value_alias()
|> Map.get("insc")
|> Utils.atomize_values()
|> Utils.downcase_keys_and_remove_whitespace()
|> Utils.add_canonical_alias()
@doc """
Returns a map of aliases for
Unicode indic syllabic categorys.
An alias is an alternative name
for referring to a indic syllabic category. Aliases
are resolved by the `fetch/1` and
`get/1` functions.
"""
@impl Unicode.Property.Behaviour
def aliases do
@indic_syllabic_category_alias
end
@doc """
Returns the Unicode ranges for
a given indic syllabic category as a list of
ranges as 2-tuples.
Aliases are resolved by this function.
Returns either `{:ok, range_list}` or
`:error`.
"""
@impl Unicode.Property.Behaviour
def fetch(indic_syllabic_category) when is_atom(indic_syllabic_category) do
Map.fetch(indic_syllabic_categories(), indic_syllabic_category)
end
def fetch(indic_syllabic_category) do
indic_syllabic_category = Utils.downcase_and_remove_whitespace(indic_syllabic_category)
indic_syllabic_category = Map.get(aliases(), indic_syllabic_category, indic_syllabic_category)
Map.fetch(indic_syllabic_categories(), indic_syllabic_category)
end
@doc """
Returns the Unicode ranges for
a given indic syllabic category as a list of
ranges as 2-tuples.
Aliases are resolved by this function.
Returns either `range_list` or
`nil`.
"""
@impl Unicode.Property.Behaviour
def get(indic_syllabic_category) do
case fetch(indic_syllabic_category) do
{:ok, indic_syllabic_category} -> indic_syllabic_category
_ -> nil
end
end
@doc """
Returns the count of the number of characters
for a given indic syllabic category.
## Example
iex> Unicode.IndicSyllabicCategory.count(:bindu)
91
"""
@impl Unicode.Property.Behaviour
def count(indic_syllabic_category) do
with {:ok, indic_syllabic_category} <- fetch(indic_syllabic_category) do
Enum.reduce(indic_syllabic_category, 0, fn {from, to}, acc -> acc + to - from + 1 end)
end
end
@doc """
Returns the indic syllabic category name(s) for the
given binary or codepoint.
In the case of a codepoint, a single
indic syllabic category name is returned.
For a binary a list of distinct indic syllabic category
names represented by the lines in
the binary is returned.
"""
def indic_syllabic_category(string) when is_binary(string) do
string
|> String.to_charlist()
|> Enum.map(&indic_syllabic_category/1)
|> Enum.uniq()
end
for {indic_syllabic_category, ranges} <- @indic_syllabic_categories do
def indic_syllabic_category(codepoint) when unquote(Utils.ranges_to_guard_clause(ranges)) do
unquote(indic_syllabic_category)
end
end
def indic_syllabic_category(codepoint)
when is_integer(codepoint) and codepoint in 0..0x10FFFF do
:other
end
end | lib/unicode/indic_syllabic_category.ex | 0.892659 | 0.592195 | indic_syllabic_category.ex | starcoder |
defmodule Donut.GraphQL.Schema.Notation do
@moduledoc """
Sets up the notations for building an Absinthe schema.
"""
defmacro __using__(_options) do
quote do
use Absinthe.Schema.Notation, except: [resolve: 1]
import Donut.GraphQL.Schema.Notation, only: [
resolve: 1,
mutable: 1, mutable: 2, mutable: 3,
immutable: 1, immutable: 2,
mutable_object: 2, mutable_object: 3,
mutable_interface: 2, mutable_interface: 3
]
import Donut.GraphQL.Result
end
end
require Logger
@type parent :: map
@type args :: map
@type env :: Absinthe.Resolution.t
@type result :: { :ok, any } | { :error, any }
@type resolver :: (args, env -> result) | (parent, args, env -> result)
@doc false
@spec resolve(resolver) :: Macro.t
defmacro resolve(fun) do
quote do
Absinthe.Schema.Notation.resolve(&Donut.GraphQL.Schema.Notation.run(&1, &2, &3, unquote(fun)))
end
end
@spec resolver(parent, args, env, resolver) :: result
defp resolver(_, args, env, fun) when is_function(fun, 2), do: fun.(args, env)
defp resolver(parent, args, env, fun), do: fun.(parent, args, env)
@doc false
@spec run(parent, args, env, resolver) :: result
def run(parent, args, env = %{ definition: %{ directives: directives } }, fun) do
if Enum.any?(directives, fn
%{ schema_node: %{ identifier: :debug } } -> true
_ -> false
end) do
try do
resolver(parent, args, env, fun)
rescue
exception ->
err = Donut.GraphQL.Result.InternalError.new(:error, exception)
Logger.error(err.error_message)
{ :ok, err }
catch
type, value when type in [:exit, :throw] ->
err = Donut.GraphQL.Result.InternalError.new(type, value)
Logger.error(err.error_message)
{ :ok, err }
end
else
resolver(parent, args, env, fun)
end
end
defp document_type([]), do: nil
defp document_type([%Absinthe.Blueprint.Document.Operation{ type: type }|_]), do: type
defp document_type([_|paths]), do: document_type(paths)
defp document_type(%{ path: paths }), do: document_type(paths)
defmacro mutable(immutable_fields, _env), do: immutable_fields
def mutable(immutable_fields, mutable_fields, env) do
immutable_fields = if(is_function(immutable_fields), do: immutable_fields.(), else: immutable_fields)
case document_type(env) do
:query -> immutable_fields
:mutation ->
mutable_fields = if(is_function(mutable_fields), do: mutable_fields.(), else: mutable_fields)
Map.merge(immutable_fields, mutable_fields)
end
end
defmacro immutable(_attrs \\ [], _block), do: raise "Must be used inside a mutable object"
defmacro mutable(value), do: value
defmacro mutable(type, name, attrs, block) do
{ mutable_body, immutable } = Macro.prewalk(block, nil, fn
{ :immutable, context, body }, _ ->
block = Enum.find_value(body, nil, fn
[do: block] -> block
_ -> false
end)
{ block, { type, context, [name|body] } }
{ :mutable, _, [name] }, acc when is_atom(name) -> { String.to_atom("mutable_#{to_string(name)}"), acc }
node, acc -> { node, acc }
end)
quote do
description = @desc
unquote(immutable)
@desc description
unquote(type)(unquote(String.to_atom("mutable_#{to_string(name)}")), unquote(attrs), unquote(mutable_body))
end
end
defmacro mutable_object(name, attrs \\ [], block) do
quote do
Donut.GraphQL.Schema.Notation.mutable(:object, unquote(name), unquote(attrs), unquote(block))
end
end
defmacro mutable_interface(name, attrs \\ [], block) do
quote do
Donut.GraphQL.Schema.Notation.mutable(:interface, unquote(name), unquote(attrs), unquote(block))
end
end
end | apps/donut_graphql/lib/donut.graphql/schema.notation.ex | 0.611614 | 0.421998 | schema.notation.ex | starcoder |
defmodule Bingo.GameServer do
@moduledoc """
A game server process that holds a `Game` struct as its state.
"""
use GenServer
require Logger
@timeout :timer.hours(2)
# Client (Public) Interface
@doc """
Spawns a new game server process registered under the given `game_name`.
"""
def start_link(game_name, size) do
GenServer.start_link(__MODULE__, {game_name, size}, name: via_tuple(game_name))
end
def summary(game_name) do
GenServer.call(via_tuple(game_name), :summary)
end
def mark(game_name, phrase, player) do
GenServer.call(via_tuple(game_name), {:mark, phrase, player})
end
@doc """
Returns a tuple used to register and lookup a game server process by name.
"""
def via_tuple(game_name) do
{:via, Registry, {Bingo.GameRegistry, game_name}}
end
@doc """
Returns the `pid` of the game server process registered under the
given `game_name`, or `nil` if no process is registered.
"""
def game_pid(game_name) do
game_name
|> via_tuple()
|> GenServer.whereis()
end
# Server Callbacks
def init({game_name, size}) do
buzzwords = Bingo.BuzzwordCache.get_buzzwords()
game =
case :ets.lookup(:games_table, game_name) do
[] ->
game = Bingo.Game.new(buzzwords, size)
:ets.insert(:games_table, {game_name, game})
game
[{^game_name, game}] ->
game
end
Logger.info("Spawned game server process named '#{game_name}'.")
{:ok, game, @timeout}
end
def handle_call(:summary, _from, game) do
{:reply, summarize(game), game, @timeout}
end
def handle_call({:mark, phrase, player}, _from, game) do
new_game = Bingo.Game.mark(game, phrase, player)
:ets.insert(:games_table, {my_game_name(), new_game})
{:reply, summarize(new_game), new_game, @timeout}
end
def summarize(game) do
%{
squares: game.squares,
scores: game.scores,
winner: game.winner
}
end
def handle_info(:timeout, game) do
{:stop, {:shutdown, :timeout}, game}
end
def terminate({:shutdown, :timeout}, _game) do
:ets.delete(:games_table, my_game_name())
:ok
end
def terminate(_reason, _game) do
:ok
end
defp my_game_name do
Registry.keys(Bingo.GameRegistry, self()) |> List.first()
end
end | apps/bingo/lib/bingo/game_server.ex | 0.817647 | 0.439026 | game_server.ex | starcoder |
defmodule LocationsWeb.GeoHelpers do
@moduledoc """
GEO helpers
"""
@doc """
Returns the type of the features for a location
"""
def get_feature_type(%{geo_features: features}) when length(features) > 1 do
:multiple_features
end
def get_feature_type(%{geo_features: features}) when length(features) == 1 do
:single_feature
end
def get_feature_type(_) do
:no_features
end
@doc """
Returns the first features entry from a collection of features
"""
def get_first_encoding_result(features) do
geo_selected_id = get_first_geo__id(features)
get_location_for_selected(features, geo_selected_id)
end
@doc """
Returns the display name of a place
"""
def get_geo_place_display_name(geo_place) do
if name = geo_place["properties"]["display_name"] do
name
else
get_geo_place_id(geo_place)
end
end
@doc """
Returns the type of a place
"""
def get_geo_place_display_type(geo_place) do
if type = geo_place["properties"]["type"] do
type
else
"unknown"
end
end
@doc """
Returns the osm_id for a place
"""
def get_geo_place_id(geo_place) do
geo_place["properties"]["osm_id"]
end
@doc """
Returns the feature from the collection of features for the passed osm_id
"""
def get_location_for_selected(features, geo_id) do
with {:ok, geometry} <- get_geometry(features, geo_id),
{:ok, geometry} <- has_geometry(geometry),
{:ok, coordinates} <- get_coordinates(geometry) do
geo_location = create_geo_location(coordinates)
{:ok, geo_location, geo_id}
else
reason ->
reason
end
end
defp create_geo_location([lon, lat]) do
%{
lat: lat,
lon: lon
}
end
defp get_coordinates(geometry) do
if Map.has_key?(geometry, "coordinates") do
{:ok, geometry["coordinates"]}
else
{:error, :no_coordinates}
end
end
defp has_geometry(nil) do
{:error, :no_geometry_nil}
end
defp has_geometry([]) do
{:error, :no_geometry_empty}
end
defp has_geometry(geometry) do
{:ok, geometry}
end
defp get_geometry([], _geo_id) do
{:error, :no_features}
end
defp get_geometry(features, _geo_id) when 1 == length(features) do
feature = hd(features)
get_geometry_from_feature(feature)
end
defp get_geometry(features, geo_id) do
feature =
Enum.find(features, fn place ->
geo_id == get_geo_place_id(place)
end)
get_geometry_from_feature(feature)
end
defp get_geometry_from_feature(nil) do
{:error, :no_feature}
end
defp get_geometry_from_feature(feature) do
if Map.has_key?(feature, "geometry") do
{:ok, feature["geometry"]}
else
{:error, :no_geometry}
end
end
defp get_first_geo__id([]) do
nil
end
defp get_first_geo__id(geo_features) do
place = hd(geo_features)
get_geo_place_id(place)
end
end | lib/locations_web/geo_helpers/geo_helpers.ex | 0.828349 | 0.561185 | geo_helpers.ex | starcoder |
defmodule OpenTelemetry.Tracer do
@moduledoc """
This module contains macros for Tracer operations around the lifecycle of the Spans within a Trace.
The Tracer is able to start a new Span as a child of the active Span of the current process, set
a different Span to be the current Span by passing the Span's context, end a Span or run a code
block within the context of a newly started span that is ended when the code block completes.
The macros use the Tracer registered to the Application the module using the macro is included in,
assuming `OpenTelemetry.register_application_tracer/1` has been called for the Application. If
not then the default Tracer is used.
require OpenTelemetry.Tracer
OpenTelemetry.Tracer.with_span "span-1" do
... do something ...
end
"""
@type start_opts() :: %{
optional(:attributes) => OpenTelemetry.attributes(),
# TODO sampler should is an opaque type defined in the implementation
optional(:sampler) => term(),
optional(:links) => OpenTelemetry.links(),
optional(:is_recording) => boolean(),
optional(:start_time) => :opentelemetry.timestamp(),
optional(:kind) => OpenTelemetry.span_kind()
}
@doc """
Starts a new span and does not make it the current active span of the current process.
The current active Span is used as the parent of the created Span.
"""
defmacro start_span(name, opts \\ quote(do: %{})) do
quote bind_quoted: [name: name, start_opts: opts] do
:otel_tracer.start_span(:opentelemetry.get_tracer(__MODULE__), name, Map.new(start_opts))
end
end
@doc """
Starts a new span and does not make it the current active span of the current process.
The current active Span is used as the parent of the created Span.
"""
defmacro start_span(ctx, name, opts) do
quote bind_quoted: [ctx: ctx, name: name, start_opts: opts] do
:otel_tracer.start_span(
ctx,
:opentelemetry.get_tracer(__MODULE__),
name,
Map.new(start_opts)
)
end
end
@doc """
Takes a `t:OpenTelemetry.span_ctx/0` and the Tracer sets it to the currently active Span.
"""
def set_current_span(span_ctx) do
:otel_tracer.set_current_span(span_ctx)
end
@doc """
Takes a `t:OpenTelemetry.ctx/0` and the `t:OpenTelemetry.span_ctx/0` and the Tracer sets
it to the current span in the pass Context.
"""
def set_current_span(ctx, span_ctx) do
:otel_tracer.set_current_span(ctx, span_ctx)
end
@doc """
Creates a new span which is set to the currently active Span in the Context of the block.
The Span is ended automatically when the `block` completes and the Context is what it was
before the block.
See `start_span/2` and `end_span/0`.
"""
defmacro with_span(name, start_opts \\ quote(do: %{}), do: block) do
quote do
:otel_tracer.with_span(
:opentelemetry.get_tracer(__MODULE__),
unquote(name),
Map.new(unquote(start_opts)),
fn _ -> unquote(block) end
)
end
end
@doc """
Creates a new span which is set to the currently active Span in the Context of the block.
The Span is ended automatically when the `block` completes and the Context is what it was
before the block.
See `start_span/2` and `end_span/0`.
"""
defmacro with_span(ctx, name, start_opts, do: block) do
quote do
:otel_tracer.with_span(
unquote(ctx),
:opentelemetry.get_tracer(__MODULE__),
unquote(name),
Map.new(unquote(start_opts)),
fn _ -> unquote(block) end
)
end
end
@doc """
Returns the currently active `t:OpenTelemetry.span_ctx/0`.
"""
def current_span_ctx() do
:otel_tracer.current_span_ctx()
end
@doc """
Returns the `t:OpenTelemetry.span_ctx/0` active in Context `ctx`.
"""
def current_span_ctx(ctx) do
:otel_tracer.current_span_ctx(ctx)
end
@doc """
End the Span. Sets the end timestamp for the currently active Span. This has no effect on any
child Spans that may exist of this Span.
The Span in the current Context has its `is_recording` set to `false`.
"""
def end_span() do
:otel_tracer.end_span()
end
@doc """
Set an attribute with key and value on the currently active Span.
"""
@spec set_attribute(OpenTelemetry.attribute_key(), OpenTelemetry.attribute_value()) :: boolean()
def set_attribute(key, value) do
:otel_span.set_attribute(
:otel_tracer.current_span_ctx(),
key,
value
)
end
@doc """
Add a list of attributes to the currently active Span.
"""
@spec set_attributes(OpenTelemetry.attributes()) :: boolean()
def set_attributes(attributes) do
:otel_span.set_attributes(:otel_tracer.current_span_ctx(), attributes)
end
@doc """
Add an event to the currently active Span.
"""
@spec add_event(OpenTelemetry.event_name(), OpenTelemetry.attributes()) :: boolean()
def add_event(event, attributes) do
:otel_span.add_event(
:otel_tracer.current_span_ctx(),
event,
attributes
)
end
@doc """
Add a list of events to the currently active Span.
"""
@spec add_events([OpenTelemetry.event()]) :: boolean()
def add_events(events) do
:otel_span.add_events(:otel_tracer.current_span_ctx(), events)
end
@doc """
Sets the Status of the currently active Span.
If used, this will override the default Span Status, which is `ok`.
"""
@spec set_status(OpenTelemetry.status()) :: boolean()
def set_status(status) do
:otel_span.set_status(:otel_tracer.current_span_ctx(), status)
end
@doc """
Updates the Span name.
It is highly discouraged to update the name of a Span after its creation. Span name is
often used to group, filter and identify the logical groups of spans. And often, filtering
logic will be implemented before the Span creation for performance reasons. Thus the name
update may interfere with this logic.
The function name is called UpdateName to differentiate this function from the regular
property setter. It emphasizes that this operation signifies a major change for a Span
and may lead to re-calculation of sampling or filtering decisions made previously
depending on the implementation.
"""
@spec update_name(String.t()) :: boolean()
def update_name(name) do
:otel_span.update_name(:otel_tracer.current_span_ctx(), name)
end
end | apps/opentelemetry_api/lib/open_telemetry/tracer.ex | 0.692642 | 0.502075 | tracer.ex | starcoder |
defmodule Bertex do
@moduledoc """
This is a work TOTALLY based on @mojombo and @eproxus work:
More at: https://github.com/eproxus/bert.erl and http://github.com/mojombo/bert.erl
"""
import :erlang, only: [binary_to_term: 1,
binary_to_term: 2,
term_to_binary: 1]
defprotocol Bert do
@fallback_to_any true
def encode(term)
def decode(term)
end
defimpl Bert, for: Atom do
def encode(false), do: {:bert, false}
def encode(true), do: {:bert, true}
def encode(nil), do: {:bert, nil}
def encode(atom), do: atom
def decode(atom), do: atom
end
defimpl Bert, for: List do
def encode(list) do
Enum.map(list, &Bert.encode(&1))
end
def decode(list) do
Enum.map(list, &Bert.decode(&1))
end
end
# Inspired by talentdeficit/jsex solution
defimpl Bert, for: Tuple do
def encode(tuple) do
Tuple.to_list(tuple)
|> Enum.map(&Bert.encode(&1))
|> List.to_tuple
end
def decode({:bert, nil}), do: nil
def decode({:bert, true}), do: true
def decode({:bert, false}), do: false
def decode({:bert, :dict, dict}), do: Enum.into(Bert.decode(dict), %{})
def decode({:bert, :time, mega, sec, micro}) do
unix = mega * 1000000000000 + sec * 1000000 + micro
DateTime.from_unix!(unix, :microseconds)
end
def decode(tuple) do
Tuple.to_list(tuple)
|> Enum.map(&Bert.decode(&1))
|> List.to_tuple
end
end
defimpl Bert, for: Date do
def encode(term) do
{:ok, zero} = Time.new(0, 0, 0)
{:ok, naive} = NaiveDateTime.new(term, zero)
naive
|> DateTime.from_naive!("Etc/UTC")
|> Bert.encode
end
def decode(term), do: term
end
defimpl Bert, for: NaiveDateTime do
def encode(term) do
term
|> DateTime.from_naive!("Etc/UTC")
|> Bert.encode
end
def decode(term), do: term
end
defimpl Bert, for: DateTime do
def encode(term) do
micro = DateTime.to_unix(term, :microseconds)
mega = micro |> div(1000000000000)
sec = micro |> rem(1000000000000) |> div(1000000)
micro = micro |> rem(1000000)
{:bert, :time, mega, sec, micro}
end
def decode(term), do: term
end
defimpl Bert, for: Any do
def encode(term), do: term
def decode(term), do: term
end
@doc """
iex> Bertex.encode([42, :banana, {:xy, 5, 10}, "robot", true, false])
<<131,108,0,0,0,6,97,42,100,0,6,98,97,110,97,110,97,104,3,100,0,2,120,121,97,5,97,10,109,0,0,0,5,114,111,98,111,116,104,2,100,0,4,98,101,114,116,100,0,4,116,114,117,101,104,2,100,0,4,98,101,114,116,100,0,5,102,97,108,115,101,106>>
"""
@spec encode(term) :: binary
def encode(term) do
Bert.encode(term) |> term_to_binary
end
@doc """
iex> Bertex.decode(<<131,108,0,0,0,6,97,42,100,0,6,98,97,110,97,110,97,104,3,100,0,2,120,121,97,5,97,10,109,0,0,0,5,114,111,98,111,116,104,2,100,0,4,98,101,114,116,100,0,4,116,114,117,101,104,2,100,0,4,98,101,114,116,100,0,5,102,97,108,115,101,106>>)
[42, :banana, {:xy, 5, 10}, "robot", true, false]
"""
@spec decode(binary) :: term
def decode(bin) do
binary_to_term(bin) |> Bert.decode
end
@spec safe_decode(binary) :: term
def safe_decode(bin) do
binary_to_term(bin, [:safe]) |> Bert.decode
end
end | lib/bertex.ex | 0.758153 | 0.545407 | bertex.ex | starcoder |
defmodule Central.Helpers.QueryHelpers do
alias Central.Repo
import Ecto.Query, warn: false
defmacro stddev_pop(field) do
quote do
fragment("stddev_pop(?)", unquote(field))
end
end
defmacro between(field, low, high) do
quote do
fragment("? BETWEEN ? AND ?", unquote(field), unquote(low), unquote(high))
end
end
defmacro array_remove(field, value) do
quote do
fragment("array_remove(?, ?)", unquote(field), unquote(value))
end
end
defmacro array_agg(field) do
quote do
fragment("array_agg(?)", unquote(field))
end
end
defmacro extract_year(field) do
quote do
fragment("EXTRACT(YEAR FROM ?)", unquote(field))
end
end
defmacro extract_month(field) do
quote do
fragment("EXTRACT(MONTH FROM ?)", unquote(field))
end
end
defmacro extract_week(field) do
quote do
fragment("EXTRACT(WEEK FROM ?)", unquote(field))
end
end
defmacro extract_hour(field) do
quote do
fragment("EXTRACT(HOUR FROM ?)", unquote(field))
end
end
defmacro date_trunc(period, field) do
quote do
fragment("date_trunc(?, ?)", unquote(period), unquote(field))
end
end
def count(table) do
Repo.aggregate(table, :count, :id)
end
@spec offset_query(Ecto.Query.t(), Integer.t()) :: Ecto.Query.t()
def offset_query(query, amount) do
query
|> offset(^amount)
end
@spec limit_query(Ecto.Query.t(), Integer.t() | :infinity) :: Ecto.Query.t()
def limit_query(query, :infinity), do: query
def limit_query(query, amount) do
query
|> limit(^amount)
end
@spec limit_query(Ecto.Query.t(), integer() | nil, integer() | nil) :: Ecto.Query.t()
def limit_query(query, nil, max_amount), do: limit_query(query, max_amount)
def limit_query(query, amount, max_amount) when is_integer(amount) do
limit_query(query, min(amount, max_amount))
end
def limit_query(query, amount, max_amount) do
limit_query(query, min(amount |> String.to_integer(), max_amount))
end
@spec select(Ecto.Query.t(), String.t() | nil) :: Ecto.Query.t()
def select(query, nil), do: query
def select(query, fields) do
from stat_grids in query,
select: ^fields
end
end | lib/central/helpers/query_helpers.ex | 0.661158 | 0.538437 | query_helpers.ex | starcoder |
defmodule GoogleRoads do
@moduledoc """
Provides methods to interact with Google Roads API.
Unless otherwise noted, all the functions take the required Google
parameters as its own parameters, and all optional ones in an
`options` keyword list.
The `options` keyword can also take special entry for `headers` and
`options`, which are passed to the underlying `Request`. See the
documentation of `HTTPoison` for details.
"""
alias GoogleRoads.{Request, Response}
@type latitude :: number
@type longitude :: number
@typedoc """
A latitude/longitude pair in tuple or comma-separated string format.
"""
@type coordinate :: {latitude(), longitude()} | String.t
@typedoc """
A tagged tuple with an ID of a known place.
"""
@type path :: coordinate()
@type options :: keyword()
@type interpolate :: boolean
@doc """
Takes up to 100 GPS points collected along a route, and returns a similar set of data, with the points snapped to the most likely roads the vehicle was traveling along
Args:
* `path` — The path to be snapped. The path parameter accepts a
list of latitude/longitude pairs. Latitude and longitude values
should be separated by commas. Coordinates should be separated by
the pipe character: "|".
For example: path=60.170880,24.942795|60.170879,24.942796|60.170877,24.942796.
Options:
* `interpolate` — Whether to interpolate a path to include all points forming the full road-geometry. When true, additional interpolated points will also be returned, resulting in a path that smoothly follows the geometry of the road, even around corners and through tunnels. Interpolated paths will most likely contain more points than the original path. Defaults to `false`.
This function returns `{:ok, body}` if the request is successful, and
Google returns data. It returns `{:error, error}` when there is HTTP
errors, or `{:error, status, error_message}` when the request is successful, but
Google returns status codes different than "OK".
## Examples
# Snap to Roads with an invalid API key
iex> {:error, status, error_message} = GoogleRoads.snap_to_roads("-35.27801,149.12958|-35.28032,149.12907|-35.28099,149.12929", true, [
...> key: "invalid key"
...> ])
iex> status
"INVALID_ARGUMENT"
iex> error_message
"API key not valid. Please pass a valid API key."
iex> {:ok, result} = GoogleRoads.snap_to_roads("-35.27801,149.12958|-35.28032,149.12907|-35.28099,149.12929", true)
iex> match?(%{"snappedPoints" => _}, result)
true
iex> is_list(result["snappedPoints"])
true
iex> Enum.count(result["snappedPoints"])
26
"""
@spec snap_to_roads(path(), interpolate(), options()) :: Response.t()
def snap_to_roads(path, interpolate \\ false, options \\ []) do
params = options
|> Keyword.merge([
path: path,
interpolate: interpolate
])
GoogleRoads.get("snapToRoads", params)
end
@doc """
Direct request to Google Roads API endpoint.
Instead of relying on the functionality this module provides, you can
use this function to make direct request to the Google Roads API.
It takes an endpoint string, and a keyword list of parameters.
## Examples
iex> {:error, status, error_message} = GoogleRoads.get("snapToRoads", [
...> path: "-35.27801,149.12958|-35.28032,149.12907|-35.28099,149.12929",
...> interpolate: true,
...> key: "invalid key",
...> ])
iex> status
"INVALID_ARGUMENT"
iex> error_message
"API key not valid. Please pass a valid API key."
iex> {:ok, result} = GoogleRoads.get("snapToRoads", [
...> path: "-35.27801,149.12958|-35.28032,149.12907|-35.28099,149.12929",
...> interpolate: true,
...> ])
iex> match?(%{"snappedPoints" => _}, result)
true
iex> {:error, reason} = GoogleRoads.get("snapToRoads", [
...> path: "-35.27801,149.12958|-35.28032,149.12907|-35.28099,149.12929",
...> interpolate: true,
...> headers: [{"Accept-Language", "pt-BR"}],
...> options: [timeout: 0]
...> ])
...> reason
:connect_timeout
"""
@spec get(String.t, keyword()) :: Response.t()
def get(endpoint, params) do
Request.get(endpoint, params)
|> Response.wrap
end
end | lib/google_roads.ex | 0.907212 | 0.61891 | google_roads.ex | starcoder |
defmodule Mix.Tasks.Format do
use Mix.Task
@shortdoc "Formats the given files/patterns"
@moduledoc """
Formats the given files and patterns.
mix format mix.exs "lib/**/*.{ex,exs}" "test/**/*.{ex,exs}"
If any of the files is `-`, then the output is read from stdin
and written to stdout.
## Formatting options
The formatter will read a `.formatter.exs` in the current directory for
formatter configuration. Evaluating this file should return a keyword list.
Here is an example `.formatter.exs` that works as a starting point:
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
]
Besides the options listed in `Code.format_string!/2`, the `.formatter.exs`
supports the following options:
* `:inputs` (a list of paths and patterns) - specifies the default inputs
to be used by this task. For example, `["mix.exs", "{config,lib,test}/**/*.{ex,exs}"]`.
Patterns are expanded with `Path.wildcard/2`.
* `:subdirectories` (a list of paths and patterns) - specifies subdirectories
that have their own formatting rules. Each subdirectory should have a
`.formatter.exs` that configures how entries in that subdirectory should be
formatted as. Configuration between `.formatter.exs` are not shared nor
inherited. If a `.formatter.exs` lists "lib/app" as a subdirectory, the rules
in `.formatter.exs` won't be available in `lib/app/.formatter.exs`.
Note that the parent `.formatter.exs` must not specify files inside the "lib/app"
subdirectory in its `:inputs` configuration. If this happens, the behaviour of
which formatter configuration will be picked is unspecified.
* `:import_deps` (a list of dependencies as atoms) - specifies a list
of dependencies whose formatter configuration will be imported.
When specified, the formatter should run in the same directory as
the `mix.exs` file that defines those dependencies. See the "Importing
dependencies configuration" section below for more information.
* `:export` (a keyword list) - specifies formatter configuration to be exported.
See the "Importing dependencies configuration" section below.
## Task-specific options
* `--check-formatted` - checks that the file is already formatted.
This is useful in pre-commit hooks and CI scripts if you want to
reject contributions with unformatted code. However keep in mind
that the formatted output may differ between Elixir versions as
improvements and fixes are applied to the formatter.
* `--check-equivalent` - checks if the files after formatting have the
same AST as before formatting. If the ASTs are not equivalent, it is
a bug in the code formatter. This option is useful if you suspect you
have ran into a formatter bug and you would like confirmation.
* `--dry-run` - does not save files after formatting.
* `--dot-formatter` - path to the file with formatter configuration.
Defaults to `.formatter.exs` if one is available. See the "`.formatter.exs`"
section for more information.
If any of the `--check-*` options are given and a check fails, the formatted
contents won't be written to disk nor printed to standard output.
## When to format code
We recommend developers to format code directly in their editors, either
automatically when saving a file or via an explicit command or key binding. If
such option is not yet available in your editor of choice, adding the required
integration is usually a matter of invoking:
cd $project && mix format $file
where `$file` refers to the current file and `$project` is the root of your
project.
It is also possible to format code across the whole project by passing a list
of patterns and files to `mix format`, as shown at the top of this task
documentation. This list can also be set in the `.formatter.exs` under the
`:inputs` key.
## Importing dependencies configuration
This task supports importing formatter configuration from dependencies.
A dependency that wants to export formatter configuration needs to have a
`.formatter.exs` file at the root of the project. In this file, the dependency
can export a `:export` option with configuration to export. For now, only one
option is supported under `:export`: `:locals_without_parens` (whose value has
the same shape as the value of the `:locals_without_parens` in `Code.format_string!/2`).
The functions listed under `:locals_without_parens` in the `:export` option of
a dependency can be imported in a project by listing that dependency in the
`:import_deps` option of the formatter configuration file of the project.
For example, consider I have a project `my_app` that depends on `my_dep`.
`my_dep` wants to export some configuration, so `my_dep/.formatter.exs`
would look like this:
# my_dep/.formatter.exs
[
# Regular formatter configuration for my_dep
# ...
export: [
locals_without_parens: [some_dsl_call: 2, some_dsl_call: 3]
]
]
In order to import configuration, `my_app`'s `.formatter.exs` would look like
this:
# my_app/.formatter.exs
[
import_deps: [:my_dep]
]
"""
@switches [
check_equivalent: :boolean,
check_formatted: :boolean,
dot_formatter: :string,
dry_run: :boolean
]
@manifest "cached_dot_formatter"
@manifest_vsn 1
@impl true
def run(args) do
{opts, args} = OptionParser.parse!(args, strict: @switches)
{dot_formatter, formatter_opts} = eval_dot_formatter(opts)
{formatter_opts_and_subs, _sources} =
eval_deps_and_subdirectories(dot_formatter, [], formatter_opts, [dot_formatter])
args
|> expand_args(dot_formatter, formatter_opts_and_subs)
|> Task.async_stream(&format_file(&1, opts), ordered: false, timeout: 30000)
|> Enum.reduce({[], [], []}, &collect_status/2)
|> check!()
end
@doc """
Returns formatter options to be used for the given file.
"""
def formatter_opts_for_file(file, opts \\ []) do
{dot_formatter, formatter_opts} = eval_dot_formatter(opts)
{formatter_opts_and_subs, _sources} =
eval_deps_and_subdirectories(dot_formatter, [], formatter_opts, [dot_formatter])
split = file |> Path.relative_to_cwd() |> Path.split()
find_formatter_opts_for_file(split, formatter_opts_and_subs)
end
defp eval_dot_formatter(opts) do
cond do
dot_formatter = opts[:dot_formatter] ->
{dot_formatter, eval_file_with_keyword_list(dot_formatter)}
File.regular?(".formatter.exs") ->
{".formatter.exs", eval_file_with_keyword_list(".formatter.exs")}
true ->
{".formatter.exs", []}
end
end
# This function reads exported configuration from the imported
# dependencies and subdirectories and deals with caching the result
# of reading such configuration in a manifest file.
defp eval_deps_and_subdirectories(dot_formatter, prefix, formatter_opts, sources) do
deps = Keyword.get(formatter_opts, :import_deps, [])
subs = Keyword.get(formatter_opts, :subdirectories, [])
if not is_list(deps) do
Mix.raise("Expected :import_deps to return a list of dependencies, got: #{inspect(deps)}")
end
if not is_list(subs) do
Mix.raise("Expected :subdirectories to return a list of directories, got: #{inspect(subs)}")
end
if deps == [] and subs == [] do
{{formatter_opts, []}, sources}
else
manifest = Path.join(Mix.Project.manifest_path(), @manifest)
maybe_cache_in_manifest(dot_formatter, manifest, fn ->
{subdirectories, sources} = eval_subs_opts(subs, prefix, sources)
{{eval_deps_opts(formatter_opts, deps), subdirectories}, sources}
end)
end
end
defp maybe_cache_in_manifest(dot_formatter, manifest, fun) do
cond do
is_nil(Mix.Project.get()) or dot_formatter != ".formatter.exs" -> fun.()
entry = read_manifest(manifest) -> entry
true -> write_manifest!(manifest, fun.())
end
end
defp read_manifest(manifest) do
with {:ok, binary} <- File.read(manifest),
{:ok, {@manifest_vsn, entry, sources}} <- safe_binary_to_term(binary),
expanded_sources = Enum.flat_map(sources, &Path.wildcard(&1, match_dot: true)),
false <- Mix.Utils.stale?([Mix.Project.config_mtime() | expanded_sources], [manifest]) do
{entry, sources}
else
_ -> nil
end
end
defp safe_binary_to_term(binary) do
{:ok, :erlang.binary_to_term(binary)}
rescue
_ -> :error
end
defp write_manifest!(manifest, {entry, sources}) do
File.mkdir_p!(Path.dirname(manifest))
File.write!(manifest, :erlang.term_to_binary({@manifest_vsn, entry, sources}))
{entry, sources}
end
defp eval_deps_opts(formatter_opts, []) do
formatter_opts
end
defp eval_deps_opts(formatter_opts, deps) do
deps_paths = Mix.Project.deps_paths()
parenless_calls =
for dep <- deps,
dep_path = assert_valid_dep_and_fetch_path(dep, deps_paths),
dep_dot_formatter = Path.join(dep_path, ".formatter.exs"),
File.regular?(dep_dot_formatter),
dep_opts = eval_file_with_keyword_list(dep_dot_formatter),
parenless_call <- dep_opts[:export][:locals_without_parens] || [],
uniq: true,
do: parenless_call
Keyword.update(
formatter_opts,
:locals_without_parens,
parenless_calls,
&(&1 ++ parenless_calls)
)
end
defp eval_subs_opts(subs, prefix, sources) do
{subs, sources} =
Enum.flat_map_reduce(subs, sources, fn sub, sources ->
prefix = Path.join(prefix ++ [sub])
{Path.wildcard(prefix), [Path.join(prefix, ".formatter.exs") | sources]}
end)
Enum.flat_map_reduce(subs, sources, fn sub, sources ->
sub_formatter = Path.join(sub, ".formatter.exs")
if File.exists?(sub_formatter) do
formatter_opts = eval_file_with_keyword_list(sub_formatter)
{formatter_opts_and_subs, sources} =
eval_deps_and_subdirectories(:in_memory, [sub], formatter_opts, sources)
{[{sub, formatter_opts_and_subs}], sources}
else
{[], sources}
end
end)
end
defp assert_valid_dep_and_fetch_path(dep, deps_paths) when is_atom(dep) do
case Map.fetch(deps_paths, dep) do
{:ok, path} ->
if File.dir?(path) do
path
else
Mix.raise(
"Unavailable dependency #{inspect(dep)} given to :import_deps in the formatter configuration. " <>
"The dependency cannot be found in the file system, please run \"mix deps.get\" and try again"
)
end
:error ->
Mix.raise(
"Unknown dependency #{inspect(dep)} given to :import_deps in the formatter configuration. " <>
"The dependency is not listed in your mix.exs for environment #{inspect(Mix.env())}"
)
end
end
defp assert_valid_dep_and_fetch_path(dep, _deps_paths) do
Mix.raise("Dependencies in :import_deps should be atoms, got: #{inspect(dep)}")
end
defp eval_file_with_keyword_list(path) do
{opts, _} = Code.eval_file(path)
unless Keyword.keyword?(opts) do
Mix.raise("Expected #{inspect(path)} to return a keyword list, got: #{inspect(opts)}")
end
opts
end
defp expand_args([], dot_formatter, formatter_opts_and_subs) do
if no_entries_in_formatter_opts?(formatter_opts_and_subs) do
Mix.raise(
"Expected one or more files/patterns to be given to mix format " <>
"or for a .formatter.exs to exist with an :inputs or :subdirectories key"
)
end
dot_formatter
|> expand_dot_inputs([], formatter_opts_and_subs, %{})
|> Enum.map(fn {file, {_dot_formatter, formatter_opts}} -> {file, formatter_opts} end)
end
defp expand_args(files_and_patterns, _dot_formatter, {formatter_opts, subs}) do
files =
for file_or_pattern <- files_and_patterns,
file <- stdin_or_wildcard(file_or_pattern),
uniq: true,
do: file
if files == [] do
Mix.raise(
"Could not find a file to format. The files/patterns given to command line " <>
"did not point to any existing file. Got: #{inspect(files_and_patterns)}"
)
end
for file <- files do
if file == :stdin do
{file, formatter_opts}
else
split = file |> Path.relative_to_cwd() |> Path.split()
{file, find_formatter_opts_for_file(split, {formatter_opts, subs})}
end
end
end
defp expand_dot_inputs(dot_formatter, prefix, {formatter_opts, subs}, acc) do
if no_entries_in_formatter_opts?({formatter_opts, subs}) do
Mix.raise("Expected :inputs or :subdirectories key in #{dot_formatter}")
end
map =
for input <- List.wrap(formatter_opts[:inputs]),
file <- Path.wildcard(Path.join(prefix ++ [input]), match_dot: true),
do: {expand_relative_to_cwd(file), {dot_formatter, formatter_opts}},
into: %{}
acc =
Map.merge(acc, map, fn file, {dot_formatter1, _}, {dot_formatter2, formatter_opts} ->
Mix.shell().error(
"Both #{dot_formatter1} and #{dot_formatter2} specify the file " <>
"#{Path.relative_to_cwd(file)} in their :inputs option. To resolve the " <>
"conflict, the configuration in #{dot_formatter1} will be ignored. " <>
"Please change the list of :inputs in one of the formatter files so only " <>
"one of them matches #{Path.relative_to_cwd(file)}"
)
{dot_formatter2, formatter_opts}
end)
Enum.reduce(subs, acc, fn {sub, formatter_opts_and_subs}, acc ->
sub_formatter = Path.join(sub, ".formatter.exs")
expand_dot_inputs(sub_formatter, [sub], formatter_opts_and_subs, acc)
end)
end
defp expand_relative_to_cwd(path) do
case File.cwd() do
{:ok, cwd} -> Path.expand(path, cwd)
_ -> path
end
end
defp find_formatter_opts_for_file(split, {formatter_opts, subs}) do
Enum.find_value(subs, formatter_opts, fn {sub, formatter_opts_and_subs} ->
if List.starts_with?(split, Path.split(sub)) do
find_formatter_opts_for_file(split, formatter_opts_and_subs)
end
end)
end
defp no_entries_in_formatter_opts?({formatter_opts, subs}) do
is_nil(formatter_opts[:inputs]) and subs == []
end
defp stdin_or_wildcard("-"), do: [:stdin]
defp stdin_or_wildcard(path), do: path |> Path.expand() |> Path.wildcard(match_dot: true)
defp read_file(:stdin) do
{IO.stream(:stdio, :line) |> Enum.to_list() |> IO.iodata_to_binary(), file: "stdin"}
end
defp read_file(file) do
{File.read!(file), file: file}
end
defp format_file({file, formatter_opts}, task_opts) do
{input, extra_opts} = read_file(file)
output = IO.iodata_to_binary([Code.format_string!(input, extra_opts ++ formatter_opts), ?\n])
check_equivalent? = Keyword.get(task_opts, :check_equivalent, false)
check_formatted? = Keyword.get(task_opts, :check_formatted, false)
dry_run? = Keyword.get(task_opts, :dry_run, false)
cond do
check_equivalent? and not equivalent?(input, output) ->
{:not_equivalent, file}
check_formatted? ->
if input == output, do: :ok, else: {:not_formatted, file}
dry_run? ->
:ok
true ->
write_or_print(file, input, output)
end
rescue
exception ->
{:exit, file, exception, __STACKTRACE__}
end
defp write_or_print(file, input, output) do
cond do
file == :stdin -> IO.write(output)
input == output -> :ok
true -> File.write!(file, output)
end
:ok
end
defp collect_status({:ok, :ok}, acc), do: acc
defp collect_status({:ok, {:exit, _, _, _} = exit}, {exits, not_equivalent, not_formatted}) do
{[exit | exits], not_equivalent, not_formatted}
end
defp collect_status({:ok, {:not_equivalent, file}}, {exits, not_equivalent, not_formatted}) do
{exits, [file | not_equivalent], not_formatted}
end
defp collect_status({:ok, {:not_formatted, file}}, {exits, not_equivalent, not_formatted}) do
{exits, not_equivalent, [file | not_formatted]}
end
defp check!({[], [], []}) do
:ok
end
defp check!({[{:exit, :stdin, exception, stacktrace} | _], _not_equivalent, _not_formatted}) do
Mix.shell().error("mix format failed for stdin")
reraise exception, stacktrace
end
defp check!({[{:exit, file, exception, stacktrace} | _], _not_equivalent, _not_formatted}) do
Mix.shell().error("mix format failed for file: #{Path.relative_to_cwd(file)}")
reraise exception, stacktrace
end
defp check!({_exits, [_ | _] = not_equivalent, _not_formatted}) do
Mix.raise("""
mix format failed due to --check-equivalent.
The following files were not equivalent:
#{to_bullet_list(not_equivalent)}
Please report this bug with the input files at github.com/elixir-lang/elixir/issues
""")
end
defp check!({_exits, _not_equivalent, [_ | _] = not_formatted}) do
Mix.raise("""
mix format failed due to --check-formatted.
The following files were not formatted:
#{to_bullet_list(not_formatted)}
""")
end
defp to_bullet_list(files) do
Enum.map_join(files, "\n", &" * #{&1}")
end
defp equivalent?(input, output) do
Code.Formatter.equivalent(input, output) == :ok
end
end | lib/mix/lib/mix/tasks/format.ex | 0.924167 | 0.583263 | format.ex | starcoder |
defmodule Cloak.AES.GCM do
@moduledoc """
A `Cloak.Cipher` which encrypts values with the AES cipher in GCM (block) mode.
Internally relies on Erlang's `:crypto.block_encrypt/4`.
## Configuration
In addition to the normal `:default` and `tag` configuration options, this
cipher take a `:keys` option to support using multiple AES keys at the same time.
config :cloak, Cloak.AES.GCM,
default: true,
tag: "GCM",
keys: [
%{tag: <<1>>, key: Base.decode64!("..."), default: false},
%{tag: <<2>>, key: Base.decode64!("..."), default: true}
]
If you want to store your key in the environment variable, you can use
`{:system, "VAR"}` syntax:
config :cloak, Cloak.AES.GCM,
default: true,
tag: "GCM",
keys: [
%{tag: <<1>>, key: {:system, "CLOAK_KEY_PRIMARY"}, default: true},
%{tag: <<2>>, key: {:system, "CLOAK_KEY_SECONDARY"}, default: false}
]
If you want to store your key in the OTP app environment, you can use
`{:app_env, :otp_app, :env_key}` syntax:
config :cloak, Cloak.AES.GCM,
default: true,
tag: "GCM",
keys: [
%{tag: <<1>>, key: {:app_env, :my_app, :env_primary_key}, default: true},
%{tag: <<2>>, key: {:app_env, :my_app, :env_secondary_key}, default: false}
]
### Key Configuration Options
A key may have the following attributes:
- `:tag` - The ID of the key. This is included in the ciphertext, and should be
only a single byte. See `encrypt/2` for more details.
- `:key` - The AES key to use, in binary. If you store your keys in Base64
format you will need to decode them first. The key must be 128, 192, or 256 bits
long (16, 24 or 32 bytes, respectively).
- `:default` - Boolean. Whether to use this key by default or not.
## Upgrading to a New Key
To upgrade to a new key, simply add the key to the `:keys` array, and set it
as `default: true`.
keys: [
%{tag: <<1>>, key: "old key", default: false},
%{tag: <<2>>, key: "new key", default: true}
]
After this, your new key will automatically be used for all new encyption,
while the old key will be used to decrypt legacy values.
To migrate everything proactively to the new key, see the `mix cloak.migrate`
mix task defined in `Mix.Tasks.Cloak.Migrate`.
"""
import Cloak.Tags.Encoder
import Cloak.Tags.Decoder
@behaviour Cloak.Cipher
@aad "AES256GCM"
@doc """
Callback implementation for `Cloak.Cipher.encrypt`. Encrypts a value using
AES in CTR mode.
Generates a random IV for every encryption, and prepends the key tag, IV, and Ciphertag to
the beginning of the ciphertext. The format can be diagrammed like this:
+----------------------------------------------------------+----------------------+
| HEADER | BODY |
+-------------------+---------------+----------------------+----------------------+
| Key Tag (n bytes) | IV (16 bytes) | Ciphertag (16 bytes) | Ciphertext (n bytes) |
+-------------------+---------------+----------------------+----------------------+
When this function is called through `Cloak.encrypt/1`, the module's `:tag`
will be added, and the resulting binary will be in this format:
+---------------------------------------------------------------------------------+----------------------+
| HEADER | BODY |
+----------------------+-------------------+---------------+----------------------+----------------------+
| Module Tag (n bytes) | Key Tag (n bytes) | IV (16 bytes) | Ciphertag (16 bytes) | Ciphertext (n bytes) |
+----------------------+-------------------+---------------+----------------------+----------------------+
The header information allows Cloak to know enough about each ciphertext to
ensure a successful decryption. See `decrypt/1` for more details.
**Important**: Because a random IV is used for every encryption, `encrypt/2`
will not produce the same ciphertext twice for the same value.
### Parameters
- `plaintext` - Any type of value to encrypt.
- `key_tag` - Optional. The tag of the key to use for encryption.
### Examples
iex> encrypt("The charge against me is a...") != "The charge against me is a..."
true
iex> encrypt("The charge against me is a...") != encrypt("The charge against me is a...")
true
"""
def encrypt(plain_text, key_tag \\ nil) do
perform_encryption(plain_text, iv(), find_key(key_tag))
end
defp perform_encryption(plaintext, iv, key) do
{ciphertext, ciphertag} =
:crypto.block_encrypt(
:aes_gcm,
Cloak.Ciphers.Util.key_value(key),
iv,
{@aad, plaintext}
)
encode(key.tag) <> iv <> ciphertag <> ciphertext
end
@doc """
Callback implementation for `Cloak.Cipher.decrypt/2`. Decrypts a value
encrypted with AES in GCM mode.
Uses the key tag to find the correct key for decryption, and the IV and Ciphertag included
in the header to decrypt the body of the ciphertext.
### Parameters
- `ciphertext` - Binary ciphertext generated by `encrypt/2`.
### Examples
iex> encrypt("Hello") |> decrypt
"Hello"
"""
def decrypt(message) do
%{key_tag: key_tag, remainder: remainder} = decode(message)
perform_decryption(
Cloak.Ciphers.Util.key_value(find_key(key_tag)),
remainder
)
end
defp perform_decryption(key, <<iv::binary-16, ciphertag::binary-16, ciphertext::binary>>) do
:crypto.block_decrypt(:aes_gcm, key, iv, {@aad, ciphertext, ciphertag})
end
defp iv, do: :crypto.strong_rand_bytes(16)
defp find_key(key_tag) do
Cloak.Ciphers.Util.config(__MODULE__, key_tag) || default_key()
end
defp default_key, do: Cloak.Ciphers.Util.default_key(__MODULE__)
@doc """
Callback implementation for `Cloak.Cipher.version/0`. Returns the tag of the
current default key.
"""
def version, do: default_key().tag
end | lib/cloak/ciphers/aes_gcm.ex | 0.900214 | 0.541045 | aes_gcm.ex | starcoder |
defmodule Monet.Writer do
@moduledoc """
Prepares and sends messages to the server. Should not be called directly from
outside this library.
"""
use Bitwise, only: [bsl: 2, bor: 1]
import Kernel, except: [send: 2] # resolve conflict
import Monet.Connection, only: [connection: 2]
@doc """
Sends `data` to the server.
MonetDB only accepts individual frames up to 8190 bytes. If our message is larger
than this, it needs to be broken up.
Each frame has a 2 byte header. 1 bit of the header is used to indicate if this
is the final frame of the message or not. The rest is used for the length.
"""
def send(conn, data) do
len = :erlang.iolist_size(data)
socket = connection(conn, :socket)
case len > 8190 do
true ->
header = <<252, 63>> # max length not fin, aka: bor(bsl(8190, 1), 0)
<<data::bytes-size(8190), rest::binary>> = :erlang.iolist_to_binary(data)
with :ok <- do_send(socket, [header, data]) do
send(conn, rest)
end
false ->
header = <<bor(bsl(len, 1), 1)::little-16>>
do_send(socket, [header, data])
end
end
defp do_send(socket, data) do
case :gen_tcp.send(socket, data) do
:ok -> :ok
{:error, err} -> {:error, Monet.Error.new(:network, err)}
end
end
@doc """
Sends a command to the server. Commands appear to be queries with just an empty
response. This should
"""
def command(conn, command) do
send(conn, ["X", command, "\n"])
end
@doc """
Sends a query to the server. Except for a very few things that are considered
"commands", almost everything is a query
"""
def query(conn, query) do
send(conn, [?s, query, ?;])
end
@doc """
Encodes a list of value to be sent as part of a prepare + exec flow. The types
parameter is parsed from the response of the prepare statement. See
Monet.Prepared for more information
"""
def encode(values, types, acc \\ [])
def encode([value], [type], acc), do: [acc, encode_value(value, type)]
def encode([value | values], [type | types], acc), do: encode(values, types, [acc, encode_value(value, type), ?,,])
# should not be here, wrong number of values, let the server handle it
def encode(_, _, acc), do: acc
defp encode_value(nil, _type), do: "NULL"
defp encode_value(f, _) when is_float(f), do: Float.to_string(f)
defp encode_value(n, _) when is_integer(n), do: Integer.to_string(n)
defp encode_value(%Decimal{} = d, _), do: Decimal.to_string(d)
defp encode_value(true, _), do: "true"
defp encode_value(false, _), do: "false"
defp encode_value(<<data::binary>>, :blob), do: ["blob '", Base.encode16(data), ?']
defp encode_value(<<data::binary>>, :json), do: ["json '", encode_string(data), ?']
defp encode_value(<<data::binary>>, :uuid), do: ["uuid '", data, ?']
defp encode_value(<<data::binary>>, _), do: [?', encode_string(data), ?']
defp encode_value(%Time{} = t, {:time, 3}), do: ["time(3) '", Time.to_string(t), ?']
defp encode_value(%Time{} = t, {:time, 6}), do: ["time(6) '", Time.to_string(t), ?']
defp encode_value(%Time{} = t, _), do: ["time '", Time.to_string(t), ?']
defp encode_value(%Date{} = t, _), do: ["date '", Date.to_string(t), ?']
defp encode_value(%NaiveDateTime{} = t, {:time, 3}), do: ["timestamp(3) '", NaiveDateTime.to_iso8601(t), ?']
defp encode_value(%NaiveDateTime{} = t, {:time, 6}), do: ["timestamp(6) '", NaiveDateTime.to_iso8601(t), ?']
defp encode_value(%NaiveDateTime{} = t, _), do: ["timestamp '", NaiveDateTime.to_iso8601(t), ?']
defp encode_value(%DateTime{} = t, {:time, 3}), do: ["timestamptz(3) '", DateTime.to_iso8601(t), ?']
defp encode_value(%DateTime{} = t, {:time, 6}), do: ["timestamptz(6) '", DateTime.to_iso8601(t), ?']
defp encode_value(%DateTime{} = t, _), do: ["timestamptz '", DateTime.to_iso8601(t), ?']
def encode_string(data) do
data
|> String.replace("\\", "\\\\")
|> String.replace("\'", "\\'")
end
end | lib/writer.ex | 0.605333 | 0.458167 | writer.ex | starcoder |
defmodule AWS.Personalize do
@moduledoc """
Amazon Personalize is a machine learning service that makes it easy to add
individualized recommendations to customers.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-05-22",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "personalize",
global?: false,
protocol: "json",
service_id: "Personalize",
signature_version: "v4",
signing_name: "personalize",
target_prefix: "AmazonPersonalize"
}
end
@doc """
Creates a batch inference job.
The operation can handle up to 50 million records and the input file must be in
JSON format. For more information, see `recommendations-batch`.
"""
def create_batch_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateBatchInferenceJob", input, options)
end
@doc """
Creates a campaign by deploying a solution version.
When a client calls the
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html) and
[GetPersonalizedRanking](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetPersonalizedRanking.html)
APIs, a campaign is specified in the request.
## Minimum Provisioned TPS and Auto-Scaling
A transaction is a single `GetRecommendations` or `GetPersonalizedRanking` call.
Transactions per second (TPS) is the throughput and unit of billing for Amazon
Personalize. The minimum provisioned TPS (`minProvisionedTPS`) specifies the
baseline throughput provisioned by Amazon Personalize, and thus, the minimum
billing charge.
If your TPS increases beyond `minProvisionedTPS`, Amazon Personalize auto-scales
the provisioned capacity up and down, but never below `minProvisionedTPS`.
There's a short time delay while the capacity is increased that might cause loss
of transactions.
The actual TPS used is calculated as the average requests/second within a
5-minute window. You pay for maximum of either the minimum provisioned TPS or
the actual TPS. We recommend starting with a low `minProvisionedTPS`, track your
usage using Amazon CloudWatch metrics, and then increase the `minProvisionedTPS`
as necessary.
## Status
A campaign can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the campaign status, call `DescribeCampaign`.
Wait until the `status` of the campaign is `ACTIVE` before asking the campaign
for recommendations.
## Related APIs
* `ListCampaigns`
* `DescribeCampaign`
* `UpdateCampaign`
* `DeleteCampaign`
"""
def create_campaign(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCampaign", input, options)
end
@doc """
Creates an empty dataset and adds it to the specified dataset group.
Use `CreateDatasetImportJob` to import your training data to a dataset.
There are three types of datasets:
* Interactions
* Items
* Users
Each dataset type has an associated schema with required field types. Only the
`Interactions` dataset is required in order to train a model (also referred to
as creating a solution).
A dataset can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the status of the dataset, call `DescribeDataset`.
## Related APIs
* `CreateDatasetGroup`
* `ListDatasets`
* `DescribeDataset`
* `DeleteDataset`
"""
def create_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDataset", input, options)
end
@doc """
Creates an empty dataset group.
A dataset group contains related datasets that supply data for training a model.
A dataset group can contain at most three datasets, one for each type of
dataset:
* Interactions
* Items
* Users
To train a model (create a solution), a dataset group that contains an
`Interactions` dataset is required. Call `CreateDataset` to add a dataset to the
group.
A dataset group can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING
To get the status of the dataset group, call `DescribeDatasetGroup`. If the
status shows as CREATE FAILED, the response includes a `failureReason` key,
which describes why the creation failed.
You must wait until the `status` of the dataset group is `ACTIVE` before adding
a dataset to the group.
You can specify an AWS Key Management Service (KMS) key to encrypt the datasets
in the group. If you specify a KMS key, you must also include an AWS Identity
and Access Management (IAM) role that has permission to access the key.
## APIs that require a dataset group ARN in the request
* `CreateDataset`
* `CreateEventTracker`
* `CreateSolution`
## Related APIs
* `ListDatasetGroups`
* `DescribeDatasetGroup`
* `DeleteDatasetGroup`
"""
def create_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDatasetGroup", input, options)
end
@doc """
Creates a job that imports training data from your data source (an Amazon S3
bucket) to an Amazon Personalize dataset.
To allow Amazon Personalize to import the training data, you must specify an AWS
Identity and Access Management (IAM) role that has permission to read from the
data source, as Amazon Personalize makes a copy of your data and processes it in
an internal AWS system.
The dataset import job replaces any existing data in the dataset that you
imported in bulk.
## Status
A dataset import job can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
To get the status of the import job, call `DescribeDatasetImportJob`, providing
the Amazon Resource Name (ARN) of the dataset import job. The dataset import is
complete when the status shows as ACTIVE. If the status shows as CREATE FAILED,
the response includes a `failureReason` key, which describes why the job failed.
Importing takes time. You must wait until the status shows as ACTIVE before
training a model using the dataset.
## Related APIs
* `ListDatasetImportJobs`
* `DescribeDatasetImportJob`
"""
def create_dataset_import_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDatasetImportJob", input, options)
end
@doc """
Creates an event tracker that you use when adding event data to a specified
dataset group using the
[PutEvents](https://docs.aws.amazon.com/personalize/latest/dg/API_UBS_PutEvents.html) API.
Only one event tracker can be associated with a dataset group. You will get an
error if you call `CreateEventTracker` using the same dataset group as an
existing event tracker.
When you create an event tracker, the response includes a tracking ID, which you
pass as a parameter when you use the
[PutEvents](https://docs.aws.amazon.com/personalize/latest/dg/API_UBS_PutEvents.html)
operation. Amazon Personalize then appends the event data to the Interactions
dataset of the dataset group you specify in your event tracker.
The event tracker can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the status of the event tracker, call `DescribeEventTracker`.
The event tracker must be in the ACTIVE state before using the tracking ID.
## Related APIs
* `ListEventTrackers`
* `DescribeEventTracker`
* `DeleteEventTracker`
"""
def create_event_tracker(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateEventTracker", input, options)
end
@doc """
Creates a recommendation filter.
For more information, see `filter`.
"""
def create_filter(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateFilter", input, options)
end
@doc """
Creates an Amazon Personalize schema from the specified schema string.
The schema you create must be in Avro JSON format.
Amazon Personalize recognizes three schema variants. Each schema is associated
with a dataset type and has a set of required field and keywords. You specify a
schema when you call `CreateDataset`.
## Related APIs
* `ListSchemas`
* `DescribeSchema`
* `DeleteSchema`
"""
def create_schema(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateSchema", input, options)
end
@doc """
Creates the configuration for training a model.
A trained model is known as a solution. After the configuration is created, you
train the model (create a solution) by calling the `CreateSolutionVersion`
operation. Every time you call `CreateSolutionVersion`, a new version of the
solution is created.
After creating a solution version, you check its accuracy by calling
`GetSolutionMetrics`. When you are satisfied with the version, you deploy it
using `CreateCampaign`. The campaign provides recommendations to a client
through the
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html)
API.
To train a model, Amazon Personalize requires training data and a recipe. The
training data comes from the dataset group that you provide in the request. A
recipe specifies the training algorithm and a feature transformation. You can
specify one of the predefined recipes provided by Amazon Personalize.
Alternatively, you can specify `performAutoML` and Amazon Personalize will
analyze your data and select the optimum USER_PERSONALIZATION recipe for you.
Amazon Personalize doesn't support configuring the `hpoObjective` for solution
hyperparameter optimization at this time.
## Status
A solution can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the status of the solution, call `DescribeSolution`. Wait until the
status shows as ACTIVE before calling `CreateSolutionVersion`.
## Related APIs
* `ListSolutions`
* `CreateSolutionVersion`
* `DescribeSolution`
* `DeleteSolution`
* `ListSolutionVersions`
* `DescribeSolutionVersion`
"""
def create_solution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateSolution", input, options)
end
@doc """
Trains or retrains an active solution.
A solution is created using the `CreateSolution` operation and must be in the
ACTIVE state before calling `CreateSolutionVersion`. A new version of the
solution is created every time you call this operation.
## Status
A solution version can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
To get the status of the version, call `DescribeSolutionVersion`. Wait until the
status shows as ACTIVE before calling `CreateCampaign`.
If the status shows as CREATE FAILED, the response includes a `failureReason`
key, which describes why the job failed.
## Related APIs
* `ListSolutionVersions`
* `DescribeSolutionVersion`
* `ListSolutions`
* `CreateSolution`
* `DescribeSolution`
* `DeleteSolution`
"""
def create_solution_version(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateSolutionVersion", input, options)
end
@doc """
Removes a campaign by deleting the solution deployment.
The solution that the campaign is based on is not deleted and can be redeployed
when needed. A deleted campaign can no longer be specified in a
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html)
request. For more information on campaigns, see `CreateCampaign`.
"""
def delete_campaign(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteCampaign", input, options)
end
@doc """
Deletes a dataset.
You can't delete a dataset if an associated `DatasetImportJob` or
`SolutionVersion` is in the CREATE PENDING or IN PROGRESS state. For more
information on datasets, see `CreateDataset`.
"""
def delete_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDataset", input, options)
end
@doc """
Deletes a dataset group.
Before you delete a dataset group, you must delete the following:
* All associated event trackers.
* All associated solutions.
* All datasets in the dataset group.
"""
def delete_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDatasetGroup", input, options)
end
@doc """
Deletes the event tracker.
Does not delete the event-interactions dataset from the associated dataset
group. For more information on event trackers, see `CreateEventTracker`.
"""
def delete_event_tracker(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteEventTracker", input, options)
end
@doc """
Deletes a filter.
"""
def delete_filter(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteFilter", input, options)
end
@doc """
Deletes a schema.
Before deleting a schema, you must delete all datasets referencing the schema.
For more information on schemas, see `CreateSchema`.
"""
def delete_schema(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteSchema", input, options)
end
@doc """
Deletes all versions of a solution and the `Solution` object itself.
Before deleting a solution, you must delete all campaigns based on the solution.
To determine what campaigns are using the solution, call `ListCampaigns` and
supply the Amazon Resource Name (ARN) of the solution. You can't delete a
solution if an associated `SolutionVersion` is in the CREATE PENDING or IN
PROGRESS state. For more information on solutions, see `CreateSolution`.
"""
def delete_solution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteSolution", input, options)
end
@doc """
Describes the given algorithm.
"""
def describe_algorithm(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAlgorithm", input, options)
end
@doc """
Gets the properties of a batch inference job including name, Amazon Resource
Name (ARN), status, input and output configurations, and the ARN of the solution
version used to generate the recommendations.
"""
def describe_batch_inference_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeBatchInferenceJob", input, options)
end
@doc """
Describes the given campaign, including its status.
A campaign can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
When the `status` is `CREATE FAILED`, the response includes the `failureReason`
key, which describes why.
For more information on campaigns, see `CreateCampaign`.
"""
def describe_campaign(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCampaign", input, options)
end
@doc """
Describes the given dataset.
For more information on datasets, see `CreateDataset`.
"""
def describe_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDataset", input, options)
end
@doc """
Describes the given dataset group.
For more information on dataset groups, see `CreateDatasetGroup`.
"""
def describe_dataset_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDatasetGroup", input, options)
end
@doc """
Describes the dataset import job created by `CreateDatasetImportJob`, including
the import job status.
"""
def describe_dataset_import_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDatasetImportJob", input, options)
end
@doc """
Describes an event tracker.
The response includes the `trackingId` and `status` of the event tracker. For
more information on event trackers, see `CreateEventTracker`.
"""
def describe_event_tracker(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEventTracker", input, options)
end
@doc """
Describes the given feature transformation.
"""
def describe_feature_transformation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeFeatureTransformation", input, options)
end
@doc """
Describes a filter's properties.
"""
def describe_filter(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeFilter", input, options)
end
@doc """
Describes a recipe.
A recipe contains three items:
* An algorithm that trains a model.
* Hyperparameters that govern the training.
* Feature transformation information for modifying the input data
before training.
Amazon Personalize provides a set of predefined recipes. You specify a recipe
when you create a solution with the `CreateSolution` API. `CreateSolution`
trains a model by using the algorithm in the specified recipe and a training
dataset. The solution, when deployed as a campaign, can provide recommendations
using the
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html)
API.
"""
def describe_recipe(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRecipe", input, options)
end
@doc """
Describes a schema.
For more information on schemas, see `CreateSchema`.
"""
def describe_schema(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSchema", input, options)
end
@doc """
Describes a solution.
For more information on solutions, see `CreateSolution`.
"""
def describe_solution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSolution", input, options)
end
@doc """
Describes a specific version of a solution.
For more information on solutions, see `CreateSolution`.
"""
def describe_solution_version(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSolutionVersion", input, options)
end
@doc """
Gets the metrics for the specified solution version.
"""
def get_solution_metrics(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetSolutionMetrics", input, options)
end
@doc """
Gets a list of the batch inference jobs that have been performed off of a
solution version.
"""
def list_batch_inference_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListBatchInferenceJobs", input, options)
end
@doc """
Returns a list of campaigns that use the given solution.
When a solution is not specified, all the campaigns associated with the account
are listed. The response provides the properties for each campaign, including
the Amazon Resource Name (ARN). For more information on campaigns, see
`CreateCampaign`.
"""
def list_campaigns(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListCampaigns", input, options)
end
@doc """
Returns a list of dataset groups.
The response provides the properties for each dataset group, including the
Amazon Resource Name (ARN). For more information on dataset groups, see
`CreateDatasetGroup`.
"""
def list_dataset_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatasetGroups", input, options)
end
@doc """
Returns a list of dataset import jobs that use the given dataset.
When a dataset is not specified, all the dataset import jobs associated with the
account are listed. The response provides the properties for each dataset import
job, including the Amazon Resource Name (ARN). For more information on dataset
import jobs, see `CreateDatasetImportJob`. For more information on datasets, see
`CreateDataset`.
"""
def list_dataset_import_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatasetImportJobs", input, options)
end
@doc """
Returns the list of datasets contained in the given dataset group.
The response provides the properties for each dataset, including the Amazon
Resource Name (ARN). For more information on datasets, see `CreateDataset`.
"""
def list_datasets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatasets", input, options)
end
@doc """
Returns the list of event trackers associated with the account.
The response provides the properties for each event tracker, including the
Amazon Resource Name (ARN) and tracking ID. For more information on event
trackers, see `CreateEventTracker`.
"""
def list_event_trackers(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListEventTrackers", input, options)
end
@doc """
Lists all filters that belong to a given dataset group.
"""
def list_filters(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListFilters", input, options)
end
@doc """
Returns a list of available recipes.
The response provides the properties for each recipe, including the recipe's
Amazon Resource Name (ARN).
"""
def list_recipes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListRecipes", input, options)
end
@doc """
Returns the list of schemas associated with the account.
The response provides the properties for each schema, including the Amazon
Resource Name (ARN). For more information on schemas, see `CreateSchema`.
"""
def list_schemas(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListSchemas", input, options)
end
@doc """
Returns a list of solution versions for the given solution.
When a solution is not specified, all the solution versions associated with the
account are listed. The response provides the properties for each solution
version, including the Amazon Resource Name (ARN). For more information on
solutions, see `CreateSolution`.
"""
def list_solution_versions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListSolutionVersions", input, options)
end
@doc """
Returns a list of solutions that use the given dataset group.
When a dataset group is not specified, all the solutions associated with the
account are listed. The response provides the properties for each solution,
including the Amazon Resource Name (ARN). For more information on solutions, see
`CreateSolution`.
"""
def list_solutions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListSolutions", input, options)
end
@doc """
Updates a campaign by either deploying a new solution or changing the value of
the campaign's `minProvisionedTPS` parameter.
To update a campaign, the campaign status must be ACTIVE or CREATE FAILED. Check
the campaign status using the `DescribeCampaign` API.
You must wait until the `status` of the updated campaign is `ACTIVE` before
asking the campaign for recommendations.
For more information on campaigns, see `CreateCampaign`.
"""
def update_campaign(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateCampaign", input, options)
end
end | lib/aws/generated/personalize.ex | 0.882668 | 0.553385 | personalize.ex | starcoder |
defmodule ForgeSdk do
@moduledoc """
Forge is a full fledge blockchain framework for developers to build decentralized applications easily. Forge gives the developers / operators the freedom to launch their own customized chains with their own application logic.
This is the Elixir / Erlang version of the SDK for Forge framework. To develop applications on top of the forge, you shall pick up a SDK. Forge SDK is intended to make the interaction with the chain built by Forge as easy as possible. All SDK APIs are organized into the following categories:
- chain APIs: provide the client wrapper for `chain` related gRPC
- wallet APIs: provide the client wrapper for `wallet` related gRPC
- state APIs: provide the client wrapper for `state` related gRPC
- subscription APIs: provide the client wrapper for `subscription` related gRPC
- transaction APIs: the gRPC for transaction is `send_tx`, this set of APIs provide helper functions to make building and sending a tx easy.
- misc APIs: parsing `configuration`, initialize sdk and more.
"""
alias ForgeAbi.{
# other
AccountState,
AssetState,
BlockInfo,
BlockInfoSimple,
ChainInfo,
ForgeState,
NetInfo,
NodeInfo,
PageInfo,
ProtocolState,
Transaction,
TransactionInfo,
ValidatorsInfo,
WalletInfo,
# request response
RequestGetAccountState,
RequestGetAssetState,
RequestGetBlock,
RequestGetBlocks,
RequestGetTx,
RequestGetProtocolState,
RequestSendTx,
RequestSubscribe,
RequestUnsubscribe,
ResponseSubscribe
}
alias ForgeSdk.{Display, Loader, Rpc, Util, Wallet}
@doc """
Migrate a `wallet` from old address (as well as pk, sk) to a new address.
## Example
old_wallet = ForgeSdk.create_wallet()
declare_tx = ForgeAbi.DeclareTx.new(moniker: "sisyphus")
ForgeSdk.declare(declare_tx, wallet: old_wallet)
new_wallet = ForgeSdk.create_wallet()
itx = ForgeAbi.AccountMigrateTx.new(pk: new_wallet.pk, address: new_wallet.address)
ForgeSdk.account_migrate(itx, wallet: old_wallet)
"""
@spec account_migrate(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate account_migrate(itx, opts), to: Rpc
@doc """
Acquire an `asset` from an existing asset factory.
## Example
w = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "theater"), wallet: w)
w1 = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "tyr"), wallet: w1)
# Note application shall already registered `Ticket` into Forge via `deploy_protocol`.
factory = %{
description: "movie ticket factory",
limit: 5,
price: ForgeAbi.token_to_unit(1),
template: ~s({
"row": "{{ row }}",
"seat": "{{ seat }}",
"time": "11:00am 04/30/2019",
"room": "4"
}),
allowed_spec_args: ["row", "seat"],
asset_name: "Ticket",
attributes: %ForgeAbi.AssetAttributes{
transferrable: true,
ttl: 3600 * 3
}
}
ForgeSdk.create_asset_factory("Avenerages: Endgame", factory, wallet: w)
specs =
Enum.map(["0", "2"], fn seat ->
apply(ForgeAbi.AssetSpec, :new, [%{data: ~s({"row": "15", "seat": "\#{seat}"})}])
end)
itx = ForgeAbi.AcquireAssetTx.new(to: address, specs: specs)
ForgeSdk.acquire_asset(itx, wallet: w1)
"""
@spec acquire_asset(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate acquire_asset(itx, opts), to: Rpc
# defdelegate consensus_upgrade(itx, opts), to: Rpc
@spec prepare_consume_asset(map(), Keyword.t()) :: Transaction.t() | {:error, term()}
defdelegate prepare_consume_asset(itx, opts), to: Rpc
@spec finalize_consume_asset(Transaction.t(), Keyword.t()) ::
{:error, any()} | Transaction.t()
defdelegate finalize_consume_asset(tx, opts), to: Rpc
@doc """
Create a new `asset`.
## Example
wallet = ForgeSdk.create_wallet()
declare_tx = ForgeAbi.DeclareTx.new(moniker: "sisyphus")
ForgeSdk.declare(declare_tx, wallet: wallet)
ticket = ForgeAbi.Ticket.new(row: "K", seat: "22", room: "3A", time: "03/04/2019 11:00am PST",
name: "Avengers: Endgame")
itx = ForgeAbi.CreateAsset.new(data: ForgeSdk.encode_any!(ticket), readonly: true,
transferrable: true, ttl: 7200)
ForgeSdk.create_asset(itx, wallet: wallet)
"""
@spec create_asset(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate create_asset(itx, opts), to: Rpc
@doc """
Create a new `asset factory`.
## Example
w = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "theater"), wallet: w)
w1 = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "tyr"), wallet: w)
# Note application shall already registered `Ticket` into Forge via `deploy_protocol`.
factory = %{
description: "movie ticket factory",
limit: 5,
price: ForgeAbi.token_to_unit(1),
template: ~s({
"row": "{{ row }}",
"seat": "{{ seat }}",
"time": "11:00am 04/30/2019",
"room": "4"
}),
allowed_spec_args: ["row", "seat"],
asset_name: "Ticket",
attributes: %ForgeAbi.AssetAttributes{
transferrable: true,
ttl: 3600 * 3
}
}
ForgeSdk.create_asset_factory("Avenerages: Endgame", factory, wallet: w)
"""
@spec create_asset_factory(String.t(), map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate create_asset_factory(moniker, factory, opts), to: Rpc
@doc """
Declare a `wallet` to the chain.
## Example
wallet = ForgeSdk.create_wallet()
declare_tx = ForgeAbi.DeclareTx.new(moniker: "sisyphus")
ForgeSdk.declare(declare_tx, wallet: wallet)
"""
@spec declare(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate declare(itx, opts), to: Rpc
@spec prepare_declare(map(), Keyword.t()) :: Transaction.t() | {:error, term()}
defdelegate prepare_declare(itx, opts), to: Rpc
@spec finalize_declare(Transaction.t(), Keyword.t()) :: {:error, any()} | Transaction.t()
defdelegate finalize_declare(tx, opts), to: Rpc
@doc """
Deploy a `new protocol` into the chain at a given `block height`.
## Example
itx = data |> Base.url_decode64!(padding: false) |> ForgeAbi.DeployProtocolTx.decode()
ForgeSdk.deploy_protocol(itx, wallet: wallet)
"""
@spec deploy_protocol(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate deploy_protocol(itx, opts), to: Rpc
@spec prepare_exchange(map(), Keyword.t()) :: Transaction.t() | {:error, term()}
defdelegate prepare_exchange(itx, opts), to: Rpc
@spec finalize_exchange(Transaction.t(), Keyword.t()) :: {:error, term()} | Transaction.t()
defdelegate finalize_exchange(tx, opts), to: Rpc
# defdelegate sys_upgrade(itx, opts), to: Rpc
@doc """
One wallet can poke in a **daily basis** to get some free tokens (for test chains only), `nonce` should be 0.
## Example
w = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "alice"), wallet: w)
hsh = ForgeSdk.checkin(wallet: w)
"""
defdelegate poke(itx, opts), to: Rpc
defdelegate stake(itx, opts), to: Rpc
# defdelegate sys_upgrade(itx, opts), to: Rpc
@doc """
Transfer `tokens or/and assets` from one wallet to another.
## Example
w1 = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "alice"), wallet: w1)
w2 = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "bob"), wallet: w2)
data = Google.Protobuf.Any.new(type_url: "test_asset", value: "hello world")
itx = ForgeSdk.encode_any!(TransferTx.new(to: w2.address, value: new_unit(100)))
ForgeSdk.transfer(req, wallet: w1)
"""
@spec transfer(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate transfer(itx, opts), to: Rpc
@doc """
Update an existing `asset`.
## Example
wallet = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "alice"), wallet: wallet)
post = ForgeAbi.Post.new(title: "a new post", content: "hello world!")
itx = ForgeAbi.CreateAsset.new(itx: ForgeSdk.encode_any!(post))
hash = ForgeSdk.create_asset(itx, wallet: wallet)
address = ForgeSdk.get_address(hash)
new_post = ForgeAbi.Post.new(title: "a new post", content: "Yeah!")
itx = ForgeAbi.UpdateAssetTx.new(data: ForgeSdk.encode_any!(post), address: address)
ForgeSdk.get_asset_state(address: address)
"""
@spec update_asset(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate update_asset(itx, opts), to: Rpc
@doc """
Upgrade the `node` to a new version at a given `block height`.
## Example
itx = ForgeAbi.UpgradeNodeTx.new(version: "0.26.0", height: 12000)
ForgeSdk.upgrade_node(itx, wallet: wallet)
"""
@spec upgrade_node(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate upgrade_node(itx, opts), to: Rpc
@spec update_validator(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate update_validator(itx, opts), to: Rpc
@spec activate_protocol(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate activate_protocol(itx, opts), to: Rpc
@spec deactivate_protocol(map(), Keyword.t()) :: String.t() | {:error, term()}
defdelegate deactivate_protocol(itx, opts), to: Rpc
defdelegate setup_swap(itx, opts), to: Rpc
defdelegate retrieve_swap(itx, opts), to: Rpc
defdelegate revoke_swap(itx, opts), to: Rpc
defdelegate delegate(itx, opts), to: Rpc
defdelegate revoke_delegate(itx, opts), to: Rpc
defdelegate deposit_token(itx, opts), to: Rpc
defdelegate prepare_withdraw_token(itx, opts), to: Rpc
defdelegate finalize_withdraw_token(itx, opts), to: Rpc
defdelegate approve_withdraw(itx, opts), to: Rpc
defdelegate revoke_withdraw(itx, opts), to: Rpc
defdelegate refuel(opts), to: Rpc
defdelegate refuel(itx, opts), to: Rpc
# extended tx helper
defdelegate stake_for_node(address, amount, opts), to: Rpc
# chain related
@doc """
One wallet can check in a daily basis to get some free tokens (for test chains only), `nonce` should be 0.
## Example
w = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "alice"), wallet: w)
ForgeSdk.checkin(wallet: w)
"""
@spec checkin(Keyword.t()) :: String.t() | {:error, term()}
defdelegate checkin(opts), to: Rpc
# RPC
# chain related
@doc """
Retrieve the current status of the chain.
## Example
ForgeSdk.get_chain_info()
"""
@spec get_chain_info(String.t()) :: ChainInfo.t() | {:error, term()}
defdelegate get_chain_info(conn_name \\ ""), to: Rpc
@doc """
Retrive the current status of the node.
## Example
ForgeSdk.get_node_info()
"""
@spec get_node_info(String.t()) :: NodeInfo.t() | {:error, term()}
defdelegate get_node_info(conn_name \\ ""), to: Rpc
@doc """
Retrieve the `network info`.
## Example
ForgeSdk.get_net_info()
"""
@spec get_net_info(String.t()) :: NetInfo.t() | {:error, term()}
defdelegate get_net_info(conn_name \\ ""), to: Rpc
@doc """
Retrieve the current validator info.
## Example
ForgeSdk.get_validators_info()
"""
@spec get_validators_info(String.t()) :: ValidatorsInfo.t() | {:error, term()}
defdelegate get_validators_info(conn_name \\ ""), to: Rpc
@doc """
Forge we support `multisig` for a tx, you can use this to endorse an already signed tx.
**ExchangeTx, ConsumeAssetTx and some other txs** are using multisig technology.
## Example
w1 = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "alice"), wallet: w1)
w2 = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "bob"), wallet: w2)
data = Google.Protobuf.Any.new(type_url: "test_asset", value: "hello world")
hash = ForgeSdk.create_asset(ForgeAbi.CreateAssetTx.new(data: asset_data), wallet: w2)
asset_address = ForgeSdk.get_address(hash)
sender_info = ForgeAbi.ExchangeInfo.new(value: ForgeSdk.token_to_unit(1))
receiver_info = ForgeAbi.ExchangeInfo.new(assets: [asset_address])
itx = ForgeAbi.ExchangeTx.new(to: w2.address, sender: sender_info, receiver: receiver_info)
tx = ForgeSdk.prepare_exchange(itx, wallet: w1)
tx1 = ForgeSdk.multisig(tx, w2)
ForgeSdk.send_tx(tx: tx1)
"""
@spec multisig(Keyword.t(), String.t() | atom()) ::
Transaction.t() | {:error, term()}
defdelegate multisig(request, conn_name \\ ""), to: Rpc
@doc """
Send tx.
## Example
w1 = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "alice"), wallet: w1)
w2 = ForgeSdk.create_wallet()
ForgeSdk.declare(ForgeAbi.DeclareTx.new(moniker: "bob"), wallet: w2)
data = Google.Protobuf.Any.new(type_url: "test_asset", value: "hello world")
itx = ForgeSdk.encode_any!(TransferTx.new(to: w2.address, value: new_unit(100)))
tx = ForgeSdk.transfer(itx, wallet: w1, send: :nosend)
hash = ForgeSdk.send_tx(tx: tx)
"""
@spec send_tx(RequestSendTx.t() | Keyword.t(), String.t() | atom()) ::
String.t() | {:error, term()}
defdelegate send_tx(request, conn_name \\ ""), to: Rpc
@doc """
Return an already processed `transaction` by its `hash`. If this API returns `nil`, mostly your tx hasn't been.
## Example
hash = ForgeSdk.send_tx(tx: tx)
ForgeSdk.get_tx(hash: hash)
"""
@spec get_tx(
RequestGetTx.t() | [RequestGetTx.t()] | Keyword.t() | [Keyword.t()],
String.t()
) :: TransactionInfo.t() | [TransactionInfo.t()] | {:error, term()}
defdelegate get_tx(requests, conn_name \\ ""), to: Rpc
defdelegate get_unconfirmed_txs(request, conn_name \\ ""), to: Rpc
@doc """
Get a block by its `height`. All txs included in this block will be returned.
## Example
req = ForgeAbi.RequestGetBlock.new(height: 1000)
ForgeSdk.get_block(req)
"""
@spec get_block(
RequestGetBlock.t() | [RequestGetBlock.t()] | Keyword.t() | [Keyword.t()],
String.t()
) :: BlockInfo.t() | [BlockInfo.t()] | {:error, term()}
defdelegate get_block(requests, conn_name \\ ""), to: Rpc
@doc """
Get a `list` of blocks between a range.
## Example
page_info = ForgeAbi.PageInfo.new
range_filter = ForgeAbi.RangeFilter.new(from: 1000, to: 1015)
req = ForgeAbi.RequestGetBlocks.new(empty_excluded: true, height_filter: range_filter,
paging: page_info)
ForgeSdk.get_blocks(req)
"""
@spec get_blocks(RequestGetBlocks.t() | Keyword.t(), String.t() | atom()) ::
{[BlockInfoSimple.t()], PageInfo.t()} | {:error, term()}
defdelegate get_blocks(request, conn_name \\ ""), to: Rpc
defdelegate search(request, conn_name \\ ""), to: Rpc
defdelegate get_config(request, conn_name \\ ""), to: Rpc
# wallet related
@doc """
This will generate a wallet with default DID type: public key type is `ED25519`, hash type is `sha3(256)`, and DID role type is account.
## Example
ForgeSdk.create_wallet()
"""
@spec create_wallet :: WalletInfo.t()
def create_wallet, do: Wallet.create(%Wallet.Type.Forge{})
@doc """
You can pass in your own `DID` type in a map once you want to create a wallet with different settings.
## Example
w1 = ForgeSdk.create_wallet()
ForgeSdk.create_wallet(moniker: "alice")
"""
@spec create_wallet(Keyword.t(), String.t() | atom()) :: WalletInfo.t() | {:error, term()}
defdelegate create_wallet(request, conn_name \\ ""), to: Rpc
@spec prepare_create_wallet(Keyword.t(), String.t() | atom()) ::
{WalletInfo.t(), Transaction.t()} | {:error, term()}
defdelegate prepare_create_wallet(request, conn_name \\ ""), to: Rpc
@spec finalize_create_wallet(Transaction.t(), Keyword.t()) :: {:error, any()} | Transaction.t()
defdelegate finalize_create_wallet(tx, opts), to: Rpc
defdelegate declare_node(request, conn_name \\ ""), to: Rpc
# state related
@doc """
Return the `state` for an account, node, validator or application address.
## Example
req = ForgeAbi.RequestGetAccountState.new(address: "z1QNTPxDUCbh68q6ci6zUmtnT2Cj8nbLw75")
ForgeSdk.get_account_state(req)
"""
@spec get_account_state(
RequestGetAccountState.t() | [RequestGetAccountState.t()] | Keyword.t() | [Keyword.t()],
String.t() | atom()
) :: AccountState.t() | nil | [AccountState.t()] | {:error, term()}
defdelegate get_account_state(request, conn_name \\ ""), to: Rpc
@doc """
Return the `state` for an asset.
## Example
req = ForgeAbi.RequestGetAssetState.new(address: "zjdjh65vHxvvWfj3xPrDoUDYp1aY6xUCV21b")
ForgeSdk.get_asset_state(req)
"""
@spec get_asset_state(
RequestGetAssetState.t() | [RequestGetAssetState.t()] | Keyword.t() | [Keyword.t()],
String.t() | atom()
) :: AssetState.t() | [AssetState.t()] | {:error, term()}
defdelegate get_asset_state(request, conn_name \\ ""), to: Rpc
@doc """
Return global state for forge.
## Example
ForgeSdk.get_forge_state()
"""
@spec get_forge_state(String.t() | atom()) :: ForgeState.t() | {:error, term()}
defdelegate get_forge_state(conn_name \\ ""), to: Rpc
@doc """
Return installed protocol state.
## Example
req = ForgeAbi.RequestGetProtocolState.new(address: "z2E3zCQTx5dPQeimQvJWz3vJvcDv9Ad6YgaPn")
ForgeSdk.get_protocol_state(req)
"""
@spec get_protocol_state(
RequestGetProtocolState.t()
| [RequestGetProtocolState.t()]
| Keyword.t()
| [Keyword.t()],
String.t()
) :: ProtocolState.t() | [ProtocolState.t()] | {:error, term()}
defdelegate get_protocol_state(request, conn_name \\ ""), to: Rpc
defdelegate get_stake_state(request, conn_name \\ ""), to: Rpc
defdelegate get_swap_state(request, conn_name \\ ""), to: Rpc
defdelegate get_delegate_state(request, conn_name \\ ""), to: Rpc
# subscription related
@doc """
Subscribe to a `topic`. You can event set a filter for the event that you'd listen.
## Example
req = ForgeAbi.RequestSubscribe.new(topic: "fg:t:declare")
ForgeSdk.Rpc.subscribe(req)
"""
@spec subscribe(RequestSubscribe.t() | Keyword.t(), String.t() | atom(), Keyword.t()) ::
[ResponseSubscribe.t()] | {:error, term()}
defdelegate subscribe(request, conn_name \\ "", opts \\ []), to: Rpc
@doc """
Terminate the subscription by the topic `id`.
## Example
req = ForgeAbi.RequestSubscribe.new(topic: "fg:t:declare")
stream_declare = ForgeSdk.Rpc.subscribe(req)
[topic: topic] = Enum.take(stream_declare, 1)
req = ForgeAbi.RequestUnsubscribe.new(topic: topic)
ForgeSdk.Rpc.unsubscribe(req)
"""
@spec unsubscribe(RequestUnsubscribe.t(), String.t() | atom(), Keyword.t()) ::
:ok | {:error, term()}
defdelegate unsubscribe(request, conn_name \\ "", opts \\ []), to: Rpc
# extended
# defdelegate get_nonce(address, conn_name \\ "", app_hash \\ ""), to: Rpc
# display a data structure
@doc """
Provide a display friendly result for a data structure.
## Examples
req = ForgeAbi.RequestGetAccountState.new(address: "z1QNTPxDUCbh68q6ci6zUmtnT2Cj8nbLw75")
account_state = ForgeSdk.get_account_state(req)
ForgeSdk.display(account_state)
"""
@spec display(any(), boolean()) :: any()
defdelegate display(data, expand? \\ false), to: Display
defdelegate connect(hostname, opts), to: Util
defdelegate get_conn(name \\ ""), to: Util
defdelegate get_parsed_config(name \\ ""), to: Util
defdelegate datetime_to_proto(dt), to: Util
defdelegate update_type_url(forge_state), to: Loader
defdelegate get_tx_protocols(forge_state, address), to: Loader
defdelegate get_address(hash), to: Rpc
defdelegate encode_any(data, type_url \\ nil), to: ForgeAbi
defdelegate encode_any!(data, type_url \\ nil), to: ForgeAbi
defdelegate decode_any(data), to: ForgeAbi
defdelegate decode_any!(data), to: ForgeAbi
defdelegate token_to_unit(tokens, name \\ ""), to: Util
defdelegate unit_to_token(units, name \\ ""), to: Util
defdelegate one_token(name \\ ""), to: Util
defdelegate verify_sig(tx), to: Util
defdelegate verify_multi_sig(tx), to: Util
# stats
defdelegate get_forge_stats(requests, conn_name \\ ""), to: Rpc
defdelegate list_transactions(request, conn_name \\ ""), to: Rpc
defdelegate list_assets(request, conn_name \\ ""), to: Rpc
defdelegate list_stakes(request, conn_name \\ ""), to: Rpc
defdelegate list_account(request, conn_name \\ ""), to: Rpc
defdelegate list_top_accounts(request, conn_name \\ ""), to: Rpc
defdelegate list_asset_transactions(request, conn_name \\ ""), to: Rpc
defdelegate list_blocks(request, conn_name \\ ""), to: Rpc
defdelegate list_swap(request, conn_name \\ ""), to: Rpc
defdelegate get_health_status(request, conn_name \\ ""), to: Rpc
end | lib/forge_sdk.ex | 0.881577 | 0.605974 | forge_sdk.ex | starcoder |
defmodule TaskBunny.Job do
@moduledoc """
Behaviour module for implementing a TaskBunny job.
TaskBunny job is an asynchronous background job whose execution request is
enqueued to RabbitMQ and performed in a worker process.
defmodule HelloJob do
use TaskBunny.Job
def perform(%{"name" => name}) do
IO.puts "Hello " <> name
:ok
end
end
HelloJob.enqueue(%{"name" => "Cloud"})
## Failing
TaskBunny treats the job as failed when...
- the return value of perform is not `:ok` or `{:ok, something}`
- the perform timed out
- the perform raises an exception while being executed
- the perform throws :exit signal while being executed.
TaskBunny will retry the failed job later.
## Timeout
By default TaskBunny terminates the job when it takes more than 2 minutes.
This prevents messages blocking a worker.
If your job is expected to take longer than 2 minutes or you want to terminate
the job earlier, override `timeout/0`.
defmodule SlowJob do
use TaskBunny.Job
def timeout, do: 300_000
def perform(_) do
slow_work()
:ok
end
end
# Retry
By default TaskBunny retries 10 times every five minutes for a failed job.
You can change this by overriding `max_retry/0` and `retry_interval/1`.
For example, if you want the job to be retried five times and gradually
increase the interval based on failed times, you can write logic like
the following:
defmodule HttpSyncJob do
def max_retry, do: 5
def retry_interval(failed_count) do
[1, 5, 10, 30, 60]
|> Enum.map(&(&1 * 60_000))
|> Enum.at(failed_count - 1, 1000)
end
...
end
"""
@doc """
Callback to process a job.
It can take any type of argument as long as it can be serialized with Poison,
but we recommend you to use map with string keys for a consistency.
def perform(name) do
IO.puts name <> ", it's not a preferred way"
end
def perform(%{"name" => name}) do
IO.puts name <> ", it's a preferred way :)"
end
"""
@callback perform(any) :: :ok | {:ok, any} | {:error, term}
@doc """
Enforces job uniqueness.
When returning a string from this function,
TaskBunny enforces that only one job per queue key
can be put in the queue at the same time. Only when the job has left
the queue (after it has been executed), it will be possible to
enqueue a job with the same queue key again.
"""
@callback queue_key(any) :: nil | String.t()
@doc """
Enforces job execution serialization.
When returning a string from this function, TaskBunny enforces that not more
than one job with the same job is executed concurrently. However, it
is still possible to have multiple jobs with the same execution key
enqueued, but jobs that have the same execution key will be put in a
waiting queue and processed serially.
"""
@callback execution_key(any) :: nil | String.t()
@doc """
Callback executed when a process gets rejected.
It receives in input the whole error trace structure plus the orginal payload for inspection and recovery actions.
"""
@callback on_reject(any) :: :ok
@doc """
Callback for the timeout in milliseconds for a job execution.
Default value is 120_000 = 2 minutes.
Override the function if you want to change the value.
"""
@callback timeout() :: integer
@doc """
Callback for the max number of retries TaskBunny can make for a failed job.
Default value is 10.
Override the function if you want to change the value.
"""
@callback max_retry() :: integer
@doc """
Callback for the retry interval in milliseconds.
Default value is 300_000 = 5 minutes.
Override the function if you want to change the value.
TaskBunny will set failed count to the argument.
The value will be more than or equal to 1 and less than or equal to max_retry.
"""
@callback retry_interval(integer) :: integer
require Logger
alias TaskBunny.{Config, Queue, Job, Message, Partition, Publisher}
alias TaskBunny.{
Publisher.PublishError,
Connection.ConnectError,
Job.QueueNotFoundError
}
defmacro __using__(_options \\ []) do
quote do
@behaviour Job
@doc false
@spec enqueue(any, keyword) :: :ok | {:error, any}
def enqueue(payload \\ %{}, options \\ []) do
TaskBunny.Job.enqueue(__MODULE__, payload, options)
end
@doc false
@spec enqueue!(any, keyword) :: :ok
def enqueue!(payload \\ %{}, options \\ []) do
TaskBunny.Job.enqueue!(__MODULE__, payload, options)
end
# Makes sure that the queue only includes a unique job.
@doc false
@spec queue_key(any) :: nil | String.t()
def queue_key(_payload), do: nil
# Makes sure that only one with the same key is
# being executed at the same time.
@doc false
@spec execution_key(any) :: nil | String.t()
def execution_key(_payload), do: nil
# Returns timeout (default 2 minutes).
# Override the method to change the timeout.
@doc false
@spec timeout() :: integer
def timeout, do: 120_000
# Retries 10 times in every 5 minutes by default.
# You have to re-create the queue after you change retry_interval.
@doc false
@spec max_retry() :: integer
def max_retry, do: 10
@doc false
@spec retry_interval(integer) :: integer
def retry_interval(_failed_count), do: 300_000
@doc false
@spec on_reject(any) :: :ok
def on_reject(_body), do: :ok
defoverridable timeout: 0,
max_retry: 0,
retry_interval: 1,
on_reject: 1,
queue_key: 1,
execution_key: 1
end
end
@doc """
Enqueues a job with payload.
You might want to use the shorter version if you can access to the job.
# Following two calls are exactly same.
RegistrationJob.enqueue(payload)
TaskBunny.enqueue(RegistrationJob, payload)
## Options
- delay: Set time in milliseconds to schedule the job enqueue time.
- host: RabbitMQ host. By default it is automatically selected from configuration.
- queue: RabbitMQ queue. By default it is automatically selected from configuration.
"""
@spec enqueue(atom, any, keyword) :: :ok | {:error, any}
def enqueue(job, payload, options \\ []) do
enqueue!(job, payload, options)
rescue
e in [ConnectError, PublishError, QueueNotFoundError] -> {:error, e}
end
@doc """
Similar to enqueue/3 but raises an exception on error.
"""
@spec enqueue!(atom, any, keyword) :: :ok
def enqueue!(job, payload, options \\ []) do
queue_data = Config.queue_for_job(job) || []
host = options[:host] || queue_data[:host] || :default
# Check the queue key; when there is a queue key and it is not
# queued, immediately add it to the queue key set to prevent
# races.
if job.queue_key(payload) != nil and Partition.queued?(job.queue_key(payload), :add) do
{:error, :duplicate}
else
{:ok, message} = Message.encode(job, payload)
case options[:queue] || queue_data[:name] do
nil -> raise QueueNotFoundError, job: job
queue -> do_enqueue(host, queue, message, options[:delay])
end
end
end
@spec do_enqueue(atom, String.t(), String.t(), nil | integer) :: :ok
defp do_enqueue(host, queue, message, nil) do
Publisher.publish!(host, queue, message)
end
defp do_enqueue(host, queue, message, delay) do
scheduled = Queue.scheduled_queue(queue)
options = [
expiration: "#{delay}"
]
Publisher.publish!(host, scheduled, message, options)
end
end | lib/task_bunny/job.ex | 0.884146 | 0.533215 | job.ex | starcoder |
defmodule Q do
@moduledoc """
Documentation for Q ( Elixir Quantum module ).
"""
@doc """
|0> qubit = ( 1, 0 )
## Examples
iex> Q.q0.array
[ 1, 0 ]
"""
def q0, do: Numexy.new( [ 1, 0 ] )
@doc """
|1> qubit = ( 0, 1 )
## Examples
iex> Q.q1.array
[ 0, 1 ]
"""
def q1, do: Numexy.new( [ 0, 1 ] )
@doc """
X gate.
## Examples
iex> Q.x( Q.q0 ).array
[ 0, 1 ]
iex> Q.x( Q.q1 ).array
[ 1, 0 ]
iex> ( Q.z( Q.q1 ) |> Q.x ).array
[ -1, 0 ]
iex> ( Q.z( Q.q1 ) |> Q.x |> Q.x ).array
[ 0, -1 ]
iex> ( Q.h( Q.q0 ) |> Q.x ).array
[ 1 / Math.sqrt( 2 ) * 1, 1 / Math.sqrt( 2 ) * 1 ]
iex> ( Q.h( Q.q1 ) |> Q.x ).array
[ 1 / Math.sqrt( 2 ) * -1, 1 / Math.sqrt( 2 ) * 1 ]
"""
def x( qbit ), do: Numexy.dot( x_matrix(), qbit )
def x_matrix(), do: Numexy.new( [ [ 0, 1 ], [ 1, 0 ] ] )
@doc """
Z gate.
## Examples
iex> Q.z( Q.q0 ).array
[ 1, 0 ]
iex> Q.z( Q.q1 ).array
[ 0, -1 ]
iex> ( Q.h( Q.q0 ) |> Q.z ).array
[ 1 / Math.sqrt( 2 ) * 1, 1 / Math.sqrt( 2 ) * -1 ]
iex> ( Q.h( Q.q1 ) |> Q.z ).array
[ 1 / Math.sqrt( 2 ) * 1, 1 / Math.sqrt( 2 ) * 1 ]
"""
def z( qbit ), do: Numexy.dot( z_matrix(), qbit )
def z_matrix(), do: Numexy.new( [ [ 1, 0 ], [ 0, -1 ] ] )
@doc """
Hadamard gate.
## Examples
iex> Q.h( Q.q0 ).array
[ 1 / Math.sqrt( 2 ) * 1, 1 / Math.sqrt( 2 ) * 1 ]
iex> Q.h( Q.q1 ).array
[ 1 / Math.sqrt( 2 ) * 1, 1 / Math.sqrt( 2 ) * -1 ]
iex> ( Q.h( Q.q0 ) |> Q.h ).array
[ 1, 0 ]
iex> ( Q.h( Q.q1 ) |> Q.h ).array
[ 0, 1 ]
iex> ( Numexy.new( [ Q.q0.array, Q.q1.array ] ) |> Q.cut( 0 ) |> Q.h |> Q.x |> Q.h ).array
[ 1, 0 ]
iex> ( Numexy.new( [ Q.q0.array, Q.q1.array ] ) |> Q.cut( 1 ) |> Q.h |> Q.x |> Q.h ).array
[ 0, -1 ]
"""
def h( qbit ), do: Numexy.dot( h_matrix(), qbit ) |> to_bit
def h_matrix(), do: 1 / Math.sqrt( 2 ) |> Numexy.mul( Numexy.new( [ [ 1, 1 ], [ 1, -1 ] ] ) )
def to_bit( 0.9999999999999998 ), do: 1
def to_bit( -0.9999999999999998 ), do: -1
def to_bit( 0.4999999999999999 ), do: 0.5
def to_bit( -0.4999999999999999 ), do: -0.5
def to_bit( 0.0 ), do: 0
def to_bit( value ) when is_list( value ) do
case value |> List.first |> is_list do
true -> value |> Enum.map( &( &1 |> Enum.map( fn n -> to_bit( n ) end ) ) )
false -> value |> Enum.map( &( to_bit( &1 ) ) )
end
end
def to_bit( %Array{ array: list, shape: _ } ), do: list |> to_bit |> Numexy.new
def to_bit( others ), do: others
@doc """
Controlled NOT gate.
## Examples
iex> Q.cnot( Q.q0, Q.q0 ).array # |00>
[ [ 1, 0 ], [ 0, 0 ] ]
iex> Q.cnot( Q.q0, Q.q1 ).array # |01>
[ [ 0, 1 ], [ 0, 0 ] ]
iex> Q.cnot( Q.q1, Q.q0 ).array # |11>
[ [ 0, 0 ], [ 0, 1 ] ]
iex> Q.cnot( Q.q1, Q.q1 ).array # |10>
[ [ 0, 0 ], [ 1, 0 ] ]
"""
def cnot( qbit1, qbit2 ), do: ( Numexy.dot( cnot_matrix(), tensordot( qbit1, qbit2 ) ) ).array |> Numexy.reshape( 2 )
def cnot_matrix() do
Numexy.new(
[
[ 1, 0, 0, 0 ],
[ 0, 1, 0, 0 ],
[ 0, 0, 0, 1 ],
[ 0, 0, 1, 0 ],
]
)
end
def tensordot( %Array{ array: xm, shape: _xm_shape }, %Array{ array: ym, shape: _ym_shape } ) do
xv = List.flatten( xm )
yv = List.flatten( ym )
xv
|> Enum.map( fn x -> yv |> Enum.map( fn y -> x * y end ) end )
|> List.flatten
|> Numexy.new
end
@doc """
Y gate.
## Examples
iex> Q.y( Q.q0 ).array
[ 0, ComplexNum.new( 0, 1 ) ]
iex> Q.y( Q.q1 ).array
[ ComplexNum.new( 0, -1 ), 0 ]
iex> ( Q.y( Q.q0 ) |> Q.y ).array
[ 1, 0 ]
iex> ( Q.y( Q.q1 ) |> Q.y ).array
[ 0, 1 ]
"""
def y( qbit ), do: complex_dot( y_matrix(), qbit )
def y_matrix(), do: Numexy.new( [ [ 0, ComplexNum.new( 0, -1 ) ], [ ComplexNum.new( 0, 1 ), 0 ] ] )
def complex_dot( %Array{ array: xm, shape: { xm_row, nil } }, %Array{ array: ym, shape: { ym_row, nil } } ) when xm_row == ym_row do
complex_dot_vector( xm, ym ) |> Numexy.new
end
def complex_dot( %Array{ array: xm, shape: { _, xm_col } }, %Array{ array: ym, shape: { ym_row, nil } } ) when xm_col == ym_row do
( for x <- xm, y <- [ ym ], do: [ x, y ] )
|> Enum.map( fn [ x, y ] -> complex_dot_vector( x, y ) end )
|> Numexy.new
end
def complex_dot_vector( xm, ym ) do
result = Enum.zip( xm, ym )
|> Enum.reduce( 0, fn { a, b }, acc -> complex_mult( a, b ) |> complex_add( acc ) end )
if result == ComplexNum.new( 0, 0 ), do: 0, else: result
end
def complex_mult( a, b ) when is_map( a ) or is_map( b ), do: ComplexNum.mult( a, b )
def complex_mult( a, b ), do: a * b
def complex_add( a, b ) when is_map( a ) or is_map( b ), do: ComplexNum.add( a, b )
def complex_add( a, b ), do: a + b
@doc """
Cut qbit.
## Examples
iex> Q.cut( Numexy.new( [ Q.q0.array, Q.q1.array ] ), 0 ).array
[ 1, 0 ]
iex> Q.cut( Numexy.new( [ Q.q0.array, Q.q1.array ] ), 1 ).array
[ 0, 1 ]
"""
def cut( qbit, no ), do: qbit.array |> Enum.at( no ) |> Numexy.new
end | lib/q.ex | 0.660939 | 0.70581 | q.ex | starcoder |
defmodule AWS.SES do
@moduledoc """
Amazon Simple Email Service
This document contains reference information for the [Amazon Simple Email Service](https://aws.amazon.com/ses/) (Amazon SES) API, version 2010-12-01.
This document is best used in conjunction with the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/Welcome.html).
For a list of Amazon SES endpoints to use in service requests, see [Regions and Amazon SES](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/regions.html)
in the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/Welcome.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Amazon SES",
api_version: "2010-12-01",
content_type: "application/x-www-form-urlencoded",
credential_scope: nil,
endpoint_prefix: "email",
global?: false,
protocol: "query",
service_id: "SES",
signature_version: "v4",
signing_name: "ses",
target_prefix: nil
}
end
@doc """
Creates a receipt rule set by cloning an existing one.
All receipt rules and configurations are copied to the new receipt rule set and
are completely independent of the source rule set.
For information about setting up rule sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-receipt-rule-set.html).
You can execute this operation no more than once per second.
"""
def clone_receipt_rule_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CloneReceiptRuleSet", input, options)
end
@doc """
Creates a configuration set.
Configuration sets enable you to publish email sending events. For information
about using configuration sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html).
You can execute this operation no more than once per second.
"""
def create_configuration_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateConfigurationSet", input, options)
end
@doc """
Creates a configuration set event destination.
When you create or update an event destination, you must provide one, and only
one, destination. The destination can be CloudWatch, Amazon Kinesis Firehose, or
Amazon Simple Notification Service (Amazon SNS).
An event destination is the AWS service to which Amazon SES publishes the email
sending events associated with a configuration set. For information about using
configuration sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html).
You can execute this operation no more than once per second.
"""
def create_configuration_set_event_destination(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"CreateConfigurationSetEventDestination",
input,
options
)
end
@doc """
Creates an association between a configuration set and a custom domain for open
and click event tracking.
By default, images and links used for tracking open and click events are hosted
on domains operated by Amazon SES. You can configure a subdomain of your own to
handle these events. For information about using custom domains, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/configure-custom-open-click-domains.html).
"""
def create_configuration_set_tracking_options(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"CreateConfigurationSetTrackingOptions",
input,
options
)
end
@doc """
Creates a new custom verification email template.
For more information about custom verification email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def create_custom_verification_email_template(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"CreateCustomVerificationEmailTemplate",
input,
options
)
end
@doc """
Creates a new IP address filter.
For information about setting up IP address filters, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-ip-filters.html).
You can execute this operation no more than once per second.
"""
def create_receipt_filter(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateReceiptFilter", input, options)
end
@doc """
Creates a receipt rule.
For information about setting up receipt rules, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-receipt-rules.html).
You can execute this operation no more than once per second.
"""
def create_receipt_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateReceiptRule", input, options)
end
@doc """
Creates an empty receipt rule set.
For information about setting up receipt rule sets, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-receipt-rule-set.html).
You can execute this operation no more than once per second.
"""
def create_receipt_rule_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateReceiptRuleSet", input, options)
end
@doc """
Creates an email template.
Email templates enable you to send personalized email to one or more
destinations in a single API operation. For more information, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-personalized-email-api.html).
You can execute this operation no more than once per second.
"""
def create_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateTemplate", input, options)
end
@doc """
Deletes a configuration set.
Configuration sets enable you to publish email sending events. For information
about using configuration sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html).
You can execute this operation no more than once per second.
"""
def delete_configuration_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteConfigurationSet", input, options)
end
@doc """
Deletes a configuration set event destination.
Configuration set event destinations are associated with configuration sets,
which enable you to publish email sending events. For information about using
configuration sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html).
You can execute this operation no more than once per second.
"""
def delete_configuration_set_event_destination(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DeleteConfigurationSetEventDestination",
input,
options
)
end
@doc """
Deletes an association between a configuration set and a custom domain for open
and click event tracking.
By default, images and links used for tracking open and click events are hosted
on domains operated by Amazon SES. You can configure a subdomain of your own to
handle these events. For information about using custom domains, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/configure-custom-open-click-domains.html).
Deleting this kind of association will result in emails sent using the specified
configuration set to capture open and click events using the standard, Amazon
SES-operated domains.
"""
def delete_configuration_set_tracking_options(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DeleteConfigurationSetTrackingOptions",
input,
options
)
end
@doc """
Deletes an existing custom verification email template.
For more information about custom verification email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def delete_custom_verification_email_template(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DeleteCustomVerificationEmailTemplate",
input,
options
)
end
@doc """
Deletes the specified identity (an email address or a domain) from the list of
verified identities.
You can execute this operation no more than once per second.
"""
def delete_identity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteIdentity", input, options)
end
@doc """
Deletes the specified sending authorization policy for the given identity (an
email address or a domain).
This API returns successfully even if a policy with the specified name does not
exist.
This API is for the identity owner only. If you have not verified the identity,
this API will return an error.
Sending authorization is a feature that enables an identity owner to authorize
other senders to use its identities. For information about using sending
authorization, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def delete_identity_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteIdentityPolicy", input, options)
end
@doc """
Deletes the specified IP address filter.
For information about managing IP address filters, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-ip-filters.html).
You can execute this operation no more than once per second.
"""
def delete_receipt_filter(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteReceiptFilter", input, options)
end
@doc """
Deletes the specified receipt rule.
For information about managing receipt rules, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rules.html).
You can execute this operation no more than once per second.
"""
def delete_receipt_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteReceiptRule", input, options)
end
@doc """
Deletes the specified receipt rule set and all of the receipt rules it contains.
The currently active rule set cannot be deleted.
For information about managing receipt rule sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rule-sets.html).
You can execute this operation no more than once per second.
"""
def delete_receipt_rule_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteReceiptRuleSet", input, options)
end
@doc """
Deletes an email template.
You can execute this operation no more than once per second.
"""
def delete_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteTemplate", input, options)
end
@doc """
Deprecated.
Use the `DeleteIdentity` operation to delete email addresses and domains.
"""
def delete_verified_email_address(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteVerifiedEmailAddress", input, options)
end
@doc """
Returns the metadata and receipt rules for the receipt rule set that is
currently active.
For information about setting up receipt rule sets, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-receipt-rule-set.html).
You can execute this operation no more than once per second.
"""
def describe_active_receipt_rule_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeActiveReceiptRuleSet", input, options)
end
@doc """
Returns the details of the specified configuration set.
For information about using configuration sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html).
You can execute this operation no more than once per second.
"""
def describe_configuration_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConfigurationSet", input, options)
end
@doc """
Returns the details of the specified receipt rule.
For information about setting up receipt rules, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-receipt-rules.html).
You can execute this operation no more than once per second.
"""
def describe_receipt_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeReceiptRule", input, options)
end
@doc """
Returns the details of the specified receipt rule set.
For information about managing receipt rule sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rule-sets.html).
You can execute this operation no more than once per second.
"""
def describe_receipt_rule_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeReceiptRuleSet", input, options)
end
@doc """
Returns the email sending status of the Amazon SES account for the current
region.
You can execute this operation no more than once per second.
"""
def get_account_sending_enabled(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAccountSendingEnabled", input, options)
end
@doc """
Returns the custom email verification template for the template name you
specify.
For more information about custom verification email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def get_custom_verification_email_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetCustomVerificationEmailTemplate", input, options)
end
@doc """
Returns the current status of Easy DKIM signing for an entity.
For domain name identities, this operation also returns the DKIM tokens that are
required for Easy DKIM signing, and whether Amazon SES has successfully verified
that these tokens have been published.
This operation takes a list of identities as input and returns the following
information for each:
* Whether Easy DKIM signing is enabled or disabled.
* A set of DKIM tokens that represent the identity. If the identity
is an email address, the tokens represent the domain of that address.
* Whether Amazon SES has successfully verified the DKIM tokens
published in the domain's DNS. This information is only returned for domain name
identities, not for email addresses.
This operation is throttled at one request per second and can only get DKIM
attributes for up to 100 identities at a time.
For more information about creating DNS records using DKIM tokens, go to the
[Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html).
"""
def get_identity_dkim_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetIdentityDkimAttributes", input, options)
end
@doc """
Returns the custom MAIL FROM attributes for a list of identities (email
addresses : domains).
This operation is throttled at one request per second and can only get custom
MAIL FROM attributes for up to 100 identities at a time.
"""
def get_identity_mail_from_domain_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetIdentityMailFromDomainAttributes",
input,
options
)
end
@doc """
Given a list of verified identities (email addresses and/or domains), returns a
structure describing identity notification attributes.
This operation is throttled at one request per second and can only get
notification attributes for up to 100 identities at a time.
For more information about using notifications with Amazon SES, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notifications.html).
"""
def get_identity_notification_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetIdentityNotificationAttributes", input, options)
end
@doc """
Returns the requested sending authorization policies for the given identity (an
email address or a domain).
The policies are returned as a map of policy names to policy contents. You can
retrieve a maximum of 20 policies at a time.
This API is for the identity owner only. If you have not verified the identity,
this API will return an error.
Sending authorization is a feature that enables an identity owner to authorize
other senders to use its identities. For information about using sending
authorization, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def get_identity_policies(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetIdentityPolicies", input, options)
end
@doc """
Given a list of identities (email addresses and/or domains), returns the
verification status and (for domain identities) the verification token for each
identity.
The verification status of an email address is "Pending" until the email address
owner clicks the link within the verification email that Amazon SES sent to that
address. If the email address owner clicks the link within 24 hours, the
verification status of the email address changes to "Success". If the link is
not clicked within 24 hours, the verification status changes to "Failed." In
that case, if you still want to verify the email address, you must restart the
verification process from the beginning.
For domain identities, the domain's verification status is "Pending" as Amazon
SES searches for the required TXT record in the DNS settings of the domain. When
Amazon SES detects the record, the domain's verification status changes to
"Success". If Amazon SES is unable to detect the record within 72 hours, the
domain's verification status changes to "Failed." In that case, if you still
want to verify the domain, you must restart the verification process from the
beginning.
This operation is throttled at one request per second and can only get
verification attributes for up to 100 identities at a time.
"""
def get_identity_verification_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetIdentityVerificationAttributes", input, options)
end
@doc """
Provides the sending limits for the Amazon SES account.
You can execute this operation no more than once per second.
"""
def get_send_quota(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetSendQuota", input, options)
end
@doc """
Provides sending statistics for the current AWS Region.
The result is a list of data points, representing the last two weeks of sending
activity. Each data point in the list contains statistics for a 15-minute period
of time.
You can execute this operation no more than once per second.
"""
def get_send_statistics(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetSendStatistics", input, options)
end
@doc """
Displays the template object (which includes the Subject line, HTML part and
text part) for the template you specify.
You can execute this operation no more than once per second.
"""
def get_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetTemplate", input, options)
end
@doc """
Provides a list of the configuration sets associated with your Amazon SES
account in the current AWS Region.
For information about using configuration sets, see [Monitoring Your Amazon SES Sending
Activity](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html)
in the *Amazon SES Developer Guide.*
You can execute this operation no more than once per second. This operation will
return up to 1,000 configuration sets each time it is run. If your Amazon SES
account has more than 1,000 configuration sets, this operation will also return
a NextToken element. You can then execute the `ListConfigurationSets` operation
again, passing the `NextToken` parameter and the value of the NextToken element
to retrieve additional results.
"""
def list_configuration_sets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListConfigurationSets", input, options)
end
@doc """
Lists the existing custom verification email templates for your account in the
current AWS Region.
For more information about custom verification email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def list_custom_verification_email_templates(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ListCustomVerificationEmailTemplates",
input,
options
)
end
@doc """
Returns a list containing all of the identities (email addresses and domains)
for your AWS account in the current AWS Region, regardless of verification
status.
You can execute this operation no more than once per second.
"""
def list_identities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListIdentities", input, options)
end
@doc """
Returns a list of sending authorization policies that are attached to the given
identity (an email address or a domain).
This API returns only a list. If you want the actual policy content, you can use
`GetIdentityPolicies`.
This API is for the identity owner only. If you have not verified the identity,
this API will return an error.
Sending authorization is a feature that enables an identity owner to authorize
other senders to use its identities. For information about using sending
authorization, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def list_identity_policies(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListIdentityPolicies", input, options)
end
@doc """
Lists the IP address filters associated with your AWS account in the current AWS
Region.
For information about managing IP address filters, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-ip-filters.html).
You can execute this operation no more than once per second.
"""
def list_receipt_filters(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListReceiptFilters", input, options)
end
@doc """
Lists the receipt rule sets that exist under your AWS account in the current AWS
Region.
If there are additional receipt rule sets to be retrieved, you will receive a
`NextToken` that you can provide to the next call to `ListReceiptRuleSets` to
retrieve the additional entries.
For information about managing receipt rule sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rule-sets.html).
You can execute this operation no more than once per second.
"""
def list_receipt_rule_sets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListReceiptRuleSets", input, options)
end
@doc """
Lists the email templates present in your Amazon SES account in the current AWS
Region.
You can execute this operation no more than once per second.
"""
def list_templates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTemplates", input, options)
end
@doc """
Deprecated.
Use the `ListIdentities` operation to list the email addresses and domains
associated with your account.
"""
def list_verified_email_addresses(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListVerifiedEmailAddresses", input, options)
end
@doc """
Adds or updates the delivery options for a configuration set.
"""
def put_configuration_set_delivery_options(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutConfigurationSetDeliveryOptions", input, options)
end
@doc """
Adds or updates a sending authorization policy for the specified identity (an
email address or a domain).
This API is for the identity owner only. If you have not verified the identity,
this API will return an error.
Sending authorization is a feature that enables an identity owner to authorize
other senders to use its identities. For information about using sending
authorization, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html).
You can execute this operation no more than once per second.
"""
def put_identity_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutIdentityPolicy", input, options)
end
@doc """
Reorders the receipt rules within a receipt rule set.
All of the rules in the rule set must be represented in this request. That is,
this API will return an error if the reorder request doesn't explicitly position
all of the rules.
For information about managing receipt rule sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rule-sets.html).
You can execute this operation no more than once per second.
"""
def reorder_receipt_rule_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ReorderReceiptRuleSet", input, options)
end
@doc """
Generates and sends a bounce message to the sender of an email you received
through Amazon SES.
You can only use this API on an email up to 24 hours after you receive it.
You cannot use this API to send generic bounces for mail that was not received
by Amazon SES.
For information about receiving email through Amazon SES, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email.html).
You can execute this operation no more than once per second.
"""
def send_bounce(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendBounce", input, options)
end
@doc """
Composes an email message to multiple destinations.
The message body is created using an email template.
In order to send email using the `SendBulkTemplatedEmail` operation, your call
to the API must meet the following requirements:
* The call must refer to an existing email template. You can create
email templates using the `CreateTemplate` operation.
* The message must be sent from a verified email address or domain.
* If your account is still in the Amazon SES sandbox, you may only
send to verified addresses or domains, or to email addresses associated with the
Amazon SES Mailbox Simulator. For more information, see [Verifying Email Addresses and
Domains](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-addresses-and-domains.html)
in the *Amazon SES Developer Guide.*
* The maximum message size is 10 MB.
* Each `Destination` parameter must include at least one recipient
email address. The recipient address can be a To: address, a CC: address, or a
BCC: address. If a recipient email address is invalid (that is, it is not in the
format *UserName@[SubDomain.]Domain.TopLevelDomain*), the entire message will be
rejected, even if the message contains other recipients that are valid.
* The message may not include more than 50 recipients, across the
To:, CC: and BCC: fields. If you need to send an email message to a larger
audience, you can divide your recipient list into groups of 50 or fewer, and
then call the `SendBulkTemplatedEmail` operation several times to send the
message to each group.
* The number of destinations you can contact in a single call to the
API may be limited by your account's maximum sending rate.
"""
def send_bulk_templated_email(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendBulkTemplatedEmail", input, options)
end
@doc """
Adds an email address to the list of identities for your Amazon SES account in
the current AWS Region and attempts to verify it.
As a result of executing this operation, a customized verification email is sent
to the specified address.
To use this operation, you must first create a custom verification email
template. For more information about creating and using custom verification
email templates, see [Using Custom Verification Email Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def send_custom_verification_email(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendCustomVerificationEmail", input, options)
end
@doc """
Composes an email message and immediately queues it for sending.
In order to send email using the `SendEmail` operation, your message must meet
the following requirements:
* The message must be sent from a verified email address or domain.
If you attempt to send email using a non-verified address or domain, the
operation will result in an "Email address not verified" error.
* If your account is still in the Amazon SES sandbox, you may only
send to verified addresses or domains, or to email addresses associated with the
Amazon SES Mailbox Simulator. For more information, see [Verifying Email Addresses and
Domains](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-addresses-and-domains.html)
in the *Amazon SES Developer Guide.*
* The maximum message size is 10 MB.
* The message must include at least one recipient email address. The
recipient address can be a To: address, a CC: address, or a BCC: address. If a
recipient email address is invalid (that is, it is not in the format
*UserName@[SubDomain.]Domain.TopLevelDomain*), the entire message will be rejected, even if the message contains other recipients that are valid.
* The message may not include more than 50 recipients, across the
To:, CC: and BCC: fields. If you need to send an email message to a larger
audience, you can divide your recipient list into groups of 50 or fewer, and
then call the `SendEmail` operation several times to send the message to each
group.
For every message that you send, the total number of recipients (including each
recipient in the To:, CC: and BCC: fields) is counted against the maximum number
of emails you can send in a 24-hour period (your *sending quota*). For more
information about sending quotas in Amazon SES, see [Managing Your Amazon SES
Sending
Limits](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/manage-sending-limits.html)
in the *Amazon SES Developer Guide.*
"""
def send_email(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendEmail", input, options)
end
@doc """
Composes an email message and immediately queues it for sending.
This operation is more flexible than the `SendEmail` API operation. When you use
the `SendRawEmail` operation, you can specify the headers of the message as well
as its content. This flexibility is useful, for example, when you want to send a
multipart MIME email (such a message that contains both a text and an HTML
version). You can also use this operation to send messages that include
attachments.
The `SendRawEmail` operation has the following requirements:
* You can only send email from [verified email addresses or domains](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-addresses-and-domains.html).
If you try to send email from an address that isn't verified, the operation
results in an "Email address not verified" error.
* If your account is still in the [Amazon SES sandbox](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/request-production-access.html),
you can only send email to other verified addresses in your account, or to
addresses that are associated with the [Amazon SES mailbox simulator](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/mailbox-simulator.html).
* The maximum message size, including attachments, is 10 MB.
* Each message has to include at least one recipient address. A
recipient address includes any address on the To:, CC:, or BCC: lines.
* If you send a single message to more than one recipient address,
and one of the recipient addresses isn't in a valid format (that is, it's not in
the format *UserName@[SubDomain.]Domain.TopLevelDomain*), Amazon SES rejects the entire message, even if the other addresses are valid.
* Each message can include up to 50 recipient addresses across the
To:, CC:, or BCC: lines. If you need to send a single message to more than 50
recipients, you have to split the list of recipient addresses into groups of
less than 50 recipients, and send separate messages to each group.
* Amazon SES allows you to specify 8-bit Content-Transfer-Encoding
for MIME message parts. However, if Amazon SES has to modify the contents of
your message (for example, if you use open and click tracking), 8-bit content
isn't preserved. For this reason, we highly recommend that you encode all
content that isn't 7-bit ASCII. For more information, see [MIME
Encoding](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-email-raw.html#send-email-mime-encoding)
in the *Amazon SES Developer Guide*.
Additionally, keep the following considerations in mind when using the
`SendRawEmail` operation:
* Although you can customize the message headers when using the
`SendRawEmail` operation, Amazon SES will automatically apply its own
`Message-ID` and `Date` headers; if you passed these headers when creating the
message, they will be overwritten by the values that Amazon SES provides.
* If you are using sending authorization to send on behalf of
another user, `SendRawEmail` enables you to specify the cross-account identity
for the email's Source, From, and Return-Path parameters in one of two ways: you
can pass optional parameters `SourceArn`, `FromArn`, and/or `ReturnPathArn` to
the API, or you can include the following X-headers in the header of your raw
email:
* `X-SES-SOURCE-ARN`
* `X-SES-FROM-ARN`
* `X-SES-RETURN-PATH-ARN`
Don't include these X-headers in the DKIM signature. Amazon SES removes these
before it sends the email.
If you only specify the `SourceIdentityArn` parameter, Amazon SES sets the From
and Return-Path addresses to the same identity that you specified.
For more information about sending authorization, see the [Using Sending Authorization with Amazon
SES](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html)
in the *Amazon SES Developer Guide.*
* For every message that you send, the total number of recipients
(including each recipient in the To:, CC: and BCC: fields) is counted against
the maximum number of emails you can send in a 24-hour period (your *sending
quota*). For more information about sending quotas in Amazon SES, see [Managing Your Amazon SES Sending
Limits](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/manage-sending-limits.html)
in the *Amazon SES Developer Guide.*
"""
def send_raw_email(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendRawEmail", input, options)
end
@doc """
Composes an email message using an email template and immediately queues it for
sending.
In order to send email using the `SendTemplatedEmail` operation, your call to
the API must meet the following requirements:
* The call must refer to an existing email template. You can create
email templates using the `CreateTemplate` operation.
* The message must be sent from a verified email address or domain.
* If your account is still in the Amazon SES sandbox, you may only
send to verified addresses or domains, or to email addresses associated with the
Amazon SES Mailbox Simulator. For more information, see [Verifying Email Addresses and
Domains](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-addresses-and-domains.html)
in the *Amazon SES Developer Guide.*
* The maximum message size is 10 MB.
* Calls to the `SendTemplatedEmail` operation may only include one
`Destination` parameter. A destination is a set of recipients who will receive
the same version of the email. The `Destination` parameter can include up to 50
recipients, across the To:, CC: and BCC: fields.
* The `Destination` parameter must include at least one recipient
email address. The recipient address can be a To: address, a CC: address, or a
BCC: address. If a recipient email address is invalid (that is, it is not in the
format *UserName@[SubDomain.]Domain.TopLevelDomain*), the entire message will be rejected, even if the message contains other recipients that are valid.
If your call to the `SendTemplatedEmail` operation includes all of the required
parameters, Amazon SES accepts it and returns a Message ID. However, if Amazon
SES can't render the email because the template contains errors, it doesn't send
the email. Additionally, because it already accepted the message, Amazon SES
doesn't return a message stating that it was unable to send the email.
For these reasons, we highly recommend that you set up Amazon SES to send you
notifications when Rendering Failure events occur. For more information, see
[Sending Personalized Email Using the Amazon SES
API](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-personalized-email-api.html)
in the *Amazon Simple Email Service Developer Guide*.
"""
def send_templated_email(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendTemplatedEmail", input, options)
end
@doc """
Sets the specified receipt rule set as the active receipt rule set.
To disable your email-receiving through Amazon SES completely, you can call this
API with RuleSetName set to null.
For information about managing receipt rule sets, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rule-sets.html).
You can execute this operation no more than once per second.
"""
def set_active_receipt_rule_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetActiveReceiptRuleSet", input, options)
end
@doc """
Enables or disables Easy DKIM signing of email sent from an identity.
If Easy DKIM signing is enabled for a domain, then Amazon SES uses DKIM to sign
all email that it sends from addresses on that domain. If Easy DKIM signing is
enabled for an email address, then Amazon SES uses DKIM to sign all email it
sends from that address.
For email addresses (for example, `<EMAIL>`), you can only enable DKIM
signing if the corresponding domain (in this case, `example.com`) has been set
up to use Easy DKIM.
You can enable DKIM signing for an identity at any time after you start the
verification process for the identity, even if the verification process isn't
complete.
You can execute this operation no more than once per second.
For more information about Easy DKIM signing, go to the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim.html).
"""
def set_identity_dkim_enabled(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetIdentityDkimEnabled", input, options)
end
@doc """
Given an identity (an email address or a domain), enables or disables whether
Amazon SES forwards bounce and complaint notifications as email.
Feedback forwarding can only be disabled when Amazon Simple Notification Service
(Amazon SNS) topics are specified for both bounces and complaints.
Feedback forwarding does not apply to delivery notifications. Delivery
notifications are only available through Amazon SNS.
You can execute this operation no more than once per second.
For more information about using notifications with Amazon SES, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notifications.html).
"""
def set_identity_feedback_forwarding_enabled(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"SetIdentityFeedbackForwardingEnabled",
input,
options
)
end
@doc """
Given an identity (an email address or a domain), sets whether Amazon SES
includes the original email headers in the Amazon Simple Notification Service
(Amazon SNS) notifications of a specified type.
You can execute this operation no more than once per second.
For more information about using notifications with Amazon SES, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notifications.html).
"""
def set_identity_headers_in_notifications_enabled(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"SetIdentityHeadersInNotificationsEnabled",
input,
options
)
end
@doc """
Enables or disables the custom MAIL FROM domain setup for a verified identity
(an email address or a domain).
To send emails using the specified MAIL FROM domain, you must add an MX record
to your MAIL FROM domain's DNS settings. If you want your emails to pass Sender
Policy Framework (SPF) checks, you must also add or update an SPF record. For
more information, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/mail-from-set.html).
You can execute this operation no more than once per second.
"""
def set_identity_mail_from_domain(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetIdentityMailFromDomain", input, options)
end
@doc """
Sets an Amazon Simple Notification Service (Amazon SNS) topic to use when
delivering notifications.
When you use this operation, you specify a verified identity, such as an email
address or domain. When you send an email that uses the chosen identity in the
Source field, Amazon SES sends notifications to the topic you specified. You can
send bounce, complaint, or delivery notifications (or any combination of the
three) to the Amazon SNS topic that you specify.
You can execute this operation no more than once per second.
For more information about feedback notification, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notifications.html).
"""
def set_identity_notification_topic(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetIdentityNotificationTopic", input, options)
end
@doc """
Sets the position of the specified receipt rule in the receipt rule set.
For information about managing receipt rules, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rules.html).
You can execute this operation no more than once per second.
"""
def set_receipt_rule_position(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetReceiptRulePosition", input, options)
end
@doc """
Creates a preview of the MIME content of an email when provided with a template
and a set of replacement data.
You can execute this operation no more than once per second.
"""
def test_render_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TestRenderTemplate", input, options)
end
@doc """
Enables or disables email sending across your entire Amazon SES account in the
current AWS Region.
You can use this operation in conjunction with Amazon CloudWatch alarms to
temporarily pause email sending across your Amazon SES account in a given AWS
Region when reputation metrics (such as your bounce or complaint rates) reach
certain thresholds.
You can execute this operation no more than once per second.
"""
def update_account_sending_enabled(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateAccountSendingEnabled", input, options)
end
@doc """
Updates the event destination of a configuration set.
Event destinations are associated with configuration sets, which enable you to
publish email sending events to Amazon CloudWatch, Amazon Kinesis Firehose, or
Amazon Simple Notification Service (Amazon SNS). For information about using
configuration sets, see [Monitoring Your Amazon SES Sending Activity](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/monitor-sending-activity.html)
in the *Amazon SES Developer Guide.*
When you create or update an event destination, you must provide one, and only
one, destination. The destination can be Amazon CloudWatch, Amazon Kinesis
Firehose, or Amazon Simple Notification Service (Amazon SNS).
You can execute this operation no more than once per second.
"""
def update_configuration_set_event_destination(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"UpdateConfigurationSetEventDestination",
input,
options
)
end
@doc """
Enables or disables the publishing of reputation metrics for emails sent using a
specific configuration set in a given AWS Region.
Reputation metrics include bounce and complaint rates. These metrics are
published to Amazon CloudWatch. By using CloudWatch, you can create alarms when
bounce or complaint rates exceed certain thresholds.
You can execute this operation no more than once per second.
"""
def update_configuration_set_reputation_metrics_enabled(
%Client{} = client,
input,
options \\ []
) do
Request.request_post(
client,
metadata(),
"UpdateConfigurationSetReputationMetricsEnabled",
input,
options
)
end
@doc """
Enables or disables email sending for messages sent using a specific
configuration set in a given AWS Region.
You can use this operation in conjunction with Amazon CloudWatch alarms to
temporarily pause email sending for a configuration set when the reputation
metrics for that configuration set (such as your bounce on complaint rate)
exceed certain thresholds.
You can execute this operation no more than once per second.
"""
def update_configuration_set_sending_enabled(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"UpdateConfigurationSetSendingEnabled",
input,
options
)
end
@doc """
Modifies an association between a configuration set and a custom domain for open
and click event tracking.
By default, images and links used for tracking open and click events are hosted
on domains operated by Amazon SES. You can configure a subdomain of your own to
handle these events. For information about using custom domains, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/configure-custom-open-click-domains.html).
"""
def update_configuration_set_tracking_options(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"UpdateConfigurationSetTrackingOptions",
input,
options
)
end
@doc """
Updates an existing custom verification email template.
For more information about custom verification email templates, see [Using Custom Verification Email
Templates](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/custom-verification-emails.html)
in the *Amazon SES Developer Guide*.
You can execute this operation no more than once per second.
"""
def update_custom_verification_email_template(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"UpdateCustomVerificationEmailTemplate",
input,
options
)
end
@doc """
Updates a receipt rule.
For information about managing receipt rules, see the [Amazon SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/receiving-email-managing-receipt-rules.html).
You can execute this operation no more than once per second.
"""
def update_receipt_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateReceiptRule", input, options)
end
@doc """
Updates an email template.
Email templates enable you to send personalized email to one or more
destinations in a single API operation. For more information, see the [Amazon SES Developer
Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-personalized-email-api.html).
You can execute this operation no more than once per second.
"""
def update_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateTemplate", input, options)
end
@doc """
Returns a set of DKIM tokens for a domain identity.
When you execute the `VerifyDomainDkim` operation, the domain that you specify
is added to the list of identities that are associated with your account. This
is true even if you haven't already associated the domain with your account by
using the `VerifyDomainIdentity` operation. However, you can't send email from
the domain until you either successfully [verify it](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-domains.html)
or you successfully [set up DKIM for it](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim.html).
You use the tokens that are generated by this operation to create CNAME records.
When Amazon SES detects that you've added these records to the DNS configuration
for a domain, you can start sending email from that domain. You can start
sending email even if you haven't added the TXT record provided by the
VerifyDomainIdentity operation to the DNS configuration for your domain. All
email that you send from the domain is authenticated using DKIM.
To create the CNAME records for DKIM authentication, use the following values:
* **Name**: *token*._domainkey.*example.com*
* **Type**: CNAME
* **Value**: *token*.dkim.amazonses.com
In the preceding example, replace *token* with one of the tokens that are
generated when you execute this operation. Replace *example.com* with your
domain. Repeat this process for each token that's generated by this operation.
You can execute this operation no more than once per second.
"""
def verify_domain_dkim(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "VerifyDomainDkim", input, options)
end
@doc """
Adds a domain to the list of identities for your Amazon SES account in the
current AWS Region and attempts to verify it.
For more information about verifying domains, see [Verifying Email Addresses and Domains](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-addresses-and-domains.html)
in the *Amazon SES Developer Guide.*
You can execute this operation no more than once per second.
"""
def verify_domain_identity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "VerifyDomainIdentity", input, options)
end
@doc """
Deprecated.
Use the `VerifyEmailIdentity` operation to verify a new email address.
"""
def verify_email_address(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "VerifyEmailAddress", input, options)
end
@doc """
Adds an email address to the list of identities for your Amazon SES account in
the current AWS region and attempts to verify it.
As a result of executing this operation, a verification email is sent to the
specified address.
You can execute this operation no more than once per second.
"""
def verify_email_identity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "VerifyEmailIdentity", input, options)
end
end | lib/aws/generated/ses.ex | 0.824885 | 0.45417 | ses.ex | starcoder |
defmodule Kojin.Pod.PodObject do
@moduledoc """
Module for defining plain old data objects, independent of target language
"""
alias Kojin.Pod.{PodField, PodObject, PodTypeRef, PodTypes}
use TypedStruct
@typedoc """
A plain old data object, with an `id`, a `doc` comment and a
list of fields.
"""
typedstruct do
field(:id, atom, enforce: true)
field(:doc, String.t())
field(:fields, list(PodField.t()), default: [])
field(:properties, map(), default: %{})
end
@doc """
Creates a `Kojin.Pod.PodObject` given:
- `id`: Identifier for the object
- `doc`: Documentation on the object type
- `fields`: List of fields in the object
## Examples
iex> alias Kojin.Pod.{PodObject, PodField, PodType}
...> import Kojin.Pod.{PodObject, PodField}
...> point = pod_object(:point, "A 2 dimensional point", [
...> pod_field(:x, "Abcissa", :int32),
...> pod_field(:y, "Ordinate", :int32)
...> ])
...> (%PodObject{
...> id: :point,
...> doc: "A 2 dimensional point",
...> fields: [
...> %PodField{
...> id: :x,
...> doc: "Abcissa",
...> type: %PodType{ id: :int32 }
...> },
...> %PodField{
...> id: :y,
...> doc: "Ordinate",
...> type: %PodType{ id: :int32 }
...> }
...> ]
...> } = point) && true
true
Converts list of field parameters into list of fields:
iex> alias Kojin.Pod.{PodObject, PodField, PodType}
...> import Kojin.Pod.{PodObject}
...> point = pod_object(:point, "A 2 dimensional point", [
...> [:x, "Abcissa", :int32],
...> [:y, "Ordinate", :int32]
...> ])
...> (%PodObject{
...> id: :point,
...> doc: "A 2 dimensional point",
...> fields: [
...> %PodField{
...> id: :x,
...> doc: "Abcissa",
...> type: %PodType{ id: :int32 }
...> },
...> %PodField{
...> id: :y,
...> doc: "Ordinate",
...> type: %PodType{ id: :int32 }
...> }
...> ]
...> } = point) && true
true
"""
def pod_object(id, doc, fields, opts \\ []) when is_atom(id) and is_binary(doc) do
opts =
Kojin.check_args(
[
properties: %{}
],
opts
)
%PodObject{
id: id,
doc: doc,
fields: fields |> Enum.map(fn field -> PodField.pod_field(field) end),
properties: opts[:properties]
}
end
@doc """
Returns all distinct types referenced in the `PodObject` (non-recursive).
Note: Array is not represented as a type
## Examples
iex> import Kojin.Pod.{PodObject, PodField, PodArray}
...> all_types(pod_object(:x, "x", [ pod_field(:f, "f", array_of(:t))]))
MapSet.new([Kojin.Pod.PodTypes.pod_type(:t)])
"""
def all_types(%PodObject{} = pod_object) do
pod_object.fields
|> Enum.reduce(MapSet.new(), fn pod_field, acc ->
# put in the referred type if there is one, or the standard type
MapSet.put(acc, PodTypes.ref_type(pod_field.type) || pod_field.type)
end)
end
@doc """
Returns all distinct ref types referenced in the `PodObject` (non-recursive)
"""
def all_ref_types(%PodObject{} = pod_object) do
for(
%PodTypeRef{} = elm <- Enum.map(all_types(pod_object), fn t -> PodTypes.ref_type(t) end),
do: elm
)
|> MapSet.new()
end
end | lib/kojin/pod/pod_object.ex | 0.849082 | 0.553566 | pod_object.ex | starcoder |
defmodule Validation do
@moduledoc """
> **Easy. Simple. Powerful.**
>
> Elixir Validation library with +25 fully tested rules. *(+30 coming up soon!)*
[![Build Status](https://travis-ci.org/elixir-validation/validation.svg?branch=master)](https://travis-ci.org/elixir-validation/validation)
[![Build status](https://ci.appveyor.com/api/projects/status/xu5j5rrlx76hlcsu?svg=true)](https://ci.appveyor.com/project/elixir-validation/validation)
[![Coverage Status](https://coveralls.io/repos/github/elixir-validation/validation/badge.svg?branch=master)](https://coveralls.io/github/elixir-validation/validation?branch=master)
[![Inline docs](https://inch-ci.org/github/elixir-validation/validation.svg?branch=master)](https://inch-ci.org/github/elixir-validation/validation)
[![Hex Version](https://img.shields.io/hexpm/v/validation.svg)](https://hex.pm/packages/validation)
[![hex.pm downloads](https://img.shields.io/hexpm/dt/validation.svg)](https://hex.pm/packages/validation)
[![HitCount](http://hits.dwyl.io/elixir-validation/validation.svg)](https://github.com/elixir-validation/validation)
[![License](https://img.shields.io/badge/License-MIT-green.svg)](https://opensource.org/licenses/MIT)
\*\* Library under development! [Roadmap: upcoming new rules](https://github.com/elixir-validation/validation#roadmap-30-coming-up-soon).
# Demo
```
# alias Validation, as: V
# valid data will return true
V.email?("<EMAIL>")
V.uuid?("e4eaaaf2-d142-11e1-b3e4-080027620cdd")
V.credit_card?("4882743696073832")
V.mac_address?("12-77-0E-42-E4-65")
V.consonants?("bcdfgh")
V.country_code?("US")
# invalid data will return false
V.language_code?("qq") # could be something like "en", "pt" or "de"
V.vowel?("bcdf") # could be something like "aeiou"
V.yes?("nope") # could be something like "yes", "yeah", "yeap"
V.is_odd?(2) # could be something like 3 or 5...
```
"""
@doc """
Validates whether the input is alpha.
# true
V.alpha?("")
V.alpha?("a")
V.alpha?("john")
V.alpha?("doe")
V.alpha?("foobar")
# false
V.alpha?("123")
V.alpha?("number 100%")
V.alpha?("@#$")
V.alpha?("_")
V.alpha?("dgç")
"""
@spec alpha?(String.t) :: boolean
def alpha?(input) when is_binary(input) do
Validation.Rules.Alpha.validate?(input)
end
@doc """
Validates whether the input is alpha, excluding specific characters.
# true
V.alpha?("john_", "_")
V.alpha?("google.com", ".")
V.alpha?("<NAME>", " ")
# false
V.alpha?("john_123", "$")
V.alpha?("google.com321", "*")
V.alpha?("<NAME>", "_")
"""
@spec alpha?(String.t, String.t) :: boolean
def alpha?(input, excluded_characters) when is_binary(input) and is_binary(excluded_characters) do
Validation.Rules.Alpha.validate?(input, excluded_characters)
end
@doc """
Validates whether the input is alphanumeric.
# true
V.alphanumeric?("foo123")
V.alphanumeric?("100number")
# false
V.alphanumeric?("number 100%")
V.alphanumeric?("foo_bar")
"""
@spec alphanumeric?(String.t) :: boolean
def alphanumeric?(input) when is_binary(input) do
Validation.Rules.Alphanumeric.validate?(input)
end
@doc """
Validates whether the input is alphanumeric, excluding specific characters.
# true
V.alphanumeric?("foo 123", " ")
V.alphanumeric?("foo_123", "_")
# false
V.alphanumeric?("number 100%", "%")
V.alphanumeric?("foo_bar", "%")
"""
@spec alphanumeric?(String.t, String.t) :: boolean
def alphanumeric?(input, excluded_characters) when is_binary(input) and is_binary(excluded_characters) do
Validation.Rules.Alphanumeric.validate?(input, excluded_characters)
end
@doc """
Validates ranges.
# true
V.between?(15, 10, 20)
V.between?(70, 0, 100)
# false
V.between?(15, 20, 30)
V.between?(70, 300, 999)
"""
@spec between?(number, number, number) :: boolean
def between?(value, min, max) when is_number(value) and is_number(min) and is_number(max) do
Validation.Rules.Between.validate?(value, min, max)
end
@doc """
Validates whether the input is a valid CNH (brazilian driver license ID).
# true
V.cnh?("02650306461")
V.cnh?("04397322870")
V.cnh?("04375701302")
V.cnh?("02996843266")
V.cnh?("04375700501")
# false
V.cnh?("")
V.cnh?("0000000000")
V.cnh?("9999999999")
V.cnh?("0265131640")
V.cnh?("0439732280")
"""
@spec cnh?(String.t) :: boolean
def cnh?(input) when is_binary(input) do
Validation.Rules.CNH.validate?(input)
end
@doc """
Validates whether the input is a valid CNPJ (brazilian company ID).
# true
V.cnpj?("32.063.364/0001-07")
V.cnpj?("27.355.204/0001-00")
V.cnpj?("36.310.327/0001-07")
V.cnpj?("37550610000179")
V.cnpj?("12774546000189")
# false
V.cnpj?("12.345.678/9012-34")
V.cnpj?("11.111.111/1111-11")
V.cnpj?("00000000000000")
V.cnpj?("99-010-0.")
"""
@spec cnpj?(String.t) :: boolean
def cnpj?(input) when is_binary(input) do
Validation.Rules.CNPJ.validate?(input)
end
@doc """
Validates country codes like US, DE or BR according to [ISO 639](https://en.wikipedia.org/wiki/ISO_639).
# true
V.country_code?("US", :alpha2)
V.country_code?("USA", :alpha3)
V.country_code?("BR", :alpha2)
V.country_code?("BRA", :alpha3)
V.country_code?("DE", :alpha2)
V.country_code?("DEU", :alpha3)
V.country_code?("076", :numeric) # Brazil
# false
V.country_code?("USAAAAA", :unknown_type)
The rules use data from [iso-codes](https://salsa.debian.org/iso-codes-team/iso-codes).
"""
@spec country_code?(String.t, atom) :: boolean
def country_code?(input, type \\ :alpha2) when is_binary(input) and is_atom(type) do
Validation.Rules.CountryCode.validate?(input, type)
end
@doc """
Validates whether the input is a valid CPF (brazilian ID).
# true
V.cpf?("350.45261819")
V.cpf?("693-319-118-40")
V.cpf?("11598647644")
V.cpf?("86734718697")
V.cpf?("3.6.8.8.9.2.5.5.4.8.8")
# false
V.cpf?("")
V.cpf?("01234567890")
V.cpf?("000.000.000-00")
V.cpf?("111.222.444-05")
V.cpf?("999999999.99")
V.cpf?("8.8.8.8.8.8.8.8.8.8.8")
V.cpf?("693-319-110-40")
"""
@spec cpf?(String.t) :: boolean
def cpf?(input) when is_binary(input) do
Validation.Rules.CPF.validate?(input)
end
@doc """
Validates whether the input is a valid credit card.
# true
V.credit_card?("2223000048400011")
V.credit_card?("2222 4000 4124 0011")
V.credit_card?("4024.0071.5336.1885")
# false
V.credit_card?("it isnt my credit c)ard number")
V.credit_card?("1234 1234 1234 1234")
V.credit_card?("1234.1234.1234.12__34")
"""
@spec credit_card?(String.t) :: boolean
def credit_card?(input) when is_binary(input) do
Validation.Rules.CreditCard.validate?(input)
end
@doc """
Validates whether the input is a valid American Express (amex) credit card.
# true
V.credit_card_amex?("340-3161-9380-9364")
# false
V.credit_card_amex?("5376 7473 9720 8720") # master
V.credit_card_amex?("4024.0071.5336.1885") # visa
"""
@spec credit_card_amex?(String.t) :: boolean
def credit_card_amex?(input) when is_binary(input) do
Validation.Rules.CreditCard.Amex.validate?(input)
end
@doc """
Validates whether the input is a valid Diners credit card.
# true
V.credit_card_diners?("30351042633884")
# false
V.credit_card_diners?("5376 7473 9720 8720") # master
V.credit_card_diners?("4024.0071.5336.1885") # visa
"""
@spec credit_card_diners?(String.t) :: boolean
def credit_card_diners?(input) when is_binary(input) do
Validation.Rules.CreditCard.Diners.validate?(input)
end
@doc """
Validates whether the input is a valid Discover credit card.
# true
V.credit_card_discover?("6011000990139424")
# false
V.credit_card_discover?("5376 7473 9720 8720") # master
V.credit_card_discover?("4024.0071.5336.1885") # visa
"""
@spec credit_card_discover?(String.t) :: boolean
def credit_card_discover?(input) when is_binary(input) do
Validation.Rules.CreditCard.Discover.validate?(input)
end
@doc """
Validates whether the input is a valid Jcb credit card.
# true
V.credit_card_jcb?("3566002020360505")
# false
V.credit_card_jcb?("5376 7473 9720 8720") # master
V.credit_card_jcb?("4024.0071.5336.1885") # visa
"""
@spec credit_card_jcb?(String.t) :: boolean
def credit_card_jcb?(input) when is_binary(input) do
Validation.Rules.CreditCard.Jcb.validate?(input)
end
@doc """
Validates whether the input is a valid Master credit card.
# true
V.credit_card_master?("5376 7473 9720 8720")
# false
V.credit_card_master?("340-3161-9380-9364") # amex
V.credit_card_master?("4024.0071.5336.1885") # visa
"""
@spec credit_card_master?(String.t) :: boolean
def credit_card_master?(input) when is_binary(input) do
Validation.Rules.CreditCard.Master.validate?(input)
end
@doc """
Validates whether the input is a valid VISA credit card.
# true
V.credit_card_visa?("4024 007 193 879")
V.credit_card_visa?("4024.0071.5336.1885")
# false
V.credit_card_visa?("340-3161-9380-9364") # amex
V.credit_card_visa?("5376 7473 9720 8720") # master
"""
@spec credit_card_visa?(String.t) :: boolean
def credit_card_visa?(input) when is_binary(input) do
Validation.Rules.CreditCard.Visa.validate?(input)
end
@doc """
Validates a currency code like USD, EUR or GBP according to [ISO 4217](https://en.wikipedia.org/wiki/ISO_4217).
# true
V.currency_code?("USD")
V.currency_code?("EUR")
V.currency_code?("GBP")
# false
V.currency_code?("QQQ")
V.currency_code?("2---1")
V.currency_code?("nope")
The rules use data from [iso-codes](https://salsa.debian.org/iso-codes-team/iso-codes).
"""
@spec currency_code?(String.t) :: boolean
def currency_code?(input) when is_binary(input) do
Validation.Rules.CurrencyCode.validate?(input)
end
@doc """
Validates whether the input is has only consonants.
# true
V.consonant?("w")
V.consonant?("y")
V.consonant?("qrst")
V.consonant?("bcdfghklmnp")
# false
V.consonant?("a")
V.consonant?("ul")
V.consonant?("aeiou")
V.consonant?("Foo")
"""
@spec consonant?(String.t) :: boolean
def consonant?(input) when is_binary(input) do
Validation.Rules.Consonant.validate?(input)
end
@doc """
Validates whether the input is has only consonants, excluding specific characters.
# true
V.consonant?("www%", "%")
V.consonant?("bcd_fgh", "_")
V.consonant?("www www__www", " _")
# false
V.consonant?("awww%", "%")
V.consonant?("uwwwq", "_")
"""
@spec consonant?(String.t, String.t) :: boolean
def consonant?(input, excluded_characters) when is_binary(input) and is_binary(excluded_characters) do
Validation.Rules.Consonant.validate?(input, excluded_characters)
end
@doc """
Validates whether the input is a valid digit.
# true
V.digit?("165")
V.digit?("01650")
V.digit?("01")
# false
V.digit?("")
V.digit?("-1.1")
V.digit?("-12")
V.digit?("1.0")
"""
@spec digit?(String.t) :: boolean
def digit?(input) when is_binary(input) do
Validation.Rules.Digit.validate?(input)
end
@doc """
Validates whether the input is a valid digit, excluding specific characters.
# true
V.digit?("1.0", ".")
V.digit?("16-50", "-")
# false
V.digit?("")
V.digit?("1.%0", ".")
V.digit?("3333316-5.0/", "-.")
"""
@spec digit?(String.t, String.t) :: boolean
def digit?(input, excluded_characters) when is_binary(input) and is_binary(excluded_characters) do
Validation.Rules.Digit.validate?(input, excluded_characters)
end
@doc """
Validates whether the input is a valid email. See [RFC822](http://www.ex-parrot.com/~pdw/Mail-RFC822-Address.html)
# true
V.email?("<EMAIL>")
V.email?("<EMAIL>")
V.email?("<EMAIL>")
# false
V.email?("plainaddress")
V.email?("#@%^%#$@#$@#.com")
V.email?("<EMAIL>.")
V.email?(".<EMAIL>")
V.email?("@domain.com")
"""
@spec email?(String.t) :: boolean
def email?(input) when is_binary(input) do
Validation.Rules.Email.validate?(input)
end
@doc """
Validates whether the input is even.
# true
V.even?(0)
V.even?(2)
V.even?(4)
V.even?(-2)
V.even?(9999998)
# false
V.even?(1)
V.even?(5)
V.even?(-3)
V.even?(9999999)
"""
@spec even?(Integer.t) :: boolean
def even?(input) when is_integer(input) do
Validation.Rules.Even.validate?(input)
end
@doc """
Validates a language code according to [ISO 639](https://en.wikipedia.org/wiki/ISO_639):
# true
V.language_code?("en")
V.language_code?("pt")
V.language_code?("en", :alpha2)
V.language_code?("pt", :alpha2)
V.language_code?("it", :alpha2)
V.language_code?("eng", :alpha3)
V.language_code?("por", :alpha3)
V.language_code?("ita", :alpha3)
# false
V.language_code?("hi", :alpha2)
V.language_code?("foo", :alpha3)
You can choose between alpha-2 and alpha-3, alpha-2 is set by default.
The rules use data from [iso-codes](https://salsa.debian.org/iso-codes-team/iso-codes).
"""
@spec language_code?(String.t, atom) :: boolean
def language_code?(input, type \\ :alpha2) when is_binary(input) and is_atom(type) do
Validation.Rules.LanguageCode.validate?(input, type)
end
@doc """
Validates whether the input is lowercase.
# true
V.lowercase?("")
V.lowercase?("lowercase")
V.lowercase?("lowercase-with-dashes")
V.lowercase?("lowercase with spaces")
V.lowercase?("lowercase with specials characters like ã ç ê")
# false
V.lowercase?("UPPERCASE")
V.lowercase?("CamelCase")
V.lowercase?("First Character Uppercase")
V.lowercase?("With Numbers 1 2 3")
"""
@spec lowercase?(String.t) :: boolean
def lowercase?(input) when is_binary(input) do
Validation.Rules.Lowercase.validate?(input)
end
@doc """
Validates whether the input is a valid UUID. It also supports validation of specific versions `1`, `3`, `4` and `5`.
# true
V.uuid?("e4eaaaf2-d142-11e1-b3e4-080027620cdd")
V.uuid?("11a38b9a-b3da-360f-9353-a5a725514269")
V.uuid?("25769c6c-d34d-4bfe-ba98-e0ee856f3e7a", 4)
# false
V.uuid?("00000000-0000-aaaa-bbbb-cccccccccccc")
"""
@spec uuid?(String.t, integer) :: boolean
def uuid?(input, version \\ nil) when is_binary(input) and (is_integer(version) or is_nil(version)) do
Validation.Rules.UUID.validate?(input)
end
@doc """
Validates whether the input is valid Luhn.
# true
V.luhn?("9773725370")
V.luhn?("2222400041240011")
V.luhn?("340316193809364")
# false
V.luhn?("")
V.luhn?("true")
V.luhn?("8888888888888887")
"""
@spec luhn?(String.t) :: boolean
def luhn?(input) when is_binary(input) do
Validation.Rules.Luhn.validate?(input)
end
@doc """
Validates whether the input is a valid MAC address.
# true
V.mac_address?("00:11:22:33:44:55")
V.mac_address?("66-77-88-99-aa-bb")
V.mac_address?("AF:0F:bd:12:44:ba")
# false
V.mac_address?("")
V.mac_address?("00-1122:33:44:55")
"""
@spec mac_address?(String.t) :: boolean
def mac_address?(input) when is_binary(input) do
Validation.Rules.MacAddress.validate?(input)
end
@doc """
Validates if value is considered as "No".
# true
V.no?("N")
V.no?("n")
V.no?("Nay")
V.no?("No")
V.no?("Nope")
V.no?("Not")
# false
V.no?("Donnot")
V.no?("Never")
V.no?("Niet")
V.no?("Noooooooo")
V.no?("Não")
V.no?("não")
"""
@spec no?(String.t) :: boolean
def no?(input) when is_binary(input) do
Validation.Rules.No.validate?(input)
end
@doc """
Validates whether the input is odd.
# true
V.odd?(1)
V.odd?(5)
V.odd?(-11)
V.odd?(9999999)
# false
V.odd?(0)
V.odd?(8)
V.odd?(-100)
V.odd?(9999998)
"""
@spec odd?(Integer.t) :: boolean
def odd?(input) when is_integer(input) do
Validation.Rules.Odd.validate?(input)
end
@doc """
Validates subdivision country codes (states, provincies, territories and other sub regions) according to [ISO 3166-2](https://en.wikipedia.org/wiki/ISO_3166-2).
# true
V.subdivision_code?("US", "TX")
V.subdivision_code?("BR", "SP")
V.subdivision_code?("CA", "BC")
# false
V.subdivision_code?("US", "AA")
V.subdivision_code?("BR", "BB")
V.subdivision_code?("CA", "CC")
The rules use data from [iso-codes](https://salsa.debian.org/iso-codes-team/iso-codes).
"""
@spec subdivision_code?(String.t, String.t) :: boolean
def subdivision_code?(country, subdivision) when is_binary(country) and is_binary(subdivision) do
Validation.Rules.SubdivisionCode.validate?(country, subdivision)
end
@doc """
Validates a top-level domain according to [IANA](https://data.iana.org/TLD/tlds-alpha-by-domain.txt).
# true
V.tld?("com")
V.tld?("br")
V.tld?("cafe")
V.tld?("democrat")
# false
V.tld?("1.0")
V.tld?("wrongtld")
"""
@spec tld?(String.t) :: boolean
def tld?(input) when is_binary(input) do
Validation.Rules.Tld.validate?(input)
end
@doc """
Validates whether the input is uppercase.
# true
V.uppercase?("")
V.uppercase?("UPPERCASE")
V.uppercase?("UPPERCASE-WITH-DASHES")
V.uppercase?("UPPERCASE WITH SPACES")
V.uppercase?("UPPERCASE WITH NUMBERS 123")
# false
V.uppercase?("lowercase")
V.uppercase?("CamelCase")
V.uppercase?("First Character Uppercase")
V.uppercase?("With Numbers 1 2 3")
"""
@spec uppercase?(String.t) :: boolean
def uppercase?(input) when is_binary(input) do
Validation.Rules.Uppercase.validate?(input)
end
@doc """
Validates whether the input is has only vowels.
# true
V.vowel?("a")
V.vowel?("o")
V.vowel?("u")
V.vowel?("aeiou")
# false
V.vowel?("b")
V.vowel?("ul")
V.vowel?("16")
V.vowel?("\\r")
"""
@spec vowel?(String.t) :: boolean
def vowel?(input) when is_binary(input) do
Validation.Rules.Vowel.validate?(input)
end
@doc """
Validates whether the input is has only vowels, excluding specific characters.
# true
V.vowel?("aaaa==", "=")
V.vowel?("aeoiu!!--", "!-")
# false
V.vowel?("b==aaaa", "=")
V.vowel?("bc==aeoiu", "b!-")
"""
@spec vowel?(String.t, String.t) :: boolean
def vowel?(input, excluded_characters) when is_binary(input) and is_binary(excluded_characters) do
Validation.Rules.Vowel.validate?(input, excluded_characters)
end
@doc """
Validates if value is considered as "Yes".
# true
V.yes?("Y")
V.yes?("Yea")
V.yes?("Yeah")
V.yes?("Yep")
V.yes?("Yes")
V.yes?("yes")
# false
V.yes?("let\\"s thrown an error")
V.yes?("Yydoesnotmatter")
V.yes?("Si")
V.yes?("Sim")
V.yes?("Yoo")
V.yes?("Yiip")
"""
@spec yes?(String.t) :: boolean
def yes?(input) when is_binary(input) do
Validation.Rules.Yes.validate?(input)
end
end | lib/validation.ex | 0.722233 | 0.887351 | validation.ex | starcoder |
defmodule Analytics.Mixpanel.Events do
@moduledoc """
This module provides a struct that accumulates user events and helper to submit data to Mixpanel.
"""
alias Analytics.Mixpanel.Events
@track_endpoint "track"
defstruct client: Analytics.Mixpanel.Client, events: [], distinct_id: nil, ip: nil, token: nil
@doc """
Creates a new `Events` struct that is used to submit events for a client identified with `distinct_id`.
"""
def new(distinct_id), do: %{new() | distinct_id: distinct_id}
def new do
[client: client, token: token] = Analytics.Mixpanel.config()
%Events{client: client, token: token}
end
@doc """
The IP address associated with a given profile, which Mixpanel
uses to guess user geographic location. Ignored if not set.
"""
def set_ip(%Events{} = batch_request, ip), do: %{batch_request | ip: ip_to_string(ip)}
defp ip_to_string({a, b, c, d}), do: "#{a}.#{b}.#{c}.#{d}"
defp ip_to_string(ip), do: ip
@doc """
Appends an event to a `Events` struct with a pre-defined `distinct_id`.
Events struct must be created with `new/1` in order to use this function.
## Arguments
* `event` - A name for the event;
* `properties` - A collection of properties associated with this event. Where `:time` (timestamp) would update \
event time (otherwise Mixpanel uses time when event is arrived to their back-end), \
`distinct_id` can be used to identify user and `:token` can be used to override Mixpanel API key.
"""
def track(%Events{distinct_id: distinct_id} = batch_request, event, properties \\ %{})
when is_map(properties) and not is_nil(distinct_id) do
%{batch_request | events: [{distinct_id, event, properties} | batch_request.events]}
end
@doc """
Appends an event to a `Events` struct with a specific `distinct_id`. This is useful when you want
to submit events to more than user per request.
## Arguments
* `distinct_id` - Distinct ID that identifies user on Mixpanel;
* `event` - A name for the event;
* `properties` - A collection of properties associated with this event. Where `:time` (timestamp) would update \
event time (otherwise Mixpanel uses time when event is arrived to their back-end), \
`distinct_id` can be used to identify user and `:token` can be used to override Mixpanel API key.
"""
def track_for_user(%Events{} = batch_request, distinct_id, event, properties \\ %{})
when is_map(properties) do
%{batch_request | events: [{distinct_id, event, properties} | batch_request.events]}
end
@doc """
Submits events tracked for a user.
"""
def submit(%Events{} = batch_request) do
%{client: client, events: events, ip: ip, token: token} = batch_request
event_template =
Map.new()
|> Map.put("token", token)
|> maybe_put("ip", ip)
payload =
events
|> Enum.reverse()
|> Enum.map(fn {distinct_id, event, properties} ->
properties =
event_template
|> maybe_put("distinct_id", distinct_id)
|> Map.merge(properties)
|> maybe_normalize_time()
%{event: event, properties: properties}
end)
client.send_batch(@track_endpoint, payload)
end
defp maybe_put(map, _key, nil), do: map
defp maybe_put(map, key, value), do: Map.put(map, key, value)
defp maybe_normalize_time(%{time: time} = properties), do: Map.put(properties, :time, normalize_time(time))
defp maybe_normalize_time(%{"time" => time} = properties), do: Map.put(properties, "time", normalize_time(time))
defp maybe_normalize_time(properties), do: properties
defp normalize_time(nil), do: nil
defp normalize_time(timestamp) when is_integer(timestamp), do: timestamp
defp normalize_time(%DateTime{} = datetime), do: DateTime.to_unix(datetime)
end | lib/analytics/mixpanel/events.ex | 0.873849 | 0.511595 | events.ex | starcoder |
defmodule Elm.Platform.Parser do
use Combine, parsers: [:text]
alias Data.Json.Decode
alias Data.Function
alias Elm.Docs.Binop
alias Elm.Docs.Value
alias Elm.Docs.Union
alias Elm.Docs.Alias
alias Elm.Docs.Module
alias Elm.Searchable
alias Elm.Version
alias Elm.Name
def module_name(text) do
text
|> run(module_name_parser())
|> with_default("Main")
end
def module_path(text) do
String.replace(module_name(text), ".", "/") <> ".elm"
end
defp module_name_parser() do
choice([
ignore(string("module")),
ignore(string("port")) |> ignore(spaces()) |> ignore(string("module")),
ignore(string("effect")) |> ignore(spaces()) |> ignore(string("module"))
])
|> ignore(spaces())
|> map(sep_by1(word_of(~r/[A-Z][a-zA-Z0-9_]+/), char(".")), &Enum.join(&1, "."))
end
defp run(text, parser) do
case Combine.parse(text, parser) do
{:error, _} -> nil
stuff -> List.first(stuff)
end
end
defp with_default(nil, a), do: a
defp with_default(a, _a), do: a
def searchables_json(body) do
Decode.decode_string(body, searchables_decoder())
end
def searchables_decoder() do
version =
Decode.and_then(Decode.string(), fn string ->
case Version.from_string(string) do
{:ok, version} -> Decode.succeed(version)
:error -> Decode.fail("Expecting a version MAJOR.MINOR.PATCH")
end
end)
name =
Decode.and_then(Decode.string(), fn string ->
case Name.from_string(string) do
{:ok, name} -> Decode.succeed(name)
:error -> Decode.fail("Expecting a name USER/PROJECT")
end
end)
searchable =
Decode.succeed(Function.curry(&%Searchable{name: &1, summary: &2, versions: &3}))
|> Decode.and_map(Decode.field("name", name))
|> Decode.and_map(Decode.field("summary", Decode.string()))
|> Decode.and_map(Decode.field("versions", Decode.list(version)))
Decode.list(searchable)
end
def docs_json(body) do
Decode.decode_string(body, docs_decoder())
end
defp docs_decoder() do
Decode.one_of([
docs_decoder_18(),
docs_decoder_19()
])
end
defp docs_decoder_18() do
cased =
Decode.succeed(Function.curry(&{&1, &2}))
|> Decode.and_map(Decode.index(0, Decode.string()))
|> Decode.and_map(Decode.index(1, Decode.list(Decode.string())))
associativity =
Decode.and_then(Decode.string(), fn
"left" -> Decode.succeed(:left)
"non" -> Decode.succeed(:none)
"right" -> Decode.succeed(:right)
_ -> Decode.fail("expecting one of the following values: left, non, right")
end)
fix =
Decode.one_of([
Decode.succeed(Function.curry(&{&1, &2}))
|> Decode.and_map(Decode.field("associativity", associativity))
|> Decode.and_map(Decode.field("precedence", Decode.integer())),
Decode.succeed(nil)
])
value =
Decode.succeed(Function.curry(&%{name: &1, comment: &2, type: &3, fix: &4}))
|> Decode.and_map(Decode.field("name", Decode.string()))
|> Decode.and_map(Decode.field("comment", Decode.string()))
|> Decode.and_map(Decode.field("type", Decode.string()))
|> Decode.and_map(fix)
union =
Decode.succeed(Function.curry(&%{name: &1, comment: &2, args: &3, cases: &4}))
|> Decode.and_map(Decode.field("name", Decode.string()))
|> Decode.and_map(Decode.field("comment", Decode.string()))
|> Decode.and_map(Decode.field("args", Decode.list(Decode.string())))
|> Decode.and_map(Decode.field("cases", Decode.list(cased)))
aliasd =
Decode.succeed(Function.curry(&%{name: &1, comment: &2, args: &3, type: &4}))
|> Decode.and_map(Decode.field("name", Decode.string()))
|> Decode.and_map(Decode.field("comment", Decode.string()))
|> Decode.and_map(Decode.field("args", Decode.list(Decode.string())))
|> Decode.and_map(Decode.field("type", Decode.string()))
old_docs =
Decode.succeed(
Function.curry(&%{name: &1, comment: &2, aliases: &3, types: &4, values: &5})
)
|> Decode.and_map(Decode.field("name", Decode.string()))
|> Decode.and_map(Decode.field("comment", Decode.string()))
|> Decode.and_map(Decode.field("aliases", Decode.list(aliasd)))
|> Decode.and_map(Decode.field("types", Decode.list(union)))
|> Decode.and_map(Decode.field("values", Decode.list(value)))
old_docs
|> Decode.map(&convert_old_docs/1)
|> Decode.list()
end
defp convert_old_docs(old_docs) do
%Module{
name: old_docs.name,
comment: old_docs.comment,
unions:
Enum.map(old_docs.types, fn t ->
%Union{
name: t.name,
comment: t.comment,
args: t.args,
tags: t.cases
}
end),
aliases:
Enum.map(old_docs.aliases, fn a ->
%Alias{
name: a.name,
comment: a.comment,
args: a.args,
type: a.type
}
end),
values:
old_docs.values
|> Enum.filter(fn v -> is_nil(v.fix) end)
|> Enum.map(fn v -> %Value{name: v.name, comment: v.comment, type: v.type} end),
binops:
old_docs.values
|> Enum.filter(fn v -> not is_nil(v.fix) end)
|> Enum.map(fn v ->
%Binop{
name: v.name,
comment: v.name,
type: v.type,
associativity: elem(v.fix, 0),
precedence: elem(v.fix, 1)
}
end)
}
end
defp docs_decoder_19() do
associativity =
Decode.and_then(Decode.string(), fn
"left" -> Decode.succeed(:left)
"non" -> Decode.succeed(:none)
"right" -> Decode.succeed(:right)
_ -> Decode.fail("expecting one of the following values: left, non, right")
end)
binop =
Decode.succeed(
Function.curry(
&%Binop{name: &1, comment: &2, type: &3, associativity: &4, precedence: &5}
)
)
|> Decode.and_map(Decode.field("name", Decode.string()))
|> Decode.and_map(Decode.field("comment", Decode.string()))
|> Decode.and_map(Decode.field("type", Decode.string()))
|> Decode.and_map(Decode.field("associativity", associativity))
|> Decode.and_map(Decode.field("precedence", Decode.integer()))
value =
Decode.succeed(Function.curry(&%Value{name: &1, comment: &2, type: &3}))
|> Decode.and_map(Decode.field("name", Decode.string()))
|> Decode.and_map(Decode.field("comment", Decode.string()))
|> Decode.and_map(Decode.field("type", Decode.string()))
tag =
Decode.succeed(Function.curry(&{&1, &2}))
|> Decode.and_map(Decode.index(0, Decode.string()))
|> Decode.and_map(Decode.index(1, Decode.list(Decode.string())))
union =
Decode.succeed(Function.curry(&%Union{name: &1, comment: &2, args: &3, tags: &4}))
|> Decode.and_map(Decode.field("name", Decode.string()))
|> Decode.and_map(Decode.field("comment", Decode.string()))
|> Decode.and_map(Decode.field("args", Decode.list(Decode.string())))
|> Decode.and_map(Decode.field("cases", Decode.list(tag)))
aliasd =
Decode.succeed(Function.curry(&%Alias{name: &1, comment: &2, args: &3, type: &4}))
|> Decode.and_map(Decode.field("name", Decode.string()))
|> Decode.and_map(Decode.field("comment", Decode.string()))
|> Decode.and_map(Decode.field("args", Decode.list(Decode.string())))
|> Decode.and_map(Decode.field("type", Decode.string()))
moduled =
Decode.succeed(
Function.curry(
&%Module{name: &1, comment: &2, unions: &3, aliases: &4, values: &5, binops: &6}
)
)
|> Decode.and_map(Decode.field("name", Decode.string()))
|> Decode.and_map(Decode.field("comment", Decode.string()))
|> Decode.and_map(Decode.field("unions", Decode.list(union)))
|> Decode.and_map(Decode.field("aliases", Decode.list(aliasd)))
|> Decode.and_map(Decode.field("values", Decode.list(value)))
|> Decode.and_map(Decode.field("binops", Decode.list(binop)))
Decode.list(moduled)
end
end | lib/elm/platform/parser.ex | 0.651355 | 0.436202 | parser.ex | starcoder |
defmodule WorkTime do
@moduledoc """
Documentation for `WorkTime`.
"""
@doc """
## Examples
iex> WorkTime.parse("6:00 PM")
{:ok, %WorkTime{hour: 6, minute: 0, ampm: :PM}}
iex> WorkTime.parse("13:00 PM")
{:error, "hour greater than 12"}
"""
defstruct hour: nil,
minute: nil,
ampm: nil
def parse(time) do
result = Regex.run(~r/(\d?\d):(\d\d) (AM|PM)/, time, capture: :all_but_first)
case result do
[_, _, _] -> form_result(result)
_ -> {:error, "not all fragments provided"}
end
end
def difference(%WorkTime{} = start_time, %WorkTime{} = end_time) do
spans_two_days = start_time.ampm != end_time.ampm && end_time.ampm == :AM
minutes_worked = canonicalize(end_time, spans_two_days) - canonicalize(start_time)
hours = div(minutes_worked, 60)
round_up = if rem(minutes_worked, 60) > 0, do: 1, else: 0
hours + round_up
end
def canonicalize(%WorkTime{} = time, shift \\ false) do
hour = if shift, do: time.hour + 24, else: time.hour
minutes = time.minute + hour * 60 + if time.ampm == :PM, do: 12 * 60, else: 0
if shift do
minutes
else
minutes
end
end
defp form_result([hours, minutes, ampm]) do
with {:ok, h} <- Integer.parse(hours, 10) |> elem(0) |> validate_hour,
{:ok, m} <- Integer.parse(minutes, 10) |> elem(0) |> validate_mins,
{:ok, a} <- ampm |> String.to_atom() |> validate_ampm,
do: {:ok, %WorkTime{hour: h, minute: m, ampm: a}}
end
defp validate_hour(hour) when hour < 1 do
{:error, "hour less than 1"}
end
defp validate_hour(hour) when hour > 12 do
{:error, "hour greater than 12"}
end
defp validate_hour(hour) do
{:ok, hour}
end
defp validate_mins(mins) when mins < 0 do
{:error, "minute less than 0"}
end
defp validate_mins(mins) when mins > 59 do
{:error, "minute greater than 59"}
end
defp validate_mins(mins) do
{:ok, mins}
end
defp validate_ampm(ampm) do
{:ok, ampm}
end
end | lib/work_time.ex | 0.786008 | 0.430506 | work_time.ex | starcoder |
defmodule AWS.Cloudsearchdomain do
@moduledoc """
You use the AmazonCloudSearch2013 API to upload documents to a search
domain and search those documents.
The endpoints for submitting `UploadDocuments`, `Search`, and `Suggest`
requests are domain-specific. To get the endpoints for your domain, use the
Amazon CloudSearch configuration service `DescribeDomains` action. The
domain endpoints are also displayed on the domain dashboard in the Amazon
CloudSearch console. You submit suggest requests to the search endpoint.
For more information, see the [Amazon CloudSearch Developer
Guide](http://docs.aws.amazon.com/cloudsearch/latest/developerguide).
"""
@doc """
Retrieves a list of documents that match the specified search criteria. How
you specify the search criteria depends on which query parser you use.
Amazon CloudSearch supports four query parsers:
<ul> <li>`simple`: search all `text` and `text-array` fields for the
specified string. Search for phrases, individual terms, and prefixes. </li>
<li>`structured`: search specific fields, construct compound queries using
Boolean operators, and use advanced features such as term boosting and
proximity searching.</li> <li>`lucene`: specify search criteria using the
Apache Lucene query parser syntax.</li> <li>`dismax`: specify search
criteria using the simplified subset of the Apache Lucene query parser
syntax defined by the DisMax query parser.</li> </ul> For more information,
see [Searching Your
Data](http://docs.aws.amazon.com/cloudsearch/latest/developerguide/searching.html)
in the *Amazon CloudSearch Developer Guide*.
The endpoint for submitting `Search` requests is domain-specific. You
submit search requests to a domain's search endpoint. To get the search
endpoint for your domain, use the Amazon CloudSearch configuration service
`DescribeDomains` action. A domain's endpoints are also displayed on the
domain dashboard in the Amazon CloudSearch console.
"""
def search(client, cursor \\ nil, expr \\ nil, facet \\ nil, filter_query \\ nil, highlight \\ nil, partial \\ nil, query, query_options \\ nil, query_parser \\ nil, return \\ nil, size \\ nil, sort \\ nil, start \\ nil, stats \\ nil, options \\ []) do
path_ = "/2013-01-01/search?format=sdk&pretty=true"
headers = []
query_ = []
query_ = if !is_nil(stats) do
[{"stats", stats} | query_]
else
query_
end
query_ = if !is_nil(start) do
[{"start", start} | query_]
else
query_
end
query_ = if !is_nil(sort) do
[{"sort", sort} | query_]
else
query_
end
query_ = if !is_nil(size) do
[{"size", size} | query_]
else
query_
end
query_ = if !is_nil(return) do
[{"return", return} | query_]
else
query_
end
query_ = if !is_nil(query_parser) do
[{"q.parser", query_parser} | query_]
else
query_
end
query_ = if !is_nil(query_options) do
[{"q.options", query_options} | query_]
else
query_
end
query_ = if !is_nil(query) do
[{"q", query} | query_]
else
query_
end
query_ = if !is_nil(partial) do
[{"partial", partial} | query_]
else
query_
end
query_ = if !is_nil(highlight) do
[{"highlight", highlight} | query_]
else
query_
end
query_ = if !is_nil(filter_query) do
[{"fq", filter_query} | query_]
else
query_
end
query_ = if !is_nil(facet) do
[{"facet", facet} | query_]
else
query_
end
query_ = if !is_nil(expr) do
[{"expr", expr} | query_]
else
query_
end
query_ = if !is_nil(cursor) do
[{"cursor", cursor} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves autocomplete suggestions for a partial query string. You can use
suggestions enable you to display likely matches before users finish
typing. In Amazon CloudSearch, suggestions are based on the contents of a
particular text field. When you request suggestions, Amazon CloudSearch
finds all of the documents whose values in the suggester field start with
the specified query string. The beginning of the field must match the query
string to be considered a match.
For more information about configuring suggesters and retrieving
suggestions, see [Getting
Suggestions](http://docs.aws.amazon.com/cloudsearch/latest/developerguide/getting-suggestions.html)
in the *Amazon CloudSearch Developer Guide*.
The endpoint for submitting `Suggest` requests is domain-specific. You
submit suggest requests to a domain's search endpoint. To get the search
endpoint for your domain, use the Amazon CloudSearch configuration service
`DescribeDomains` action. A domain's endpoints are also displayed on the
domain dashboard in the Amazon CloudSearch console.
"""
def suggest(client, query, size \\ nil, suggester, options \\ []) do
path_ = "/2013-01-01/suggest?format=sdk&pretty=true"
headers = []
query_ = []
query_ = if !is_nil(suggester) do
[{"suggester", suggester} | query_]
else
query_
end
query_ = if !is_nil(size) do
[{"size", size} | query_]
else
query_
end
query_ = if !is_nil(query) do
[{"q", query} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Posts a batch of documents to a search domain for indexing. A document
batch is a collection of add and delete operations that represent the
documents you want to add, update, or delete from your domain. Batches can
be described in either JSON or XML. Each item that you want Amazon
CloudSearch to return as a search result (such as a product) is represented
as a document. Every document has a unique ID and one or more fields that
contain the data that you want to search and return in results. Individual
documents cannot contain more than 1 MB of data. The entire batch cannot
exceed 5 MB. To get the best possible upload performance, group add and
delete operations in batches that are close the 5 MB limit. Submitting a
large volume of single-document batches can overload a domain's document
service.
The endpoint for submitting `UploadDocuments` requests is domain-specific.
To get the document endpoint for your domain, use the Amazon CloudSearch
configuration service `DescribeDomains` action. A domain's endpoints are
also displayed on the domain dashboard in the Amazon CloudSearch console.
For more information about formatting your data for Amazon CloudSearch, see
[Preparing Your
Data](http://docs.aws.amazon.com/cloudsearch/latest/developerguide/preparing-data.html)
in the *Amazon CloudSearch Developer Guide*. For more information about
uploading data for indexing, see [Uploading
Data](http://docs.aws.amazon.com/cloudsearch/latest/developerguide/uploading-data.html)
in the *Amazon CloudSearch Developer Guide*.
"""
def upload_documents(client, input, options \\ []) do
path_ = "/2013-01-01/documents/batch?format=sdk"
{headers, input} =
[
{"contentType", "Content-Type"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "cloudsearch"}
host = build_host("cloudsearchdomain", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end | lib/aws/generated/cloudsearchdomain.ex | 0.858095 | 0.553867 | cloudsearchdomain.ex | starcoder |
defmodule HtmlSanitizeEx.Scrubber.HTML5 do
@moduledoc """
Allows all HTML5 tags to support user input.
Sanitizes all malicious content.
"""
require HtmlSanitizeEx.Scrubber.Meta
alias HtmlSanitizeEx.Scrubber.Meta
# Removes any CDATA tags before the traverser/scrubber runs.
Meta.remove_cdata_sections_before_scrub()
Meta.strip_comments()
@valid_schemes ["http", "https", "mailto"]
Meta.allow_tag_with_uri_attributes("a", ["href"], @valid_schemes)
Meta.allow_tag_with_these_attributes("a", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"target",
"ping",
"rel",
"media",
"hreflang",
"type"
])
Meta.allow_tag_with_these_attributes("b", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("blockquote", [
"accesskey",
"cite",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("br", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("caption", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("code", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("del", [
"accesskey",
"cite",
"datetime",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("div", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("em", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h1", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h2", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h3", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h4", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h5", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("h6", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("head", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("header", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("hgroup", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("hr", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("html", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"manifest"
])
Meta.allow_tag_with_these_attributes("i", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_uri_attributes("iframe", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("iframe", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"name",
"sandbox",
"seamless",
"width",
"height"
])
Meta.allow_tag_with_uri_attributes(
"img",
["src", "lowsrc", "srcset"],
@valid_schemes
)
Meta.allow_tag_with_these_attributes("img", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"alt crossorigin usemap ismap width height"
])
Meta.allow_tag_with_uri_attributes("input", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("input", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"accept",
"alt",
"autocomplete",
"autofocus",
"checked",
"dirname",
"disabled",
"form",
"formaction",
"formenctype",
"formmethod",
"formnovalidate",
"formtarget",
"height",
"inputmode",
"list",
"max",
"maxlength",
"min",
"multiple",
"name",
"pattern",
"placeholder",
"readonly",
"required",
"size",
"step",
"type",
"value",
"width"
])
Meta.allow_tag_with_these_attributes("ins", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"cite",
"datetime"
])
Meta.allow_tag_with_these_attributes("kbd", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("keygen", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"autofocus",
"challenge",
"disabled",
"form",
"keytype",
"name"
])
Meta.allow_tag_with_these_attributes("label", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"form",
"for"
])
Meta.allow_tag_with_these_attributes("legend", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("li", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"value"
])
# Meta.allow_tag_with_uri_attributes "link", ["href"], @valid_schemes
# Meta.allow_tag_with_these_attributes "link", ["href rel media hreflang type sizes"]
Meta.allow_tag_with_these_attributes("map", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"name"
])
Meta.allow_tag_with_these_attributes("mark", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("menu", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"type",
"label"
])
Meta.allow_tag_with_these_attributes("meta", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"name",
"http-equiv",
"content",
"charset"
])
Meta.allow_tag_with_these_attributes("meter", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"value",
"min",
"max",
"low",
"high",
"optimum"
])
Meta.allow_tag_with_these_attributes("nav", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
# Meta.allow_tag_with_these_attributes "noscript"
Meta.allow_tag_with_these_attributes("object", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"data",
"type",
"typemustmatch",
"name",
"usemap",
"form",
"width",
"height"
])
Meta.allow_tag_with_these_attributes("ol", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"reversed",
"start"
])
Meta.allow_tag_with_these_attributes("optgroup", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"disabled",
"label"
])
Meta.allow_tag_with_these_attributes("option", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"disabled",
"label",
"selected",
"value"
])
Meta.allow_tag_with_these_attributes("output", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"for",
"form",
"name"
])
Meta.allow_tag_with_these_attributes("p", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("param", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"name",
"value"
])
Meta.allow_tag_with_these_attributes("pre", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("progress", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"value",
"max"
])
Meta.allow_tag_with_these_attributes("q", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"cite"
])
Meta.allow_tag_with_these_attributes("rp", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("rt", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("ruby", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("s", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("samp", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
# Meta.allow_tag_with_these_attributes "script", ["src async defer type charset"]
Meta.allow_tag_with_these_attributes("section", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("select", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"autofocus",
"disabled",
"form",
"multiple",
"name",
"required",
"size"
])
Meta.allow_tag_with_these_attributes("small", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_uri_attributes("source", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("source", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"type",
"media"
])
Meta.allow_tag_with_these_attributes("span", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("strong", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("sub", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("summary", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("sup", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("table", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("tbody", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("td", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"colspan",
"rowspan",
"headers"
])
Meta.allow_tag_with_these_attributes("textarea", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"autocomplete",
"autofocus",
"cols",
"dirname",
"disabled",
"form",
"inputmode",
"maxlength",
"name",
"placeholder",
"readonly",
"required",
"rows",
"wrap"
])
Meta.allow_tag_with_these_attributes("tfoot", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("th", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"colspan",
"rowspan",
"headers",
"scope",
"abbr"
])
Meta.allow_tag_with_these_attributes("thead", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("time", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"datetime",
"pubdate"
])
Meta.allow_tag_with_these_attributes("title", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("tr", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_uri_attributes("track", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("track", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"default",
"kind",
"label",
"srclang"
])
Meta.allow_tag_with_these_attributes("u", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("ul", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_these_attributes("var", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tag_with_uri_attributes("video", ["src"], @valid_schemes)
Meta.allow_tag_with_these_attributes("video", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate",
"crossorigin",
"poster",
"preload",
"autoplay",
"mediagroup",
"loop",
"muted",
"controls",
"width",
"height"
])
Meta.allow_tag_with_these_attributes("wbr", [
"accesskey",
"class",
"contenteditable",
"contextmenu",
"dir",
"draggable",
"dropzone",
"hidden",
"id",
"inert",
"itemid",
"itemprop",
"itemref",
"itemscope",
"itemtype",
"lang",
"role",
"spellcheck",
"tabindex",
"title",
"translate"
])
Meta.allow_tags_with_style_attributes([
"a",
"blockquote",
"br",
"code",
"del",
"em",
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"head",
"header",
"hgroup",
"hr",
"html",
"i",
"iframe",
"img",
"input",
"ins",
"kbd",
"keygen",
"label",
"legend",
"li",
"link",
"map",
"mark",
"menu",
"meta",
"meter",
"nav",
"noscript",
"object",
"ol",
"optgroup",
"option",
"output",
"p",
"param",
"pre",
"progress",
"q",
"rp",
"rt",
"ruby",
"s",
"samp",
"script",
"section",
"select",
"small",
"source",
"span",
"strong",
"sub",
"summary",
"sup",
"table",
"tbody",
"td",
"textarea",
"tfoot",
"th",
"thead",
"time",
"title",
"tr",
"track",
"u",
"ul",
"var",
"video",
"wbr"
])
# style tags
def scrub({"style", attributes, [text]}) do
{"style", scrub_attributes("style", attributes), [scrub_css(text)]}
end
defp scrub_attributes("style", attributes) do
Enum.map(attributes, fn attr -> scrub_attribute("style", attr) end)
|> Enum.reject(&is_nil(&1))
end
def scrub_attribute("style", {"media", value}), do: {"media", value}
def scrub_attribute("style", {"type", value}), do: {"type", value}
def scrub_attribute("style", {"scoped", value}), do: {"scoped", value}
defp scrub_css(text) do
HtmlSanitizeEx.Scrubber.CSS.scrub(text)
end
Meta.strip_everything_not_covered()
end | lib/html_sanitize_ex/scrubber/html5.ex | 0.538498 | 0.427815 | html5.ex | starcoder |
defmodule Timex.AmbiguousDateTime do
@moduledoc """
Represents a DateTime which is ambiguous due to timezone rules.
## Ambiguity #1 - Non-existent times
Let's use American daylight savings time rules as our example here,
using America/Chicago as our example. Central Standard Time for that
zone ends at 2:00 AM, but Central Daylight Time does not begin until
3:00 AM, this is because at 2:00 AM, our clocks "spring forward" - which
is just an easy way of remembering that the offset goes from -6 from UTC,
to -5 from UTC. Since there is no timezone period associated with the hours
of 2-3 AM in the America/Chicago zone (it's neither CST nor CDT during that hour),
one has to decide what the intent is. Timex makes the call that shifting to the
next period (i.e. "spring foward" using our example above) makes the most logical
sense when working with non-existent time periods.
TL;DR - Timex will "spring forward" or "fall back", depending on what the zone change
happens to be for the non-existent time. Using America/Chicago as an example, if you
try to create a DateTime for 2 AM on March 13, 2016, Timex will give you back 3 AM on
March 13, 2016, because the zone is in the middle of changing from CST to CDT, and the
earliest representable time in CDT is 3 AM.
## Ambiguity #2 - Times with more than one valid zone period
This one is the reason why this module exists. There are times, though rare, where more
than one zone applies to a given date and time. For example, Asia/Taipei, on December 31st,
1895, from 23:54:00 to 23:59:59, two timezone periods are active LMT, and JWST, because that
locale was switching to JWST from LMT. Because of this, it's impossible to know programmaticaly
which zone is desired. The programmer must make a choice on which zone they want to use.
For this use case, Timex will return an AmbiguousDateTime any time you try to create a DateTime,
or shift a DateTime, to an ambiguous time period. It has two fields, :before, containing a DateTime
configured in the timezone occurring before the ambiguous period, and :after, containing a DateTime
configured in the timezone occurring after the ambiguous period. It is up to you as the programmer to
decide which DateTime is the one to use, but my recommendation is to choose :after, unless you have a
specific reason to use :before.
"""
defstruct before: nil,
after: nil,
type: :ambiguous
@type t :: %__MODULE__{
:before => DateTime.t(),
:after => DateTime.t(),
:type => :ambiguous | :gap
}
defimpl Inspect do
alias Timex.AmbiguousDateTime
def inspect(datetime, %{:structs => false} = opts) do
Inspect.Algebra.to_doc(datetime, opts)
end
def inspect(%AmbiguousDateTime{before: before, after: aft, type: :gap}, _opts) do
"#<Gap(#{inspect(before)} ~ #{inspect(aft)})>"
end
def inspect(%AmbiguousDateTime{before: before, after: aft}, _opts) do
"#<Ambiguous(#{inspect(before)} ~ #{inspect(aft)})>"
end
end
end | lib/datetime/ambiguous.ex | 0.77081 | 0.749133 | ambiguous.ex | starcoder |
defmodule Defconst do
@moduledoc """
Define constants and enum with use in guards
## Define a contant
defmodule ConstType do
use Defconst
defconst :one, 1
defconst :two, 2
end
## Define an enum with default values
defmodule EnumType1 do
use Defconst
defenum [
:zero,
:one,
:two
]
end
## Define an enum with explicit values
defmodule EnumType2 do
use Defconst
defenum [
{:one, 1},
{:nine, 9},
:ten
]
end
"""
@doc false
defmacro __using__(_opts) do
caller_module = __CALLER__.module
quote do
import unquote(__MODULE__)
Module.register_attribute(unquote(caller_module), :constants, accumulate: true)
@before_compile unquote(__MODULE__)
end
end
@doc false
defmacro __before_compile__(env) do
constants =
env.module
|> Module.get_attribute(:constants)
|> Enum.reverse()
constant_map = Enum.into(constants, %{})
value_map =
Enum.reduce(constants, %{}, fn {constant, value}, map ->
member = Map.get(map, value)
new_member =
case member do
nil -> [constant]
_ -> member ++ [constant]
end
Map.put(map, value, new_member)
end)
quote do
def _constants(), do: unquote(constants)
@doc """
Returns all constants as list of tuples
## Examples:
iex> #{__MODULE__}.constants
#{unquote(constants) |> Kernel.inspect()}
"""
def constants(), do: unquote(constants)
@doc """
Returns constant for specified value
## Parameters:
* value: value of a constant
## Examples:
iex> #{__MODULE__}.constant_of(#{
unquote(constants)
|> Keyword.values()
|> List.first()
|> Kernel.inspect()
})
#{unquote(constants) |> Keyword.keys() |> List.first() |> Kernel.inspect()}
"""
def constant_of(value) do
constants = unquote(Macro.escape(value_map))[value]
case constants do
[constant] -> constant
_ -> constants
end
end
@doc """
Returns value for specified constant
## Parameters:
* constant: defined constant
## Examples:
iex> #{__MODULE__}.constant_of(#{
unquote(constants)
|> Keyword.keys()
|> List.first()
|> Kernel.inspect()
})
#{unquote(constants) |> Keyword.values() |> List.first() |> Kernel.inspect()}
"""
def value_of(constant) do
unquote(Macro.escape(constant_map))[constant]
end
end
end
@doc """
Define constant
## Examples:
iex> defmodule ConstType do
...> use Defconst
...>
...> defconst :one, 1
...> defconst :two, 2
...> end
iex> defmodule ConstUse do
...> require ConstType
...>
...> def const_value(x) do
...> case x do
...> ConstType.one -> "one"
...> ConstType.two -> "two"
...> _ -> "unknown"
...> end
...> end
...>
...> def const_guard(x) when x == ConstType.two do
...> "two"
...> end
...> end
iex> ConstType.constants
[{:one, 1}, {:two, 2}]
iex> ConstUse.const_value(1)
"one"
iex> ConstUse.const_guard(2)
"two"
"""
defmacro defconst(name, value) do
caller_module = __CALLER__.module
var = Macro.var(name, __MODULE__)
quote do
Module.put_attribute(unquote(caller_module), :constants, {unquote(name), unquote(value)})
@doc """
Returns #{unquote(value)}
## Examples:
iex> #{__MODULE__}.#{unquote(name)}()
#{unquote(value) |> Kernel.inspect()}
"""
defmacro unquote(var), do: unquote(value)
end
end
@doc """
Defines an enum with specified constant names and optional values
## Examples:
iex> defmodule EnumType1 do
...> use Defconst
...>
...> defenum [
...> :zero,
...> :one,
...> :two
...> ]
...> end
iex> defmodule EnumUse1 do
...> require EnumType1
...>
...> def enum_value(x) do
...> case x do
...> EnumType1.zero -> "zero"
...> EnumType1.one -> "one"
...> EnumType1.two -> "two"
...> _ -> "unknown"
...> end
...> end
...>
...> def enum_guard(x) when x == EnumType1.two do
...> "two"
...> end
...> end
iex> EnumType1.constants
[zero: 0, one: 1, two: 2]
iex> EnumUse1.enum_value(1)
"one"
iex> EnumUse1.enum_guard(2)
"two"
iex> defmodule EnumType2 do
...> use Defconst
...>
...> defenum [
...> {:zero, "zero"},
...> {:one, 1},
...> {:nine, 9},
...> :ten
...> ]
...> end
iex> defmodule EnumUse2 do
...> require EnumType2
...>
...> def enum_value(x) do
...> case x do
...> EnumType2.zero -> "zero"
...> EnumType2.one -> "one"
...> EnumType2.nine -> "nine"
...> EnumType2.ten -> "ten"
...> _ -> "unknown"
...> end
...> end
...>
...> def enum_guard(x) when x == EnumType2.ten do
...> "ten"
...> end
...> end
iex> EnumType2.constants
[zero: "zero", one: 1, nine: 9, ten: 10]
iex> EnumUse2.enum_value(1)
"one"
iex> EnumUse2.enum_guard(10)
"ten"
"""
defmacro defenum(constants, quoted_generator \\ quote(do: Defconst.Enum.DefaultGenerator))
defmacro defenum(constants, quoted_generator) do
# Expand quoted module
generator = Macro.expand(quoted_generator, __CALLER__)
constants
|> normalize(generator)
|> Enum.reverse()
|> Enum.map(fn {name, value} ->
quote do
defconst(unquote(name), unquote(value))
end
end)
end
defp normalize(constants, generator) do
{result, _} = Enum.reduce(constants, {[], 0}, &normalize_contant(generator, &1, &2))
result
end
defp normalize_contant(generator, {constant_name, value} = constant, {accumulator, _index}) do
{[constant | accumulator], generator.next_value(constant_name, value)}
end
defp normalize_contant(generator, constant_name, {accumulator, index}) do
{[{constant_name, index} | accumulator], generator.next_value(constant_name, index)}
end
end | lib/defconst.ex | 0.782164 | 0.499512 | defconst.ex | starcoder |
defmodule ExHealth do
@moduledoc """
[![CircleCI](https://circleci.com/gh/Kartstig/ex_health/tree/master.svg?style=svg&circle-token=<PASSWORD>)](https://circleci.com/gh/Kartstig/ex_health/tree/master) [![codecov](https://codecov.io/gh/Kartstig/ex_health/branch/master/graph/badge.svg)](https://codecov.io/gh/Kartstig/ex_health)
ExHealth is a simple extensible health check utility that monitors your applications.
By itself, ExHealth is a supervised GenServer that periodically performs a set
of checks, but you can easily configure your it to serve JSON responses that
look like:
{
last_check:"2018-09-18T06:43:53.773719Z",
result:{
check_results:[
[
"Database",
"ok"
],
[
"PhoenixExampleWeb_Endpoint",
"ok"
]
],
msg:"healthy"
}
}
# Getting Started
Configuration for ExHealth must be present the Application environment. This
can be done by updating the `:ex_health` values in your `config/config.exs`:
config :ex_health,
module: MyApplication.HealthChecks,
interval_ms: 1000
Then you must define a module `MyApplication.HealthChecks` with some checks:
defmodule MyApplication.HealthChecks do
process_check(MyApplication.CacheServer)
test "Redis" do
MyRedis.ping() # This should return :ok | {:error, "Message"}
end
end
# Integrating with Phoenix
To integrate with [Phoenix](https://hexdocs.pm/phoenix/Phoenix.html)
or any other web framework, you can take advantage of `ExHealth.Plug`
which handles serving a JSON response for you.
See `ExHealth.Plug` for instructions.
"""
use Application
@function_prefix "hc__"
@doc """
Defines a healthcheck function.
Takes the following arguments:
1. `name` - a string for the name of the health check
2. `block` - block that returns `:ok | true | {:error, "Reason"}`
## Examples:
defmodule MyApp.HealthChecks do
health_check("Database") do
MyDB.ping() # This should return :ok | true | {:error, "Reason"}
end
end
"""
defmacro health_check(name, do: block) do
function_name = String.to_atom("#{@function_prefix}" <> name)
quote do
def unquote(function_name)() do
try do
unquote(block)
rescue
_ -> {:error, "Error in HealthCheck"}
end
end
end
end
@doc """
Defines a healthcheck function for a given process.
Returns `:ok` if the process has one of the following statuses:
- `:running`
- `:waiting`
See [Process.info/1](https://hexdocs.pm/elixir/Process.html#info/1) for more
information about process status.
## Examples:
defmodule MyApp.HealthChecks do
process_check(MyApp.SomeImportantService)
end
"""
defmacro process_check({_, _, module_list} = _module) do
{module, _} = Code.eval_string(Enum.join(module_list, "."))
function_name = String.to_atom("#{@function_prefix}" <> Enum.join(module_list, "_"))
quote do
def unquote(function_name)() do
with pid when not is_nil(pid) <- Process.whereis(unquote(module)),
info <- Process.info(pid),
{:ok, status} <- Keyword.fetch(info, :status),
status == :running || :waiting do
:ok
else
nil -> {:error, "no proc"}
_ -> {:error, "process not running/waiting"}
end
end
end
end
defimpl Jason.Encoder, for: [Tuple] do
def encode(tuple, opts) do
Jason.Encode.list(Tuple.to_list(tuple), opts)
end
end
@doc """
Starts the application with empty state
"""
def start() do
start(:normal, state: %ExHealth.Status{})
end
def start(_type, args) do
import Supervisor.Spec
configure(args)
initial_state = load_config()
children = [
supervisor(ExHealth.HealthServer, [initial_state])
]
opts = [strategy: :one_for_one, name: ExHealth.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Synchronously fetches the latest status from `ExHealth.HealthServer`
## Examples:
iex(1)> ExHealth.status()
%ExHealth.Status{
checks: [
%ExHealth.Check{
mfa: {ExHealth.SelfCheck, :hc__ExHealth_HealthServer, []},
name: "ExHealth_HealthServer"
}
],
interval_ms: 15000,
last_check: nil,
result: %{check_results: [], msg: :pending}
}
"""
@spec status() :: ExHealth.Status.t()
def status() do
GenServer.call(ExHealth.HealthServer, :status)
end
@doc """
Stops the Application
"""
@spec stop() :: :ok
def stop() do
Supervisor.stop(ExHealth.Supervisor, :normal)
end
defp configure([]), do: nil
defp configure([{k, v} | remainder]) do
Application.put_env(:ex_health, k, v)
configure(remainder)
end
defp extract_health_checks(module) do
hc_regex = ~r/#{@function_prefix}.*/
module.__info__(:functions)
|> Enum.filter(fn {func, _arr} ->
Atom.to_string(func) =~ hc_regex
end)
end
@spec extract_and_transform(module()) :: list(ExHealth.Check.t())
defp extract_and_transform(module) do
function_list = extract_health_checks(module)
for {func, _arr} <- function_list do
%ExHealth.Check{name: remove_function_prefix(func), mfa: {module, func, []}}
end
end
@spec load_config() :: ExHealth.Status.t()
defp load_config() do
:ok =
case Application.load(:ex_health) do
:ok ->
:ok
{:error, {:already_loaded, :ex_health}} ->
:ok
end
module = Application.get_env(:ex_health, :module)
interval_ms = Application.get_env(:ex_health, :interval_ms, 15_000)
mfas =
case module do
nil -> extract_and_transform(ExHealth.SelfCheck)
mod -> extract_and_transform(mod)
end
%ExHealth.Status{
checks: mfas,
interval_ms: interval_ms
}
end
defp remove_function_prefix(function) do
name_with_prefix = Atom.to_string(function)
prefix = String.length(@function_prefix)
String.slice(name_with_prefix, prefix, String.length(name_with_prefix))
end
end | lib/ex_health.ex | 0.756582 | 0.528351 | ex_health.ex | starcoder |
defmodule Statistics.Distributions.T do
alias Statistics.Math
alias Statistics.Math.Functions
@moduledoc """
Student's t distribution.
This distribution is always centered around 0.0 and allows a *degrees of freedom* parameter.
"""
@doc """
The probability density function
## Examples
iex> Statistics.Distributions.T.pdf(3).(0)
0.3675525969478612
iex> Statistics.Distributions.T.pdf(1).(3.2)
0.028319384891796327
"""
@spec pdf(number) :: fun
def pdf(df) do
fac = Functions.gamma((df + 1) / 2) / (Math.sqrt(df * Math.pi()) * Functions.gamma(df / 2))
exp = (df + 1) / 2 * -1
fn x -> fac * Math.pow(1 + x * x / df, exp) end
end
@doc """
The cumulative density function
NOTE: this currently uses the very slow Simpson's Rule to execute
a numerical integration of the `pdf` function to approximate
the CDF. This leads to a trade-off between precision and speed.
A robust implementation of the 2F1 hypergeometric function is
required to properly calculate the CDF of the t distribution.
## Examples
iex> Statistics.Distributions.T.cdf(3).(0)
0.4909182507070275
"""
@spec cdf(number) :: fun
def cdf(df) do
cpdf = pdf(df)
fn x -> Functions.simpson(cpdf, -10000, x, 10000) end
end
# when a robust hyp2F1 materialises, use this implementation
# defp cdf_hyp2f1(x, df) do
# p1 = 0.5 + x * Functions.gamma((df+1)/2)
# p2n = Math.hyp2f1(0.5, ((df+1)/2), 1.5, -1*Math.pow(x,2)/df)
# p2d = Math.sqrt(Math.pi*df) * Functions.gamma(df/2)
# p1 * (p2n / p2d)
# end
@doc """
The percentile-point function
NOTE: this is very slow due to the current implementation of the CDF
"""
@spec ppf(number) :: fun
def ppf(df) do
fn x ->
ppf_tande(x, cdf(df), 4)
end
end
# trial-and-error method which refines guesses
# to arbitrary number of decimal places
defp ppf_tande(x, pcdf, precision) do
ppf_tande(x, pcdf, -10, precision + 2, 0)
end
defp ppf_tande(_, _, g, precision, precision) do
g
end
defp ppf_tande(x, pcdf, g, precision, p) do
increment = 100 / Math.pow(10, p)
guess = g + increment
if x < pcdf.(guess) do
ppf_tande(x, pcdf, g, precision, p + 1)
else
ppf_tande(x, pcdf, guess, precision, p)
end
end
@doc """
Draw a random number from a t distribution with specified degrees of freedom
"""
@spec rand(number) :: number
def rand(df), do: randf(pdf(df))
defp randf(rpdf) do
# t-dist is fatter-tailed than normal
x = Math.rand() * 50 - 25
if rpdf.(x) > Math.rand() do
x
else
# keep trying
randf(rpdf)
end
end
end | lib/statistics/distributions/t.ex | 0.895657 | 0.698844 | t.ex | starcoder |
defmodule SpringConfig do
@moduledoc """
Consume configuration from a Spring Cloud Config Server in Elixir.
"""
use PatternTap
@default_opts []
@spec get(atom(), any(), keyword()) :: any()
@doc """
Finds and returns `key` in the configuration registry. If `key` is not found, `default` is returned.
Available options are:
- `ensure_started`: Uses `Application.ensure_all_started/2` to start the SpringConfig application
temporarily in case the process is not part of the supervision tree. Default `false`.
"""
def get(key, default \\ nil, opts \\ []) when is_atom(key) do
opts = Keyword.merge(@default_opts, opts)
if opts[:ensure_started] do
{:ok, _} = Application.ensure_all_started(:spring_config)
end
key |> to_string() |> String.split(".") |> Enum.map(&String.to_atom/1) |> do_get(default)
end
@spec get!(atom(), keyword()) :: any()
@doc """
Similar to `get/3` but raises if `key` is not found.
"""
def get!(key, opts \\ []) when is_atom(key) do
case get(key, :default_value, opts) do
:default_value ->
raise "Key #{key} not found in configuration entries"
value ->
value
end
end
defp do_get(keys, default) do
case :ets.select(:spring_config, [{{keys ++ :"$1", :"$2"}, [], [{{:"$1", :"$2"}}]}]) do
[] ->
default
result ->
transform(result)
end
end
defp transform(entries) do
Enum.reduce(entries, %{}, fn
{[], value}, _out ->
value
{keys, value}, map ->
map
|> create_path(keys)
|> put_in(keys, value)
end)
end
defp create_path(map, keys) do
keys
|> Enum.reduce({map, []}, fn
key, {map, []} ->
{Map.put_new(map, key, nil), [key]}
key, {map, prev_keys} ->
new_map =
update_in(map, prev_keys, fn
nil -> %{key => nil}
val -> val
end)
{new_map, prev_keys ++ [key]}
end)
|> tap({map, _} ~> map)
end
end | lib/spring_config.ex | 0.801042 | 0.433262 | spring_config.ex | starcoder |
defmodule BinanceFutures.RateLimiter do
@moduledoc """
Rate Limiter handles Binance Futures API limits.
More info could be found here:
https://binance-docs.github.io/apidocs/futures/en/#limits
Binance API has two type of limits.
- `weight` limit - you have N available weight by IP address per time frame
- `orders` limit - you have N available calls to `ORDERS` API's per API Key, per time frame.
Time frames for now are:
- `1M` - 1 Minute. Applicable for `weight` and `orders` limits.
- `10S` - 10 Seconds. Applicable for `orders` limits only.
By default it will only collect already used rates from API requests.
And wouldn't be able to provide any remaining rate limits information.
## Already used rate limits
To get already used limits for your IP/API Key you could use:
- `BinanceFutures.RateLimiter.get/0` - Shows all available limits information (including remaining if fetched).
- `BinanceFutures.RateLimiter.get_weight/0` - Shows used `weight` limits by time frame.
- `BinanceFutures.RateLimiter.get_orders/0` - Shows used `orders` limits by time frame.
## Remaining limits
If you need to keep track of remaining rate limits, you have to call
`BinanceFutures.RateLimiter.fetch_limits/0` function.
## Example
iex(1)> BinanceFutures.RateLimiter.fetch_limits
:ok
It will spend some on your `weight` limit by calling
`BinanceFutures.USDM.MarketData.exchange_info/0` function.
But also will grab remainig rate limits for your account.
After this call you will be able to keep track on remaining limits by using:
- `BinanceFutures.RateLimiter.remaining/0` - Shows all remaining limits.
- `BinanceFutures.RateLimiter.remaining_orders/0` - Shows remaining orders limits by time frames.
- `BinanceFutures.RateLimiter.remaining_weight/0` - Shows remaining weight limits by time frames.
"""
use GenServer
alias BinanceFutures.USDM.MarketData
@weight_header "X-MBX-USED-WEIGHT-"
@order_header "X-MBX-ORDER-COUNT-"
@typedoc """
Limit type.
Contain time frame as key, example: `1M`, `10S`
And actual limit as value: `1`, `2400`
## Example
%{"1M" => 2399}
%{"10S" => 300, "1M" => 1200}
"""
@type limit :: %{optional(binary) => non_neg_integer}
@typedoc """
Limits for `weight` and `orders` types that fetched from
`BinanceFutures.USDM.MarketData.exchange_info/0`
## Example
%{orders: %{"10S" => 300, "1M" => 1200}, weight: %{"1M" => 2400}}
"""
@type limits :: %{weight: limit(), orders: limit()}
defmodule State do
@typedoc """
Rate limiter state.
- `limits` - Limits pulled from exchange info.
- `weight` - Used `weight` limits.
- `orders` - Used `orders` limits.
"""
@type t :: %{
limits: BinanceFutures.RateLimiter.limits(),
weight: BinanceFutures.RateLimiter.limit(),
orders: BinanceFutures.RateLimiter.limit(),
}
defstruct limits: %{weight: %{}, orders: %{}},
weight: %{},
orders: %{}
end
@doc false
def start_link(_opts \\ []),
do: GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
@doc """
Parses used limits from given headers.
Don't use this function if you don't know what are you doing !
This function is used in all REST API calls.
"""
@spec set(HTTPoison.Base.headers()) :: :ok
def set(headers),
do: GenServer.cast(__MODULE__, {:set, headers})
@doc """
Get all available limits information.
Note that if you didn't call `BinanceFutures.RateLimiter.fetch_limits/0`
`limits` field will be empty.
## Example
iex(1)> BinanceFutures.RateLimiter.get()
%{limits: %{orders: %{}, weight: %{}}, orders: %{}, weight: %{"1M" => 1}}
iex(2)> BinanceFutures.RateLimiter.fetch_limits()
:ok
iex(3)> BinanceFutures.RateLimiter.get()
%{
limits: %{orders: %{"10S" => 300, "1M" => 1200}, weight: %{"1M" => 2400}},
orders: %{},
weight: %{"1M" => 2}
}
"""
@spec get() :: State.t()
def get(),
do: GenServer.call(__MODULE__, :get)
@doc """
Gets already used `weight` limit.
## Example
iex(1)> BinanceFutures.RateLimiter.get_weight()
%{}
iex(2)> BinanceFutures.USDM.MarketData.server_time()
{:ok, 1616347174621}
iex(3)> BinanceFutures.RateLimiter.get_weight()
%{"1M" => 2}
"""
@spec get_weight() :: limit()
def get_weight(),
do: GenServer.call(__MODULE__, :get_weight)
@spec get_orders() :: limit()
def get_orders(),
do: GenServer.call(__MODULE__, :get_orders)
@doc """
Fetches Binance Futures API limits.
Uses `BinanceFutures.USDM.MarketData.exchange_info/0` function for pulling information.
Returns `{:error, term}` in case of some issues with API call.
## Example
iex(1)> BinanceFutures.RateLimiter.fetch_limits()
:ok
"""
@spec fetch_limits() :: :ok | {:error, term}
def fetch_limits(),
do: GenServer.call(__MODULE__, :fetch_limits)
@doc """
Gets remaining limits information.
By default it does not calculate any remaining limits.
To make it happen you have to call `BinanceFutures.RateLimiter.fetch_limits/0`
before calling `BinanceFutures.RateLimiter.remaining/0`
## Example
iex(1)> BinanceFutures.RateLimiter.remaining()
%{orders: %{}, weight: %{}}
iex(2)> BinanceFutures.USDM.MarketData.server_time()
{:ok, 1616347615118}
iex(3)> BinanceFutures.RateLimiter.remaining()
%{orders: %{}, weight: %{}}
iex(4)> BinanceFutures.RateLimiter.fetch_limits()
:ok
iex(5)> BinanceFutures.RateLimiter.remaining()
%{orders: %{"10S" => 300, "1M" => 1200}, weight: %{"1M" => 2399}}
"""
@spec remaining() :: %{orders: limit(), weight: limit()}
def remaining(),
do: GenServer.call(__MODULE__, :remaining)
@doc """
Gets remaining `weight` limits.
By default it does not calculate any remaining limits.
To make it happen you have to call `BinanceFutures.RateLimiter.fetch_limits/0`
before calling `BinanceFutures.RateLimiter.remaining_weight/0`
## Example
iex(1)> BinanceFutures.RateLimiter.remaining_weight()
%{}
iex(2)> BinanceFutures.USDM.MarketData.server_time()
{:ok, 1616347833596}
iex(3)> BinanceFutures.RateLimiter.remaining_weight()
%{}
iex(4)> BinanceFutures.RateLimiter.fetch_limits()
:ok
iex(5)> BinanceFutures.RateLimiter.remaining_weight()
%{"1M" => 2398}
"""
@spec remaining_weight() :: limit()
def remaining_weight(),
do: GenServer.call(__MODULE__, :remaining_weight)
@doc """
Gets remaining `orders` limits.
By default it does not calculate any remaining limits.
To make it happen you have to call `BinanceFutures.RateLimiter.fetch_limits/0`
before calling `BinanceFutures.RateLimiter.remaining_orders/0`
"""
@spec remaining_orders() :: limit()
def remaining_orders(),
do: GenServer.call(__MODULE__, :remaining_orders)
## Callbacks
@impl true
def init(_) do
{:ok, %State{}}
end
@impl true
def handle_cast({:set, headers}, %State{} = state) do
weight =
headers
|> Enum.filter(fn {name, _} -> String.starts_with?(name, @weight_header) end)
|> Enum.map(fn {name, weight} ->
{String.replace(name, @weight_header, ""), String.to_integer(weight)}
end)
|> Enum.into(%{})
orders =
headers
|> Enum.filter(fn {name, _} -> String.starts_with?(name, @order_header) end)
|> Enum.map(fn {name, weight} ->
{String.replace(name, @order_header, ""), String.to_integer(weight)}
end)
|> Enum.into(%{})
{:noreply, %State{state | weight: weight, orders: orders}}
end
@impl true
def handle_call(:fetch_limits, _from, %State{limits: limits} = state) do
case MarketData.exchange_info() do
{:ok, data} ->
updated_limits =
data
|> Map.get("rateLimits")
|> Enum.reduce(limits, &pick_limits/2)
{:reply, :ok, %State{state | limits: updated_limits}}
{:error, err} ->
{:reply, {:error, err}, state}
end
end
@impl true
def handle_call(:get, _from, %State{} = state),
do: {:reply, Map.from_struct(state), state}
@impl true
def handle_call(:get_weight, _from, %State{weight: weight} = state),
do: {:reply, weight, state}
@impl true
def handle_call(:get_orders, _from, %State{orders: orders} = state),
do: {:reply, orders, state}
@impl true
def handle_call(
:remaining,
_from,
%State{
limits: %{orders: orders_limits, weight: weight_limits},
weight: weight,
orders: orders
} = state
) do
orders =
orders_limits
|> Enum.map(fn {name, limit} ->
{name, limit - Map.get(orders, name, 0)}
end)
|> Enum.into(%{})
weight =
weight_limits
|> Enum.map(fn {name, limit} ->
{name, limit - Map.get(weight, name, 0)}
end)
|> Enum.into(%{})
res = %{
orders: orders,
weight: weight
}
{:reply, res, state}
end
@impl true
def handle_call(
:remaining_weight,
_from,
%State{
limits: %{weight: weight_limits},
weight: weight
} = state
) do
res =
weight_limits
|> Enum.map(fn {name, limit} ->
{name, limit - Map.get(weight, name, 0)}
end)
|> Enum.into(%{})
{:reply, res, state}
end
@impl true
def handle_call(
:remaining_orders,
_from,
%State{
limits: %{orders: orders_limits},
orders: orders
} = state
) do
res =
orders_limits
|> Enum.map(fn {name, limit} ->
{name, limit - Map.get(orders, name, 0)}
end)
|> Enum.into(%{})
{:reply, res, state}
end
defp pick_limits(
%{
"rateLimitType" => "REQUEST_WEIGHT",
"intervalNum" => interval_num,
"interval" => "MINUTE",
"limit" => limit
},
%{weight: weight} = limits
) do
weight = Map.put(weight, "#{interval_num}M", limit)
%{limits | weight: weight}
end
defp pick_limits(
%{
"rateLimitType" => "ORDERS",
"intervalNum" => interval_num,
"interval" => interval,
"limit" => limit
},
%{orders: orders} = limits
) do
orders = Map.put(orders, "#{interval_num}#{String.first(interval)}", limit)
%{limits | orders: orders}
end
defp pick_limits(_, limits),
do: limits
end | lib/binance_futures/rate_limiter.ex | 0.854187 | 0.57687 | rate_limiter.ex | starcoder |
defmodule Glide do
@external_resource "README.md"
@moduledoc @external_resource
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
@doc """
Create a value from a generator.
Mostly wraps StreamData or when the function exists any of the Glide generators
```
Glide.val(:uuid)
Glide.val(:integer)
Glide.val(:string, :alphanumeric)
"""
def val(_data, options \\ [])
def val(%StreamData{} = data, fun) do
fun =
if fun == [] do
&Function.identity/1
else
fun
end
pick(data) |> fun.()
end
def val(nil, _) do
val(nil_())
end
def val(name, args) when is_atom(name) do
gen(name, args) |> val
end
@doc """
Create a generator.
Mostly wraps StreamData or when the function exists any of the Glide generators
```
Glide.gen(:uuid)
Glide.gen(:integer)
Glide.gen(:string, :alphanumeric)
"""
def gen(name, args \\ [])
def gen(nil, _) do
nil_()
end
def gen(:member_of, args) do
StreamData.member_of(args)
end
def gen(name, args) when is_list(args) do
if function_exported?(__MODULE__, name, length(args)) do
apply(__MODULE__, name, args)
else
apply(StreamData, name, args)
end
end
def gen(name, arg) do
gen(name, [arg])
end
@doc """
Creates generator for optional data
When creating a value from this generator it will either by `nil` or of the type
passed in as the argument.
```
Glide.optional(Glide.string(:ascii))
```
"""
def optional(data) do
StreamData.one_of([nil_(), data])
end
@doc """
Create generator for nil constant
Can also be called with Glide.gen(nil)
"""
def nil_ do
StreamData.constant(nil)
end
@doc """
Create generator for a struct
```
struct_of(User, %{
name: string(:ascii),
address: optional(string(:ascii))
})
```
"""
def struct_of(struct, data) do
StreamData.map(StreamData.fixed_map(data), fn map -> struct!(struct, map) end)
end
@doc """
Merges two StreamData structs, underlying datastructure should
implement Glide.Merge protocol.
By default implemented for Map and Keyword lists
"""
def merge(%StreamData{} = data1, %StreamData{} = data2) do
StreamData.bind(data1, fn d1 ->
StreamData.bind(data2, fn d2 ->
StreamData.constant(Glide.Merge.merge(d1, d2))
end)
end)
end
def merge(values) when is_list(values) do
fold_gen(values, &merge(&1, &2))
end
@doc """
Concats two StreamData structs, underlying datastructure should
implement Glide.Concat protocol.
Will cast any non StreamData value to a StreamData.constant
By default implemented for List and String
"""
def concat(%StreamData{} = data1, %StreamData{} = data2) do
StreamData.bind(data1, fn d1 ->
StreamData.bind(data2, fn d2 ->
StreamData.constant(Glide.Concat.concat(d1, d2))
end)
end)
end
def concat(%StreamData{} = data, value) when is_binary(value) do
concat(data, StreamData.constant(value))
end
def concat(value, %StreamData{} = data) when is_binary(value) do
concat(StreamData.constant(value), data)
end
def concat(value1, value2) when is_binary(value2) and is_binary(value1) do
concat(StreamData.constant(value1), StreamData.constant(value2))
end
def concat(values) when is_list(values) do
fold_gen(values, &concat(&1, &2))
end
@doc """
iex> Glide.fold_gen(["1","2","3"], fn doc, acc ->
...> Glide.concat([doc, "!", acc])
...> end) |> Glide.val
"1!2!3"
"""
def fold_gen(docs, folder_fun)
def fold_gen([], _folder_fun), do: StreamData.constant([])
def fold_gen([%StreamData{} = data], _folder_fun), do: data
def fold_gen([value], _folder_fun), do: StreamData.constant(value)
def fold_gen([doc | docs], folder_fun) when is_function(folder_fun, 2),
do: folder_fun.(doc, fold_gen(docs, folder_fun))
@doc """
Generates a version 4 (random) UUID generator
iex> Glide.uuid() |> Glide.val
"30192e4a-6d03-4f9a-86cb-f301454447c2"
"""
def uuid() do
StreamData.map(binuuid(), fn binuuid ->
{:ok, uuid} = encode(binuuid)
uuid
end)
end
@doc """
Generates a version 4 (random) UUID in the binary format generator
iex> Glide.binuuid() |> Glide.val
<<130, 204, 75, 232, 2, 161, 72, 182, 138, 181, 5, 244, 199, 120, 124, 155>>
"""
def binuuid() do
StreamData.map(StreamData.binary(length: 16), fn <<u0::48, _::4, u1::12, _::2, u2::62>> ->
<<u0::48, fc00:e968:6179::de52:7100, u1::12, 2::2, u2::62>>
end)
end
@time_zones ["Etc/UTC"]
@doc """
Generates a Date by default somewhere between 1970..2050
iex> Glide.date(1980..1985) |> Glide.val
~D[1984-09-18]
"""
def date(range \\ 1970..2050) do
StreamData.tuple(
{StreamData.integer(range), StreamData.integer(1..12), StreamData.integer(1..31)}
)
|> StreamData.bind_filter(fn tuple ->
case Date.from_erl(tuple) do
{:ok, date} -> {:cont, StreamData.constant(date)}
_ -> :skip
end
end)
end
@doc """
Generates a Date by default somewhere between 1970..2050
iex> Glide.time() |> Glide.val
~T[14:57:31]
"""
def time do
StreamData.tuple(
{StreamData.integer(0..23), StreamData.integer(0..59), StreamData.integer(0..59)}
)
|> StreamData.map(&Time.from_erl!/1)
end
@doc """
Generates a Date by default somewhere between 1970..2050
iex> Glide.naive_datetime() |> Glide.val
~N[2050-01-22 03:54:58]
"""
def naive_datetime do
StreamData.tuple({date(), time()})
|> StreamData.map(fn {date, time} ->
{:ok, naive_datetime} = NaiveDateTime.new(date, time)
naive_datetime
end)
end
@doc """
Generates a Date by default somewhere between 1970..2050
iex> Glide.datetime() |> Glide.val
~U[2050-09-02 22:08:07Z]
"""
def datetime do
StreamData.tuple({naive_datetime(), StreamData.member_of(@time_zones)})
|> StreamData.map(fn {naive_datetime, time_zone} ->
DateTime.from_naive!(naive_datetime, time_zone)
end)
end
@doc """
Generates a seed
"""
def seed(start \\ 0) do
:rand.seed(:exs1024, start)
end
@doc """
Generate value from StreamData
Will use a preset seed (e.g. by ExUnit) if available, otherwise will
create a new seed.
See https://hexdocs.pm/stream_data/ExUnitProperties.html#pick/1
"""
def pick(data, start \\ 0) do
exported_seed =
case :rand.export_seed() do
:undefined ->
# use provided seed if not preseeded (by ExUnit)
seed(start)
seed ->
seed
end
seed = :rand.seed_s(exported_seed)
{size, seed} = :rand.uniform_s(100, seed)
%StreamData.LazyTree{root: root} = StreamData.__call__(data, seed, size)
{_, {seed, _}} = seed
:rand.seed(:exs1024, seed)
root
end
# See https://github.com/elixir-ecto/ecto/blob/v3.6.2/lib/ecto/uuid.ex#L1
defp encode(
<<a1::4, a2::4, a3::4, a4::4, afc00:db20:35b:7399::5, a6::4, a7::4, a8::4, fc00:db20:35b:7399::5, bfc00:db20:35b:7399::5, b3::4, b4::4,
c1::4, c2::4, c3::4, c4::4, d1::4, d2::4, d3::4, d4::4, e1::4, e2::4, e3::4, e4::4,
e5::4, e6::4, e7::4, e8::4, e9::4, e1fc00:e968:6179::de52:7100, e1fc00:e968:6179::de52:7100, e12::4>>
) do
<<e(a1), e(a2), e(a3), e(a4), e(a5), e(a6), e(a7), e(a8), ?-, e(b1), e(b2), e(b3), e(b4), ?-,
e(c1), e(c2), e(c3), e(c4), ?-, e(d1), e(d2), e(d3), e(d4), ?-, e(e1), e(e2), e(e3), e(e4),
e(e5), e(e6), e(e7), e(e8), e(e9), e(e10), e(e11), e(e12)>>
catch
:error -> :error
else
encoded -> {:ok, encoded}
end
@compile {:inline, e: 1}
defp e(0), do: ?0
defp e(1), do: ?1
defp e(2), do: ?2
defp e(3), do: ?3
defp e(4), do: ?4
defp e(5), do: ?5
defp e(6), do: ?6
defp e(7), do: ?7
defp e(8), do: ?8
defp e(9), do: ?9
defp e(10), do: ?a
defp e(11), do: ?b
defp e(12), do: ?c
defp e(13), do: ?d
defp e(14), do: ?e
defp e(15), do: ?f
end | lib/glide.ex | 0.779825 | 0.651909 | glide.ex | starcoder |
alias InterpreterTerms.SymbolMatch, as: Sym
alias InterpreterTerms.WordMatch, as: Word
defmodule GraphReasoner.QueryMatching.TriplesBlock do
@moduledoc """
Parses information from a TriplesBlock SymbolMatch.
The idea behind this module is to keep it as simple as possible,
mainly focussing on abstracting the verbose EBNF.
"""
@doc """
Assuming the supplied SymbolMatch contains only one simple triple,
extract
- subject: the VarOrTerm element
- predicate: the PathPrimary element
- object: the VarOrTerm element
"""
def single_triple!(
%Sym{ symbol: :TriplesBlock, submatches: [
%Sym{ symbol: :TriplesSameSubjectPath, submatches: [
subjectVarOrTerm,
%Sym{ symbol: :PropertyListPathNotEmpty, submatches: [
%Sym{ symbol: :VerbPath, submatches: [
%Sym{ symbol: :Path, submatches: [
%Sym{ symbol: :PathAlternative, submatches: [
%Sym{ symbol: :PathSequence, submatches: [
%Sym{ symbol: :PathEltOrInverse, submatches: [
%Sym{ symbol: :PathElt, submatches: [
%Sym{ symbol: :PathPrimary } = predicateElement ] } ] } ] } ] } ] } ] },
%Sym{ symbol: :ObjectListPath, submatches: [
%Sym{ symbol: :ObjectPath, submatches: [
%Sym{ symbol: :GraphNodePath, submatches: [ objectVarOrTerm ] } ] } ] } ] } ] }
| maybe_a_dot ] }
) do
case maybe_a_dot do
[] -> nil
[%Word{}] -> nil
_ -> raise "TriplesBlock is not a single triple"
end
{ subjectVarOrTerm,
predicateElement,
objectVarOrTerm }
end
@doc """
Assuming the supplied SymbolMatch contains a simple triple, extract
- subject: the VarOrTerm element
- predicate: the PathPrimary element
- object: the VarOrTerm element
"""
def first_triple!(
%Sym{ symbol: :TriplesBlock, submatches: [
%Sym{ symbol: :TriplesSameSubjectPath, submatches: [
subjectVarOrTerm,
%Sym{ symbol: :PropertyListPathNotEmpty, submatches: [
%Sym{ symbol: :VerbPath, submatches: [
%Sym{ symbol: :Path, submatches: [
%Sym{ symbol: :PathAlternative, submatches: [
%Sym{ symbol: :PathSequence, submatches: [
%Sym{ symbol: :PathEltOrInverse, submatches: [
%Sym{ symbol: :PathElt, submatches: [
%Sym{ symbol: :PathPrimary } = predicateElement ] } ] } ] } ] } ] } ] },
%Sym{ symbol: :ObjectListPath, submatches: [
%Sym{ symbol: :ObjectPath, submatches: [
%Sym{ symbol: :GraphNodePath, submatches: [ objectVarOrTerm ] } ] } ] } ] } ] }
| _maybe_other_content ] }
) do
{ subjectVarOrTerm,
predicateElement,
objectVarOrTerm }
end
@doc """
Overwrites the submatches of the parent_triples_block to contain only the supplied child_triples_block.
"""
def set_child(
%Sym{ symbol: :TriplesBlock, submatches: [ first_elt | _rest_elts ] } = parent_triples_block,
%Sym{ symbol: :TriplesBlock } = child_triples_block
) do
%{ parent_triples_block | submatches: [ first_elt, %Word{ word: "." }, child_triples_block ] }
end
@doc """
Easy updating of the predicate of a TriplesBlock
"""
def update_predicate( triples_block, new_predicate ) do
Manipulators.DeepUpdates.update_deep_submatch(
triples_block, new_predicate,
[ :TriplesBlock, {:TriplesSameSubjectPath,1}, :PropertyListPathNotEmpty, :VerbPath, :Path, :PathAlternative, :PathSequence, :PathEltOrInverse, :PathElt ])
end
def predicate( triples_block ) do
triples_block
|> first_triple!
|> elem(1)
end
def wrap_in_graph( triples_block, graph_uri ) do
# Convert the TriplesBlock into a GraphPatternNotTriples>GraphGraphPattern>GroupGraphPattern>GroupGraphPatternSub>TriplesBlock
# This last one can be inlined as a GroupGraphPattern>GroupGraphPatternSub may have many GraphPatternNotTriples subexpressions.
%Sym{ symbol: :GraphPatternNotTriples, submatches: [
%Sym{ symbol: :GraphGraphPattern, submatches: [
%Word{ word: "GRAPH" },
%Sym{ symbol: :VarOrIri, submatches: [
%Sym{ symbol: :iri, submatches: [
%Sym{ symbol: :IRIREF,
string: "<" <> graph_uri <> ">",
submatches: :none }
] } ] },
%Sym{ symbol: :GroupGraphPattern,
submatches: [
%Word{ word: "{" },
%Sym{ symbol: :GroupGraphPatternSub,
submatches: [
triples_block
] },
%Word{ word: "}" }
] } ] } ] }
end
end | lib/graph_reasoner/query_matching/triples_block.ex | 0.83762 | 0.541106 | triples_block.ex | starcoder |
defmodule Braintree.Transaction do
@moduledoc """
Create a new sale.
To create a transaction, you must include an amount and either a
payment_method_nonce or a payment_method_token.
https://developers.braintreepayments.com/reference/response/transaction/ruby
"""
use Braintree.Construction
alias Braintree.{AddOn, HTTP}
alias Braintree.ErrorResponse, as: Error
@type t :: %__MODULE__{
add_ons: [AddOn.t()],
additional_processor_response: String.t(),
amount: number,
apple_pay_details: String.t(),
avs_error_response_code: String.t(),
avs_postal_code_response_code: String.t(),
avs_street_address_response_code: String.t(),
billing: map,
channel: String.t(),
coinbase_details: String.t(),
created_at: String.t(),
credit_card: map,
currency_iso_code: String.t(),
custom_fields: map,
customer_details: map,
cvv_response_code: String.t(),
descriptor: map,
disbursement_details: map,
discounts: [any],
disputes: [any],
escrow_status: String.t(),
gateway_rejection_reason: String.t(),
id: String.t(),
merchant_account_id: String.t(),
order_id: String.t(),
payment_instrument_type: String.t(),
paypal: map,
plan_id: String.t(),
processor_authorization_code: String.t(),
processor_response_code: String.t(),
processor_response_text: String.t(),
processor_settlement_response_code: String.t(),
processor_settlement_response_text: String.t(),
purchase_order_number: String.t(),
recurring: String.t(),
refund_ids: String.t(),
refunded_transaction_id: String.t(),
risk_data: String.t(),
service_fee_amount: number,
settlement_batch_id: String.t(),
shipping: map,
status: String.t(),
status_history: String.t(),
subscription_details: map,
subscription_id: String.t(),
tax_amount: number,
tax_exempt: boolean,
type: String.t(),
updated_at: String.t(),
voice_referral_number: String.t()
}
defstruct add_ons: [],
additional_processor_response: nil,
amount: 0,
apple_pay_details: nil,
avs_error_response_code: nil,
avs_postal_code_response_code: nil,
avs_street_address_response_code: nil,
billing: %{},
channel: nil,
coinbase_details: nil,
created_at: nil,
credit_card: %{},
currency_iso_code: nil,
custom_fields: %{},
customer_details: %{},
cvv_response_code: nil,
descriptor: %{},
disbursement_details: nil,
discounts: [],
disputes: [],
escrow_status: nil,
gateway_rejection_reason: nil,
id: nil,
merchant_account_id: nil,
order_id: nil,
payment_instrument_type: nil,
paypal: %{},
plan_id: nil,
processor_authorization_code: nil,
processor_response_code: nil,
processor_response_text: nil,
processor_settlement_response_code: nil,
processor_settlement_response_text: nil,
purchase_order_number: nil,
recurring: nil,
refund_ids: nil,
refunded_transaction_id: nil,
risk_data: nil,
service_fee_amount: 0,
settlement_batch_id: nil,
shipping: %{},
status: nil,
status_history: nil,
subscription_details: %{},
subscription_id: nil,
tax_amount: 0,
tax_exempt: false,
type: nil,
updated_at: nil,
voice_referral_number: nil
@doc """
Use a `payment_method_nonce` or `payment_method_token` to make a one time
charge against a payment method.
## Example
{:ok, transaction} = Transaction.sale(%{
amount: "100.00",
payment_method_nonce: @payment_method_nonce,
options: %{submit_for_settlement: true}
})
transaction.status # "settling"
"""
@spec sale(map, Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def sale(params, opts \\ []) do
sale_params = Map.merge(params, %{type: "sale"})
with {:ok, payload} <- HTTP.post("transactions", %{transaction: sale_params}, opts) do
{:ok, new(payload)}
end
end
@doc """
Use a `transaction_id` and optional `amount` to settle the transaction.
Use this if `submit_for_settlement` was false while creating the charge using sale.
## Example
{:ok, transaction} = Transaction.submit_for_settlement("123", %{amount: "100"})
transaction.status # "settling"
"""
@spec submit_for_settlement(String.t(), map, Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def submit_for_settlement(transaction_id, params, opts \\ []) do
path = "transactions/#{transaction_id}/submit_for_settlement"
with {:ok, payload} <- HTTP.put(path, %{transaction: params}, opts) do
{:ok, new(payload)}
end
end
@doc """
Use a `transaction_id` and optional `amount` to issue a refund
for that transaction
## Example
{:ok, transaction} = Transaction.refund("123", %{amount: "100.00"})
transaction.status # "refunded"
"""
@spec refund(String.t(), map, Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def refund(transaction_id, params, opts \\ []) do
path = "transactions/#{transaction_id}/refund"
with {:ok, payload} <- HTTP.post(path, %{transaction: params}, opts) do
{:ok, new(payload)}
end
end
@doc """
Use a `transaction_id` to issue a void for that transaction
## Example
{:ok, transaction} = Transaction.void("123")
transaction.status # "voided"
"""
@spec void(String.t(), Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def void(transaction_id, opts \\ []) do
path = "transactions/#{transaction_id}/void"
with {:ok, payload} <- HTTP.put(path, opts) do
{:ok, new(payload)}
end
end
@doc """
Find an existing transaction by `transaction_id`
## Example
{:ok, transaction} = Transaction.find("123")
"""
@spec find(String.t(), Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def find(transaction_id, opts \\ []) do
path = "transactions/#{transaction_id}"
with {:ok, payload} <- HTTP.get(path, opts) do
{:ok, new(payload)}
end
end
@doc """
Convert a map into a Transaction struct.
Add_ons are converted to a list of structs as well.
## Example
transaction =
Braintree.Transaction.new(%{
"subscription_id" => "subxid",
"status" => "submitted_for_settlement"
})
"""
def new(%{"transaction" => map}) do
new(map)
end
def new(map) when is_map(map) do
transaction = super(map)
%{transaction | add_ons: AddOn.new(transaction.add_ons)}
end
def new(list) when is_list(list) do
Enum.map(list, &new/1)
end
end | lib/transaction.ex | 0.905879 | 0.446434 | transaction.ex | starcoder |
defmodule FunLand.Mappable do
@moduledoc """
Something is Mappable if there is a way to map a function over it.
`mapping` means to apply a transformation to the contents, without changing the structure.
This module both contains the Mappable behaviour, which might be added
to your modules/structures by using `use Mappable` from within them,
as well as the `Mappable.map(mappable, fun)` function, which will dispatch to whatever structure is passed in as first argument.
In Category Theory, something that is Mappable is called a *Functor*.
## Fruit Salad Example
Say we have an apple. There are many _operations_ we could do with an apple, such as peel it, slice it, juice it, etc.
However, right now we don't have an apple, but a bowl filled with apples. How can we make sure we can still use all the operations we could on single apples?
The answer: We need to take the apples one-by-one from the bowl, perform the desired operation, and then put them back in the bowl.
This 'take -> perform -> put back' is the implementation of `map` for a bowl. It works similar for other data structures:
Exactly _how_ to take something and put a result back, and _when_ to perform the desired operation (if we have an empty bowl, for instance, there's nothing to do)
is what you need to define in your implementation.
"""
@type mappable(a) :: FunLand.adt(a)
@callback map(mappable(a), (a -> b)) :: mappable(b) when a: any, b: any
def __using__(_opts) do
quote do
@behaviour FunLand.Mappable
end
end
@doc """
Maps the function `function` over all things inside `mappable`.
Exactly what this means, depends on the structure of `mappable`.
For lists, for instance, this means that all of the elements will be transformed by `function`.
For `Maybe`, this will do nothing if `Maybe` is `Nothing`, while it will transform whatever is inside
if the `Maybe` is `Just something`.
"""
def map(mappable, function)
# Stdlib structs
for {stdlib_module, module} <- FunLand.Builtin.__stdlib_struct_modules__() do
def map(mappable = %unquote(stdlib_module){}, function) do
apply(unquote(module), :map, [mappable, function])
end
end
# Structs with user-defined specification.
def map(mappable = %mappable_module{}, function) when is_function(function, 1) do
mappable_module.map(mappable, function)
end
use FunLand.Helper.GuardMacros
for {guard, module} <- FunLand.Builtin.__builtin__() do
def map(mappable, function) when is_function(function, 1) and unquote(guard)(mappable) do
apply(unquote(module), :map, [mappable, function])
end
end
end | lib/fun_land/mappable.ex | 0.778565 | 0.733022 | mappable.ex | starcoder |
defmodule Staxx.Proxy.Chain.State do
@moduledoc """
Default chain process state
Chain process has it's own statuses a bit different to ExTestchain
When new chain process is spawning it's status is set to `:initializing` then flow is this:
`:initializing` -> `:ready` -> `:terminating` -> `:terminated`
So chain is fully ready only when status is set to `:ready`
In case of failure status will be set to `:failed`
"""
alias Staxx.Proxy.Chain.Storage.Record
alias Staxx.Proxy.Chain.Notification
@type status :: :initializing | :ready | :terminating | :terminated | :locked | :failed
@type t :: %__MODULE__{
id: binary,
node: node(),
start_type: :new | :existing,
status: status,
notify_pid: pid() | nil,
chain_status: atom(),
deploy_tag: nil | binary,
deploy_step_id: 0..9
}
defstruct id: nil,
node: nil,
start_type: :new,
status: :initializing,
notify_pid: nil,
chain_status: :none,
deploy_tag: nil,
deploy_step_id: 0
@doc """
Update node for state and return updated state
"""
@spec node(t(), node()) :: t()
def node(%__MODULE__{} = state, node), do: %__MODULE__{state | node: node}
@doc """
Set status for state and return updated state
"""
@spec status(t(), status()) :: t()
def status(%__MODULE__{} = state, status),
do: %__MODULE__{state | status: status}
@doc """
Set chain status for state and return updated state
"""
@spec chain_status(t(), atom) :: t()
def chain_status(%__MODULE__{} = state, chain_status),
do: %__MODULE__{state | chain_status: chain_status}
@doc """
Send notification about chain to `notify_pid`.
Notification will be send to `notify_pid` if it's exist
And to global event bus
"""
@spec notify(t(), binary | atom, term()) :: t()
def notify(%__MODULE__{id: id, notify_pid: pid} = state, event, data \\ %{}) do
notification = %Notification{id: id, event: event, data: data}
if pid do
send(pid, notification)
end
Notification.send_to_event_bus(notification)
state
end
@doc """
Store state into DB. Will call Storage to store chain details
"""
@spec store(t()) :: t()
def store(%__MODULE__{} = state) do
state
|> Record.from_state()
|> Record.store()
state
end
end | apps/proxy/lib/proxy/chain/state.ex | 0.801742 | 0.590661 | state.ex | starcoder |
defmodule Ash.Filter.Runtime do
@moduledoc """
Checks a record to see if it matches a filter statement.
We can't always tell if a record matches a filter statement, and as such
this function may return `:unknown`
"""
alias Ash.Query.{Expression, Not, Ref}
@doc """
Checks if a record matches a filter, side loading any necessary relationships"
If it can't tell, this returns false.
"""
def matches?(api, record, filter, dirty_fields \\ []) do
case matches(record, filter, dirty_fields) do
{:ok, boolean} ->
boolean
{:side_load, side_loads} when not is_nil(api) ->
matches?(api, api.load!(record, side_loads), filter, dirty_fields)
{:side_load, _} ->
false
:unknown ->
false
end
end
def matches(record, filter, dirty_fields, side_loads \\ [])
def matches(record, expression, dirty_fields, side_loads) do
case expression do
%Ash.Filter{expression: expression} ->
matches(record, expression, dirty_fields, side_loads)
nil ->
{:ok, true}
boolean when is_boolean(boolean) ->
{:ok, boolean}
%op{__operator__?: true, left: left, right: right} = operator ->
with true <- :erlang.function_exported(op, :match?, 1),
{:dirty?, false} <- {:dirty?, dirty?([left, right], dirty_fields)},
{:side_load, []} <- {:side_load, need_to_load([left, right], record)} do
case right do
%Ref{} ->
{:ok,
right
|> resolve_ref(record)
|> List.wrap()
|> Enum.any?(fn right_resolved ->
left
|> resolve_ref(record)
|> List.wrap()
|> Enum.any?(fn left_resolved ->
op.evaluate(%{operator | left: left_resolved, right: right_resolved})
end)
end)}
_ ->
{:ok,
left
|> resolve_ref(record)
|> Enum.any?(fn left_resolved ->
op.evaluate(%{operator | left: left_resolved, right: right})
end)}
end
else
false ->
:unknown
{:side_load, paths} ->
{:side_load, paths}
{:dirty?, true} ->
:unknown
end
%func{__function__?: true, arguments: arguments} = function ->
with true <- :erlang.function_exported(func, :match?, 1),
{:dirty?, false} <- {:dirty?, dirty?(arguments, dirty_fields)},
{:side_load, []} <- {:side_load, need_to_load(arguments, record)} do
{:ok,
arguments
|> Enum.map(&resolve_ref(&1, record))
|> unique_calls()
|> Enum.any?(fn args ->
func.evaluate(%{function | arguments: args})
end)}
else
false ->
:unknown
{:side_load, paths} ->
{:side_load, paths}
{:dirty?, true} ->
:unknown
end
%Not{expression: expression} ->
case matches(record, expression, dirty_fields, side_loads) do
:unknown ->
:unknown
{:ok, match?} ->
{:ok, !match?}
{:side_load, side_loads} ->
{:side_load, side_loads}
end
%Expression{op: op, left: left, right: right} ->
expression_matches(op, left, right, record, dirty_fields, side_loads)
end
end
defp unique_calls([arg_values | rest]) do
Enum.map(arg_values, fn value ->
rest
|> unique_calls()
|> Enum.map(fn call ->
[value | call]
end)
end)
end
defp unique_calls([]), do: []
defp resolve_ref(%Ref{relationship_path: [], attribute: %{name: name}}, record) do
[Map.get(record, name)]
end
defp resolve_ref(%Ref{attribute: %{name: name}, relationship_path: path}, record) do
record
|> get_related(path)
|> Enum.map(&Map.get(&1, name))
end
defp resolve_ref(value, _record), do: value
defp dirty?(fields, dirty) do
dirty = dirty || []
fields
|> Enum.filter(&ref?/1)
|> Enum.filter(&(&1.relationship_path == []))
|> Enum.any?(&(&1.attribute.name in dirty))
end
defp need_to_load(fields, record) do
fields
|> Enum.filter(&ref?/1)
|> Enum.filter(&(&1.relationship_path != []))
|> Enum.reject(&loaded?(record, &1.relationship_path))
|> Enum.map(& &1.relationship_path)
|> Enum.map(fn path ->
path_to_side_load(path)
end)
end
defp path_to_side_load([first]), do: first
defp path_to_side_load([first | rest]) do
{first, [path_to_side_load(rest)]}
end
defp ref?(%Ash.Query.Ref{}), do: true
defp ref?(_), do: false
defp expression_matches(:and, left, right, record, dirty_fields, side_loads) do
case matches(record, left, dirty_fields, side_loads) do
{:ok, true} ->
matches(record, right, dirty_fields, side_loads)
:unknown ->
:unknown
{:ok, false} ->
{:ok, false}
{:side_load, side_loads} ->
matches(record, right, dirty_fields, side_loads)
end
end
defp expression_matches(:or, left, right, record, dirty_fields, side_loads) do
case matches(record, left, dirty_fields, side_loads) do
{:ok, true} ->
{:ok, true}
:unknown ->
case matches(record, right, dirty_fields, side_loads) do
{:ok, false} -> {:ok, :unknown}
other -> other
end
{:ok, false} ->
matches(record, right, dirty_fields, side_loads)
{:side_load, side_loads} ->
matches(record, right, dirty_fields, side_loads)
end
end
defp get_related(record, path) when not is_list(record) do
get_related([record], path)
end
defp get_related(records, []) do
records
end
defp get_related(records, [key | rest]) when is_list(records) do
Enum.flat_map(records, fn record ->
case Map.get(record, key) do
%Ash.NotLoaded{type: :relationship} ->
[]
value ->
get_related(value, rest)
end
end)
end
defp loaded?(records, path) when is_list(records) do
Enum.all?(records, &loaded?(&1, path))
end
defp loaded?(%Ash.NotLoaded{}, _), do: false
defp loaded?(_, []), do: true
defp loaded?(record, [key | rest]) do
record
|> Map.get(key)
|> loaded?(rest)
end
end | lib/ash/filter/runtime.ex | 0.826537 | 0.591753 | runtime.ex | starcoder |
defmodule GCS do
@moduledoc """
A simple library to interact with Google Cloud Storage
"""
alias GCS.{Client, Auth}
require Logger
@make_public_body ~s({"role":"READER"})
@type headers :: [{String.t(), String.t()}]
@doc """
Uploads a file to GCS
Requires the bucket name, the desired gcs file location **with desired filename**, the path to the file to be uploaded, and the content type of the file.
## Examples
```
iex> File.write!("file.txt", "hello")
:ok
iex> GCS.upload_object("my-bucket", "myfile.txt", "file.txt", "Application/txt")
{:ok, %{...}} # GCS Response
```
"""
@spec upload_object(any, binary, any, any, headers, any) :: {:ok, any}
def upload_object(
bucket,
gcs_file_path,
file_path,
content_type,
headers \\ [],
http_opts \\ []
) do
url = upload_url(bucket, gcs_file_path)
headers =
headers
|> add_auth_header(:read_write)
|> add_content_type_header(content_type)
Client.request(:post, url, {:file, file_path}, headers, http_opts)
|> decode_json_response()
end
@doc """
Downloads a file from GCS
Requires the bucket name and the gcs file location **with filename**.
**Example:** if the *bucket* is "my-bucket" and the *gcs file path* is "myfile.png",
the file would be retrieved from "my-bucket" at the location "myfile.png".
## More Examples
```
iex> File.write!("file.txt", "hello")
:ok
iex> GCS.upload_object("my-bucket", "myfile.txt", "file.txt", "Application/txt")
{:ok, %{...}} # GCS Response
iex> GCS.download_object("my-bucket", "myfile.txt")
"hello"
```
"""
@spec download_object(any, binary, headers, any) :: {:ok, any}
def download_object(bucket, gcs_file_path, headers \\ [], http_opts \\ []) do
url = download_url(bucket, gcs_file_path)
headers = add_auth_header(headers, :read_only)
Client.request(:get, url, "", headers, http_opts)
end
@doc """
Makes a file in GCS publicly accessible
Requires the bucket name and the gcs file location **with filename**.
The file will be available at *https://storage.googleapis.com/bucket/file_path*
**Example:** if the *bucket* is "my-bucket" and the *gcs file path* is "myfile.png", the url would be
*https://storage.googleapis.com/my-bucket/myfile.png*.
## More Examples
```
iex> File.write!("file.txt", "hello")
:ok
iex> GCS.upload_object("my-bucket", "myfile.txt", "file.txt", "Application/txt")
{:ok, %{...}} # GCS Response
iex> GCS.make_public("my-bucket", "myfile.txt")
{:ok, %{...}} # GCS Response
iex> SomeHTTPClient.get("https://storage.googleapis.com/my-bucket/myfile.txt")
{:ok, %{body: "hello"}}
```
"""
@spec make_public(any, binary, headers, any) :: {:ok, any}
def make_public(bucket, gcs_file_path, headers \\ [], http_opts \\ []) do
url = make_public_url(bucket, gcs_file_path)
headers =
headers
|> add_auth_header(:full_control)
|> add_content_type_header("application/json")
Client.request(:put, url, @make_public_body, headers, http_opts)
|> decode_json_response()
end
@doc """
Deletes a file from GCS
Requires the bucket name and the gcs file location **with filename**.
## Examples
```
iex> File.write!("file.txt", "hello")
:ok
iex> GCS.upload_object("my-bucket", "myfile.txt", "file.txt", "Application/txt")
{:ok, %{...}} # GCS Response
iex> GCS.make_public("my-bucket", "myfile.txt")
{:ok, %{...}} # GCS Response
iex> SomeHTTPClient.get("https://storage.googleapis.com/my-bucket/myfile.txt")
{:ok, %{body: "hello"}}
```
"""
@spec delete_object(any, binary, headers, any) :: {:ok, :deleted} | {:error, any}
def delete_object(bucket, gcs_file_path, headers \\ [], http_opts \\ []) do
url = delete_url(bucket, gcs_file_path)
headers = add_auth_header(headers, :read_write)
case Client.request(:delete, url, "", headers, http_opts) do
{:ok, _} ->
{:ok, :deleted}
{:error, reason} ->
format_errors(reason)
end
end
defp delete_url(bucket, path) do
"https://www.googleapis.com/storage/v1/b/#{bucket}/o/#{URI.encode_www_form(path)}"
end
defp download_url(bucket, path) do
"https://www.googleapis.com/storage/v1/b/#{bucket}/o/#{URI.encode_www_form(path)}?alt=media"
end
defp upload_url(bucket, path) do
"https://www.googleapis.com/upload/storage/v1/b/#{bucket}/o?uploadType=media&name=#{
URI.encode_www_form(path)
}"
end
defp make_public_url(bucket, path) do
"https://www.googleapis.com/storage/v1/b/#{bucket}/o/#{URI.encode_www_form(path)}/acl/allUsers"
end
defp add_auth_header(headers, token_type) when is_list(headers) do
[{"Authorization", "Bearer #{Auth.get_token(token_type)}"} | headers]
end
defp add_content_type_header(headers, content_type) when is_list(headers) do
[{"Content-Type", content_type} | headers]
end
defp decode_json_response({:ok, body}) do
case Jason.decode(body) do
{:ok, decoded_body} -> {:ok, decoded_body}
{:error, reason} -> {:error, reason}
end
end
defp decode_json_response({:error, reason}), do: format_errors(reason)
defp format_errors({:gcs_error, status, body}) do
case Jason.decode(body) do
{:ok, decoded_body} ->
{:error,
{:gcs_error, status,
decoded_body["error"]["message"] || "Malformed json error response body"}}
{:error, reason} ->
{:error, reason}
end
end
defp format_errors(error), do: {:error, error}
end | lib/gcs.ex | 0.83975 | 0.548976 | gcs.ex | starcoder |
defmodule GenMetricsBench.Cluster do
alias GenMetrics.GenServer.Cluster
alias GenMetricsBench.GenServer.Server
alias GenMetricsBench.Utils.Runtime
@moduledoc """
GenMetricsBench harness for GenServer Clusters.
This module provides a simple benchmark harness to
load a simple GenServer with flexible message sizes
and load volume.
Using 'no_metrics/0` a benchmark can be run with GenMetrics
data collection and reporting entirely disabled. This provides
a baseline benchmark reading against which you can compare
benchmarks run with GenMetrics activated.
The following benchmarks can be run with various flavours
of GenMetrics activated:
- `summary_metrics/0'
- `statistical_metrics/0`
- `statsd_metrics/0`
- `datadog_metrics/0`
"""
# GenServer operations: :call, :cast, :info
@benchmark_operation :info
@default_benchmark_load 10_000
@doc """
Run benchmark with all metrics gathering disabled.
"""
def no_metrics do
{simulator, sim_msg, sim_load} = build_benchmark()
{:ok, pid} = Server.start_link(sim_load)
do_run(pid, {simulator, sim_msg, sim_load})
end
@doc """
Run benchmark with in-memory summary metrics gathering enabled.
"""
def summary_metrics do
{simulator, sim_msg, sim_load} = build_benchmark()
{:ok, pid} = Server.start_link(sim_load)
cluster = %Cluster{name: "bench_summary_metrics", servers: [Server]}
{:ok, _mid} = GenMetrics.monitor_cluster(cluster)
do_run(pid, {simulator, sim_msg, sim_load})
end
@doc """
Run benchmark with in-memory statistical metrics gathering enabled.
"""
def statistical_metrics do
{simulator, sim_msg, sim_load} = build_benchmark()
{:ok, pid} = Server.start_link(sim_load)
cluster = %Cluster{name: "bench_statistical_metrics",
servers: [Server], opts: [statistics: true]}
{:ok, _mid} = GenMetrics.monitor_cluster(cluster)
do_run(pid, {simulator, sim_msg, sim_load})
end
@doc """
Run benchmark with `statsd` statistical metrics gathering enabled.
"""
def statsd_metrics do
{simulator, sim_msg, sim_load} = build_benchmark()
{:ok, pid} = Server.start_link(sim_load)
cluster = %Cluster{name: "bench_statsd_metrics",
servers: [Server], opts: [statistics: :statsd]}
{:ok, _mid} = GenMetrics.monitor_cluster(cluster)
do_run(pid, {simulator, sim_msg, sim_load})
end
@doc """
Run benchmark with `datadog` statistical metrics gathering enabled.
"""
def datadog_metrics do
{simulator, sim_msg, sim_load} = build_benchmark()
{:ok, pid} = Server.start_link(sim_load)
cluster = %Cluster{name: "bench_datadog_metrics",
servers: [Server], opts: [statistics: :datadog]}
{:ok, _mid} = GenMetrics.monitor_cluster(cluster)
do_run(pid, {simulator, sim_msg, sim_load})
end
defp build_benchmark do
simulator = Runtime.cluster_simulator
sim_msg = simulator.gen_msg
sim_load = Application.get_env(:gen_metrics_bench,
:benchmark_load, @default_benchmark_load)
{simulator, sim_msg, sim_load}
end
defp do_run(pid, {_, sim_msg, sim_load}) do
for _ <- 1..sim_load do
case @benchmark_operation do
:info -> Kernel.send(pid, sim_msg)
_ -> apply(GenServer, @benchmark_operation, [pid, sim_msg])
end
end
:ok
end
end | lib/cluster.ex | 0.819893 | 0.508056 | cluster.ex | starcoder |
defmodule Tanx.Game.Walls do
@moduledoc """
Computes force on tanks due to walls.
"""
@doc """
Given a wall, returns a "decomposed" form of the wall that is preprocessed to
make force computation efficient.
The decomposed form is a list of tuples representing, in order, concave corners,
convex corners, and segments, where
* each concave_corner is {point1, dir0, dir1, dir2, t_ratio, s_ratio} where
* point1 is the corner
* dir0 is the direction unit vector toward the previous point
* dir2 is the direction unit vector toward the next point
* dir1 is the direction unit vector halfway between them
* t_ratio is t/r where r is the radius and t is the dist along each side
* s_ratio is s/r where s is the dist along bisect_dir to the final point
* each convex_corner is {point0, point1, point2}
* each segment is {point0, point1}
"""
def decompose_wall(points = [p0, p1 | _]) do
{concave, convex, segments} =
(points ++ [p0, p1])
|> Enum.chunk_every(3, 1, :discard)
|> Enum.reduce({[], [], []}, &decompose_wall_triplet/2)
{concave ++ convex ++ segments, segments}
end
def decompose_wall(_points), do: {[], []}
@doc """
Given a decomposed wall, and an object represented by a point and radius,
returns the force applied by the wall against the object.
"""
def force_from_decomposed_wall({elements, _segments}, p, radius) do
force =
elements
|> Enum.map(&element_force(&1, p, radius))
|> Enum.max_by(fn
nil -> 0.0
{x, y} -> x * x + y * y
end)
if force == nil, do: {0.0, 0.0}, else: force
end
@doc """
Given a list of decomposed walls, and an object represented by a point and radius,
returns the total force applied by all walls against the object.
"""
def force_from_decomposed_walls(decomposed_walls, p, radius) do
decomposed_walls
|> Enum.reduce({0.0, 0.0}, fn wall, acc ->
force_from_decomposed_wall(wall, p, radius) |> vadd(acc)
end)
end
@doc """
Given a fixed point, and an object represented by a point and radius,
returns the force applied by the fixed point against the object.
"""
def force_from_point(from, p, radius) do
case force_from_point_internal(from, p, radius) do
nil -> {0.0, 0.0}
force -> force
end
end
@doc """
Given a single decomposed wall, and two points representing two locations of a
point object, returns either a tuple of {point of impact on the wall, normal to
the wall}, or nil for no impact.
"""
def collision_with_decomposed_wall(decomposed_wall, from, to) do
decomposed_wall
|> wall_collision_as_ratio_and_normal(from, to)
|> ratio_to_point(from, to)
end
@doc """
Given a list of decomposed walls, and two points representing two locations of a
point object, returns either a tuple of {the first point of impact on a wall, the
normal to the wall}, or nil for no impact.
"""
def collision_with_decomposed_walls(decomposed_walls, from, to) do
decomposed_walls
|> Enum.map(&wall_collision_as_ratio_and_normal(&1, from, to))
|> min_ratio_or_nil
|> ratio_to_point(from, to)
end
defp ratio_to_point(nil, _from, _to), do: nil
defp ratio_to_point({ratio, normal}, from, to) do
{vdiff(to, from) |> vscale(ratio) |> vadd(from), normal}
end
defp min_ratio_or_nil(values) do
values
|> Enum.min_by(fn
nil -> 2.0
{ratio, _normal} -> ratio
end)
end
defp wall_collision_as_ratio_and_normal({_elements, segments}, from, to) do
segments
|> Enum.map(&segment_intersection_as_ratio_and_normal(&1, from, to))
|> min_ratio_or_nil
end
defp segment_intersection_as_ratio_and_normal({p0, p1}, from, to) do
from_mag = cross_magnitude(p0, from, p1)
to_mag = cross_magnitude(p0, to, p1)
if from_mag < 0 and to_mag >= 0 and cross_magnitude(from, p0, to) >= 0 and
cross_magnitude(from, p1, to) <= 0 do
normal = vdiff(p1, p0) |> turn_left |> normalize
{from_mag / (from_mag - to_mag), normal}
else
nil
end
end
defp decompose_wall_triplet([p0, p1, p2], {concave, convex, segments}) do
segments = [{p0, p1} | segments]
if cross_magnitude(p0, p1, p2) <= 0 do
elem = {vdiff(p1, p0) |> turn_left |> vadd(p1), p1, vdiff(p1, p2) |> turn_right |> vadd(p1)}
convex = [elem | convex]
{concave, convex, segments}
else
dir0 = vdiff(p0, p1) |> normalize
dir2 = vdiff(p2, p1) |> normalize
dir1 = vadd(dir0, dir2) |> normalize
dist0 = vdist(p0, p1)
dist2 = vdist(p2, p1)
csquared = dist_squared(p2, p0)
denom = csquared - (dist2 - dist0) * (dist2 - dist0)
t_ratio = :math.sqrt(((dist0 + dist2) * (dist0 + dist2) - csquared) / denom)
s_ratio = :math.sqrt(4.0 * dist0 * dist2 / denom)
concave = [{p1, dir0, dir1, dir2, t_ratio, s_ratio} | concave]
{concave, convex, segments}
end
end
defp force_from_point_internal(from, p, radius) do
normal = vdiff(p, from)
dist = vnorm(normal)
if dist < radius do
if dist == 0 do
ang = :rand.uniform() * :math.pi() * 2
{radius * :math.cos(ang), radius * :math.sin(ang)}
else
normal |> vscale((radius - dist) / dist)
end
else
nil
end
end
# Force for a wall segment
defp element_force({p0, p1}, p, radius) do
if cross_magnitude(p0, p, p1) < 0 do
a = vdiff(p, p0)
b = vdiff(p1, p0)
factor = vdot(a, b) / norm_squared(b)
if factor >= 0.0 and factor <= 1.0 do
proj = vscale(b, factor) |> vadd(p0)
force_from_point_internal(proj, p, radius)
else
nil
end
else
nil
end
end
# Force for a convex corner
defp element_force({n0, p1, n2}, p, radius) do
if cross_magnitude(n0, p1, p) >= 0 and cross_magnitude(p, p1, n2) >= 0 do
force_from_point_internal(p1, p, radius)
else
nil
end
end
# Force for a concave corner
defp element_force({p1, dir0, dir1, dir2, t_ratio, s_ratio}, p, radius) do
p0 = vscale(dir0, radius * t_ratio) |> vadd(p1)
p2 = vscale(dir2, radius * t_ratio) |> vadd(p1)
p3 = vscale(dir1, radius * s_ratio) |> vadd(p1)
if cross_magnitude(p, p0, p1) >= 0 and cross_magnitude(p, p1, p2) >= 0 and
cross_magnitude(p, p2, p3) >= 0 and cross_magnitude(p, p3, p0) >= 0 do
vdiff(p3, p)
else
nil
end
end
defp vadd({x0, y0}, {x1, y1}), do: {x0 + x1, y0 + y1}
defp vdiff({x0, y0}, {x1, y1}), do: {x0 - x1, y0 - y1}
defp vdot({x0, y0}, {x1, y1}), do: x0 * x1 + y0 * y1
defp turn_right({x, y}), do: {y, -x}
defp turn_left({x, y}), do: {-y, x}
defp cross_magnitude({x0, y0}, {x1, y1}, {x2, y2}) do
(x1 - x0) * (y2 - y1) - (x2 - x1) * (y1 - y0)
end
defp vscale({x, y}, r), do: {x * r, y * r}
defp norm_squared({x, y}), do: x * x + y * y
defp vnorm(p), do: p |> norm_squared |> :math.sqrt()
defp dist_squared(p0, p1), do: vdiff(p0, p1) |> norm_squared
defp vdist(p0, p1), do: dist_squared(p0, p1) |> :math.sqrt()
defp normalize(p), do: vscale(p, 1 / vnorm(p))
end | apps/tanx/lib/tanx/game/walls.ex | 0.867556 | 0.768928 | walls.ex | starcoder |
defmodule Grizzly.ZWave.Commands.WakeUpIntervalCapabilitiesReport do
@moduledoc """
This module implements the WAKE_UP_INTERVAL_CAPABILITIES_REPORT command of the COMMAND_CLASS_WAKE_UP command class.
Params:
* `:minimum_seconds` - the minimum Wake Up Interval supported by the sending node - v2
* `:maximum_seconds` - the maximum Wake Up Interval supported by the sending node - v2
* `:default_seconds` - the default Wake Up Interval value for the sending node. - v2
* `:step_seconds` - the resolution of valid Wake Up Intervals values for the sending node - v2
* `:on_demand` - whether the supporting node supports the Wake Up On Demand functionality - v3
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.Command
alias Grizzly.ZWave.CommandClasses.WakeUp
@type param ::
{:minimum_seconds, non_neg_integer}
| {:minimum_seconds, non_neg_integer}
| {:maximum_seconds, non_neg_integer}
| {:default_seconds, non_neg_integer}
| {:step_seconds, non_neg_integer}
| {:on_demand, boolean}
@impl true
def new(params) do
command = %Command{
name: :wake_up_interval_capabilities_report,
command_byte: 0x0A,
command_class: WakeUp,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
def encode_params(command) do
minimum_seconds = Command.param!(command, :minimum_seconds)
maximum_seconds = Command.param!(command, :maximum_seconds)
default_seconds = Command.param!(command, :default_seconds)
step_seconds = Command.param!(command, :step_seconds)
on_demand = Command.param(command, :on_demand)
if on_demand == nil do
# v2
<<
minimum_seconds::size(3)-unit(8),
maximum_seconds::size(3)-unit(8),
default_seconds::size(3)-unit(8),
step_seconds::size(3)-unit(8)
>>
else
# v3
<<
minimum_seconds::size(3)-unit(8),
maximum_seconds::size(3)-unit(8),
default_seconds::size(3)-unit(8),
step_seconds::size(3)-unit(8),
0x00::size(7),
encode_on_demand(on_demand)::size(1)
>>
end
end
@impl true
# v2
def decode_params(<<
minimum_seconds::size(3)-unit(8),
maximum_seconds::size(3)-unit(8),
default_seconds::size(3)-unit(8),
step_seconds::size(3)-unit(8)
>>) do
{:ok,
[
minimum_seconds: minimum_seconds,
maximum_seconds: maximum_seconds,
default_seconds: default_seconds,
step_seconds: step_seconds
]}
end
# v3
def decode_params(
<<minimum_seconds::size(3)-unit(8), maximum_seconds::size(3)-unit(8),
default_seconds::size(3)-unit(8), step_seconds::size(3)-unit(8), 0x00::size(7),
on_demand_byte::size(1)>>
) do
{:ok,
[
minimum_seconds: minimum_seconds,
maximum_seconds: maximum_seconds,
default_seconds: default_seconds,
step_seconds: step_seconds,
on_demand: on_demand_byte == 0x01
]}
end
defp encode_on_demand(false), do: 0x00
defp encode_on_demand(true), do: 0x01
end | lib/grizzly/zwave/commands/wake_up_interval_capabilities_report.ex | 0.883532 | 0.415551 | wake_up_interval_capabilities_report.ex | starcoder |
defmodule Annex.Learner do
@moduledoc """
The Learner module defines the types, callbacks, and helper functions for a Learner.
A Learner is a model that is capable of supervised learning.
"""
alias Annex.{
Data,
Dataset,
LayerConfig,
Optimizer,
Optimizer.SGD
}
require Logger
@type t() :: struct()
@type options :: Keyword.t()
@type training_output :: %{atom() => any()}
@type data :: Data.data()
@callback init_learner(t(), options()) :: t()
@callback train(t(), Dataset.t(), options()) :: {t(), training_output()}
@callback predict(t(), data()) :: data()
@optional_callbacks [
train: 3,
init_learner: 2
]
defmacro __using__(_) do
quote do
def __annex__(:learner?), do: true
@before_compile Annex.Learner
end
end
defmacro __before_compile__(_env) do
quote do
def __annex__(_), do: nil
end
end
@spec is_learner?(any) :: boolean()
def is_learner?(%module{}) do
is_learner?(module)
end
def is_learner?(module) do
is_atom(module) && function_exported?(module, :__annex__, 1) && module.__annex__(:learner?)
end
@spec predict(t(), data()) :: data()
def predict(%module{} = learner, data) do
module.predict(learner, data)
end
@spec train(t(), Dataset.t(), Keyword.t()) :: {t(), training_output()}
def train(learner, dataset, opts \\ [])
def train(%LayerConfig{} = cfg, dataset, opts) do
cfg
|> LayerConfig.init_layer()
|> train(dataset, opts)
end
def train(%module{} = learner, dataset, opts) do
learner
|> module.init_learner(opts)
|> do_train(dataset, opts)
end
defp debug_logger(_learner, training_output, epoch, opts) do
log_interval = Keyword.get(opts, :log_interval, 10_000)
if rem(epoch, log_interval) == 0 do
Logger.debug(fn ->
"""
Learner -
training: #{Keyword.get(opts, :name)}
epoch: #{epoch}
training_output: #{inspect(training_output, pretty: true)}
"""
end)
end
end
defp do_train(%learner_module{} = orig_learner, dataset, opts) do
{halt_opt, opts} = Keyword.pop(opts, :halt_condition, {:epochs, 1_000})
{log, opts} = Keyword.pop(opts, :log, &debug_logger/4)
{optimizer, opts} = Keyword.pop(opts, :optimizer, SGD)
halt_condition = parse_halt_condition(halt_opt)
1
|> Stream.iterate(fn epoch -> epoch + 1 end)
|> Enum.reduce_while(orig_learner, fn epoch, learner ->
{%_{} = learner2, training_output} =
if has_train?(learner_module) do
learner_module.train(learner, dataset, opts)
else
Optimizer.train(optimizer, learner, dataset, opts)
end
_ = log.(learner2, training_output, epoch, opts)
if halt_condition.(learner2, training_output, epoch, opts) do
{:halt, {learner2, training_output}}
else
{:cont, learner2}
end
end)
end
@spec init_learner(t(), options()) :: t()
def init_learner(%module{} = learner, options) do
module.init_learner(learner, options)
end
def has_train?(%module{}), do: has_train?(module)
def has_train?(module) do
is_atom(module) && function_exported?(module, :train, 3)
end
defp parse_halt_condition(func) when is_function(func, 4) do
func
end
defp parse_halt_condition({:epochs, num}) when is_number(num) do
fn _, _, epoch, _ ->
epoch >= num
end
end
defp parse_halt_condition({:loss_less_than, num}) when is_number(num) do
fn _, %{loss: loss}, _, _ ->
loss < num
end
end
end | lib/annex/learner.ex | 0.809803 | 0.485051 | learner.ex | starcoder |
defmodule Chunky.Sequence.OEIS do
@moduledoc """
Online Encyclopedia of Integer Sequences (OEIS) sequence iterators.
Supported sequences are broken down into modules based on OEIS Keyword, subject matter, or
related methods.
## Available Modules
- `Chunky.Sequence.OEIS.Combinatorics` - Permutations, Combinations, and general Combinatorics sequences
- `Chunky.Sequence.OEIS.Constants` - Constant values, digit expansions, and constant cycles
- `Chunky.Sequence.OEIS.Core` - OEIS `core` Keyword sequences
- `Chunky.Sequence.OEIS.Factors` - Factorizations and divisors
- `Chunky.Sequence.OEIS.Multiples` - Multiples and addition sequences
- `Chunky.Sequence.OEIS.Powers` - Powers and simple polynomials
- `Chunky.Sequence.OEIS.Primes` - Primes and related sequences
- `Chunky.Sequence.OEIS.Repr` - Number representations, digit contents, and digit patterns
- `Chunky.Sequence.OEIS.Sigma` - Sequenecs related to the Sigma (𝝈) function
## Utility Functions
- `coverage/0` - Show report of OEIS coverage of select sequence groups, and overall tallying of supported sequences
- `coverage/1` - Calculate coverage of sequences from a Sequence of OEIS identifiers
- `find_sequence/1` - Find an OEIS sequence by identifier
- `find_sequence!/1` - Find an OEIS sequence by identifier, or raise an error
- `has_sequence?/1` - Check if an OEIS sequence is supported, by atom or string
## Coverage
The Chunky sequence library will never have 100% coverage of OEIS sequences; there are just too many already
listed, and too many added every year. The goal of the sequence library is to cover the OEIS Core
set, as well as selected subsets of other keywords or topics.
Current coverage stats:
```
OEIS Coverage
745 total sequences
By Module
Elixir.Chunky.Sequence.OEIS - 1 sequences
Elixir.Chunky.Sequence.OEIS.Combinatorics - 7 sequences
Elixir.Chunky.Sequence.OEIS.Constants - 31 sequences
Elixir.Chunky.Sequence.OEIS.Core - 136 sequences
Elixir.Chunky.Sequence.OEIS.Factors - 122 sequences
Elixir.Chunky.Sequence.OEIS.Multiples - 75 sequences
Elixir.Chunky.Sequence.OEIS.Powers - 48 sequences
Elixir.Chunky.Sequence.OEIS.Primes - 100 sequences
Elixir.Chunky.Sequence.OEIS.Repr - 205 sequences
Elixir.Chunky.Sequence.OEIS.Sigma - 20 sequences
Sequence Groups
OEIS Core Sequences - 135 / 177 (76.27%)
OEIS Core::Easy Sequences - 121 / 146 (82.88%)
OEIS Core::Hard Sequences - 12 / 12 (100.0%)
OEIS Core::Multiplicative Sequences - 22 / 22 (100.0%)
OEIS Core::Eigen Sequences - 5 / 5 (100.0%)
```
Available/covered sequences, by OEIS ID:
```
A000001 A000002 A000004 A000005 A000007 A000009 A000010 A000012 A000027 A000029
A000031 A000032 A000035 A000037 A000040 A000041 A000043 A000045 A000048 A000051
A000069 A000079 A000081 A000085 A000105 A000108 A000109 A000110 A000111 A000112
A000120 A000123 A000124 A000129 A000142 A000161 A000166 A000169 A000203 A000204
A000217 A000219 A000225 A000244 A000262 A000272 A000290 A000292 A000302 A000311
A000312 A000326 A000330 A000351 A000364 A000396 A000400 A000420 A000521 A000578
A000583 A000593 A000594 A000609 A000670 A000688 A000720 A000788 A000796 A000798
A000959 A000961 A000977 A000984 A001003 A001006 A001018 A001019 A001020 A001021
A001022 A001023 A001024 A001025 A001026 A001027 A001029 A001045 A001055 A001057
A001065 A001101 A001147 A001157 A001158 A001159 A001160 A001190 A001221 A001222
A001227 A001333 A001358 A001405 A001414 A001477 A001478 A001481 A001489 A001511
A001519 A001567 A001597 A001615 A001694 A001699 A001700 A001764 A001826 A001842
A001906 A001969 A002033 A002093 A002106 A002110 A002113 A002182 A002275 A002378
A002473 A002487 A002530 A002531 A002620 A002654 A002808 A002975 A003094 A003418
A003484 A003586 A003601 A004144 A004176 A004177 A004178 A004179 A004180 A004181
A004182 A004183 A004184 A004526 A004709 A004719 A004720 A004721 A004722 A004723
A004724 A004725 A004726 A004727 A004728 A005100 A005101 A005117 A005153 A005188
A005349 A005361 A005408 A005470 A005588 A005811 A005823 A005835 A005836 A005843
A005934 A005935 A005936 A005937 A005938 A005939 A006036 A006037 A006318 A006530
A006753 A006881 A006882 A006886 A006894 A006966 A007018 A007088 A007089 A007090
A007091 A007092 A007093 A007094 A007095 A007304 A007318 A007395 A007412 A007434
A007602 A007770 A007774 A007947 A007953 A007954 A008277 A008279 A008292 A008585
A008586 A008587 A008588 A008589 A008590 A008591 A008592 A008593 A008594 A008595
A008596 A008597 A008598 A008599 A008600 A008601 A008602 A008603 A008604 A008605
A008606 A008607 A008683 A008966 A009003 A009964 A009965 A009966 A009967 A009968
A009969 A009970 A009971 A009972 A009973 A009974 A009975 A009976 A009977 A009978
A009979 A009980 A009981 A009982 A009983 A009984 A009985 A009986 A009987 A009988
A009989 A009990 A009991 A009992 A010344 A010346 A010348 A010350 A010353 A010354
A010692 A010701 A010709 A010716 A010722 A010727 A010731 A010734 A010850 A010851
A010852 A010853 A010854 A010855 A010856 A010857 A010858 A010859 A010860 A010861
A010862 A010863 A010864 A010865 A010866 A010867 A010868 A010869 A010870 A010871
A010872 A011531 A011532 A011533 A011534 A011535 A011536 A011537 A011538 A011539
A011540 A011557 A013929 A013954 A013955 A013956 A013957 A013958 A013959 A013960
A013961 A013962 A013963 A013964 A013965 A013966 A013967 A013968 A014263 A014576
A014612 A014613 A014614 A016825 A018252 A018253 A018256 A018261 A018266 A018293
A018321 A018350 A018412 A018609 A018676 A019506 A020136 A020137 A020138 A020139
A020140 A020141 A020142 A020143 A020144 A020145 A020146 A020147 A020148 A020149
A020150 A020151 A020152 A020153 A020154 A020155 A020156 A020157 A020158 A020159
A020160 A020161 A020162 A020163 A020164 A020165 A020166 A020167 A020168 A020169
A020170 A020171 A020172 A020173 A020174 A020175 A020176 A020177 A020178 A020179
A020180 A020181 A020182 A020183 A020184 A020185 A020186 A020187 A020188 A020189
A020190 A020191 A020192 A020193 A020194 A020195 A020196 A020197 A020198 A020199
A020200 A020201 A020202 A020203 A020204 A020205 A020206 A020207 A020208 A020209
A020210 A020211 A020212 A020213 A020214 A020215 A020216 A020217 A020218 A020219
A020220 A020221 A020222 A020223 A020224 A020225 A020226 A020227 A020228 A020639
A023416 A023692 A023693 A023694 A023695 A023696 A023697 A023698 A023699 A023700
A023701 A023702 A023703 A023704 A023705 A023706 A023707 A023708 A023709 A023710
A023711 A023712 A023713 A023714 A023715 A023716 A023717 A023718 A023719 A023720
A023721 A023722 A023723 A023724 A023725 A023726 A023727 A023728 A023729 A023730
A023731 A023732 A023733 A023734 A023735 A023736 A023738 A023739 A023740 A023745
A023746 A023747 A023748 A023749 A023750 A023751 A023752 A023753 A023754 A023755
A023756 A023757 A027642 A030513 A030515 A031177 A032924 A033273 A033942 A033987
A033992 A033993 A036537 A037143 A038109 A039956 A043321 A044102 A046099 A046253
A046306 A046308 A046310 A046312 A046314 A046321 A046386 A046387 A046660 A046758
A046759 A046760 A048272 A049310 A051003 A051037 A051038 A051270 A052040 A052382
A052383 A052404 A052405 A052406 A052413 A052414 A052419 A052421 A052486 A053816
A054377 A055512 A055640 A055641 A055642 A056911 A057716 A059015 A059269 A059376
A059377 A059378 A062756 A062768 A063416 A063997 A065958 A065959 A065960 A067251
A067259 A067885 A069091 A069092 A069093 A069094 A069095 A069272 A069273 A069274
A069275 A069276 A069277 A069278 A069279 A069280 A069281 A069537 A069540 A069543
A070939 A071858 A074206 A074940 A074969 A076479 A077267 A080197 A080681 A080682
A080683 A081603 A081605 A081606 A085959 A087752 A097251 A097252 A097253 A097254
A097255 A097256 A097257 A097258 A097259 A097260 A097261 A097262 A099542 A100968
A100969 A100970 A100971 A100972 A100973 A100974 A100975 A100987 A100988 A102669
A102670 A102671 A102672 A102673 A102674 A102675 A102676 A102677 A102678 A102679
A102680 A102681 A102682 A102683 A102684 A114904 A115983 A117805 A121022 A121023
A121024 A121025 A121026 A121027 A121028 A121029 A121030 A121031 A121032 A121033
A121034 A121035 A121036 A121037 A121038 A121039 A121040 A122840 A123321 A123322
A130897 A135628 A135631 A138591 A152691 A159981 A159984 A159986 A159987 A159988
A159989 A159991 A160093 A160889 A160891 A160893 A160895 A160897 A160908 A160953
A160957 A160960 A161948 A161949 A161950 A161951 A161952 A161953 A162511 A162643
A165412 A169823 A169825 A169827 A174312 A178858 A178859 A178860 A178861 A178862
A178863 A178864 A178877 A178878 A179868 A193238 A194472 A195819 A196563 A209061
A211337 A211338 A248910 A249674 A252994 A255731 A255732 A255735 A255736 A255805
A255808 A289682 A305548
```
"""
import Chunky.Sequence, only: [sequence_for_list: 1]
alias Chunky.Sequence
@data_keyword_core_hard [
:a000001,
:a000043,
:a000105,
:a000109,
:a000112,
:a000609,
:a000798,
:a002106,
:a003094,
:a005470,
:a006966,
:a055512
]
@data_keyword_core_easy [
:a000040,
:a000045,
:a000217,
:a000203,
:a000108,
:a000005,
:a000010,
:a000041,
:a000079,
:a000290,
:a000142,
:a000012,
:a007318,
:a000027,
:a001222,
:a001358,
:a001221,
:a000720,
:a002110,
:a000120,
:a000032,
:a005117,
:a008683,
:a000225,
:a000110,
:a002275,
:a005408,
:a000009,
:a002808,
:a000007,
:a000984,
:a000796,
:a006530,
:a000578,
:a020639,
:a000961,
:a001477,
:a000292,
:a000244,
:a005843,
:a002378,
:a000129,
:a001045,
:a000081,
:a008277,
:a002113,
:a010060,
:a001147,
:a000035,
:a000326,
:a000312,
:a001006,
:a049310,
:a000166,
:a000330,
:a001065,
:a000302,
:a001055,
:a025487,
:a000085,
:a002620,
:a001405,
:a000124,
:a001906,
:a004526,
:a001764,
:a070939,
:a001333,
:a001511,
:a000521,
:a000670,
:a001157,
:a001519,
:a002487,
:a003418,
:a001700,
:a001227,
:a000204,
:a027642,
:a000169,
:a000959,
:a018252,
:a000583,
:a000069,
:a000004,
:a000002,
:a002426,
:a000111,
:a001969,
:a000219,
:a002322,
:a006882,
:a000364,
:a005101,
:a006318,
:a000272,
:a000262,
:a000593,
:a001003,
:a001615,
:a000594,
:a000055,
:a001037,
:a001481,
:a002033,
:a005100,
:a000182,
:a000031,
:a001462,
:a246655,
:a008279,
:a001190,
:a005811,
:a004018,
:a001057,
:a000123,
:a003136,
:a000311,
:a000688,
:a000602,
:a000793,
:a074206,
:a000048,
:a002530,
:a001285,
:a000161,
:a002654,
:a005130,
:a002572,
:a000029,
:a038567,
:a002531,
:a000014,
:a002083,
:a004011,
:a001699,
:a001489,
:a006894,
:a000140,
:a003484,
:a002658,
:a005230,
:a035099,
:a038568,
:a001478,
:a005588
]
@data_keyword_core_mult [
:a000004,
:a000005,
:a000007,
:a000010,
:a000012,
:a000027,
:a000035,
:a000203,
:a000290,
:a000578,
:a000583,
:a000593,
:a000594,
:a000688,
:a001157,
:a001227,
:a001477,
:a001511,
:a001615,
:a002654,
:a003484,
:a008683
]
@data_keyword_core_eigen [:a000081, :a000108, :a000111, :a001190, :a008292]
@doc sequence: "OEIS Core::Hard Sequences"
def create_sequence_keyword_core_hard(_opts) do
sequence_for_list(@data_keyword_core_hard)
end
@doc sequence: "OEIS Core::Easy Sequences"
def create_sequence_keyword_core_easy(_opts) do
sequence_for_list(@data_keyword_core_easy)
end
@doc sequence: "OEIS Core::Multiplicative Sequences"
def create_sequence_keyword_core_mult(_opts) do
sequence_for_list(@data_keyword_core_mult)
end
@doc sequence: "OEIS Core::Eigen Sequences"
def create_sequence_keyword_core_eigen(_opts) do
sequence_for_list(@data_keyword_core_eigen)
end
@doc """
The Online Encyclopedia of Integer Sequences list of Core Sequences.
This list is primarily for reference lookup and Sequence introspection, in particular
calculating the coverage of Core sequences in Chunky.Sequences.
"""
@doc sequence: "OEIS Core Sequences",
references: [{:oeis, :core_sequences, "http://oeis.org/search?q=keyword%3Acore"}]
def create_sequence_keyword_core(_opts) do
sequence_for_list([
:a000001,
:a000002,
:a000004,
:a000005,
:a000007,
:a000009,
:a000010,
:a000012,
:a000014,
:a000019,
:a000027,
:a000029,
:a000031,
:a000032,
:a000035,
:a000040,
:a000041,
:a000043,
:a000045,
:a000048,
:a000055,
:a000058,
:a000069,
:a000079,
:a000081,
:a000085,
:a000088,
:a000105,
:a000108,
:a000109,
:a000110,
:a000111,
:a000112,
:a000120,
:a000123,
:a000124,
:a000129,
:a000140,
:a000142,
:a000161,
:a000166,
:a000169,
:a000182,
:a000203,
:a000204,
:a000217,
:a000219,
:a000225,
:a000244,
:a000262,
:a000272,
:a000273,
:a000290,
:a000292,
:a000302,
:a000311,
:a000312,
:a000326,
:a000330,
:a000364,
:a000396,
:a000521,
:a000578,
:a000583,
:a000593,
:a000594,
:a000602,
:a000609,
:a000670,
:a000688,
:a000720,
:a000793,
:a000796,
:a000798,
:a000959,
:a000961,
:a000984,
:a001003,
:a001006,
:a001034,
:a001037,
:a001045,
:a001055,
:a001065,
:a001057,
:a001097,
:a001113,
:a001147,
:a001157,
:a001190,
:a001221,
:a001222,
:a001227,
:a001285,
:a001333,
:a001349,
:a001358,
:a001405,
:a001462,
:a001477,
:a001478,
:a001481,
:a001489,
:a001511,
:a001615,
:a001699,
:a001700,
:a001519,
:a001764,
:a001906,
:a001969,
:a002033,
:a002083,
:a002106,
:a002110,
:a002113,
:a002275,
:a002322,
:a002378,
:a002426,
:a002487,
:a002530,
:a002531,
:a002572,
:a002620,
:a002654,
:a002658,
:a002808,
:a003094,
:a003136,
:a003418,
:a003484,
:a004011,
:a004018,
:a004526,
:a005036,
:a005100,
:a005101,
:a005117,
:a005130,
:a005230,
:a005408,
:a005470,
:a005588,
:a005811,
:a005843,
:a006318,
:a006530,
:a006882,
:a006894,
:a006966,
:a007318,
:a008275,
:a008277,
:a008279,
:a008292,
:a008683,
:a010060,
:a018252,
:a020639,
:a020652,
:a020653,
:a025487,
:a027641,
:a027642,
:a035099,
:a038566,
:a038567,
:a038568,
:a038569,
:a049310,
:a055512,
:a070939,
:a074206,
:a104725,
:a226898,
:a246655
])
end
@doc """
Calculate the OEIS Sequence coverage for a particular sequence group.
This is primarily for reference and testing, and is used to determine the
completeness of sequence coverage in the Chunky.Sequence.OEIS modules.
## Example
iex> (Sequence.create(Sequence.OEIS, :keyword_core) |> Sequence.OEIS.coverage()).percent < 1.0
true
iex> (Sequence.create(Sequence.OEIS, :keyword_core) |> Sequence.OEIS.coverage()).percent > 0.0
true
"""
def coverage(%Sequence{} = sequence_list) do
# get full coverage list
all_sequences =
Sequence.available()
|> Enum.map(fn seq_map -> seq_map.sequence end)
# list of true/false
mapped_seq =
sequence_list
|> Sequence.map(fn sequence ->
Enum.member?(all_sequences, sequence)
end)
covered =
mapped_seq
|> Enum.filter(fn cov -> cov end)
# build the list of atoms of uncovered/missing sequences
missing_seqs =
sequence_list
|> Sequence.take!(1000)
|> Enum.filter(fn sequence -> !Enum.member?(all_sequences, sequence) end)
%{
covered: length(covered),
total: length(mapped_seq),
percent: length(covered) / length(mapped_seq),
missing_sequences: missing_seqs
}
end
@doc """
Find a sequence definition by sequence identifier.
## Example
iex> Sequence.OEIS.find_sequence("A159986")
{:ok, %{description: "Catalan numbers read modulo 7 .", module: Chunky.Sequence.OEIS.Combinatorics, name: "A159986", sequence: :a159986}}
iex> Sequence.OEIS.find_sequence("A999999999")
{:error, :not_found}
iex> Sequence.OEIS.find_sequence(:a010853)
{:ok, %{description: "Constant sequence: a(n) = 14.", module: Chunky.Sequence.OEIS.Constants, name: "A010853", sequence: :a010853}}
"""
def find_sequence(name) when is_binary(name) do
case Sequence.available()
|> Enum.filter(fn %{name: seq_name} ->
String.downcase(name) == String.downcase(seq_name)
end) do
[] -> {:error, :not_found}
[seq] -> {:ok, seq}
_seqs -> {:error, :duplicate_sequences}
end
end
def find_sequence(name) when is_atom(name), do: find_sequence(name |> Atom.to_string())
@doc """
Like `find_sequence/1`, but directly return the sequence bundle, or raise an error.
The sequece bundle can be handed directly to `Chunky.Sequence.create/1`.
## Example
iex> seq = Sequence.OEIS.find_sequence!(:a159986) |> Sequence.create() |> Sequence.start()
iex> seq.value
1
"""
def find_sequence!(name) when is_binary(name) do
case find_sequence(name) do
{:ok, seq} -> seq
_ -> raise ArgumentError, message: "no such sequence"
end
end
def find_sequence!(name) when is_atom(name), do: find_sequence!(name |> Atom.to_string())
@doc """
Determine if a specific OEIS sequence is available.
## Examples
iex> Sequence.OEIS.has_sequence?(:a159986)
true
iex> Sequence.OEIS.has_sequence?("A008598")
true
iex> Sequence.OEIS.has_sequence?("A99999999")
false
"""
def has_sequence?(name) when is_binary(name) do
case find_sequence(name) do
{:ok, _} -> true
_ -> false
end
end
def has_sequence?(name) when is_atom(name), do: has_sequence?(name |> Atom.to_string())
@doc """
Find the next 100 missing sequences from a coverage set
"""
def missing_sequences(mod, seq) do
Sequence.create(mod, seq) |> missing_sequences()
end
def missing_sequences(%Sequence{} = sequence_list) do
(sequence_list |> coverage()).missing_sequences
end
@doc """
Print out a coverage report for named sequence groups (like CORE) in the OEIS
sequence support modules.
"""
def coverage() do
# total OEIS sequences
oeis_sequences = Sequence.available() |> Enum.filter(&has_oeis_reference?/1)
# total report
IO.puts("OEIS Coverage")
IO.puts("\t#{length(oeis_sequences)} total sequences")
IO.puts("By Module")
# group by module
oeis_sequences
|> Enum.group_by(fn %{module: mod} -> mod end)
|> Enum.each(fn {mod, seqs} ->
IO.puts("\t#{mod} - #{length(seqs)} sequences")
end)
IO.puts("Sequence Groups")
# build and report specific sequence group coverage
[
{Sequence.OEIS, :keyword_core},
{Sequence.OEIS, :keyword_core_easy},
{Sequence.OEIS, :keyword_core_hard},
{Sequence.OEIS, :keyword_core_mult},
{Sequence.OEIS, :keyword_core_eigen}
]
|> Enum.map(fn {s_mod, s_key} ->
# get coverage
seq = Sequence.create(s_mod, s_key)
cov = seq |> coverage()
# get name
nom = seq |> Sequence.readable_name()
{nom, cov}
end)
|> Enum.each(fn {nom, cov} ->
IO.puts(
"\t#{nom} - #{cov.covered} / #{cov.total} (#{(cov.percent * 100.0) |> Float.round(2)}%)"
)
end)
IO.puts("Sequences")
# sequence, ordered list
oeis_sequences
|> Enum.map(fn %{sequence: sequence} -> sequence end)
|> Enum.filter(fn seq -> Atom.to_string(seq) |> String.starts_with?("a") end)
|> Enum.sort()
|> Enum.map(fn s -> Atom.to_string(s) |> String.upcase() end)
|> Enum.chunk_every(10)
|> Enum.map(fn seq_row -> "\t#{seq_row |> Enum.join(" ")}" end)
|> Enum.join("\n")
|> IO.puts()
end
defp has_oeis_reference?(seq_def) do
seq_def
|> Sequence.get_references()
|> Enum.filter(fn {src, _, _} -> src == :oeis end)
|> length() > 0
end
end | lib/sequence/oeis.ex | 0.894832 | 0.943712 | oeis.ex | starcoder |
defmodule Flume do
@moduledoc """
A convenient way to handle control flow in pipelines. This makes for easier reading and composability.
"""
@type t :: %__MODULE__{}
@type tag :: atom()
@type process_fun :: (map() -> {:ok, tag()} | {:error, atom()})
defstruct [
:halt_on_errors,
results: %{},
errors: %{},
halted: false,
tasks: %{},
global_funs: %{}
]
defmodule FlumeError do
defexception [:message]
end
@doc """
Returns empty Flume struct.
Options:
- `:halt_on_errors`: if `false`, the steps won't stop if a `Flume.run` step returns an error
- `:on_error`: callback which is invoked every time an error occurs. If it is 1-arity, it's given
the error reason, if 2-arity, it's given the tag and the reason
## Examples
iex> %Flume{} = Flume.new()
"""
@spec new(list()) :: t()
def new(opts \\ []) do
halt_on_errors = Keyword.get(opts, :halt_on_errors, true)
global_funs = %{on_error: Keyword.get(opts, :on_error)}
%__MODULE__{halt_on_errors: halt_on_errors, global_funs: global_funs}
end
@doc """
Executes passed in callback synchronously - and stores the returned result.
Callback has to be a 0- or 1-arity function, and if it accepts an argument it is passed
the current accumulated results from previous steps.
It must return a `{:ok, result}` or a `{:error, reason}` tuple. This is so `Flume`
knows if the caller intends for the operation to be considered a success or failure.
In the first case, the result will be added to the accumulated results, and in the second case
the error will be stored with other accumulated errors (if any).
A tag uniquely annotates the operation - duplicate tags will cause the second tag to overwrite the first.
Several options can be passed in:
- `on_success`: 1 or 2 arity callback which is given the result of the operation if successful,
or the tag and the result. The return value is stored in the results
- `on_error`: 1 or 2 arity callback which is given the error reason of the operation if it failed,
or the tag and the error
- `wait_for`: by default async operations are resolved in `Flume.result`. If you want them resolved before
so that they are accessible in earlier callbacks, specify the async operation tag here
## Examples
iex> Flume.new() |>
iex> Flume.run(:a, fn -> {:ok, 2} end) |>
iex> Flume.run(:b, fn data -> {:ok, 2 * data.a} end, on_success: & &1 * 100) |>
iex> Flume.run(:this_fails, fn -> {:error, :for_some_reason} end) |>
iex> Flume.run(:this_wont_run, fn -> raise "boom" end)
"""
@spec run(t(), tag(), process_fun(), list()) :: t()
def run(flume, tag, process_fun, opts \\ [])
def run(%Flume{halted: true, halt_on_errors: true} = flume, _tag, _process_fun, _opts) do
flume
end
def run(%Flume{} = flume, tag, process_fun, opts)
when is_atom(tag) and (is_function(process_fun, 1) or is_function(process_fun, 0)) do
on_success = Keyword.get(opts, :on_success)
on_error = Keyword.get(opts, :on_error)
wait_for = Keyword.get(opts, :wait_for, [])
# Synchronise tasks that need awaiting, and refresh results + errors
%Flume{results: results, halted: halted} =
flume = flume |> resolve_tasks(wait_for) |> Map.update!(:tasks, &Map.drop(&1, wait_for))
# If some of the synced tasks have errored and halted pipeline, do not proceed
if halted do
flume
else
process_fun
|> apply_process_callback(results)
|> case do
{:ok, result} -> handle_process_callback_success(flume, tag, result, on_success)
{:error, reason} -> handle_process_callback_error(flume, tag, reason, on_error)
bad_match -> raise_match_error!(tag, bad_match)
end
end
end
@doc """
Executes passed in callback asynchronously - and stores the returned result. All asynchronous
operations are resolved when `Flume.result/1` is called.
Apart from the asynchronous nature of this function, it behaves largely the same as `Flume.run`.
Obviously using this in combination with `Flume.run` is less safe (unless you use the `wait_for` option),
because it won't necessarily stop at the first error. Also the results of the asynchronous operations
will not be available until the end.
## Examples
iex> Flume.new() |>
iex> Flume.run(:a, fn -> {:ok, 2} end) |>
iex> Flume.run_async(:b, fn data -> {:ok, data.a * 2} end) |>
iex> Flume.run_async(:c, fn -> {:ok, 4} end, on_success: & &1 * 2) |>
iex> Flume.result()
"""
@spec run_async(t(), tag(), process_fun(), list()) :: t()
def run_async(flume, tag, process_fun, opts \\ [])
def run_async(
%Flume{halted: true, halt_on_errors: true} = flume,
_tag,
_process_fun,
_opts
),
do: flume
def run_async(%Flume{tasks: tasks} = flume, tag, process_fun, opts)
when is_atom(tag) and is_function(process_fun, 0) do
tasks = Map.put(tasks, tag, %{task: Task.async(process_fun), opts: opts})
%Flume{flume | tasks: tasks}
end
def run_async(%Flume{tasks: tasks, results: results} = flume, tag, process_fun, opts)
when is_atom(tag) and is_function(process_fun, 1) do
task_fun = fn -> process_fun.(results) end
tasks = Map.put(tasks, tag, %{task: Task.async(task_fun), opts: opts})
%Flume{flume | tasks: tasks}
end
@doc """
Returns result of pipeline.
## Examples
iex> Flume.new() |> Flume.run(:a, fn -> {:ok, 2} end) |> Flume.result()
{:ok, %{a: 2}}
iex> Flume.new() |> Flume.run(:a, fn -> {:error, :idk} end) |> Flume.result()
{:error, %{a: :idk}, %{}}
"""
@spec result(Flume.t()) :: {:ok, map()} | {:error, map(), map()}
def result(%Flume{tasks: tasks} = flume) when map_size(tasks) > 0 do
flume
|> resolve_tasks()
|> Map.put(:tasks, %{})
|> result()
end
def result(%Flume{results: results, errors: errors}) when map_size(errors) > 0 do
{:error, errors, results}
end
def result(%Flume{results: results}) do
{:ok, results}
end
defp maybe_apply_on_success(fun, result, _tag) when is_function(fun, 1), do: fun.(result)
defp maybe_apply_on_success(fun, result, tag) when is_function(fun, 2), do: fun.(tag, result)
defp maybe_apply_on_success(_fun, result, _tag), do: result
defp maybe_apply_on_error(fun, error, _tag) when is_function(fun, 1) do
fun.(error)
error
end
defp maybe_apply_on_error(fun, error, tag) when is_function(fun, 2) do
fun.(tag, error)
error
end
defp maybe_apply_on_error(_fun, error, _tag), do: error
defp resolve_tasks(%Flume{tasks: tasks} = flume, only) do
tasks
|> Map.take(only)
|> Enum.reduce(flume, &resolve_task/2)
end
defp resolve_tasks(%Flume{tasks: tasks} = flume) do
Enum.reduce(tasks, flume, &resolve_task/2)
end
defp resolve_task(
{tag, %{task: task, opts: opts}},
%Flume{} = flume
) do
on_success = Keyword.get(opts, :on_success)
on_error = Keyword.get(opts, :on_error)
task
|> Task.await()
|> case do
{:ok, result} -> handle_process_callback_success(flume, tag, result, on_success)
{:error, reason} -> handle_process_callback_error(flume, tag, reason, on_error)
bad_match -> raise_match_error!(tag, bad_match)
end
end
defp apply_process_callback(callback, results) when is_function(callback, 1) do
callback.(results)
end
defp apply_process_callback(callback, _results) do
callback.()
end
defp handle_process_callback_success(%Flume{results: results} = flume, tag, result, on_success) do
result = maybe_apply_on_success(on_success, result, tag)
results = Map.put(results, tag, result)
%Flume{flume | results: results}
end
defp handle_process_callback_error(%Flume{} = flume, tag, error, on_error) do
flume
|> maybe_apply_error_callbacks(tag, error, on_error)
|> Map.update!(:errors, &Map.put(&1, tag, error))
|> maybe_halt()
end
defp maybe_apply_error_callbacks(%Flume{global_funs: global_funs} = flume, tag, error, on_error) do
maybe_apply_on_error(global_funs.on_error, error, tag)
maybe_apply_on_error(on_error, error, tag)
flume
end
defp maybe_halt(%Flume{halt_on_errors: false} = flume), do: flume
defp maybe_halt(%Flume{halted: true} = flume), do: flume
defp maybe_halt(%Flume{} = flume), do: %Flume{flume | halted: true}
defp raise_match_error!(tag, bad_match) do
raise __MODULE__.FlumeError,
"#{tag}: Expected either an `{:ok, result}` or `{:error, reason}` tuple " <>
"from the process callback but got #{inspect(bad_match)}"
end
end | lib/flume.ex | 0.900351 | 0.480662 | flume.ex | starcoder |
defmodule Web.BulletinController do
use Web, :controller
alias ChallengeGov.Challenges
alias ChallengeGov.Challenges.Bulletin
alias ChallengeGov.GovDelivery
def new(conn, %{"challenge_id" => id}) do
%{current_user: user} = conn.assigns
with {:ok, challenge} <- Challenges.get(id),
{:ok, challenge} <- Challenges.can_send_bulletin(user, challenge) do
conn
|> assign(:changeset, Bulletin.create_changeset(%Bulletin{}, %{}))
|> assign(:path, Routes.challenge_bulletin_path(conn, :create, challenge.id))
|> assign(:challenge, challenge)
|> render("new.html")
else
{:error, :not_permitted} ->
conn
|> put_flash(:error, "You are not allowed to send a bulletin for this challenge")
|> redirect(to: Routes.challenge_path(conn, :index))
{:error, :not_found} ->
conn
|> put_flash(:error, "Challenge not found")
|> redirect(to: Routes.challenge_path(conn, :index))
end
end
def create(conn, %{"challenge_id" => id, "bulletin" => bulletin_params}) do
%{current_user: user} = conn.assigns
subject = "Challenge.Gov Bulletin: #{bulletin_params["subject"]}"
with {:ok, challenge} <- Challenges.get(id),
{:ok, challenge} <- Challenges.can_send_bulletin(user, challenge),
{:ok, :sent} <- GovDelivery.send_bulletin(challenge, subject, bulletin_params["body"]) do
conn
|> put_flash(:info, "Bulletin scheduled to send")
|> redirect(to: Routes.challenge_path(conn, :index))
else
{:send_error, _e} ->
conn
|> put_flash(:error, "Error sending bulletin")
|> redirect(to: Routes.challenge_path(conn, :index))
{:error, :not_permitted} ->
conn
|> put_flash(:error, "You are not allowed to send a bulletin for this challenge")
|> redirect(to: Routes.challenge_path(conn, :index))
{:error, :not_found} ->
conn
|> put_flash(:error, "Challenge not found")
|> redirect(to: Routes.challenge_path(conn, :index))
end
end
end | lib/web/controllers/bulletin_controller.ex | 0.517083 | 0.408483 | bulletin_controller.ex | starcoder |
defmodule Collections.Heap do
defstruct data: nil, size: 0, comparator: nil
@moduledoc """
Leftist heap implemention in Elixir
See also: [Leftist Tree](https://en.wikipedia.org/wiki/Leftist_tree)
Time complexity
* `&peek/2` : O(1)
* `&push/2` : O(logn)
* `&pop/2` : O(logn)
* `&size/1` : O(1)
* `&member?/2` : O(n)
* `&empty?/1` : O(1)
"""
alias Collections.Heap
@type data :: {non_neg_integer(), any(), data(), data()} | nil
@type t :: %__MODULE__{
data: data(),
size: non_neg_integer(),
comparator: (any(), any() -> boolean())
}
@leaf nil
@compile {:min, :max, :new, :size, :peek}
@doc """
Create an empty min `heap` with default comparator `&</2`.
A min heap is a heap tree which always has the smallest value at the top.
## Examples
iex> 1..10
...> |> Enum.shuffle()
...> |> Enum.into(Collections.Heap.min())
...> |> Collections.Heap.peek()
1
"""
@spec min() :: t
def min, do: Heap.new(&</2)
@doc """
Create an empty max `heap` with default comparator `&>/2`.
A max heap is a heap tree which always has the largest value at the top.
## Examples
iex> 1..10
...> |> Enum.shuffle()
...> |> Enum.into(Collections.Heap.max())
...> |> Collections.Heap.peek()
10
"""
@spec max() :: t
def max, do: Heap.new(&>/2)
@doc """
Create an empty heap with the default comparator `&</2`.
Behaves the same as `&Heap.min`
"""
@spec new() :: t
def new(), do: %Heap{comparator: &</2}
@doc """
Create an empty heap with a specific comparator.
## Examples
iex> 1..10
...> |> Enum.shuffle()
...> |> Enum.into(Collections.Heap.new(&(&1 > &2)))
...> |> Enum.to_list()
[10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
The given function should compare two arguments, and return true if the first argument precedes the second one.
"""
@spec new((any, any -> boolean)) :: t
def new(comparator) when is_function(comparator, 2), do: %Heap{comparator: comparator}
@doc """
Test if the `heap` is empty
## Examples
iex> Collections.Heap.new() |> Collections.Heap.empty?()
true
iex> Collections.Heap.new() |> Collections.Heap.push(10) |> Collections.Heap.empty?()
false
"""
@spec empty?(t) :: boolean
def empty?(t), do: Heap.size(t) == 0
@doc """
Returns the number of elements in `heap`.
## Examples
iex> 1..10
...> |> Enum.into(Collections.Heap.new())
...> |> Collections.Heap.size()
10
"""
@spec size(t) :: non_neg_integer()
def size(%Heap{size: size}), do: size
@doc """
Push a new element into `heap`.
## Examples
iex> Collections.Heap.new()
...> |> Collections.Heap.push(10)
...> |> Collections.Heap.peek()
10
"""
@spec push(t, any()) :: t
def push(%Heap{data: data, size: size, comparator: cmp}, value) do
%Heap{data: merge(data, {1, value, @leaf, @leaf}, cmp), size: size + 1, comparator: cmp}
end
@doc """
Returns the element at the top of `heap`.
If the `heap` is empty, `default` is returned
If `default` is not provided, nil is used
## Examples
iex> Collections.Heap.new()
...> |> Collections.Heap.peek()
nil
iex> Collections.Heap.new()
...> |> Collections.Heap.peek(10)
10
iex> 1..10
...> |> Enum.shuffle()
...> |> Enum.into(Collections.Heap.new())
...> |> Collections.Heap.peek()
1
"""
@spec peek(t, default) :: any() | default when default: any()
def peek(heap, default \\ nil)
def peek(%Heap{data: nil}, default), do: default
def peek(%Heap{data: {_, v, _, _}}, _default), do: v
@doc """
Removes the element at the top of the `heap` and returns the element and the updated heap.
If the `heap` is empty, `default` is returned
If `default` is not provided, nil is used
## Examples
iex> {nil, _} = Collections.Heap.new()
...> |> Collections.Heap.pop()
iex> {10, _} = Collections.Heap.new()
...> |> Collections.Heap.pop(10)
iex> {1, rest_heap} = 1..10
...> |> Enum.shuffle()
...> |> Enum.into(Collections.Heap.new())
...> |> Collections.Heap.pop()
...> {2, _} = Collections.Heap.pop(rest_heap)
...> Collections.Heap.size(rest_heap)
9
"""
@spec pop(t, default) :: {any(), updated_heap :: t} | {default, t} when default: any()
def pop(heap, default \\ nil)
def pop(%Heap{data: nil, size: 0} = heap, default), do: {default, heap}
def pop(%Heap{data: {_, v, l, r}, size: size, comparator: cmp}, _default),
do: {v, %Heap{data: merge(l, r, cmp), size: size - 1, comparator: cmp}}
@doc """
Test if the `heap` contains the `value`.
## Examples
iex> heap = 1..10
...> |> Enum.into(Collections.Heap.new())
...> Collections.Heap.member?(heap, 5)
true
...> Collections.Heap.member?(heap, 20)
false
"""
@spec member?(t, any()) :: boolean()
def member?(%Heap{data: data}, value), do: has_member?(data, value)
@spec rank(data()) :: non_neg_integer()
defp rank(@leaf), do: 0
defp rank({r, _, _, _}), do: r
@spec merge(data(), data(), (any(), any() -> boolean())) :: data()
defp merge(@leaf, @leaf, _cmp), do: nil
defp merge(@leaf, t, _cmp), do: t
defp merge(t, @leaf, _com), do: t
defp merge({_, lv, ll, lr} = t1, {_, rv, rl, rr} = t2, cmp) do
case cmp.(lv, rv) do
true -> swipe(lv, ll, merge(lr, t2, cmp))
false -> swipe(rv, rl, merge(t1, rr, cmp))
err -> raise("Comparator should return boolean, but returned '#{err}'.")
end
end
@spec swipe(any(), data(), data()) :: data()
defp swipe(v, left, right) do
if rank(left) >= rank(right) do
{rank(right) + 1, v, left, right}
else
{rank(left) + 1, v, right, left}
end
end
@spec has_member?(data(), any()) :: boolean()
defp has_member?(nil, _value), do: false
defp has_member?({_, v, l, r}, value) do
if v == value do
true
else
has_member?(l, value) || has_member?(r, value)
end
end
end | lib/heap/heap.ex | 0.92964 | 0.702677 | heap.ex | starcoder |
defmodule ExTorch.Native.Macros do
@moduledoc """
General purpose macros to automatically generate binding declarations and calls
for both ExTorch callable functions and Rustler signature calls to the NIF library.
"""
@doc """
Automatic binding generation.
This macro allow to define a bindings block under a given `doc_section`
for a given set of function bindings. All binding declarations should be
signaled using the `defbinding` function, which recieves the function
signature, alongside an optional keyword list of parameter transformations
that must be done before calling the native function
(defined in `ExTorch.Native`).
Each `defbinding` declaration must declare its `@spec` and optionally its
docstring `@doc` before the call. Additionally, the function binding
signature can declare optional arguments. For example:
# All function docstrings will be collected under :doc_section_name
defbindings(:doc_section_name) do
@doc \"\"\"
The docstring for func goes here
\"\"\"
@spec func(type_1(), type_2(), type_3()) :: ret_type()
defbinding(
func(
arg1, # Positional argument
arg2 \\\\ optional_value, # Optional argument
arg3 \\\\ optional_value # Optional argument
)
)
@doc \"\"\"
The docstring for func2 goes here
\"\"\"
@spec func2(type_1(), type_2(), type_3(), type_4()) :: ret_type()
defbinding(
func2(
arg1 \\\\ optional_value, # Positional argument with optional value
arg2, # Positional argument
arg3 \\\\ optional_value, # Optional argument
arg4 \\\\ optional_value # Optional argument
)
)
@doc \"\"\"
The docstring for func3 goes here
\"\"\"
@spec func3(type_1(), type_2(), type_3(), type_4()) :: ret_type()
defbinding(
func3(
arg1, # Positional argument
arg2, # Positional argument
arg3 \\\\ optional_value, # Optional argument
arg4 \\\\ optional_value # Optional argument
),
arg1: arg1[:value],
arg3: call_to_some_transform(arg3, arg2),
)
end
In case optional arguments are defined, the macro will expand the declaration
to allow optional arguments to be passed as a keyword list. For example, the
function `func` will be expanded to the following function calls: `func(arg1)`,
`func(arg1, kwargs)`, `func(arg1, arg2)`, `func(arg1, arg2, kwargs)` and
`func(arg1, arg2, arg3)`, where kwargs correspond to `arg2: value,
arg3: value2` and `arg3: value`, respectively.
When the first argument is declared as optional, the macro will
generate function calls that begin with the first argument as well as the
second argument. In case there are multiple calls with the same arity, the
macro will try to disambiguate them by computing the corresponding guards
that distinguish each call from the others. In the case of `func2`, the
expanded definitions would correspond to `func2(arg2)` `func2(arg2, kwargs)`,
`func2(arg2, arg3, kwargs)`, `func2(arg2, arg3, arg4)`,
`func2(arg1, arg2)`, `func2(arg1, arg2, kwargs)`,
`func2(arg1, arg2, arg3)`, etc.
Finally, if transforms are defined (like `func3`), they will be assigned to
the specified arguments before calling the native function.
"""
defmacro defbindings(doc_section, [{:do, {:__block__, [], args}}]) do
block = compose_block(args, [], [], doc_section)
{:__block__, [], block}
end
defp compose_block([], block, attrs, _) do
attrs = Enum.reverse(attrs)
block = Enum.reverse(block)
block ++ attrs
end
defp compose_block([{:@, _, _} = attr | rest], block, attrs, doc_section) do
compose_block(rest, block, [attr | attrs], doc_section)
end
defp compose_block([{:defbinding, _, [call]} | rest], block, attrs, doc_section) do
expanded_definition = expand_binding(call, attrs, doc_section, [])
compose_block(rest, [expanded_definition | block], [], doc_section)
end
defp compose_block([{:defbinding, _, [call | transforms]} | rest], block, attrs, doc_section) do
expanded_definition = expand_binding(call, attrs, doc_section, Enum.at(transforms, 0))
compose_block(rest, [expanded_definition | block], [], doc_section)
end
defp compose_block([head | rest], block, attrs, doc_section) do
block = attrs ++ block
compose_block(rest, [head | block], [], doc_section)
end
defp expand_binding({func_name, _, args}, attrs, doc_section, transforms) do
func_info = collect_function_info(attrs, %{:doc => nil, :spec => nil})
%{:spec => spec, :doc => func_docstring} = func_info
case spec do
nil -> raise "@spec declaration is missing for #{func_name}"
_ -> nil
end
{arg_names, arg_info} = collect_arg_info(args)
{ret_type, arg_types} = collect_arg_types(func_name, spec, arg_names)
transforms = assemble_transforms(transforms)
[{_, first_arg_optional, _} | _] = arg_info
{args, kwargs, defaults, first_optional_signatures} =
case first_arg_optional do
true ->
[{first_arg, _, default_value} | other_arg_info] = arg_info
{args, kwargs, defaults} = split_args_kwargs(other_arg_info)
signatures =
compute_signatures(
func_name,
arg_types,
args,
kwargs,
defaults,
transforms,
[default_value],
[]
)
args = [first_arg | args]
{args, kwargs, defaults, signatures}
false ->
{args, kwargs, defaults} = split_args_kwargs(arg_info)
{args, kwargs, defaults, []}
end
full_positional_signatures =
compute_signatures(func_name, arg_types, args, kwargs, defaults, transforms, [])
all_signatures = first_optional_signatures ++ full_positional_signatures
signature_map = Enum.map(all_signatures, fn %{:signature => sig} = x -> {sig, x} end)
signature_map = Enum.into(signature_map, %{})
arity_map =
Enum.reduce(signature_map, %{}, fn {k, %{:arity => arity}}, acc ->
arity_funcs = Map.get(acc, arity, [])
Map.put(acc, arity, [k | arity_funcs])
end)
max_arity = Enum.max(Map.keys(arity_map))
valid_signatures =
Enum.reduce(arity_map, [], fn {_, signatures}, to_generate ->
# Enum.reduce(signatures, {[], sig_map}, fn sig, {})
valid_arity_signatures = compare_and_reduce_signatures(signatures, arg_types)
to_generate ++ valid_arity_signatures
end)
arity_docs =
Enum.reduce(valid_signatures, %{}, fn {sig, _}, acc ->
arity = length(sig)
arity_funcs = Map.get(acc, arity, [])
sig_str = Enum.map_join(sig, ", ", fn arg -> Atom.to_string(arg) end)
sig_str = "* `#{func_name}(#{sig_str})`"
Map.put(acc, arity, [sig_str | arity_funcs])
end)
arity_docs =
Enum.map(arity_docs, fn {k, v} ->
doc = """
Available signature calls:
#{Enum.join(v, "\n")}
"""
{k, doc}
end)
arity_docs = Enum.into(arity_docs, %{})
compose_binding_call(
func_name,
doc_section,
func_docstring,
ret_type,
max_arity,
arity_docs,
valid_signatures,
signature_map
)
end
defp compose_binding_call(
func_name,
doc_section,
func_docstring,
ret_type,
max_arity,
arity_docs,
signatures,
signature_map
) do
Enum.map(signatures, fn {signature, guards} ->
guards =
guards
|> MapSet.new()
|> Enum.into([])
|> compose_guards()
%{^signature => %{:arity => arity, :body => sig_body, :spec => sig_spec}} = signature_map
spec =
quote do
@spec unquote(func_name)(unquote_splicing(sig_spec)) :: unquote(ret_type)
end
doc_headers =
case {func_docstring, guards} do
{_docstring, []} when arity == max_arity ->
quote do
@doc kind: unquote(doc_section)
unquote(func_docstring)
end
_ ->
arity_doc = Map.get(arity_docs, arity)
# sig_string =
# signature
# |> Enum.map_join(", ", fn arg -> Atom.to_string(arg) end)
# sig_string = "`#{func_name}(#{sig_string})`"
quote do
@doc unquote(arity_doc)
@doc kind: unquote(doc_section)
end
end
signature = Enum.map(signature, fn arg -> Macro.var(arg, nil) end)
body =
case guards do
[] ->
quote do
unquote(doc_headers)
unquote(spec)
def unquote(func_name)(unquote_splicing(signature)) do
unquote(sig_body)
end
end
_ ->
quote do
unquote(doc_headers)
unquote(spec)
def unquote(func_name)(unquote_splicing(signature)) when unquote(guards) do
unquote(sig_body)
end
end
end
body
end)
end
defp compose_guards(guards) do
chunk_fun = fn element, acc ->
if length(acc) == 1 do
{:cont, {:and, [{:context, Elixir}, {:import, Kernel}], Enum.reverse([element | acc])},
[]}
else
{:cont, [element | acc]}
end
end
after_fun = fn
[] -> {:cont, []}
acc -> {:cont, Enum.reverse(acc), []}
end
composed_guards =
guards
|> Enum.map(fn {variable, guard} when is_atom(guard) ->
guard_call = String.to_atom("is_#{Atom.to_string(guard)}")
quote do
unquote(guard_call)(unquote(Macro.var(variable, nil)))
end
end)
|> Enum.chunk_while([], chunk_fun, after_fun)
case composed_guards do
[h | _] when is_list(h) ->
[guard | _] = h
guard
[h | _] when is_tuple(h) ->
h
[] ->
composed_guards
end
end
defp compare_and_reduce_signatures(signatures, arg_types) do
Enum.reduce(signatures, [], fn sig, valid_signatures ->
{valid, guards} = compute_guards_for_signature(valid_signatures, sig, arg_types)
case valid do
true -> [{sig, guards} | valid_signatures]
false -> valid_signatures
end
end)
end
defp compute_guards_for_signature(valid_signatures, sig, arg_types) do
Enum.reduce(valid_signatures, {true, []}, fn {valid_sig, _}, {is_valid, guards} ->
case is_valid do
true ->
{valid, diff_sig_guard} = compare_signature_types(sig, valid_sig, arg_types)
is_valid = valid and is_valid
guards = guards ++ diff_sig_guard
{is_valid, guards}
false ->
{is_valid, guards}
end
end)
end
defp compare_signature_types(signature, to_compare_sig, arg_types) do
sig_arg_types = gather_signature_types(signature, arg_types)
compare_arg_types = gather_signature_types(to_compare_sig, arg_types)
sig_arg_types
|> Enum.zip(Enum.reverse(signature))
|> Enum.zip(compare_arg_types)
|> Enum.reduce({false, []}, fn
{{that_arg, _}, that_arg}, {false, guards} ->
{false, guards}
{{this_arg, arg_name}, _that_arg}, {false, guards} ->
{true, [{arg_name, this_arg} | guards]}
{_, _}, {true, guards} ->
{true, guards}
end)
end
defp gather_signature_types(signature, arg_types) do
signature
|> Enum.map(fn
:kwargs ->
:list
arg ->
{type_alias, _} = Map.get(arg_types, arg)
type_alias
end)
|> Enum.reverse()
end
defp compute_signatures(
func_name,
arg_types,
args,
kwargs,
defaults,
transforms,
left_args,
signatures
) do
args = left_args ++ args
compute_signatures(func_name, arg_types, args, kwargs, defaults, transforms, signatures)
end
defp compute_signatures(
func_name,
arg_types,
args,
[],
_,
{transforms, output_transform},
signatures
) do
valid_args = Enum.filter(args, fn x -> is_atom(x) and Map.has_key?(arg_types, x) end)
arity = length(valid_args)
native_module = {:__aliases__, [alias: false], [:ExTorch, :Native]}
call_unquote = {:., [], [native_module, func_name]}
call_parameters =
Enum.map(
args,
fn
x when is_atom(x) -> Macro.var(x, nil)
x -> x
end
)
fn_spec =
args
|> Enum.filter(fn x -> is_atom(x) and Map.has_key?(arg_types, x) end)
|> Enum.map(fn
arg ->
{_, spec_type} = Map.get(arg_types, arg)
spec_type
end)
call =
quote do
unquote(call_unquote)(unquote_splicing(call_parameters))
end
body =
case output_transform do
nil ->
quote do
unquote(transforms)
unquote(call)
end
_ ->
quote do
unquote(transforms)
unquote(output_transform)(unquote(call))
end
end
sig_info = %{:signature => valid_args, :arity => arity, :body => body, :spec => fn_spec}
[sig_info | signatures]
end
defp compute_signatures(
func_name,
arg_types,
args,
[kwarg | rest] = kwargs,
defaults,
{transforms, output_transform},
signatures
) do
valid_args = Enum.filter(args, fn x -> is_atom(x) and Map.has_key?(arg_types, x) end)
# defaults_macro = Macro.escape(defaults)
defaults_macro =
Enum.map(defaults, fn {k, v} ->
{k, v}
end)
defaults_macro = {:%{}, [], defaults_macro}
kwargs_assignment =
kwargs
|> Enum.map(fn kwarg -> {kwarg, Macro.var(kwarg, nil)} end)
kwargs_assignment = {:%{}, [], kwargs_assignment}
native_module = {:__aliases__, [alias: false], [:ExTorch, :Native]}
call_unquote = {:., [], [native_module, func_name]}
args_spec =
valid_args
|> Enum.map(fn
arg ->
{_, spec_type} = Map.get(arg_types, arg)
spec_type
end)
kwargs_spec =
Enum.map(kwargs, fn kwarg ->
{_, kwarg_type} = Map.get(arg_types, kwarg)
{kwarg, kwarg_type}
end)
fn_spec = args_spec ++ [kwargs_spec]
call_parameters =
Enum.map(
args ++ kwargs,
fn
x when is_atom(x) -> Macro.var(x, nil)
x -> x
end
)
call =
quote do
unquote(call_unquote)(unquote_splicing(call_parameters))
end
call =
case output_transform do
nil ->
call
_ ->
quote do
unquote(output_transform)(unquote(call))
end
end
kwarg_body =
quote do
unquote(Macro.var(:kwargs, nil)) =
Enum.into(
unquote(Macro.var(:kwargs, nil)),
%{}
)
unquote(Macro.var(:kwargs, nil)) =
Map.merge(unquote(defaults_macro), unquote(Macro.var(:kwargs, nil)))
unquote(kwargs_assignment) = unquote(Macro.var(:kwargs, nil))
unquote(transforms)
unquote(call)
end
body =
quote do
unquote(kwargs_assignment) = unquote(defaults_macro)
unquote(transforms)
unquote(call)
end
kwarg_signature = valid_args ++ [:kwargs]
kwarg_arity = length(kwarg_signature)
kwarg_sig_info = %{
:signature => kwarg_signature,
:arity => kwarg_arity,
:body => kwarg_body,
:spec => fn_spec
}
signature = valid_args
arity = length(signature)
sig_info = %{:signature => signature, :arity => arity, :body => body, :spec => args_spec}
signatures = [sig_info | [kwarg_sig_info | signatures]]
args = args ++ [kwarg]
{_, defaults} = Map.pop(defaults, kwarg)
compute_signatures(
func_name,
arg_types,
args,
rest,
defaults,
{transforms, output_transform},
signatures
)
end
defp split_args_kwargs(arg_info) do
{args, kwargs, defaults} =
arg_info
|> Enum.reduce(
{[], [], %{}},
fn
{arg, true, default}, {args, kwargs, default_values} ->
{args, [arg | kwargs], Map.put(default_values, arg, default)}
{arg, false, _}, {args, kwargs, default_values} ->
{[arg | args], kwargs, default_values}
end
)
args = Enum.reverse(args)
kwargs = Enum.reverse(kwargs)
{args, kwargs, defaults}
end
defp collect_function_info([], acc) do
acc
end
defp collect_function_info([{:@, _, [{:doc, _, _}]} = attr | attrs], acc) do
acc = Map.put(acc, :doc, attr)
collect_function_info(attrs, acc)
end
defp collect_function_info([{:@, _, [{:spec, _, specs}]} | attrs], acc) do
acc = Map.put(acc, :spec, specs)
collect_function_info(attrs, acc)
end
defp collect_arg_info(args) do
args
|> Enum.map(fn
{:\\, _, [{arg_name, _, _}, default_value]} ->
{arg_name, {arg_name, true, default_value}}
{arg_name, _, _} ->
{arg_name, {arg_name, false, nil}}
end)
|> Enum.unzip()
end
defp collect_arg_types(
func_name,
[{:"::", _, [{func_name, _, arg_specs}, ret_type]}],
arg_names
) do
arg_types =
arg_names
|> Enum.zip(arg_specs)
|> Enum.map(&extract_arg_type/1)
|> Enum.into(%{})
{ret_type, arg_types}
end
defp extract_arg_type({arg_name, arg_spec}) do
case arg_spec do
{:|, _, type_union} ->
type_union = parse_type_union(type_union)
{arg_name, {type_union, arg_spec}}
_ ->
type = parse_type(arg_spec)
{arg_name, type}
end
end
defp parse_type_union(type_union) do
Enum.map(type_union, &parse_type/1)
end
defp parse_type(type) do
type_alias =
case type do
{{:., _, [{:__aliases__, [line: 122], [:ExTorch, :Tensor]}, :t]}} -> :tensor
{{:., _, [{:__aliases__, _, [:ExTorch, _]}, extorch_type]}, _, []} -> extorch_type
{type, _, _} -> type
[_] -> :list
end
{type_alias, type}
end
defp assemble_transforms(transforms) do
{transforms, output_transform} =
Enum.reduce(transforms, {[], nil}, fn
{:output, {transform, [no_parens: true, line: _], _}}, {transforms, _} ->
{transforms, transform}
{:output, transform}, {transforms, _} ->
{transforms, transform}
{variable, transform}, {transforms, output_transform} ->
transform =
quote do
unquote(Macro.var(variable, nil)) = unquote(transform)
end
{[transform | transforms], output_transform}
end)
{{:__block__, [], Enum.reverse(transforms)}, output_transform}
end
end | lib/extorch/native/macros.ex | 0.925911 | 0.63799 | macros.ex | starcoder |
defmodule Site.TripPlan.Map do
alias Leaflet.{MapData, MapData.Marker}
alias Leaflet.MapData.Polyline, as: LeafletPolyline
alias GoogleMaps
alias Routes.Route
alias TripPlan.{Leg, NamedPosition, TransitDetail}
alias Util.Position
@type static_map :: String.t()
@type t :: {MapData.t(), static_map}
@type route_mapper :: (String.t() -> Route.t() | nil)
@type stop_mapper :: (String.t() -> Stops.Stop.t() | nil)
@default_opts [
route_mapper: &Routes.Repo.get/1,
stop_mapper: &Stops.Repo.get_parent/1
]
@moduledoc """
Handles generating the maps displayed within the TripPlan Controller
"""
@doc """
Returns the url for the initial map for the Trip Planner
"""
@spec initial_map_src() :: static_map
def initial_map_src do
{630, 400}
|> MapData.new(14)
|> MapData.to_google_map_data()
|> GoogleMaps.static_map_url()
end
def initial_map_data do
{630, 400}
|> MapData.new(14)
end
# Maps for results
@doc """
Returns the static map data and source URL
Accepts a function that will return either a
Route or nil when given a route_id
"""
@spec itinerary_map([Leg.t()], Keyword.t()) :: t
def itinerary_map(itinerary, opts \\ []) do
map_data = itinerary_map_data(itinerary, Keyword.merge(@default_opts, opts))
{map_data, map_data |> MapData.to_google_map_data() |> GoogleMaps.static_map_url()}
end
@spec itinerary_map_data([Leg.t()], Keyword.t()) :: MapData.t()
defp itinerary_map_data(itinerary, opts) do
markers =
itinerary
|> markers_for_legs(opts)
|> Enum.with_index()
|> Enum.map(fn {marker, idx} -> %{marker | id: "marker-#{idx}"} end)
paths = Enum.map(itinerary, &build_leg_path(&1, opts[:route_mapper]))
{600, 600}
|> MapData.new()
|> MapData.add_markers(markers)
|> MapData.add_polylines(paths)
end
@spec build_leg_path(Leg.t(), route_mapper) :: LeafletPolyline.t()
defp build_leg_path(leg, route_mapper) do
color = leg_color(leg, route_mapper)
path_weight = if Leg.transit?(leg), do: 5, else: 1
leg.polyline
|> extend_to_endpoints(leg)
|> LeafletPolyline.new(color: color, weight: path_weight)
end
@spec extend_to_endpoints(String.t(), Leg.t()) :: String.t()
defp extend_to_endpoints(polyline, %{from: from, to: to})
when is_map(from) and is_map(to) do
from = {Position.longitude(from), Position.latitude(from)}
to = {Position.longitude(to), Position.latitude(to)}
polyline
|> Polyline.decode()
|> (fn line -> Enum.concat([[from], line, [to]]) end).()
|> Polyline.encode()
end
defp extend_to_endpoints(_polyline, _leg) do
""
end
@spec markers_for_legs([Leg.t()], Keyword.t()) :: [Marker.t()]
defp markers_for_legs(legs, opts) do
leg_count = Enum.count(legs)
legs
|> Enum.zip(Stream.iterate(0, &(&1 + 2)))
|> Enum.flat_map(&build_marker_for_leg(&1, opts, leg_count))
end
@spec build_marker_for_leg({Leg.t(), non_neg_integer}, Keyword.t(), non_neg_integer) :: [
Marker.t()
]
defp build_marker_for_leg({leg, idx}, opts, leg_count) do
leg_positions = [{leg.from, idx}, {leg.to, idx + 1}]
leg_positions
|> Enum.reject(fn {position, _n} -> is_nil(position) end)
|> build_markers_for_leg_positions(opts[:stop_mapper], leg_count)
end
defp build_markers_for_leg_positions(positions_with_indicies, stop_mapper, leg_count) do
for {position, index} <- positions_with_indicies do
build_marker_for_leg_position(position, stop_mapper, %{
start: 0,
current: index,
end: 2 * leg_count - 1
})
end
end
@spec build_marker_for_leg_position(NamedPosition.t(), stop_mapper, map) :: Marker.t()
defp build_marker_for_leg_position(leg_position, stop_mapper, indexes) do
icon_name = stop_icon_name(indexes)
opts = [
icon: icon_name,
icon_opts: stop_icon_size(icon_name),
tooltip: tooltip_for_position(leg_position, stop_mapper),
z_index: z_index(indexes)
]
leg_position
|> Position.latitude()
|> Marker.new(Position.longitude(leg_position), opts)
end
@type index_map :: %{
required(:current) => integer,
required(:start) => integer,
required(:end) => integer
}
@doc """
Simplified name for the icon type; used by javascript to fetch the full SVG.
"""
@spec stop_icon_name(index_map) :: String.t()
def stop_icon_name(%{current: idx, start: idx}), do: "map-pin-a"
def stop_icon_name(%{current: idx, end: idx}), do: "map-pin-b"
def stop_icon_name(%{}), do: "dot-mid"
@doc """
Atom representing the size to use for the icon.
Used by javascript to generate the full SVG.
"""
@spec stop_icon_size(String.t()) :: map | nil
def stop_icon_size("map-pin-a"), do: nil
def stop_icon_size("map-pin-b"), do: nil
def stop_icon_size(_), do: %{icon_size: [22, 22], icon_anchor: [0, 0]}
@spec leg_color(Leg.t(), route_mapper) :: String.t()
defp leg_color(%Leg{mode: %TransitDetail{route_id: route_id}}, route_mapper) do
with route <- route_mapper.(route_id), do: "#" <> route.color
end
defp leg_color(_leg, _route_mapper) do
"#000000"
end
@spec tooltip_for_position(NamedPosition.t(), stop_mapper) :: String.t()
defp tooltip_for_position(%NamedPosition{stop_id: nil, name: name}, _stop_mapper) do
name
end
defp tooltip_for_position(%NamedPosition{stop_id: stop_id} = position, stop_mapper) do
case stop_mapper.(stop_id) do
nil -> position.name
stop -> stop.name
end
end
@spec z_index(map) :: 0 | 1
def z_index(%{current: idx, start: idx}), do: 100
def z_index(%{current: idx, end: idx}), do: 100
def z_index(%{}), do: 0
end | apps/site/lib/site/trip_plan/map.ex | 0.820901 | 0.51879 | map.ex | starcoder |
defmodule Freddy.Consumer do
@moduledoc """
This module allows to consume messages from specified queue bound to specified exchange.
## Configuration
* `:exchange` - specifies an exchange to declare. See `Freddy.Core.Exchange` for available
options. Optional.
* `:queue` - specifies a queue to declare. See `Freddy.Core.Queue` for available options.
Mandatory.
* `:qos` - configures channel QoS. See `Freddy.Core.QoS` for available options.
* `:binds` - specifies bindings to create from the declared queue to the declared
exchange. Must be a list of keywords or `%Freddy.Core.Bind{}` structs. See `Freddy.Core.Bind`
for available options.
* `:routing_keys` - a short way to declare bindings, for example providing a list
`["key1", "key2"]` is an equivalent of specifying option
`[binds: [[routing_key: "key1"], [routing_key: "key2"]]]`.
* `:consumer` - arguments to provide to `basic.consume` method, see below.
## Consumer options
* `:consumer_tag` - Specifies the identifier for the consumer. The consumer tag is
local to a channel, so two clients can use the same consumer tags. If this field
is empty the server will generate a unique tag. Default is empty.
* `:no_local` - If the `:no_local` field is set the server will not send messages
to the connection that published them. Default is `false`.
* `:no_ack` - If this field is set the server does not expect acknowledgements for
messages. That is, when a message is delivered to the client the server assumes
the delivery will succeed and immediately dequeues it. This functionality may
increase performance but at the cost of reliability. Messages can get lost if a
client dies before they are delivered to the application. Defaults to `false`.
* `:exclusive` - Request exclusive consumer access, meaning only this consumer can
access the queue. Default is `false`.
* `:nowait` - If set, the server will not respond to the method and client
will not wait for a reply. Default is `false`.
* `:arguments` - A set of arguments for the consume. The syntax and semantics
of these arguments depends on the server implementation.
## Example
defmodule Notifications.Listener do
use Freddy.Consumer
def start_link(conn, initial \\ nil, opts \\ []) do
config = [
exchange: [name: "freddy-topic", type: :topic],
queue: [name: "notifications-queue", opts: [auto_delete: true]],
qos: [prefetch_count: 10], # optional
routing_keys: ["routing_key1", "routing_key2"], # short way to declare binds
binds: [ # fully customizable bindings
[routing_key: "routing_key3", no_wait: true]
],
consumer: [exclusive: true] # optional
]
Freddy.Consumer.start_link(__MODULE__, conn, config, initial, opts)
end
def init(initial) do
# do something on init
{:ok, initial}
end
def handle_message(payload, %{routing_key: "visitor.status.disconnect"}, state) do
{:reply, :ack, state}
end
def handle_error(error, message, _meta) do
# log error?
{:reply, :nack, state}
end
end
"""
use Freddy.Core.Actor, queue: nil, exchange: nil
@type routing_key :: String.t()
@type action :: :ack | :nack | :reject
@type error :: term
@type connection_info :: %{
channel: Freddy.Core.Channel.t(),
queue: Freddy.Core.Queue.t(),
exchange: Freddy.Core.Exchange.t()
}
@doc """
Called when the `Freddy.Consumer` process has opened and AMQP channel and declared an exchange and a queue.
First argument is a map, containing `:channel`, `:exchange` and `:queue` structures.
Returning `{:noreply, state}` will cause the process to enter the main loop
with the given state.
Returning `{:error, state}` will indicate that process failed to perform some critical actions
and must reconnect.
Returning `{:stop, reason, state}` will terminate the main loop and call
`c:terminate/2` before the process exits with reason `reason`.
"""
@callback handle_connected(meta :: connection_info, state) ::
{:noreply, state}
| {:noreply, state, timeout | :hibernate}
| {:error, state}
| {:stop, reason :: term, state}
@doc """
Called when the AMQP server has registered the process as a consumer and it
will start to receive messages.
Returning `{:noreply, state}` will causes the process to enter the main loop
with the given state.
Returning `{:stop, reason, state}` will terminate the main loop and call
`terminate(reason, state)` before the process exits with reason `reason`.
"""
@callback handle_ready(meta, state) ::
{:noreply, state}
| {:noreply, state, timeout | :hibernate}
| {:stop, reason :: term, state}
@doc """
Called when a message is delivered from the queue before passing it into a
`handle_message` function.
The arguments are the message's raw payload, some metatdata and the internal state.
The metadata is a map containing all metadata given by the AMQP client when receiving
the message plus the `:exchange` and `:queue` values.
Returning `{:ok, payload, state}` or `{:ok, payload, meta, state}` will pass the decoded
payload and meta into `handle_message/3` function.
Returning `{:reply, action, opts, state}` or `{:reply, action, state}` will immediately ack,
nack or reject the message.
Returning `{:noreply, state}` will do nothing, and therefore the message should
be acknowledged by using `Freddy.Consumer.ack/2`, `Freddy.Consumer.nack/2` or
`Freddy.Consumer.reject/2`.
Returning `{:stop, reason, state}` will terminate the main loop and call
`terminate(reason, state)` before the process exits with reason `reason`.
"""
@callback decode_message(payload :: String.t(), meta, state) ::
{:ok, payload, state}
| {:ok, payload, meta, state}
| {:reply, action, opts :: Keyword.t(), state}
| {:reply, action, state}
| {:noreply, state}
| {:stop, reason :: term, state}
@doc """
Called when a message is delivered from the queue.
The arguments are the message's decoded payload, some metadata and the internal state.
The metadata is a map containing all metadata given by the adapter when receiving
the message plus the `:exchange` and `:queue` values received at the `connect/2`
callback.
Returning `{:reply, :ack | :nack | :reject, state}` will ack, nack or reject
the message.
Returning `{:reply, :ack | :nack | :reject, opts, state}` will ack, nack or reject
the message with the given opts.
Returning `{:noreply, state}` will do nothing, and therefore the message should
be acknowledged by using `Freddy.Consumer.ack/2`, `Freddy.Consumer.nack/2` or
`Freddy.Consumer.reject/2`.
Returning `{:stop, reason, state}` will terminate the main loop and call
`terminate(reason, state)` before the process exits with reason `reason`.
"""
@callback handle_message(payload, meta, state) ::
{:reply, action, state}
| {:reply, action, opts :: Keyword.t(), state}
| {:noreply, state}
| {:noreply, state, timeout | :hibernate}
| {:stop, reason :: term, state}
defmacro __using__(_) do
quote location: :keep do
@behaviour Freddy.Consumer
# Default callback implementation
@impl true
def init(initial) do
{:ok, initial}
end
@impl true
def handle_connected(_meta, state) do
{:noreply, state}
end
@impl true
def handle_ready(_meta, state) do
{:noreply, state}
end
@impl true
def handle_disconnected(_reason, state) do
{:noreply, state}
end
@impl true
def decode_message(payload, _meta, state) do
case Jason.decode(payload) do
{:ok, new_payload} -> {:ok, new_payload, state}
{:error, reason} -> {:reply, :reject, [requeue: false], state}
end
end
@impl true
def handle_message(_message, _meta, state) do
{:reply, :ack, state}
end
@impl true
def handle_call(message, _from, state) do
{:stop, {:bad_call, message}, state}
end
@impl true
def handle_cast(message, state) do
{:stop, {:bad_cast, message}, state}
end
@impl true
def handle_info(_message, state) do
{:noreply, state}
end
@impl true
def terminate(_reason, _state),
do: :ok
defoverridable Freddy.Consumer
end
end
alias Freddy.Core.Exchange
alias Freddy.Core.Queue
alias Freddy.Core.QoS
alias Freddy.Core.Bind
@doc "Ack's a message given its meta"
@spec ack(meta :: map, opts :: Keyword.t()) :: :ok
def ack(
%{channel: %{adapter: adapter, chan: chan}, delivery_tag: delivery_tag} = _meta,
opts \\ []
) do
adapter.ack(chan, delivery_tag, opts)
end
@doc "Nack's a message given its meta"
@spec nack(meta :: map, opts :: Keyword.t()) :: :ok
def nack(
%{channel: %{adapter: adapter, chan: chan}, delivery_tag: delivery_tag} = _meta,
opts \\ []
) do
adapter.nack(chan, delivery_tag, opts)
end
@doc "Rejects a message given its meta"
@spec reject(meta :: map, opts :: Keyword.t()) :: :ok
def reject(
%{channel: %{adapter: adapter, chan: chan}, delivery_tag: delivery_tag} = _meta,
opts \\ []
) do
adapter.reject(chan, delivery_tag, opts)
end
@impl true
def handle_connected(meta, state(config: config) = state) do
case declare_subscription(meta, config) do
{:ok, %{channel: channel, queue: queue, exchange: exchange} = new_meta} ->
handle_mod_connected(
new_meta,
state(state, channel: channel, queue: queue, exchange: exchange)
)
{:error, :closed} ->
{:error, state}
{:error, reason} ->
{:stop, reason, state}
end
end
defp declare_subscription(%{channel: channel} = meta, config) do
exchange =
config
|> Keyword.get(:exchange, Exchange.default())
|> Exchange.new()
queue =
config
|> Keyword.fetch!(:queue)
|> Queue.new()
qos =
config
|> Keyword.get(:qos, QoS.default())
|> QoS.new()
routing_keys =
config
|> Keyword.get(:routing_keys, [])
|> Enum.map(&Bind.new(routing_key: &1))
custom_binds =
config
|> Keyword.get(:binds, [])
|> Enum.map(&Bind.new/1)
binds = routing_keys ++ custom_binds
consumer_opts = Keyword.get(config, :consumer, [])
with :ok <- Exchange.declare(exchange, channel),
{:ok, queue} <- Queue.declare(queue, channel),
:ok <- QoS.declare(qos, channel),
:ok <- Bind.declare_multiple(binds, exchange, queue, channel),
{:ok, _consumer_tag} <- Queue.consume(queue, self(), channel, consumer_opts) do
new_meta =
meta
|> Map.put(:queue, queue)
|> Map.put(:exchange, exchange)
{:ok, new_meta}
end
end
@impl true
def handle_info(message, state(channel: %{adapter: adapter}) = state) do
case adapter.handle_message(message) do
{:consume_ok, meta} ->
handle_mod_ready(meta, state)
{:deliver, payload, meta} ->
handle_delivery(payload, meta, state)
{:cancel, _meta} ->
{:stop, :canceled, state}
{:cancel_ok, _meta} ->
{:stop, {:shutdown, :canceled}, state}
{:return, _payload, _meta} = message ->
super(message, state)
:unknown ->
super(message, state)
end
end
def handle_info(message, state) do
super(message, state)
end
defp handle_mod_ready(meta, state(mod: mod, given: given) = state) do
case mod.handle_ready(complete(meta, state), given) do
{:noreply, new_given} ->
{:noreply, state(state, given: new_given)}
{:noreply, new_given, timeout} ->
{:noreply, state(state, given: new_given), timeout}
{:stop, reason, new_given} ->
{:stop, reason, state(state, given: new_given)}
end
end
@reply_actions [:ack, :nack, :reject]
defp handle_delivery(payload, meta, state(mod: mod, given: given, exchange: exchange) = state) do
Freddy.Tracer.with_process_span(meta, exchange, mod, fn ->
meta = complete(meta, state)
result =
case mod.decode_message(payload, meta, given) do
{:ok, new_payload, new_given} ->
mod.handle_message(new_payload, meta, new_given)
{:ok, new_payload, new_meta, new_given} ->
mod.handle_message(new_payload, new_meta, new_given)
other ->
other
end
case result do
{:reply, action, new_given} when action in @reply_actions ->
apply(__MODULE__, action, [meta])
{:noreply, state(state, given: new_given)}
{:reply, action, opts, new_given} when action in @reply_actions ->
apply(__MODULE__, action, [meta, opts])
{:noreply, state(state, given: new_given)}
{:noreply, new_given} ->
{:noreply, state(state, given: new_given)}
{:noreply, new_given, timeout} ->
{:noreply, state(state, given: new_given), timeout}
{:stop, reason, new_given} ->
{:stop, reason, state(state, given: new_given)}
end
end)
end
defp complete(meta, state(channel: channel, queue: queue, exchange: exchange)) do
meta
|> Map.put(:exchange, exchange)
|> Map.put(:queue, queue)
|> Map.put(:channel, channel)
end
end | lib/freddy/consumer.ex | 0.927683 | 0.500427 | consumer.ex | starcoder |
defmodule Zippex do
@moduledoc """
A Zipper is a representation of an aggregate data structure which
allows it to be traversed and updated arbitrarily. This module
implements tree-like semantics for traversing a data structure.
## Focus
The current node of the zipper, also known as the focus node, can be
retrieved by calling the `focus/1` function. The following functions
provide other information relating to the focus node:
* `lefts/1` - returns the left siblings of the focus node
* `rights/1` - returns the rights siblings of the focus node
* `path/1` - returns the path to the focus node from the root
## Traversal
The focus can be moved using the following functions:
* `head/1` - moves to the root node
* `down/1` - moves to the first child of the focus node
* `up/1` - moves to the parent of the focus node
* `left/1` - moves to the left sibling of the focus node
* `leftmost/1` - moves to the leftmost sibling of the focus node
* `right/1` - moves to the right sibling of the focus node
* `rightmost/1` - moves to the rightmost sibling of the focus node
* `next/1` - moves to the next node in a depth-first traversal
* `prev/1` - moves to the previous node in a depth-first traversal
## Enumeration
`Zippex` implements the `Enumerable` protocol, which allows it's values
to be enumerated in a depth-first traversal.
## Updates
The focus node can be modified using the functions `edit/2` or `edit/3`.
It can be removed, along with it's children, using the `remove/1` function,
after which the focus is moved to the previous node in a depth-first
traversal.
"""
import Kernel, except: [node: 1]
alias Zippex.Context
alias Zippex.Meta
defstruct [:spec, :node, :ctx]
@type t :: %__MODULE__{spec: Meta.t(), node: element, ctx: Context.t() | :end}
@type edit_fun :: (element -> element)
@type edit_with_args_fun :: (element, args -> element)
@type element :: any
@type args :: any
@doc """
Returns a new Zipper for a given `node` element.
`is_branch_fun` receives a node and returns `true` if it is a branch, or
`false` otherwise.
`children_fun` receives a node (which is a branch) and returns a list of
it's child nodes.
`make_node_fun` receives a parent node and a list of child nodes and
returns a new node.
"""
@spec new(Meta.is_branch_fun(), Meta.children_fun(), Meta.make_node_fun(), element) :: t
def new(is_branch, children, make_node, root) do
spec = Meta.new(is_branch, children, make_node)
%Zippex{
node: root,
spec: spec,
ctx: %Context{}
}
end
@doc """
Returns the focus node of a zipper.
"""
@spec focus(t) :: element
def focus(zipper)
def focus(%Zippex{node: n}), do: n
@doc """
Returns the root node of the zipper.
"""
@spec root(t) :: element
def root(%Zippex{} = zipper) do
zipper |> head() |> focus()
end
@doc """
Returns the path to the focus node.
"""
@spec path(t) :: list(element)
def path(%Zippex{ctx: ctx}) do
Context.path(ctx, [])
end
@doc """
Returns the left siblings of the focus node.
"""
@spec lefts(t) :: list(element)
def lefts(%Zippex{ctx: %{left: ls}}), do: ls
@doc """
Returns the right siblings of the focus node.
"""
@spec rights(t) :: list(element)
def rights(%Zippex{ctx: %{right: rs}}), do: rs
@doc """
Moves to the head of the zipper.
"""
@spec head(t) :: element
def head(%Zippex{} = zipper) do
case up(zipper) do
nil -> leftmost(zipper)
z -> head(z)
end
end
@doc """
Moves to the left sibling of the focus node.
Returns the updated zipper, or `nil` if the focus node has no left sibling.
"""
@spec left(t) :: t | nil
def left(%Zippex{node: n, ctx: ctx} = zipper) do
case ctx do
%{left: []} ->
nil
%{left: [prev | left], right: right} ->
ctx = %{ctx | left: left, right: [n | right]}
%{zipper | node: prev, ctx: ctx}
end
end
@doc """
Moves to the leftmost sibling of the focus node.
Returns the updated zipper.
"""
@spec leftmost(t) :: t
def leftmost(%Zippex{node: n, ctx: ctx} = zipper) do
case ctx do
%{left: []} ->
zipper
%{left: ls, right: rs} ->
[leftmost | right] = Enum.reduce(ls, [n | rs], &[&1 | &2])
ctx = %{ctx | left: [], right: right}
%{zipper | node: leftmost, ctx: ctx}
end
end
@doc """
Moves to the right sibling of the focus node.
Returns the updated zipper, or `nil` if the focus node has no right sibling.
"""
@spec right(t) :: t | nil
def right(%Zippex{node: n, ctx: ctx} = zipper) do
case ctx do
%{right: []} ->
nil
%{left: left, right: [next | right]} ->
ctx = %{ctx | left: [n | left], right: right}
%{zipper | node: next, ctx: ctx}
end
end
@doc """
Moves to the rightmost sibling of the focus node.
Returns the updated zipper.
"""
@spec rightmost(t) :: t
def rightmost(%Zippex{node: n, ctx: ctx} = zipper) do
case ctx do
%{right: []} ->
zipper
%{left: ls, right: rs} ->
[rightmost | left] = Enum.reduce(rs, [n | ls], &[&1 | &2])
ctx = %{ctx | left: left, right: []}
%{zipper | node: rightmost, ctx: ctx}
end
end
@doc """
Moves to the parent of the focus node.
Returns the updated zipper, or `nil` if the focus node has no parent.
"""
@spec up(t) :: t | nil
def up(%Zippex{node: n, ctx: ctx, spec: spec} = zipper) do
case ctx do
%{parent: nil} ->
nil
%{parent: parent, ctx: parent_ctx, dirty: false} ->
%{zipper | node: parent, ctx: parent_ctx}
%{left: left, right: right, parent: parent, ctx: parent_ctx, dirty: true} ->
children = Enum.reverse(left) ++ [n | right]
parent = Meta.make_node(spec, parent, children)
%{zipper | node: parent, ctx: %{parent_ctx | dirty: true}}
end
end
@doc """
Moves to the first child of the focus node.
Returns the updated zipper, or `nil` if the focus node has no children.
"""
@spec down(t) :: t | nil
def down(%Zippex{ctx: parent_ctx, node: parent, spec: spec} = zipper) do
if Meta.is_branch(spec, parent) do
case Meta.children(spec, parent) do
[child | right] ->
ctx = %Context{left: [], right: right, parent: parent, ctx: parent_ctx}
%{zipper | node: child, ctx: ctx}
_ ->
nil
end
end
end
@doc """
Moves to the next node of the focus node in a depth-first traversal.
"""
@spec next(t) :: t
def next(%Zippex{spec: spec, node: n} = zipper) do
if Meta.is_branch(spec, n) do
down(zipper)
else
case right(zipper) do
nil -> next_recur(zipper)
right -> right
end
end
end
@spec next_recur(t) :: t
defp next_recur(%Zippex{} = zipper) do
case up(zipper) do
nil ->
%{zipper | ctx: :end}
z ->
case right(z) do
nil -> next_recur(z)
right -> right
end
end
end
@doc """
Moves to the previous node of the focus node in a depth-first traversal.
"""
@spec prev(t) :: t
def prev(%Zippex{ctx: ctx} = zipper) do
case ctx do
%{left: []} -> up(zipper)
_ -> prev_recur(zipper)
end
end
@spec prev_recur(t) :: t
defp prev_recur(%Zippex{} = zipper) do
case down(zipper) do
nil ->
zipper
z ->
z |> rightmost() |> prev_recur()
end
end
@doc """
Removes the focus node, moving the focus to the node that would have
preceded it in a depth-first traversal.
"""
@spec remove(t) :: t
def remove(%Zippex{ctx: ctx, spec: spec} = zipper) do
case ctx do
%{ctx: nil} ->
raise(ArgumentError, "can't remove root")
%{left: [], right: right, parent: parent, ctx: parent_ctx} ->
parent_ctx = %{parent_ctx | dirty: true}
%{zipper | node: Meta.make_node(spec, parent, right), ctx: parent_ctx}
%{left: [l | ls]} ->
ctx = %{ctx | left: ls, dirty: true}
%{zipper | node: l, ctx: ctx} |> remove_prev()
end
end
@spec remove_prev(t) :: t
defp remove_prev(%Zippex{} = zipper) do
case down(zipper) do
nil -> zipper
z -> z |> rightmost() |> remove_prev()
end
end
@doc """
Modifies the focus node by applying a function to it.
"""
@spec edit(t, edit_fun) :: t
def edit(%Zippex{node: n, ctx: ctx} = zipper, fun) do
%{zipper | node: fun.(n), ctx: %{ctx | dirty: true}}
end
@doc """
Modifies the focus node by applying a function to it.
"""
@spec edit(t, edit_with_args_fun, args) :: t
def edit(%Zippex{node: n, ctx: ctx} = zipper, fun, args) do
%{zipper | node: fun.(n, args), ctx: %{ctx | dirty: true}}
end
defimpl Enumerable do
@impl true
def reduce(zipper, acc, fun)
def reduce(_zipper, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(zipper, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(zipper, &1, fun)}
def reduce(%Zippex{ctx: ctx, node: n} = zipper, {:cont, acc}, fun) do
case ctx do
:end ->
{:done, acc}
_ ->
zipper
|> Zippex.next()
|> reduce(fun.(n, acc), fun)
end
end
@impl true
def count(_zipper), do: {:error, __MODULE__}
@impl true
def member?(_zipper, _element), do: {:error, __MODULE__}
@impl true
def slice(_zipper), do: {:error, __MODULE__}
end
end | lib/zippex.ex | 0.953827 | 0.880489 | zippex.ex | starcoder |
defmodule Machinery.Plug do
@moduledoc """
This Plug module is the entry point for the Machinery Dashboard.
It's supposed to be used on the Endpoint of a Phoenix application,
and it's responsible to call the Machinery.Endpoint.
You're expected to use this as a plug on the main application, and it also
accepts an optional parameter that is the path you want to mount the
Machinery dashboard if it's other than `/machinery`.
## Parameters
- `path`: A string with the path you want to mount the dashboard
if other than `/machinery`.
## Example
```
defmodule YourProject.Endpoint do
plug Machinery.Plug
end
```
"""
import Plug.Conn
@default_path "/machinery"
@doc false
def init(default), do: default
@doc """
call/2 Intercepts the request as a plug and check if it matches with the
defined path passed as argument, if it does it moves on calling the
process/2 that will prepare the request and pass it through the
Machinery.Endpoint.
"""
def call(conn, [] = _path), do: call(conn, @default_path, matches?(conn, @default_path))
def call(conn, path), do: call(conn, path, matches?(conn, path))
def call(conn, path, true), do: process(conn, path)
def call(conn, _path, false), do: conn
@doc """
Function responsible for redirect the request to Machinery.Endpoint.
"""
def process(conn, path) do
module = Application.get_env(:machinery, :module)
model = Application.get_env(:machinery, :model)
repo = Application.get_env(:machinery, :repo)
conn
|> assign(:mount_path, path)
|> assign(:module, module)
|> assign(:model, model)
|> assign(:repo, repo)
|> forward(path)
|> halt
end
defp path_segments(path) do
path
|> String.split("/")
|> Enum.reject(fn(x) -> x == "" end)
end
defp matches?(conn, path) do
String.starts_with?(conn.request_path, path)
end
defp forward(conn, path) do
Phoenix.Router.Route.forward(conn, path_segments(path), Machinery.Endpoint, [])
end
end | lib/machinery/plug.ex | 0.841077 | 0.751329 | plug.ex | starcoder |
defmodule StrawHat.Map.States do
@moduledoc """
States management use cases.
"""
import Ecto.Query
alias StrawHat.{Error, Response}
alias StrawHat.Map.{City, County, State}
@spec get_states(Ecto.Repo.t(), Scrivener.Config.t()) :: Scrivener.Page.t()
def get_states(repo, pagination \\ []) do
Scrivener.paginate(State, Scrivener.Config.new(repo, [], pagination))
end
@spec create_state(Ecto.Repo.t(), State.state_attrs()) ::
Response.t(State.t(), Ecto.Changeset.t())
def create_state(repo, state_attrs) do
%State{}
|> State.changeset(state_attrs)
|> repo.insert()
|> Response.from_value()
end
@spec update_state(Ecto.Repo.t(), State.t(), State.state_attrs()) ::
Response.t(State.t(), Ecto.Changeset.t())
def update_state(repo, %State{} = state, state_attrs) do
state
|> State.changeset(state_attrs)
|> repo.update()
|> Response.from_value()
end
@spec destroy_state(Ecto.Repo.t(), State.t()) :: Response.t(State.t(), Ecto.Changeset.t())
def destroy_state(repo, %State{} = state) do
state
|> repo.delete()
|> Response.from_value()
end
@spec find_state(Ecto.Repo.t(), String.t()) :: Response.t(State.t(), Error.t())
def find_state(repo, state_id) do
repo
|> get_state(state_id)
|> Response.from_value(
Error.new("straw_hat_map.state.not_found", metadata: [state_id: state_id])
)
end
@spec get_state(Ecto.Repo.t(), String.t()) :: State.t() | nil | no_return
def get_state(repo, state_id) do
repo.get(State, state_id)
end
@spec get_states_by_ids(Ecto.Repo.t(), [integer()]) :: [State.t()] | no_return()
def get_states_by_ids(repo, state_ids) do
State
|> select([state], state)
|> where([state], state.id in ^state_ids)
|> repo.all()
end
@spec get_cities(Ecto.Repo.t(), State.t()) :: [City.t()] | no_return()
def get_cities(repo, %State{} = state) do
City
|> select([city], city)
|> where([city], state_id: ^state.id)
|> repo.all()
end
@spec get_cities(Ecto.Repo.t(), [integer()]) :: [City.t()] | no_return()
def get_cities(repo, state_ids) when is_list(state_ids) do
City
|> select([city], city)
|> where([city], city.state_id in ^state_ids)
|> repo.all()
end
@spec get_counties(Ecto.Repo.t(), [integer()]) :: [County.t()] | no_return()
def get_counties(repo, state_ids) when is_list(state_ids) do
County
|> select([county], county)
|> where([county], county.state_id in ^state_ids)
|> repo.all()
end
end | lib/straw_hat_map/world/states/states_use_cases.ex | 0.716913 | 0.401776 | states_use_cases.ex | starcoder |
defmodule Runlet.CLI do
@moduledoc "Compile runlet expresions"
@type t :: {[atom], atom} | {{[atom], atom}, [String.t() | integer]}
@type e :: String.t() | [t]
@spec aliases() :: [t]
def aliases(), do: Runlet.Config.get(:runlet, :aliases, [])
def exec(ast), do: exec!(ast, [])
def exec(ast, bind), do: exec!(ast, bind)
def exec!(ast), do: exec!(ast, [])
def exec!(ast, bind) do
{code, _} = Code.eval_quoted(ast, bind, __ENV__)
code
end
@doc """
Compile a runlet expression to AST.
Commands are looked up in the application environment:
Application.get_env(:runlet, :aliases, [])
"""
@spec compile!(e) :: [t]
def compile!(pipeline) do
compile!(pipeline, aliases())
end
@doc """
Compile a runlet expression to AST.
## Examples
iex> Runlet.CLI.compile!(
...> ~s(test "foo" | bar 123),
...> [{"test", [{[:Fake, :Cmd, :AddArg], :exec},
...> {{:Fake, :Cmd, :StaticArg}, ["static arg"]}]},
...> {"bar", {[:Fake, :Cmd, :IntArg], :exec}}])
{:|>, [context: Elixir, import: Kernel],
[{:|>, [context: Elixir, import: Kernel],
[{{:., [], [{:__aliases__, [alias: false], [:Fake, :Cmd, :AddArg]}, :exec]},
[], ["foo"]},
{{:., [],
[{:__aliases__, [alias: false], {:Fake, :Cmd, :StaticArg}},
["static arg"]]}, [], ["foo"]}]},
{{:., [], [{:__aliases__, [alias: false], [:Fake, :Cmd, :IntArg]}, :exec]},
[], '{'}]}
"""
@spec compile!(e, [t]) :: [t]
def compile!(pipeline, commands) do
case compile(pipeline, commands) do
{:error, error} -> throw(error)
{:ok, insn} -> insn
end
end
@doc """
Compile a runlet expression to AST
Commands are looked up in the application environment:
Application.get_env(:runlet, :aliases, [])
"""
@spec compile(e) :: {:ok, [t]} | {:error, String.t()}
def compile(pipeline) do
compile(pipeline, aliases())
end
@doc """
Compile a runlet expression to AST
## Examples
iex> Runlet.CLI.compile(
...> ~s(test "foo" | bar 123),
...> [{"test", [{[:Fake, :Cmd, :AddArg], :exec},
...> {{:Fake, :Cmd, :StaticArg}, ["static arg"]}]},
...> {"bar", {[:Fake, :Cmd, :IntArg], :exec}}])
{:ok,
{:|>, [context: Elixir, import: Kernel],
[{:|>, [context: Elixir, import: Kernel],
[{{:., [], [{:__aliases__, [alias: false], [:Fake, :Cmd, :AddArg]}, :exec]},
[], ["foo"]},
{{:., [],
[{:__aliases__, [alias: false], {:Fake, :Cmd, :StaticArg}},
["static arg"]]}, [], ["foo"]}]},
{{:., [], [{:__aliases__, [alias: false], [:Fake, :Cmd, :IntArg]}, :exec]},
[], '{'}]}}
"""
@spec compile(e, [t]) :: {:ok, [t]} | {:error, String.t()}
def compile(pipeline, commands) do
with {:ok, code} <- ast(pipeline, commands) do
{:ok, pipe(code)}
end
end
def ast(pipeline, commands) do
fun = fn {cmd, arg} ->
maybe_argv = fn
{{_mod, _fun}, _argv} = t -> t
{mod, fun} -> {{mod, fun}, arg}
end
case List.keyfind(commands, cmd, 0) do
nil ->
{:error, "#{cmd}: not found"}
{^cmd, {{_mod, _fun}, _argv} = t} ->
{:ok, [t]}
{^cmd, {mod, fun}} ->
{:ok, [{{mod, fun}, arg}]}
{^cmd, form} when is_list(form) ->
{:ok, form |> Enum.map(maybe_argv) |> Enum.reverse()}
end
end
with {:ok, command} <- parse(pipeline) do
expand(command, fun)
end
end
def pipe(code) do
Enum.reduce(code, fn term, acc ->
{:|>, [context: Elixir, import: Kernel], [acc, term]}
end)
end
@spec expand([t], fun) :: {:ok, [t]} | {:error, String.t()}
def expand(pipeline, fun) do
with {:ok, cmds} <- substitute(pipeline, fun) do
{:ok, Enum.map(cmds, fn cmd -> to_ast(cmd) end)}
end
end
@spec substitute([t], fun) :: {:ok, [t]} | {:error, String.t()}
def substitute(cmds, fun), do: substitute(cmds, fun, [])
def substitute([], _fun, acc) do
{:ok, Enum.reverse(List.flatten(acc))}
end
def substitute([cmd | cmds], fun, acc) do
case fun.(cmd) do
{:error, _} = error ->
error
{:ok, form} ->
substitute(cmds, fun, [form | acc])
end
end
def to_ast({{mod, fun}, arg}) do
{{:., [], [{:__aliases__, [alias: false], mod}, fun]}, [], arg}
end
@doc """
Tokenize a runlet expression.
## Examples
iex> Runlet.CLI.lex(~s(test "foo" | bar 123 | out > 456))
{:ok,
[
{:command, 1, "test"},
{:string, 1, 'foo'},
{:|, 1},
{:command, 1, "bar"},
{:integer, 1, 123},
{:|, 1},
{:command, 1, "out"},
{:>, 1},
{:integer, 1, 456}
], 1}
"""
def lex(command) do
command
|> String.to_charlist()
|> :runlet_lexer.string()
end
@doc """
Parse a runlet expression.
## Examples
iex> Runlet.CLI.parse(~s(test "foo" | bar 123 | out > 456))
{:ok, [{"test", ["foo"]}, {"bar", '{'}, {"out", []}, {">", [456]}]}
"""
@spec parse(e) ::
{:ok, [{String.t(), [Runlet.PID.t() | String.t()]}]}
| {:error, String.t()}
def parse(command) when is_binary(command) do
result =
with {:ok, tokens, _} <- lex(command) do
:runlet_parser.parse(tokens)
end
case result do
{:error, {_line, :runlet_lexer, error}, _n} ->
{:error, "#{:runlet_lexer.format_error(error)}"}
{:error, {_line, :runlet_parser, error}} ->
{:error, "#{:runlet_parser.format_error(error)}"}
{:ok, pipeline} ->
{:ok, pipeline}
end
end
def parse(command) when is_list(command), do: {:ok, command}
@doc """
Insert a runlet pipeline into another pipeline.
## Examples
iex> Runlet.CLI.insert(~s(test "foo" | bar 123 | another),
...> ~s(insert | here), 2)
{:ok,
[{"test", ["foo"]}, {"bar", '{'}, {"insert", []}, {"here", []},
{"another", []}]}
"""
@spec insert(e, String.t() | [t], integer) ::
{:ok, [t]} | {:error, String.t()}
def insert(pipeline, command, position) do
with {:ok, code} <- parse(pipeline),
{:ok, insn} <- parse(command) do
{:ok,
code
|> List.insert_at(position, insn)
|> List.flatten()}
end
end
@doc """
Add a runlet expression at the start of a pipeline.
## Examples
iex> Runlet.CLI.prepend(~s(test "foo" | bar 123 | another), ~s(insert | here))
{:ok,
[{"insert", []}, {"here", []}, {"test", ["foo"]}, {"bar", '{'},
{"another", []}]}
"""
@spec prepend(e, e) :: {:ok, [t]} | {:error, String.t()}
def prepend(code, command), do: insert(code, command, 0)
@doc """
Add a runlet expression to the end of a pipeline.
## Examples
iex> Runlet.CLI.append(~s(test "foo" | bar 123 | another), ~s(insert | here))
{:ok,
[{"test", ["foo"]}, {"bar", '{'}, {"another", []}, {"insert", []},
{"here", []}]}
"""
@spec append(e, e) :: {:ok, [t]} | {:error, String.t()}
def append(code, command), do: insert(code, command, -1)
end | lib/runlet/cli.ex | 0.800926 | 0.640102 | cli.ex | starcoder |
defmodule Bunt.ANSI.Sequence do
@moduledoc false
defmacro defalias(alias_name, original_name) do
quote bind_quoted: [alias_name: alias_name, original_name: original_name] do
def unquote(alias_name)() do
unquote(original_name)()
end
defp format_sequence(unquote(alias_name)) do
unquote(original_name)()
end
end
end
defmacro defsequence(name, code, prefix \\ "", terminator \\ "m") do
quote bind_quoted: [name: name, code: code, prefix: prefix, terminator: terminator] do
def unquote(name)() do
"\e[#{unquote(prefix)}#{unquote(code)}#{unquote(terminator)}"
end
defp format_sequence(unquote(name)) do
unquote(name)()
end
end
end
end
defmodule Bunt.ANSI do
@moduledoc """
Functionality to render ANSI escape sequences.
[ANSI escape sequences](https://en.wikipedia.org/wiki/ANSI_escape_code)
are characters embedded in text used to control formatting, color, and
other output options on video text terminals.
"""
import Bunt.ANSI.Sequence
@color_tuples [
{nil, :color16, 16, {0, 0, 0}},
{nil, :color17, 17, {0, 0, 95}},
{"darkblue", :color18, 18, {0, 0, 135}},
{nil, :color19, 19, {0, 0, 175}},
{"mediumblue", :color20, 20, {0, 0, 215}},
{nil, :color21, 21, {0, 0, 255}},
{"darkgreen", :color22, 22, {0, 95, 0}},
{"darkslategray", :color23, 23, {0, 95, 95}},
{nil, :color24, 24, {0, 95, 135}},
{nil, :color25, 25, {0, 95, 175}},
{nil, :color26, 26, {0, 95, 215}},
{nil, :color27, 27, {0, 95, 255}},
{nil, :color28, 28, {0, 135, 0}},
{nil, :color29, 29, {0, 135, 95}},
{"darkcyan", :color30, 30, {0, 135, 135}},
{nil, :color31, 31, {0, 135, 175}},
{nil, :color32, 32, {0, 135, 215}},
{nil, :color33, 33, {0, 135, 255}},
{nil, :color34, 34, {0, 175, 0}},
{nil, :color35, 35, {0, 175, 95}},
{nil, :color36, 36, {0, 175, 135}},
{nil, :color37, 37, {0, 175, 175}},
{nil, :color38, 38, {0, 175, 215}},
{"deepskyblue", :color39, 39, {0, 175, 255}},
{nil, :color40, 40, {0, 215, 0}},
{nil, :color41, 41, {0, 215, 95}},
{nil, :color42, 42, {0, 215, 135}},
{nil, :color43, 43, {0, 215, 175}},
{nil, :color44, 44, {0, 215, 215}},
{nil, :color45, 45, {0, 215, 255}},
{nil, :color46, 46, {0, 255, 0}},
{nil, :color47, 47, {0, 255, 95}},
{"springgreen", :color48, 48, {0, 255, 135}},
{nil, :color49, 49, {0, 255, 175}},
{nil, :color50, 50, {0, 255, 215}},
{"aqua", :color51, 51, {0, 255, 255}},
{nil, :color52, 52, {95, 0, 0}},
{nil, :color53, 53, {95, 0, 95}},
{nil, :color54, 54, {95, 0, 135}},
{nil, :color55, 55, {95, 0, 175}},
{nil, :color56, 56, {95, 0, 215}},
{nil, :color57, 57, {95, 0, 255}},
{nil, :color58, 58, {95, 95, 0}},
{"dimgray", :color59, 59, {95, 95, 95}},
{nil, :color60, 60, {95, 95, 135}},
{nil, :color61, 61, {95, 95, 175}},
{nil, :color62, 62, {95, 95, 215}},
{nil, :color63, 63, {95, 95, 255}},
{nil, :color64, 64, {95, 135, 0}},
{nil, :color65, 65, {95, 135, 95}},
{nil, :color66, 66, {95, 135, 135}},
{"steelblue", :color67, 67, {95, 135, 175}},
{nil, :color68, 68, {95, 135, 215}},
{nil, :color69, 69, {95, 135, 255}},
{nil, :color70, 70, {95, 175, 0}},
{nil, :color71, 71, {95, 175, 95}},
{nil, :color72, 72, {95, 175, 135}},
{nil, :color73, 73, {95, 175, 175}},
{nil, :color74, 74, {95, 175, 215}},
{nil, :color75, 75, {95, 175, 255}},
{nil, :color76, 76, {95, 215, 0}},
{nil, :color77, 77, {95, 215, 95}},
{nil, :color78, 78, {95, 215, 135}},
{nil, :color79, 79, {95, 215, 175}},
{nil, :color80, 80, {95, 215, 215}},
{nil, :color81, 81, {95, 215, 255}},
{nil, :color82, 82, {95, 255, 0}},
{nil, :color83, 83, {95, 255, 95}},
{nil, :color84, 84, {95, 255, 135}},
{nil, :color85, 85, {95, 255, 175}},
{nil, :color86, 86, {95, 255, 215}},
{nil, :color87, 87, {95, 255, 255}},
{"darkred", :color88, 88, {135, 0, 0}},
{nil, :color89, 89, {135, 0, 95}},
{"darkmagenta", :color90, 90, {135, 0, 135}},
{nil, :color91, 91, {135, 0, 175}},
{nil, :color92, 92, {135, 0, 215}},
{nil, :color93, 93, {135, 0, 255}},
{nil, :color94, 94, {135, 95, 0}},
{nil, :color95, 95, {135, 95, 95}},
{nil, :color96, 96, {135, 95, 135}},
{nil, :color97, 97, {135, 95, 175}},
{nil, :color98, 98, {135, 95, 215}},
{nil, :color99, 99, {135, 95, 255}},
{"olive", :color100, 100, {135, 135, 0}},
{nil, :color101, 101, {135, 135, 95}},
{nil, :color102, 102, {135, 135, 135}},
{nil, :color103, 103, {135, 135, 175}},
{nil, :color104, 104, {135, 135, 215}},
{nil, :color105, 105, {135, 135, 255}},
{nil, :color106, 106, {135, 175, 0}},
{nil, :color107, 107, {135, 175, 95}},
{nil, :color108, 108, {135, 175, 135}},
{nil, :color109, 109, {135, 175, 175}},
{nil, :color110, 110, {135, 175, 215}},
{nil, :color111, 111, {135, 175, 255}},
{nil, :color112, 112, {135, 215, 0}},
{nil, :color113, 113, {135, 215, 95}},
{nil, :color114, 114, {135, 215, 135}},
{nil, :color115, 115, {135, 215, 175}},
{nil, :color116, 116, {135, 215, 215}},
{nil, :color117, 117, {135, 215, 255}},
{"chartreuse", :color118, 118, {135, 255, 0}},
{nil, :color119, 119, {135, 255, 95}},
{nil, :color120, 120, {135, 255, 135}},
{nil, :color121, 121, {135, 255, 175}},
{"aquamarine", :color122, 122, {135, 255, 215}},
{nil, :color123, 123, {135, 255, 255}},
{nil, :color124, 124, {175, 0, 0}},
{nil, :color125, 125, {175, 0, 95}},
{nil, :color126, 126, {175, 0, 135}},
{nil, :color127, 127, {175, 0, 175}},
{nil, :color128, 128, {175, 0, 215}},
{nil, :color129, 129, {175, 0, 255}},
{nil, :color130, 130, {175, 95, 0}},
{nil, :color131, 131, {175, 95, 95}},
{nil, :color132, 132, {175, 95, 135}},
{nil, :color133, 133, {175, 95, 175}},
{nil, :color134, 134, {175, 95, 215}},
{nil, :color135, 135, {175, 95, 255}},
{nil, :color136, 136, {175, 135, 0}},
{nil, :color137, 137, {175, 135, 95}},
{nil, :color138, 138, {175, 135, 135}},
{nil, :color139, 139, {175, 135, 175}},
{nil, :color140, 140, {175, 135, 215}},
{nil, :color141, 141, {175, 135, 255}},
{nil, :color142, 142, {175, 175, 0}},
{nil, :color143, 143, {175, 175, 95}},
{nil, :color144, 144, {175, 175, 135}},
{nil, :color145, 145, {175, 175, 175}},
{nil, :color146, 146, {175, 175, 215}},
{nil, :color147, 147, {175, 175, 255}},
{nil, :color148, 148, {175, 215, 0}},
{nil, :color149, 149, {175, 215, 95}},
{nil, :color150, 150, {175, 215, 135}},
{nil, :color151, 151, {175, 215, 175}},
{nil, :color152, 152, {175, 215, 215}},
{nil, :color153, 153, {175, 215, 255}},
{"greenyellow", :color154, 154, {175, 255, 0}},
{nil, :color155, 155, {175, 255, 95}},
{nil, :color156, 156, {175, 255, 135}},
{nil, :color157, 157, {175, 255, 175}},
{nil, :color158, 158, {175, 255, 215}},
{nil, :color159, 159, {175, 255, 255}},
{nil, :color160, 160, {215, 0, 0}},
{nil, :color161, 161, {215, 0, 95}},
{nil, :color162, 162, {215, 0, 135}},
{nil, :color163, 163, {215, 0, 175}},
{nil, :color164, 164, {215, 0, 215}},
{nil, :color165, 165, {215, 0, 255}},
{nil, :color166, 166, {215, 95, 0}},
{nil, :color167, 167, {215, 95, 95}},
{nil, :color168, 168, {215, 95, 135}},
{nil, :color169, 169, {215, 95, 175}},
{nil, :color170, 170, {215, 95, 215}},
{nil, :color171, 171, {215, 95, 255}},
{"chocolate", :color172, 172, {215, 135, 0}},
{nil, :color173, 173, {215, 135, 95}},
{nil, :color174, 174, {215, 135, 135}},
{nil, :color175, 175, {215, 135, 175}},
{nil, :color176, 176, {215, 135, 215}},
{nil, :color177, 177, {215, 135, 255}},
{"goldenrod", :color178, 178, {215, 175, 0}},
{nil, :color179, 179, {215, 175, 95}},
{nil, :color180, 180, {215, 175, 135}},
{nil, :color181, 181, {215, 175, 175}},
{nil, :color182, 182, {215, 175, 215}},
{nil, :color183, 183, {215, 175, 255}},
{nil, :color184, 184, {215, 215, 0}},
{nil, :color185, 185, {215, 215, 95}},
{nil, :color186, 186, {215, 215, 135}},
{nil, :color187, 187, {215, 215, 175}},
{"lightgray", :color188, 188, {215, 215, 215}},
{nil, :color189, 189, {215, 215, 255}},
{nil, :color190, 190, {215, 255, 0}},
{nil, :color191, 191, {215, 255, 95}},
{nil, :color192, 192, {215, 255, 135}},
{nil, :color193, 193, {215, 255, 175}},
{"beige", :color194, 194, {215, 255, 215}},
{"lightcyan", :color195, 195, {215, 255, 255}},
{nil, :color196, 196, {255, 0, 0}},
{nil, :color197, 197, {255, 0, 95}},
{nil, :color198, 198, {255, 0, 135}},
{nil, :color199, 199, {255, 0, 175}},
{nil, :color200, 200, {255, 0, 215}},
{"fuchsia", :color201, 201, {255, 0, 255}},
{"orangered", :color202, 202, {255, 95, 0}},
{nil, :color203, 203, {255, 95, 95}},
{nil, :color204, 204, {255, 95, 135}},
{"hotpink", :color205, 205, {255, 95, 175}},
{nil, :color206, 206, {255, 95, 215}},
{nil, :color207, 207, {255, 95, 255}},
{"darkorange", :color208, 208, {255, 135, 0}},
{"coral", :color209, 209, {255, 135, 95}},
{nil, :color210, 210, {255, 135, 135}},
{nil, :color211, 211, {255, 135, 175}},
{nil, :color212, 212, {255, 135, 215}},
{nil, :color213, 213, {255, 135, 255}},
{"orange", :color214, 214, {255, 175, 0}},
{nil, :color215, 215, {255, 175, 95}},
{nil, :color216, 216, {255, 175, 135}},
{nil, :color217, 217, {255, 175, 175}},
{nil, :color218, 218, {255, 175, 215}},
{nil, :color219, 219, {255, 175, 255}},
{"gold", :color220, 220, {255, 215, 0}},
{nil, :color221, 221, {255, 215, 95}},
{"khaki", :color222, 222, {255, 215, 135}},
{"moccasin", :color223, 223, {255, 215, 175}},
{"mistyrose", :color224, 224, {255, 215, 215}},
{nil, :color225, 225, {255, 215, 255}},
{nil, :color226, 226, {255, 255, 0}},
{nil, :color227, 227, {255, 255, 95}},
{nil, :color228, 228, {255, 255, 135}},
{nil, :color229, 229, {255, 255, 175}},
{"lightyellow", :color230, 230, {255, 255, 215}},
{nil, :color231, 231, {255, 255, 255}},
{nil, :color232, 232, {255, 255, 255}},
{nil, :color233, 233, {255, 255, 255}},
{nil, :color234, 234, {255, 255, 255}},
{nil, :color235, 235, {255, 255, 255}},
{nil, :color236, 236, {255, 255, 255}},
{nil, :color237, 237, {255, 255, 255}},
{nil, :color238, 238, {255, 255, 255}},
{nil, :color239, 239, {255, 255, 255}},
{nil, :color240, 240, {255, 255, 255}},
{nil, :color241, 241, {255, 255, 255}},
{nil, :color242, 242, {255, 255, 255}},
{nil, :color243, 243, {255, 255, 255}},
{nil, :color244, 244, {255, 255, 255}},
{nil, :color245, 245, {255, 255, 255}},
{nil, :color246, 246, {255, 255, 255}},
{nil, :color247, 247, {255, 255, 255}},
{nil, :color248, 248, {255, 255, 255}},
{nil, :color249, 249, {255, 255, 255}},
{nil, :color250, 250, {255, 255, 255}},
{nil, :color251, 251, {255, 255, 255}},
{nil, :color252, 252, {255, 255, 255}},
{nil, :color253, 253, {255, 255, 255}},
{nil, :color254, 254, {255, 255, 255}},
{nil, :color255, 255, {255, 255, 255}},
]
def color_tuples, do: @color_tuples
for {name, color, code, _} <- @color_tuples do
@doc "Sets foreground color to #{color}"
defsequence color, code, "38;5;"
@doc "Sets background color to #{color}"
defsequence :"#{color}_background", code, "48;5;"
if name do
@doc "Sets foreground color to #{name}"
defsequence :"#{name}", code, "38;5;"
@doc "Sets background color to #{name}"
defsequence :"#{name}_background", code, "48;5;"
end
end
@color_aliases Application.get_env(:bunt, :color_aliases, [])
def color_aliases, do: @color_aliases
for {alias_name, original_name} <- @color_aliases do
defalias alias_name, original_name
defalias :"#{alias_name}_background", :"#{original_name}_background"
end
@typep ansicode :: atom()
@typep ansilist :: maybe_improper_list(char() | ansicode() | binary() | ansilist(), binary() | ansicode() | [])
@type ansidata :: ansilist() | ansicode() | binary()
@doc """
Checks if ANSI coloring is supported and enabled on this machine.
This function simply reads the configuration value for
`:ansi_enabled` in the `:elixir` application. The value is by
default `false` unless Elixir can detect during startup that
both `stdout` and `stderr` are terminals.
"""
@spec enabled? :: boolean
def enabled? do
Application.get_env(:elixir, :ansi_enabled, false)
end
@doc "Resets all attributes"
defsequence :reset, 0
@doc "Bright (increased intensity) or Bold"
defsequence :bright, 1
@doc "Faint (decreased intensity), not widely supported"
defsequence :faint, 2
@doc "Italic: on. Not widely supported. Sometimes treated as inverse"
defsequence :italic, 3
@doc "Underline: Single"
defsequence :underline, 4
@doc "Blink: Slow. Less than 150 per minute"
defsequence :blink_slow, 5
@doc "Blink: Rapid. MS-DOS ANSI.SYS; 150 per minute or more; not widely supported"
defsequence :blink_rapid, 6
@doc "Image: Negative. Swap foreground and background"
defsequence :inverse, 7
@doc "Image: Negative. Swap foreground and background"
defsequence :reverse, 7
@doc "Conceal. Not widely supported"
defsequence :conceal, 8
@doc "Crossed-out. Characters legible, but marked for deletion. Not widely supported"
defsequence :crossed_out, 9
@doc "Sets primary (default) font"
defsequence :primary_font, 10
for font_n <- [1, 2, 3, 4, 5, 6, 7, 8, 9] do
@doc "Sets alternative font #{font_n}"
defsequence :"font_#{font_n}", font_n + 10
end
@doc "Normal color or intensity"
defsequence :normal, 22
@doc "Not italic"
defsequence :not_italic, 23
@doc "Underline: None"
defsequence :no_underline, 24
@doc "Blink: off"
defsequence :blink_off, 25
colors = [:black, :red, :green, :yellow, :blue, :magenta, :cyan, :white]
for {color, code} <- Enum.with_index(colors) do
@doc "Sets foreground color to #{color}"
defsequence color, code + 30
@doc "Sets background color to #{color}"
defsequence :"#{color}_background", code + 40
end
@doc "Default text color"
defsequence :default_color, 39
@doc "Default background color"
defsequence :default_background, 49
@doc "Framed"
defsequence :framed, 51
@doc "Encircled"
defsequence :encircled, 52
@doc "Overlined"
defsequence :overlined, 53
@doc "Not framed or encircled"
defsequence :not_framed_encircled, 54
@doc "Not overlined"
defsequence :not_overlined, 55
@doc "Sends cursor home"
defsequence :home, "", "H"
@doc "Clears screen"
defsequence :clear, "2", "J"
@doc "Clears line"
defsequence :clear_line, "2", "K"
defp format_sequence(other) do
raise ArgumentError, "invalid ANSI sequence specification: #{other}"
end
@doc ~S"""
Formats a chardata-like argument by converting named ANSI sequences into actual
ANSI codes.
The named sequences are represented by atoms.
It will also append an `IO.ANSI.reset/0` to the chardata when a conversion is
performed. If you don't want this behaviour, use `format_fragment/2`.
An optional boolean parameter can be passed to enable or disable
emitting actual ANSI codes. When `false`, no ANSI codes will emitted.
By default checks if ANSI is enabled using the `enabled?/0` function.
## Examples
iex> IO.ANSI.format(["Hello, ", :red, :bright, "world!"], true)
[[[[[[], "Hello, "] | "\e[31m"] | "\e[1m"], "world!"] | "\e[0m"]
"""
def format(chardata, emit \\ enabled?()) when is_boolean(emit) do
do_format(chardata, [], [], emit, :maybe)
end
@doc ~S"""
Formats a chardata-like argument by converting named ANSI sequences into actual
ANSI codes.
The named sequences are represented by atoms.
An optional boolean parameter can be passed to enable or disable
emitting actual ANSI codes. When `false`, no ANSI codes will emitted.
By default checks if ANSI is enabled using the `enabled?/0` function.
## Examples
iex> IO.ANSI.format_fragment([:bright, 'Word'], true)
[[[[[[] | "\e[1m"], 87], 111], 114], 100]
"""
def format_fragment(chardata, emit \\ enabled?()) when is_boolean(emit) do
do_format(chardata, [], [], emit, false)
end
defp do_format([term | rest], rem, acc, emit, append_reset) do
do_format(term, [rest | rem], acc, emit, append_reset)
end
defp do_format(term, rem, acc, true, append_reset) when is_atom(term) do
do_format([], rem, [acc | format_sequence(term)], true, !!append_reset)
end
defp do_format(term, rem, acc, false, append_reset) when is_atom(term) do
do_format([], rem, acc, false, append_reset)
end
defp do_format(term, rem, acc, emit, append_reset) when not is_list(term) do
do_format([], rem, [acc | [term]], emit, append_reset)
end
defp do_format([], [next | rest], acc, emit, append_reset) do
do_format(next, rest, acc, emit, append_reset)
end
defp do_format([], [], acc, true, true) do
[acc | IO.ANSI.reset]
end
defp do_format([], [], acc, _emit, _append_reset) do
acc
end
end | deps/bunt/lib/bunt_ansi.ex | 0.658198 | 0.416025 | bunt_ansi.ex | starcoder |
defmodule VSCodeExUnitFormatter do
@moduledoc false
use GenServer
alias VSCodeExUnitFormatter.VsSuite
alias VSCodeExUnitFormatter.VsTestCase
@impl GenServer
def init(_opts) do
root_test_suite = %VsSuite{id: "root", label: "ExUnit"}
{:ok, root_test_suite}
end
@impl GenServer
def handle_cast({:suite_started, _opts}, root_test_suite) do
{:noreply, root_test_suite}
end
def handle_cast({:suite_finished, _run_us, _load_us}, root_test_suite) do
root_test_suite
|> Jason.encode!(pretty: true)
|> IO.puts()
{:noreply, root_test_suite}
end
def handle_cast({:module_started, %ExUnit.TestModule{} = test_module}, root_test_suite) do
vscode_suite = VsSuite.new(test_module)
root_test_suite = VsSuite.append_child_suite(root_test_suite, vscode_suite)
{:noreply, root_test_suite}
end
def handle_cast({:module_finished, _test_module}, root_test_suite) do
{:noreply, root_test_suite}
end
def handle_cast({:test_started, _test}, root_test_suite) do
{:noreply, root_test_suite}
end
def handle_cast({:test_finished, %ExUnit.Test{} = test}, root_test_suite) do
test_id = Base.encode16(Atom.to_string(test.name))
root_suite_children =
Enum.map(root_test_suite.children, fn %{children: testcases} = suite ->
%{suite | children: update_test_state(testcases, test, test_id)}
end)
root_test_suite = %{root_test_suite | children: root_suite_children}
{:noreply, root_test_suite}
end
def handle_cast({:case_started, _test}, root_test_suite) do
{:noreply, root_test_suite}
end
def handle_cast({:case_finished, _test}, root_test_suite) do
{:noreply, root_test_suite}
end
defp update_test_state(testcases, %ExUnit.Test{} = exunit_test, test_id)
when is_list(testcases) do
Enum.map(testcases, fn
%{id: id} = vs_test when id == test_id ->
VsTestCase.update_state_from_exunit(vs_test, exunit_test)
vs_test ->
vs_test
end)
end
end | lib/vs_code_ex_unit_formatter.ex | 0.625324 | 0.439747 | vs_code_ex_unit_formatter.ex | starcoder |
defmodule Numy.Enumy do
@moduledoc """
Extend Enum for homogeneous enumerables.
"""
@doc """
Check if all elements of a list are integers.
## Examples
iex(1)> import Numy.Enumy
Numy.Enumy
iex(2)> all_integers?([1, 2, 3])
true
iex(3)> all_integers?([1.1, 2, 3])
false
"""
@spec all_integers?(Enumerable.t()) :: boolean
def all_integers?(enumerable) do
Enum.all?(enumerable, fn item -> is_integer(item) end)
end
@doc """
Check if all elements of a list are floats.
## Examples
iex(10)> import Numy.Enumy
Numy.Enumy
iex(11)> all_floats?([1.1, 2.2, 3.3])
true
iex(12)> all_floats?([1.1, 2.2, 3])
false
"""
@spec all_floats?(Enumerable.t()) :: boolean
def all_floats?(enumerable) do
Enum.all?(enumerable, fn item -> is_float(item) end)
end
@spec all_numbers?(Enumerable.t()) :: boolean
def all_numbers?(enumerable) do
Enum.all?(enumerable, fn item -> is_number(item) end)
end
@doc """
Convert all numerical elements of a list to `float` type.
## Examples
iex(13)> all_to_float([1.1, 2.2, 3])
[1.1, 2.2, 3.0]
"""
@spec all_to_float(Enumerable.t()) :: [float]
def all_to_float(enumerable) do
Enum.map(enumerable, fn item ->
cond do
is_float(item) ->
item
is_integer(item) ->
item / 1 # idiomatic way to convert integer to float
true ->
raise "non numerical item"
end
end)
end
@doc """
The dot product is the sum of the products of the corresponding entries
of the two sequences of numbers.
## Examples
iex> dot_product([1,2,3],[2,3,0])
8
"""
@spec dot_product([number], [number]) :: number
def dot_product(vec1, _vec2) when vec1 == [], do: 0
def dot_product(vec1, vec2) when is_list(vec1) and is_list(vec2) do
[h1|t1] = vec1
[h2|t2] = vec2
(h1*h2) + dot_product(t1, t2)
end
@doc """
Get mean (average) of a sequence of numbers.
## Examples
iex(14)> mean([1,2,3,4,5,6,7,8,9])
5.0
"""
@spec mean(Enumerable.t()) :: float
def mean(enumerable) do
Enum.sum(enumerable) / Enum.count(enumerable)
end
@doc "Sort elements with Quicksort"
def sort([]), do: []
def sort([pivot | tail]) do
{left, right} = Enum.split_with(tail, fn(x) -> x < pivot end)
sort(left) ++ [pivot] ++ sort(right)
end
end | lib/enumy.ex | 0.901299 | 0.50653 | enumy.ex | starcoder |
defmodule CRC.Legacy do
@moduledoc false
# Legacy CRC functions, these may be depraced in a future release and removed in v1.0 - RN
defmacro __using__(_) do
quote do
@doc """
Calculates a 8-bit CRC with polynomial x^8+x^6+x^3+x^2+1, 0x14D.
Chosen based on Koopman, et al. (0xA6 in his notation = 0x14D >> 1):
http://www.ece.cmu.edu/~koopman/roses/dsn04/koopman04_crc_poly_embedded.pdf
seed defaults to 0xFF if one is not given
"""
@spec crc_8(binary, number) :: number
defdelegate crc_8(input, seed \\ 0xFF), to: :crc
@doc """
Calculates a 16-bit ANSI CRC checksum for the provided binary
"""
@spec crc_16(binary) :: number
def crc_16(input), do: :crc_fast.calc(:crc_16, input)
@doc """
Calculates a 16-bit CCITT CRC with the given seed,
seed defaults to 0xFFFF if one is not given.
This CCIT method uses a 0x1021 polynomial.
"""
@spec ccitt_16(binary) :: number
def ccitt_16(input), do: :crc_fast.calc(:crc_16_ccitt_false, input)
@spec ccitt_16(binary, number) :: number
def ccitt_16(input, seed) do
extend_model_seed(:crc_16_ccitt_false, seed)
|> :crc_fast.calc(input)
end
@doc """
Calculates a 16-bit CCITT Kermit CRC
This CCIT method uses a 0x8408 polynomial.
"""
@spec ccitt_16_kermit(binary) :: number
def ccitt_16_kermit(input), do: :crc_fast.calc(:crc_16_kermit, input)
@spec ccitt_16_kermit(binary, number) :: number
def ccitt_16_kermit(input, seed) do
extend_model_seed(:crc_16_kermit, seed)
|> :crc_fast.calc(input)
end
@doc """
Calculates a 16-bit CCITT XMODEM CRC
This CCIT method uses a 0x1021 polynomial.
"""
@spec ccitt_16_xmodem(binary) :: number
def ccitt_16_xmodem(input), do: :crc_fast.calc(:xmodem, input)
@doc """
Calculates a 16-bit CCITT 0x1D0F CRC
This CCIT method uses a 0x1021 polynomial.
"""
@spec ccitt_16_1D0F(binary) :: number
def ccitt_16_1D0F(input) do
extend_model_seed(:crc_16_ccitt_false, 0x1D0F)
|> :crc_fast.calc(input)
end
@doc """
Calculates a 16-bit modbus CRC
"""
@spec crc_16_modbus(binary) :: number
def crc_16_modbus(input), do: :crc_fast.calc(:crc_16_modbus, input)
@doc """
Calculates a 16-bit Sick CRC
"""
@spec crc_16_sick(binary) :: number
def crc_16_sick(input), do: :crc_fast.calc(:crc_16_sick, input)
@doc """
Calculates a 16-bit DNP CRC
"""
@spec crc_16_dnp(binary) :: number
def crc_16_dnp(input), do: :crc_fast.calc(:crc_16_dnp, input)
@doc """
Calculates a 32-bit CRC
"""
@spec crc_32(binary) :: number
def crc_32(input), do: :crc_fast.calc(:crc_32, input)
@doc """
Calculates an XOR checksum for the given binary
"""
@spec checksum_xor(binary) :: number
defdelegate checksum_xor(input), to: :crc
defp extend_model_seed(model, seed), do: %{extend: model, init: seed}
end
end
end | lib/crc/legacy.ex | 0.821152 | 0.561185 | legacy.ex | starcoder |
defmodule Episode do
defstruct [:show, :title, :season, :episode, :download_url]
@type t :: %Episode {
show: Show.t,
title: String.t,
season: pos_integer,
episode: pos_integer,
download_url: String.t
}
@doc """
iex> parse_title("S01E02.HR-HDTV.AC3.1024X576.x264.mkv")
[season: 1, episode: 2]
iex> parse_title("E03.HR-HDTV.AC3.1024X576.x264.mkv")
[episode: 3]
iex> parse_title("HR-HDTV.AC3.1024X576.x264.mkv")
[]
iex> parse_title("S03.HR-HDTV.AC3.1024X576.x264.mkv")
[]
"""
@spec parse_title(String.t) :: keyword
def parse_title(title) do
matched = Regex.named_captures(~r/(s(?<season>\d+))?e(?<episode>\d+)/ui, title)
case matched do
%{"episode" => episode, "season" => ""} ->
[episode: String.to_integer(episode)]
%{"episode" => episode, "season" => season} ->
[season: String.to_integer(season), episode: String.to_integer(episode)]
_ ->
[]
end
end
@doc """
iex> parse_episode_title(%Episode{title: "S01E02.HR-HDTV.AC3.1024X576.x264.mkv"})
%Episode{title: "S01E02.HR-HDTV.AC3.1024X576.x264.mkv", season: 1, episode: 2}
iex> parse_episode_title(%Episode{title: "E03.HR-HDTV.AC3.1024X576.x264.mkv"})
%Episode{title: "E03.HR-HDTV.AC3.1024X576.x264.mkv", episode: 3}
iex> parse_episode_title(%Episode{title: "HR-HDTV.AC3.1024X576.x264.mkv"})
%Episode{title: "HR-HDTV.AC3.1024X576.x264.mkv"}
"""
@spec parse_episode_title(t) :: t
def parse_episode_title(%Episode{title: title} = episode) do
struct(episode, parse_title(title))
end
@doc """
iex> episode_key(%Episode{show: %Show{name: "Flash"}, season: 2, episode: 1})
"Episode:Flash:2:1"
iex> episode_key(%Episode{show: %Show{name: "Flash"}, season: nil, episode: 1})
"Episode:Flash:1"
"""
@spec episode_key(t) :: ConfigManager.key
def episode_key(episode)
def episode_key(%Episode{episode: nil} = episode) do
[Episode, episode.show.name, episode.episode]
|> ConfigManager.normalize_key()
end
def episode_key(episode) do
[Episode, episode.show.name, episode.season, episode.episode]
|> ConfigManager.normalize_key()
end
@spec new?(t) :: boolean
def new?(episode) do
episode
|> episode_key()
|> ConfigManager.exists?()
|> Kernel.not()
end
@spec visit(t) :: non_neg_integer | :noop
def visit(episode) do
if new?(episode) do
store_episode(episode)
enqueue_daily_task(episode, Timex.now(Application.get_env(:harvester, :time_zone)), [:episode, :new], [days: 7])
else
:noop
end
end
@spec store_episode(t, list | map) :: :ok
def store_episode(episode, value) do
episode
|> episode_key()
|> ConfigManager.put_hash(value)
end
@spec store_episode(t) :: :ok
def store_episode(episode) do
store_episode(episode,
show: episode.show.name,
title: episode.title,
season: episode.season,
episode: episode.episode,
page: episode.show.url,
download_url: episode.download_url
)
end
@spec enqueue_daily_task(Timex.datetime, ConfigManager.key, Timex.shift_options, ConfigManager.value) :: non_neg_integer
def enqueue_daily_task(episode, datetime, prefix, shift) do
end_of_day = datetime |> Timex.end_of_day()
key = List.wrap(prefix) ++ [end_of_day |> Timex.to_date() |> Date.to_string()]
count = ConfigManager.enqueue(key, episode_key(episode))
expire_at = end_of_day |> Timex.shift(shift)
ConfigManager.expire_at(key, expire_at)
store_episode(episode, [enqueued_at: datetime |> Timex.format!("{ISO:Extended}")])
count
end
end | apps/harvester/lib/episode.ex | 0.735547 | 0.442034 | episode.ex | starcoder |
defmodule Annex.Data do
@moduledoc """
Annex.Data defines the callbacks and helpers for data structures used
by Annex.
An implementer of the Annex.Layer behaviour must return an Annex.Data
implementer from the `c:data_type/0` callback.
"""
alias Annex.{
AnnexError,
Data,
Data.List1D,
Data.List2D,
Shape
}
require Shape
@typedoc """
A module that implements the Annex.Data Behaviour.
"""
@type type :: module()
@type flat_data :: [float(), ...]
@type data :: struct() | flat_data() | [flat_data()]
@type op :: any()
@type args :: list(any())
defguard is_flat_data(data) when is_list(data) and is_float(hd(data))
@callback cast(data, Shape.t()) :: data()
@callback to_flat_list(data) :: list(float())
@callback shape(data) :: Shape.t()
@callback is_type?(any) :: boolean
@callback apply_op(data(), op(), args()) :: data()
defmacro __using__(_) do
quote do
require Annex.Data
require Annex.Shape
alias Annex.Data
alias Annex.Shape
@behaviour Annex.Data
end
end
@doc """
Annex.Data.cast/4 calls cast/3 for an Annex.Data behaviour implementing module.
Valid shapes are a non-empty tuple of positive integers or any the atom :any.
e.g. `{2, 3}` or `{3, :any}`
"""
def cast(type, data, []) when is_list(data) do
message = "Annex.Data.cast/3 got an empty list for shape"
raise AnnexError.build(message, type: type, data: data)
end
def cast(type, data, shape) when Shape.is_shape(shape) and is_atom(type) do
type.cast(data, shape)
end
@spec cast(Data.data(), Shape.t()) :: Data.data()
def cast(data, shape) do
data
|> infer_type()
|> cast(data, shape)
end
@doc """
Flattens an Annex.Data into a list of floats via the type's callback.
"""
@spec to_flat_list(type(), data()) :: Data.flat_data()
def to_flat_list(type, data), do: type.to_flat_list(data)
@doc """
Flattens an Annex.Data into a list of floats via Enum.into/2.
"""
@spec to_flat_list(Data.data()) :: Data.flat_data()
def to_flat_list(data) do
data
|> infer_type()
|> to_flat_list(data)
end
@doc """
Given an Annex.Data `type` and the `data` returns the shape of the data.
The shape of data is used to cast between the expected shapes from one Annex.Layer
to the next or from one Annex.Sequence to the next.
"""
@spec shape(type(), data()) :: Shape.t()
def shape(type, data), do: type.shape(data)
@spec shape(data()) :: Shape.t()
def shape(data), do: data |> infer_type() |> shape(data)
@doc """
Given a type (Data implementing module) and some `data` returns true or false if the
data is of the correct type.
Calls `c:is_type?/1` of the `type`.
"""
def is_type?(nil, _), do: false
def is_type?(type, data), do: type.is_type?(data)
@doc """
Given a `type`, `data`, and a `target_shape` converts the data to the `type` and `target_shape`
If the `data` matches the `type` and the `data_shape` matches the `target_shape` the
data is returned unaltered.
If either the `type` or `target_shape` do not match the `data` the data is casted using
`Data.cast/3`.
"""
def convert(type, data, target_shape) do
if is_type?(type, data) do
data_shape = shape(type, data)
do_convert(type, data, data_shape, target_shape)
else
flat = Data.to_flat_list(data)
data_shape = List1D.shape(flat)
do_convert(type, flat, data_shape, target_shape)
end
end
defp do_convert(type, data, data_shape, target_shape) do
new_shape = Shape.convert_abstract_to_concrete(target_shape, data_shape)
cast(type, data, new_shape)
end
def flat_data_to_tensor(flat_data, shape) when Shape.is_shape(shape) do
shape
|> Enum.reverse()
|> Enum.reduce(flat_data, fn chunk_size, acc ->
Enum.chunk_every(acc, chunk_size)
end)
|> unwrap()
end
defp unwrap([unwrapped]), do: unwrapped
@spec infer_type(data()) :: any
def infer_type(%module{} = item) do
if function_exported?(module, :data_type, 1) do
module.data_type(item)
else
module
end
end
def infer_type([]) do
raise %AnnexError{
message: """
#{inspect(__MODULE__)}.infer_type/1 was given an empty list.
An empty list is not valid Data.
""",
details: []
}
end
def infer_type(data) when is_flat_data(data) do
List1D
end
def infer_type([row | _]) when is_flat_data(row) do
List2D
end
@spec apply_op(data(), any, list(any)) :: data
def apply_op(data, name, args) do
data
|> infer_type()
|> apply_op(data, name, args)
end
@spec apply_op(module, data(), any, list(any)) :: data()
def apply_op(type, data, name, args) when is_atom(type) do
type.apply_op(data, name, args)
end
@spec error(Data.data(), Data.data()) :: Data.flat_data()
def error(outputs, labels) do
labels = Data.to_flat_list(labels)
outputs
|> Data.to_flat_list()
|> Data.apply_op(:subtract, [labels])
end
end | lib/annex/data.ex | 0.840815 | 0.701355 | data.ex | starcoder |
defmodule Expected.Plugs do
@moduledoc """
Plugs for registering logins and authenticating persistent cookies.
## Requirements
For the plugs in this module to work, you must plug `Expected` in your
endpoint:
plug Expected
As `Expected` calls `Plug.Session` itself, you must not plug it in your
endpoint. You must however configure the session in the `:expected`
configuration:
config :expected,
store: :mnesia,
table: :logins,
auth_cookie: "_my_app_auth",
session_store: PlugSessionMnesia.Store, # For instance.
session_cookie: "_my_app_key" # The Plug.Session `:key` option.
For the login registration to work, Expected needs to get the session ID from
the session cookie. **You must use a session store that stores the session
server-side and uses the cookie to store the session ID.**
"""
import Plug.Conn,
only: [
assign: 3,
configure_session: 2,
delete_resp_cookie: 2,
get_session: 2,
put_private: 3,
put_session: 3
]
alias Expected.NotLoadedUser
alias Expected.CurrentUserError
alias Expected.InvalidUserError
alias Expected.PlugError
@cookie_max_age 7_776_000
@doc """
Registers a login.
## Requirements
This plug expects that the session contains a `:current_user` key featuring a
`:username` field:
conn
|> put_session(:current_user, %User{username: "user", name: "A User"})
|> register_login()
The names of these fields can be changed by setting the corresponding options:
conn
|> put_session(:logged_in_user, %User{user_id: "user", name: "A User"})
|> register_login(current_user: :logged_in_user, username: :user_id)
They can also be set application-wide in the configuration:
config :expected,
...
plug_config: [current_user: :logged_in_user, username: :user_id]
## Authentication cookie
Authentication information is stored in a cookie. By default, it is valid for
90 days after the last successful authentication. You can change this in the
application configuration:
config :expected,
...
cookie_max_age: 86_400 # Set to one day, for example.
Alternatively, you can set it locally:
conn
|> put_session(:current_user, %User{username: "user", name: "A User"})
|> assign(:persistent_login, true)
|> register_login(cookie_max_age: 86_400)
"""
@spec register_login(Plug.Conn.t()) :: Plug.Conn.t()
@spec register_login(Plug.Conn.t(), keyword()) :: Plug.Conn.t()
def register_login(conn, opts \\ []) do
expected =
conn
|> fetch_expected!()
|> put_cookie_max_age(opts)
|> Map.put(:username, fetch_username!(conn, opts))
|> Map.put(:action, :register_login)
put_private(conn, :expected, expected)
end
@doc """
Logs a user out.
This plug deletes the login and its associated session from the stores and
their cookies. If there is no authentication cookie, it does nothing.
"""
@spec logout(Plug.Conn.t()) :: Plug.Conn.t()
@spec logout(Plug.Conn.t(), keyword()) :: Plug.Conn.t()
def logout(conn, _opts \\ []) do
expected = fetch_expected!(conn)
auth_cookie = conn.cookies[expected.auth_cookie]
case parse_auth_cookie(auth_cookie) do
{:ok, user, serial, _token} ->
Expected.delete_login(user, serial)
conn
|> delete_resp_cookie(expected.session_cookie)
|> delete_resp_cookie(expected.auth_cookie)
_ ->
delete_resp_cookie(conn, expected.auth_cookie)
end
end
@doc """
Authenticates a connection.
## Session authentication
This plug first checks if the session is already authenticated. It does so by
reading the `:authenticated` field in the session. If it is `true`, it assigns
`:authenticated` and `:current_user` in the `conn` according to their value
in the session.
The names of these fields can be changed by setting the corresponding options:
conn
|> authenticate(authenticated: :logged_in, current_user: :user_id)
They can also be set application-wide in the configuration:
config :expected,
...
plug_config: [authenticated: :logged_in, current_user: :user_id]
## Cookie authentication
If the session is not yet authenticated, this plug checks for an
authentication cookie. By default, it is valid for 90 days after the last
successful authentication. You can change this in the application
configuration:
config :expected,
...
cookie_max_age: 86_400 # Set to one day, for example.
Alternatively, you can set it locally:
conn
|> authenticate(cookie_max_age: 86_400)
## Alerts
For security purpose, an authentication cookie can be used only once. If an
authentication cookie is re-used, `conn.assigns.unexpected_token` is set to
`true` and the session is not authenticated. You can check this value using
`Expected.unexpected_token?/1` and accordingly inform the user of a possible
malicious access.
## User loading
After a successful cookie authentication, the `:current_user` field in both
the session and the `conn` assigns is set to an `Expected.NotLoadedUser`,
featuring the user’s username:
%Expected.NotLoadedUser{username: "user"}
You should load this user from the database in another plug following this one
if the session has been authenticated.
"""
@spec authenticate(Plug.Conn.t()) :: Plug.Conn.t()
@spec authenticate(Plug.Conn.t(), keyword()) :: Plug.Conn.t()
def authenticate(conn, opts \\ []) do
expected = fetch_expected!(conn)
plug_config = Application.get_env(:expected, :plug_config, [])
authenticated_field =
get_option(opts, plug_config, :authenticated, :authenticated)
current_user_field =
get_option(opts, plug_config, :current_user, :current_user)
auth_cookie = conn.cookies[expected.auth_cookie]
with auth when auth != true <- get_session(conn, :authenticated),
{:ok, user, serial, token} <- parse_auth_cookie(auth_cookie),
{:ok, login} <- expected.store.get(user, serial, expected.store_opts),
%{username: ^user, token: ^token} <- login do
session_store = expected.session_opts.store
session_store.delete(nil, login.sid, expected.session_opts.store_config)
not_loaded_user = %NotLoadedUser{username: user}
expected =
expected
|> put_cookie_max_age(opts)
|> Map.put(:login, login)
|> Map.put(:action, :update_login)
conn
|> configure_session(renew: true)
|> put_session(authenticated_field, true)
|> put_session(current_user_field, not_loaded_user)
|> assign(authenticated_field, true)
|> assign(current_user_field, not_loaded_user)
|> put_private(:expected, expected)
else
true ->
put_auth(conn, authenticated_field, current_user_field)
{:error, :no_cookie} ->
conn
{:error, :invalid} ->
delete_resp_cookie(conn, expected.auth_cookie)
{:error, :no_login} ->
delete_resp_cookie(conn, expected.auth_cookie)
%{username: username, token: _token} ->
Expected.delete_all_user_logins(username)
conn
|> put_private(:unexpected_token, true)
|> delete_resp_cookie(expected.auth_cookie)
end
end
@spec fetch_expected!(Plug.Conn.t()) :: map()
defp fetch_expected!(%{private: %{expected: expected}}), do: expected
defp fetch_expected!(_), do: raise(PlugError)
@spec parse_auth_cookie(String.t()) ::
{:ok, String.t(), String.t(), String.t()}
| {:error, :invalid}
defp parse_auth_cookie(auth_cookie) when is_binary(auth_cookie) do
with [encoded_user, serial, token] <- String.split(auth_cookie, "."),
{:ok, user} <- Base.decode64(encoded_user) do
{:ok, user, serial, token}
else
_ -> {:error, :invalid}
end
end
defp parse_auth_cookie(nil), do: {:error, :no_cookie}
defp parse_auth_cookie(_), do: {:error, :invalid}
@spec put_auth(Plug.Conn.t(), atom(), atom()) :: Plug.Conn.t()
defp put_auth(conn, authenticated_field, current_user_field) do
conn
|> assign(authenticated_field, true)
|> assign(current_user_field, get_session(conn, current_user_field))
end
@spec put_cookie_max_age(map(), keyword()) :: map()
defp put_cookie_max_age(expected, opts) do
env = Application.get_all_env(:expected)
cookie_max_age = get_option(opts, env, :cookie_max_age, @cookie_max_age)
Map.put(expected, :cookie_max_age, cookie_max_age)
end
@spec get_option(keyword(), keyword(), atom(), term()) :: term()
defp get_option(opts, config, key, default) do
opts[key] || config[key] || default
end
@spec fetch_username!(Plug.Conn.t(), keyword()) :: String.t()
defp fetch_username!(conn, opts) do
plug_config = Application.get_env(:expected, :plug_config, [])
current_user = get_option(opts, plug_config, :current_user, :current_user)
username = get_option(opts, plug_config, :username, :username)
case get_session(conn, current_user) do
%{^username => current_username} -> current_username
nil -> raise CurrentUserError
_ -> raise InvalidUserError
end
end
end | lib/expected/plugs.ex | 0.839537 | 0.413714 | plugs.ex | starcoder |
defmodule Timber.Context do
@moduledoc """
The ContextEntry module formalizes the structure of context stack entries
Most users will not interact directly with this module and will instead use
the helper functions provided by the main `Timber` module. See the `Timber`
module for more information.
The functions in this module work by modifying the Logger metadata store which
is unique to every BEAM process. This is convenient in many ways. First and
foremost, it does not require you to manually manage the metadata. Second,
because we conform to the standard Logger principles, you can utilize Timber
alongside other Logger backends without issue. Timber prefixes its contextual
metadata keys so as not to interfere with other systems.
"""
alias Timber.Contextable
alias Timber.Contexts
alias Timber.Utils.Map, as: UtilsMap
@typedoc """
Deprecated; please use `element` instead
"""
@type context_element :: element
@type element ::
map
| Keyword.t()
| Contexts.CustomContext.t()
| Contexts.HTTPContext.t()
| Contexts.JobContext.t()
| Contexts.OrganizationContext.t()
| Contexts.RuntimeContext.t()
| Contexts.SessionContext.t()
| Contexts.SystemContext.t()
| Contexts.UserContext.t()
@type t :: %{
optional(:custom) => Contexts.CustomContext.m(),
optional(:http) => Contexts.HTTPContext.m(),
optional(:job) => Contexts.JobContext.m(),
optional(:organization) => Contexts.OrganizationContext.m(),
optional(:runtime) => Contexts.RuntimeContext.m(),
optional(:session) => Contexts.SessionContext.m(),
optional(:system) => Contexts.SystemContext.m(),
optional(:user) => Contexts.UserContext.m()
}
@doc false
def new(), do: %{}
@doc """
Takes an existing context element and inserts it into the provided context.
"""
@spec add(t, element) :: t
def add(context, %Contexts.CustomContext{type: type} = context_element) when is_binary(type) do
new_context_element = %{context_element | type: String.to_atom(type)}
add(context, new_context_element)
end
def add(context, %Contexts.CustomContext{} = context_element) do
key = type(context_element)
api_map = to_api_map(context_element)
insert(context, key, api_map)
end
def add(context, data) when is_list(data) do
Enum.reduce(data, context, fn item, context ->
add(context, item)
end)
end
def add(context, {key, val}) do
add(context, %{key => val})
end
def add(context, data) do
context_element = Contextable.to_context(data)
key = type(context_element)
context_element_map = to_api_map(context_element)
insert(context, key, context_element_map)
end
# Inserts the context_element into the main context map
@spec insert(t, atom, t) :: map
defp insert(context, _key, new_context) when map_size(new_context) == 0 do
context
end
defp insert(%{custom: custom_context} = context, :custom, new_context) do
merged_custom_context = Map.merge(custom_context, new_context)
Map.put(context, :custom, merged_custom_context)
end
defp insert(context, key, new_context) do
Map.put(context, key, new_context)
end
@doc """
Merges two Context structs
Entries in the second Context will override entries in the first.
The caveat to this is custom context, which will descend into the
custom context and merge it there. Even then, custom context entries
in the second will override custom context entries in the first.
"""
@spec merge(t, t) :: t
def merge(context, nil), do: context
def merge(first_context, second_context) do
Map.merge(first_context, second_context, &c_merge/3)
end
defp c_merge(:custom, first_context, second_context) do
Map.merge(first_context, second_context)
end
defp c_merge(_key, first_context, _second_context) do
first_context
end
@doc """
Removes a key from the provided context structure.
"""
@spec remove_key(t, atom) :: t
def remove_key(context, key) do
Map.delete(context, key)
end
# Converts a context_element into a map the Timber API expects.
@spec to_api_map(element) :: map
defp to_api_map(%Contexts.CustomContext{type: type, data: data}) do
%{type => data}
|> UtilsMap.recursively_drop_blanks()
end
defp to_api_map(%Contexts.JobContext{id: id} = context_element) when is_integer(id) do
to_api_map(%{context_element | id: Integer.to_string(id)})
end
defp to_api_map(%Contexts.OrganizationContext{id: id} = context_element) when is_integer(id) do
to_api_map(%{context_element | id: Integer.to_string(id)})
end
defp to_api_map(%Contexts.SessionContext{id: id} = context_element) when is_integer(id) do
to_api_map(%{context_element | id: Integer.to_string(id)})
end
defp to_api_map(%Contexts.SystemContext{pid: pid} = context_element) when is_binary(pid) do
pid =
case Integer.parse(pid) do
{pid, _units} -> pid
_ -> nil
end
to_api_map(%{context_element | pid: pid})
end
defp to_api_map(%Contexts.UserContext{id: id} = context_element) when is_integer(id) do
to_api_map(%{context_element | id: Integer.to_string(id)})
end
defp to_api_map(context_element) do
context_element
|> UtilsMap.deep_from_struct()
|> UtilsMap.recursively_drop_blanks()
end
# Determines the key name for the context_element that the Timber API expects.
@spec type(element) :: atom
defp type(%Contexts.CustomContext{}), do: :custom
defp type(%Contexts.HTTPContext{}), do: :http
defp type(%Contexts.JobContext{}), do: :job
defp type(%Contexts.OrganizationContext{}), do: :organization
defp type(%Contexts.RuntimeContext{}), do: :runtime
defp type(%Contexts.SessionContext{}), do: :session
defp type(%Contexts.SystemContext{}), do: :system
defp type(%Contexts.UserContext{}), do: :user
end | lib/timber/context.ex | 0.786008 | 0.442998 | context.ex | starcoder |
defmodule Mix.Tasks.Yuki.Test do
@shortdoc "Tests all testcase"
@moduledoc """
Tests your source code for the specified problem.
From mix task:
mix yuki.test NO [--problem-id] [--lang LANG] [--source SOURCE] [--time-limit TIME_LIMIT] [--module MODULE]
From escript:
yuki test NO [--problem-id] [--lang LANG] [--source SOURCE] [--time-limit TIME_LIMIT] [--module MODULE]
In order to test your source code, solves a path of the source code.
If there is prefix configuration, decides its filename consisting prefix, problem number, and, extention.
For example, if prefix, problem number, and, language is `p`, `10`, and, `elixir`,
respectively, the filename is `p10.ex`.
Finds its file from directories `src` and `lib` recursively.
> Note: If there is not any testcase for the problem, first, downloads its testcases.
## Options
- `--problem-id`: if `true`, `NO` is the problem ID. If `false`, `NO` is the problem number.
- `--lang`: this option specifies language to use.
See `mix help yuki.lang.list` or `yuki help lang.list` for a list of available language.
Without `language.primary` in config file, default to `elixir`.
- `--source`: this option specifies a path of source code
if source code is out of scope for auto search on `src` or `lib`.
- `--time-limit`: this option redefines `TIME_LIMIT`.
Default to 5000 ms.
- `--module` : this option is only valid for `elixir` and specifies custom entry point `MODULE.main` on executing.
"""
use Mix.Task
use YukiHelper.Docs
import YukiHelper
alias YukiHelper.{
Config,
Download,
Problem,
Language
}
alias YukiHelper.Exceptions.CompileError
@arguments [:integer]
@switches [
problem_id: :boolean,
version: :boolean,
source: :string,
lang: :atom,
time_limit: :integer
]
@version Mix.Project.config()[:version]
@name Mix.Project.config()[:name]
@impl true
def run(argv) do
{:ok, _} = Application.ensure_all_started(:yuki_helper)
argv
|> parse_options(@arguments, @switches)
|> case do
:version -> Mix.shell().info("#{@name} v#{@version}")
:help -> Mix.Tasks.Help.run(["yuki.test"])
{:invalid_option, msg} -> Mix.raise(msg)
{:invalid_arguments, msg} -> Mix.raise(msg)
{opts, [no]} -> run_test(no, opts)
end
end
defp run_test(no, opts) do
config = Config.load_all()
{language, compiler} = Language.verify!(config, opts)
Mix.shell().info("Language : #{language}")
Mix.shell().info("Compiler : #{compiler}")
src = Problem.source_file!(config, no, opts)
Mix.shell().info("Source : #{src}\n")
testcase_list = Download.get_testcases!(config, no, opts)
testcase_list
|> Download.download_tastcases?(config, no)
|> if do
Mix.shell().info("download testcases : [skipped]")
else
problem_path = Path.expand(Problem.problem_path(config, no))
paths =
%{}
|> Map.put(:in, Path.join(problem_path, "in"))
|> Map.put(:out, Path.join(problem_path, "out"))
if not File.exists?(problem_path) do
:ok = File.mkdir_p(paths[:in])
:ok = File.mkdir_p(paths[:out])
Mix.shell().info("create directories\n #{paths[:in]}\n #{paths[:out]}")
end
Mix.shell().info("download testcases : #{length(testcase_list)} files")
Enum.each(testcase_list, fn file ->
[:in, :out]
|> Enum.each(fn filetype ->
path = Path.join(paths[filetype], file)
data = YukiHelper.Download.get_testcase!(config, no, file, filetype)
:ok = File.write(path, data)
end)
Mix.shell().info(" #{file} : [#{success("ok")}]")
end)
end
Mix.shell().info("")
case Language.compile(config, src, opts) do
{:error, %CompileError{}} ->
Mix.shell().info("compile : [#{warning("CE")}]")
{:ok, msg} ->
Mix.shell().info("compile : [#{success("ok")}]")
if 0 < String.length(msg) do
Mix.shell().info(warning(msg))
end
Mix.shell().info("run testcases: #{length(testcase_list)} files")
execute_cmd(config, no, testcase_list, src, opts)
end
Language.clean_up(config, opts)
end
defp execute_cmd(config, no, testcase_list, source, opts) do
Enum.reduce(testcase_list, true, fn
_testcase, false = next ->
next
testcase, true = next ->
case Language.run(config, no, testcase, source, opts) do
:runtime_error ->
Mix.shell().info(" #{testcase} : [#{warning("RE")}]")
next && false
:time_limit ->
Mix.shell().info(" #{testcase} : [#{warning("TLE")}]")
next
{:wrong_answer, time} ->
Mix.shell().info(" #{testcase} : [#{warning("WA")}] / #{time} ms")
next
{:accept, time} ->
Mix.shell().info(" #{testcase} : [#{success("AC")}] / #{time} ms")
next
end
end)
end
end | lib/mix/tasks/yuki.test.ex | 0.795777 | 0.49109 | yuki.test.ex | starcoder |
defmodule DgraphEx.Core.Expr.Uid do
@moduledoc """
https://docs.dgraph.io/query-language/#uid
Syntax Examples:
q(func: uid(<uid>))
predicate @filter(uid(<uid1>, ..., <uidn>))
predicate @filter(uid(a)) for variable a
q(func: uid(a,b)) for variables a and b
"""
alias DgraphEx.Util
defstruct [
:value,
:type
]
@types [
:literal,
:expression
]
defmacro __using__(_) do
quote do
def uid(value), do: unquote(__MODULE__).new(value)
end
end
defguard is_uid(value, type)
when (is_atom(value) or is_binary(value) or is_list(value)) and type in @types
@doc """
lists of uid literals are rendered inside a `uid(<uids_here>)` function (as in @filter)
lists of uid variables are rendered inside a `uid(<uids_here>)` function (as in @filter)
therefore any list is an uid expression
"""
def new(value) when is_uid(value, :literal), do: new(value, :literal)
def new(value) when is_uid(value, :expression), do: new(value, :expression)
def new(uids) when is_uid(uids, :expression), do: new(uids, :expression)
def new(value, type) when is_uid(value, type) do
%__MODULE__{value: value, type: type}
end
@doc """
This function is used by Func to ensure that a uid string ("0x9") is rendered
as an expression literal `uid(0x9)` instead of an actual literal `<0x9>`
"""
def as_expression(%__MODULE__{} = u), do: %{u | type: :expression}
def as_literal(%__MODULE__{} = u), do: %{u | type: :literal}
def as_naked(%__MODULE__{} = u), do: %{u | type: :naked}
def render(%__MODULE__{value: value})
when is_atom(value),
do: render_expression([value])
def render(%__MODULE__{value: value, type: :literal}) when is_binary(value) do
{:ok, uid_literal} = Util.as_literal(value, :uid)
uid_literal
end
def render(%__MODULE__{value: value, type: :literal})
when is_list(value),
do: render_expression(value)
def render(%__MODULE__{value: value, type: :naked})
when is_binary(value),
do: value
def render(%__MODULE__{value: value, type: :expression})
when is_atom(value) or is_binary(value),
do: render_expression([value])
def render(%__MODULE__{value: value, type: :expression})
when is_list(value),
do: render_expression(value)
defp render_expression(uids) when is_list(uids) do
args =
uids
|> Enum.map(&to_string/1)
|> Enum.join(", ")
"uid(" <> args <> ")"
end
end
defimpl String.Chars, for: DgraphEx.Core.Expr.Uid do
def to_string(uid), do: DgraphEx.Core.Expr.Uid.render(uid)
end | lib/dgraph_ex/core/expr/uid.ex | 0.841533 | 0.541227 | uid.ex | starcoder |
defmodule AdventOfCode.Day14 do
@moduledoc ~S"""
[Advent Of Code day 14](https://adventofcode.com/2018/day/14).
"""
defmodule Recipes do
use GenServer
defstruct [:scores, :elves, :last_index, :iterations]
@type t :: %__MODULE__{scores: Map.t(), elves: [non_neg_integer], last_index: non_neg_integer}
def start_link(input) do
GenServer.start_link(__MODULE__, input, name: __MODULE__)
end
def init(input) do
scores =
input
|> split_score()
|> Enum.with_index(1)
|> Enum.into(%{}, fn {score, i} -> {i, score} end)
struct = struct(__MODULE__, scores: scores, elves: [1, 2], last_index: Enum.count(scores), iterations: 0)
{:ok, struct}
end
# API
@timeout 50_000
def show_scoreboard(n), do: GenServer.call(__MODULE__, {:show_scoreboard, n}, @timeout)
def iterate, do: GenServer.cast(__MODULE__, :iterate)
def print, do: GenServer.call(__MODULE__, :print, @timeout)
# CALLBACKS
def handle_call(:print, _, state) do
{:reply, do_print(state), state}
end
def handle_call({:show_scoreboard, n}, _, state) do
{:reply, do_get_scoreboard((n + 1)..(n + 10), state.scores), state}
end
def handle_cast(:iterate, state) do
{:noreply, do_iterate(state)}
end
# IMPLEMENTATION
defp do_print(%{elves: [first, second], scores: scores}) do
Enum.map(scores, fn {i, s} ->
cond do
i == first -> "(#{s})"
i == second -> "[#{s}]"
true -> " #{s} "
end
end)
|> Enum.join(" ")
end
defp do_get_scoreboard(range, scores) do
range
|> Enum.map(fn i -> Map.get(scores, i) end)
|> Enum.join()
end
defp do_iterate(%{elves: [first, second]} = state) do
st =
(Map.get(state.scores, first) + Map.get(state.scores, second))
|> split_score()
|> Enum.reduce(state, fn score, st ->
%{state | last_index: st.last_index + 1, scores: Map.put(st.scores, st.last_index + 1, score)}
end)
%{st | elves: move_elves(st), iterations: st.iterations + 1}
end
defp move_elves(%{elves: elves, last_index: li, scores: scores}) do
Enum.map(elves, fn e ->
case rem(e + Map.get(scores, e) + 1, li) do
0 -> li
n -> n
end
end)
end
defp split_score(int), do: int |> to_string() |> String.codepoints() |> Enum.map(&String.to_integer/1)
end
def solve("1", iterations) do
Recipes.start_link(37)
1..(iterations + 10) |> Enum.each(fn _ -> Recipes.iterate() end)
Recipes.show_scoreboard(iterations)
end
def solve("2", x) do
Recipes.start_link(37)
do_iterate(to_string(x), 0)
end
defp do_iterate(x, n) do
n = n + 1
Recipes.iterate()
if n > 10 && String.starts_with?(Recipes.show_scoreboard(n - 10), x) do
n - 10
else
do_iterate(x, n)
end
end
end | lib/advent_of_code/day_14.ex | 0.707203 | 0.547162 | day_14.ex | starcoder |
defmodule AbtDid.Type do
@moduledoc """
Represents the type of the DID. A DID is composed of three inner types: `role_type`, `key_type` and `hash_type`.
"""
use TypedStruct
typedstruct do
field(:role_type, atom(), default: :account)
field(:key_type, atom(), default: :ed25519)
field(:hash_type, atom(), default: :sha3)
end
@doc """
Returns the DID type representing a blockchain node.
"""
@spec node() :: AbtDid.Type.t()
def node, do: %AbtDid.Type{role_type: :node, key_type: :ed25519, hash_type: :sha2}
@doc """
Returns the DID type representing a blockchain validator.
"""
@spec validator() :: AbtDid.Type.t()
def validator, do: %AbtDid.Type{role_type: :validator, key_type: :ed25519, hash_type: :sha2}
@doc """
Returns the DID type representing a tether.
"""
@spec tether() :: AbtDid.Type.t()
def tether, do: %AbtDid.Type{role_type: :tether, key_type: :ed25519, hash_type: :sha2}
@doc """
Returns the DID type representing a swap.
"""
@spec swap() :: AbtDid.Type.t()
def swap, do: %AbtDid.Type{role_type: :swap, key_type: :ed25519, hash_type: :sha2}
@doc """
Checks if a Did type is valid or not.
"""
@spec check_did_type!(AbtDid.Type.t()) :: :ok
def check_did_type!(%{role_type: role, hash_type: hash, key_type: key})
when role in [:validator, :node, :tether, :swap] do
if hash == :sha2 and key == :ed25519 do
:ok
else
raise "The hash_type must be :sha2 and key_type must be :ed25519 if the role_type is :node or :validator."
end
end
def check_did_type!(%{role_type: _, hash_type: hash, key_type: _}) do
if hash == :sha2 do
raise "The hash_type :sha2 is only used for role_type :node, :validator, :tether or :swap."
else
:ok
end
end
end
defmodule AbtDid.TypeBytes do
@moduledoc """
Encodes the DId type information into bytes.
"""
alias AbtDid.Type
@doc """
Converts the DID type struct to type bytes.
## Examples
iex> AbtDid.TypeBytes.struct_to_bytes(%AbtDid.Type{})
<<0, 1>>
iex> AbtDid.TypeBytes.struct_to_bytes(%AbtDid.Type{hash_type: :sha3_512})
<<0, 5>>
iex> AbtDid.TypeBytes.struct_to_bytes(%AbtDid.Type{role_type: :application, key_type: :<KEY>, hash_type: :sha3_512})
"\f%"
iex> AbtDid.TypeBytes.struct_to_bytes(%AbtDid.Type{role_type: :application, hash_type: :sha2})
** (RuntimeError) The hash_type :sha2 is only used for role_type :node, :validator, :tether or :swap.
"""
@spec struct_to_bytes(Type.t()) :: binary()
def struct_to_bytes(type) do
AbtDid.Type.check_did_type!(type)
<<_::bitstring-size(2), role::bitstring-size(6)>> = role_type_to_bytes(type.role_type)
<<_::bitstring-size(3), key::bitstring-size(5)>> = key_type_to_bytes(type.key_type)
<<_::bitstring-size(3), hash::bitstring-size(5)>> = hash_type_to_bytes(type.hash_type)
<<role::bitstring, key::bitstring, hash::bitstring>>
end
@doc """
Converts the DID type bytes to DID type struct.
## Examples
iex> AbtDid.TypeBytes.bytes_to_struct(<<0, 1>>)
%AbtDid.Type{hash_type: :sha3, key_type: :ed25519, role_type: :account}
iex> AbtDid.TypeBytes.bytes_to_struct(<<0, 5>>)
%AbtDid.Type{hash_type: :sha3_512, key_type: :ed25519, role_type: :account}
iex> AbtDid.TypeBytes.bytes_to_struct("\f%")
%AbtDid.Type{role_type: :application, key_type: :<KEY>, hash_type: :sha3_512}
iex> AbtDid.TypeBytes.bytes_to_struct(<<196, 5>>)
** (RuntimeError) Invliad role type: \"1\"
"""
@spec bytes_to_struct(binary()) :: Type.t()
def bytes_to_struct(bytes) do
<<role::bitstring-size(6), key::bitstring-size(5), hash::bitstring-size(5)>> = bytes
role_type = bytes_to_role_type(<<0::size(2), role::bitstring>>)
key_type = bytes_to_key_type(<<0::size(3), key::bitstring>>)
hash_type = bytes_to_hash_type(<<0::size(3), hash::bitstring>>)
%Type{role_type: role_type, key_type: key_type, hash_type: hash_type}
end
defp role_type_to_bytes(:account), do: <<0>>
defp role_type_to_bytes(:node), do: <<1>>
defp role_type_to_bytes(:device), do: <<2>>
defp role_type_to_bytes(:application), do: <<3>>
defp role_type_to_bytes(:smart_contract), do: <<4>>
defp role_type_to_bytes(:bot), do: <<5>>
defp role_type_to_bytes(:asset), do: <<6>>
defp role_type_to_bytes(:stake), do: <<7>>
defp role_type_to_bytes(:validator), do: <<8>>
defp role_type_to_bytes(:group), do: <<9>>
defp role_type_to_bytes(:tx), do: <<10>>
defp role_type_to_bytes(:tether), do: <<11>>
defp role_type_to_bytes(:swap), do: <<12>>
defp role_type_to_bytes(:delegate), do: <<13>>
defp role_type_to_bytes(:any), do: <<63>>
defp role_type_to_bytes(role), do: raise("Invliad role type: #{inspect(role)}")
defp bytes_to_role_type(<<0>>), do: :account
defp bytes_to_role_type(<<1>>), do: :node
defp bytes_to_role_type(<<2>>), do: :device
defp bytes_to_role_type(<<3>>), do: :application
defp bytes_to_role_type(<<4>>), do: :smart_contract
defp bytes_to_role_type(<<5>>), do: :bot
defp bytes_to_role_type(<<6>>), do: :asset
defp bytes_to_role_type(<<7>>), do: :stake
defp bytes_to_role_type(<<8>>), do: :validator
defp bytes_to_role_type(<<9>>), do: :group
defp bytes_to_role_type(<<10>>), do: :tx
defp bytes_to_role_type(<<11>>), do: :tether
defp bytes_to_role_type(<<12>>), do: :swap
defp bytes_to_role_type(<<13>>), do: :delegate
defp bytes_to_role_type(<<63>>), do: :any
defp bytes_to_role_type(role), do: raise("Invliad role type: #{inspect(role)}")
defp key_type_to_bytes(:ed25519), do: <<0>>
defp key_type_to_bytes(:secp256k1), do: <<1>>
defp key_type_to_bytes(key), do: raise("Invliad key type: #{inspect(key)}")
defp bytes_to_key_type(<<0>>), do: :ed25519
defp bytes_to_key_type(<<1>>), do: :secp256k1
defp bytes_to_key_type(key), do: raise("Invliad key type: #{inspect(key)}")
defp hash_type_to_bytes(:keccak), do: <<0>>
defp hash_type_to_bytes(:sha3), do: <<1>>
defp hash_type_to_bytes(:keccak_384), do: <<2>>
defp hash_type_to_bytes(:sha3_384), do: <<3>>
defp hash_type_to_bytes(:keccak_512), do: <<4>>
defp hash_type_to_bytes(:sha3_512), do: <<5>>
defp hash_type_to_bytes(:sha2), do: <<6>>
defp hash_type_to_bytes(hash), do: raise("Invliad hash type: #{inspect(hash)}")
defp bytes_to_hash_type(<<0>>), do: :keccak
defp bytes_to_hash_type(<<1>>), do: :sha3
defp bytes_to_hash_type(<<2>>), do: :keccak_384
defp bytes_to_hash_type(<<3>>), do: :sha3_384
defp bytes_to_hash_type(<<4>>), do: :keccak_512
defp bytes_to_hash_type(<<5>>), do: :sha3_512
defp bytes_to_hash_type(<<6>>), do: :sha2
defp bytes_to_hash_type(hash), do: raise("Invliad hash type: #{inspect(hash)}")
end | lib/type_bytes.ex | 0.860266 | 0.560614 | type_bytes.ex | starcoder |
defmodule Keypad do
@moduledoc """
`keypad` is implemented as a `__using__` macro so that you can put it in any module you want
to handle the keypress events. Because it is small GenServer, it [accepts the same options for supervision](https://hexdocs.pm/elixir/GenServer.html#module-how-to-supervise)
to configure the child spec and passes them along to `GenServer`:
```elixir
defmodule MyModule do
use Keypad, restart: :transient, shutdown: 10_000
end
```
It also has its own set of options to pass to configure the keypad connections. At a minimum, you must
pass either `:size` or a custom matrix with `:matrix`:
* `:size` - If supplied without `:matrix` it will select the default matrix for the specified size. The delaration is `row x col`, so `:one_by_four` would be 1 row, 4 columns.
* `:four_by_four` or `"4x4"` - Standard 12-digit keypad with `A`, `B`, `C`, and `D` keys
* `:four_by_three` or `"4x3"` - Standard 12-digit keypad
* `:one_by_four` or `"1x4"`
* `:matrix` - A custom matrix to use for mapping keypresses to. Will take precedence over `:size` if supplied
* Typically, these are `binary` values. However, these values are pulled from List and in theory can be
anything you want. i.e. atom, integer, or even annonymous function
* `:row_pins` - List of integers which map to corresponding GPIO to set as `INPUT` pins for keypard rows
* On raspberry pi, these will also set the internal resistor to `PULL_UP` and inactive HIGH. For all other hardware, you will probably need to make sure to place some 10K resistors between your pin and ground. see [Setup](SETUP.md) doc for some examples
* defaults to `[17, 27, 23, 24]`
* `:col_pins` - List of integers which map to corresponding GPIO to as `OUTPUT` pins for keypad columns
* defaults to `[5, 6, 13, 26]`
"""
@doc """
Required callback to handle keypress events based on defined matrix values.
It's first argument will be the result of the keypress according to the defined matrix (most typically a
binary string, though you can use anything you'd like). The second argument is the state of the keypad
GenServer. You are required to return the state in this function.
There is an optional field in the state called `:input` which is initialized as an empty string `""`. You can
use this to keep input events from keypresses and build them as needed, such as putting multiple keypresses
together to determine a password. **Note**: You will be responsible for resetting this input as needed.
This is not required and you can optionally use other measures to keep rolling state, such as `Agent`.
```elixir
defmodule MyKeypad do
use Keypad
require Logger
@impl true
def handle_keypress(key, %{input: ""} = state) do
Logger.info("First Keypress: \#{key}")
Process.send_after(self(), :reset, 5000) # Reset input after 5 seconds
%{state | input: key}
end
@impl true
def handle_keypress(key, %{input: input} = state) do
Logger.info("Keypress: \#{key}")
%{state | input: input <> key}
end
@impl true
def handle_info(:reset, state) do
{:noreply, %{state | input: ""}}
end
end
```
"""
@callback handle_keypress(key :: any, map) :: map
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
use GenServer, Keyword.drop(opts, [:row_pins, :col_pins, :matrix, :size])
@behaviour Keypad
alias __MODULE__
defmodule State do
defstruct row_pins: [17, 27, 23, 24], col_pins: [5, 6, 13, 26], input: "", matrix: nil, size: nil, last_message_at: 0
end
defguard valid_press(current, prev) when ((current - prev)/1.0e6) > 100
def start_link do
initial_state = struct(State, unquote(opts))
GenServer.start_link(__MODULE__, initial_state, name: __MODULE__)
end
@impl true
def init(state) do
send self(), :init
{:ok, state}
end
@impl true
def handle_info(:init, state) do
state = state
|> initialize_matrix_and_size()
|> initialize_rows_and_cols()
{:noreply, state}
end
@impl true
def handle_info({:circuits_gpio, pin_num, time, 0}, %{last_message_at: prev} = state) when valid_press(time, prev) do
{row_pin, row_index} = Stream.with_index(state.row_pins)
|> Enum.find(fn {row, _i} -> Circuits.GPIO.pin(row) == pin_num end)
val = state.col_pins
|> Stream.with_index()
|> Enum.reduce_while([], fn {col, col_index}, acc ->
# Write the column pin HIGH then read the row pin again.
# If the row is HIGH, then we've pin-pointed which column it is in
Circuits.GPIO.write(col, 1)
row_val = Circuits.GPIO.read(row_pin)
Circuits.GPIO.write(col, 0)
case row_val do
1 ->
# We can use the row and column indexes as x,y of the matrix
# to get which specific key character the press belongs to.
val = Enum.at(state.matrix, row_index) |> Enum.at(col_index)
{:halt, val}
0 ->
{:cont, []}
end
end)
state = apply(__MODULE__, :handle_keypress, [val, state])
{:noreply, %{state | last_message_at: time}}
end
# ignore messages that are too quick or on button release
@impl true
def handle_info({:circuits_gpio, _, time, _}, state), do: {:noreply, state}
defp initialize_rows_and_cols(%{size: <<x::binary-size(1), "x", y::binary-size(1)>>, row_pins: rows, col_pins: cols} = state) do
# x == row
# y == col
if String.to_integer(x) != length(rows), do: raise ArgumentError, "expected #{x} row pins but only #{length(rows)} were given"
if String.to_integer(y) != length(cols), do: raise ArgumentError, "expected #{y} column pins but only #{length(cols)} were given"
row_pins = for pin_num <- rows do
# Just use internal resistor if on a Raspberry Pi
{:ok, pin} = Circuits.GPIO.open(pin_num, :input, pull_mode: :pullup)
:ok = Circuits.GPIO.set_interrupts(pin, :falling)
pin
end
col_pins = for pin_num <- cols do
{:ok, pin} = Circuits.GPIO.open(pin_num, :output, initial_value: 0)
pin
end
%{state | row_pins: row_pins, col_pins: col_pins}
end
defp initialize_matrix_and_size(%{matrix: matrix} = state) when is_list(matrix) do
matrix
|> Enum.map(&length/1)
|> Enum.uniq
|> case do
[y_size] ->
%{state | size: "#{length(matrix)}x#{y_size}"}
_ ->
raise ArgumentError, "matrix columns must be equal\n#{inspect(matrix)}"
end
end
defp initialize_matrix_and_size(%{size: size, matrix: nil} = state) when not is_nil(size) do
%{state | matrix: matrix_for_size(size)}
|> initialize_matrix_and_size()
end
defp initialize_matrix_and_size(_) do
raise ArgumentError, "must provide a keypad size or matrix"
end
defp matrix_for_size(:four_by_four), do: matrix_for_size("4x4")
defp matrix_for_size(:four_by_three), do: matrix_for_size("4x3")
defp matrix_for_size(:one_by_four), do: matrix_for_size("1x4")
defp matrix_for_size("4x4") do
[
["1", "2", "3", "A"],
["4", "5", "6", "B"],
["7", "8", "9", "C"],
["*", "0", "#", "D"]
]
end
defp matrix_for_size("4x3") do
[
["1", "2", "3"],
["4", "5", "6"],
["7", "8", "9"],
["*", "0", "#"]
]
end
defp matrix_for_size("1x4") do
[
["1", "2", "3", "4"]
]
end
defp matrix_for_size(size), do: raise ArgumentError, "unsupported matrix size: #{inspect(size)}"
end
end
end | lib/keypad.ex | 0.887497 | 0.882984 | keypad.ex | starcoder |
defmodule Wand.CLI.Commands.Add do
use Wand.CLI.Command
@moduledoc """
# Add
Add elixir packages to wand.json
### Usage
**wand** add [package] [package] ... [flags]
Wand can be used to add packages from three different places: hex, git, or the local filesystem. [package] can either be the name, or name@version.
If a version is provided, the `--around` and `--exact` flags determine how the version is used.
### Hex.pm
Examples:
```
wand add poison
wand add [email protected]
```
### Git
Include the `--git` flag to pass a URI. The URI can be one of two base formats, and can end with an optional hash of the branch, tag, or ref to use
Examples:
```
wand add poison --git="https://github.com/devinus/poison.git"
wand add poison --git="<EMAIL>:devinus/poison"
wand add [email protected] --git="https://github.com/devinus/poison.git#test"
wand add poison --git="https://github.com/devinus/poison.git#3.1.0"
```
### Local Path
Local packages are described by passing in the `--path` flag corresponding to the path location
OR, for an umbrella application, when you need to include a sibling dependency, pass the app name, along with the `--in-umbrella` flag.
Examples:
```
wand add poison --path="/absolute/path/to/poison"
wand add poison --path="../../relative/path/"
wand add sibling_dependency --in-umbrella
```
## Options
The following flags are provided. They are boolean flags unless specified.
### Hex flags
```
--hex=NAME means that the local name of the dependency is different from its name on hex
E.g. wand add mypoison --hex=poison
--organization=ORGANIZATION corresponds to the private org to pull the package(s) from.
--repo=REPO An alternative repository to use. Configure with mix hex.repo. Default: hexpm
```
### Git flags
```
--sparse=FOLDER git checkout only a single folder, and use that
--submodules tells git to also initialize submodules
```
### Environment flags
Setting these flags specifies which environments to install the dependency. If none are provided, all environments are included.
```
--env=ENVIRONMENT where ENVIRONMENT is the environment to add. This flag can be added multiple times. Example: --env=prod --env=test
--dev is shorthand for --env=dev
--test is shorthand for --env=test
--prod is shorthand for --env=prod
--compile-env=ENVIRONMENT doesnt affect which environments the dependency is loaded from. Instead, it says "when compiling the dependency, which environment to use?". Defaults to --compile-env=prod
--optional will include the project for THIS project, but not reuire it should the main project be a dependency of another project.
```
### Dependency configuration
These flags deal with what happens with the dependency once configured
```
--runtime determines whether to start the dependency. Defaults to true
--read-app-file determines if the app file for the dependency is read. Defaults to true.
--download determines if mix deps.get is run after adding the package to wand.json. Defaults to true.
```
"""
defmodule Git do
@moduledoc false
defstruct git: nil,
ref: nil,
sparse: nil,
submodules: false
end
defmodule Hex do
@moduledoc false
defstruct hex: nil,
organization: nil,
repo: :hexpm
end
defmodule Path do
@moduledoc false
defstruct path: nil,
in_umbrella: false
end
defmodule Package do
@moduledoc false
@default_requirement Wand.Mode.get_requirement!(:caret, :latest)
defstruct compile_env: :prod,
details: %Hex{},
download: true,
only: nil,
name: nil,
optional: false,
override: false,
read_app_file: true,
requirement: @default_requirement,
runtime: true
end
@doc false
def moduledoc(), do: @moduledoc
@doc false
@impl true
def help(type), do: Wand.CLI.Commands.Add.Help.help(type)
@doc false
@impl true
def options() do
[
require_core: true,
load_wand_file: true
]
end
@doc false
@impl true
def validate(args), do: Wand.CLI.Commands.Add.Validate.validate(args)
@doc false
@impl true
def execute(packages, extras), do: Wand.CLI.Commands.Add.Execute.execute(packages, extras)
@doc false
@impl true
def after_save(packages), do: Wand.CLI.Commands.Add.Execute.after_save(packages)
@doc false
@impl true
def handle_error(key, data), do: Wand.CLI.Commands.Add.Error.handle_error(key, data)
end | lib/cli/commands/add.ex | 0.807688 | 0.862583 | add.ex | starcoder |
defmodule SmsPartCounter do
@moduledoc """
Module for detecting which encoding is being used and the character count of SMS text.
"""
gsm_7bit_ext_chars =
"@£$¥èéùìòÇ\nØø\rÅåΔ_ΦΓΛΩΠΨΣΘΞÆæßÉ !\"#¤%&'()*+,-./0123456789:;<=>?¡ABCDEFGHIJKLMNOPQRSTUVWXYZÄÖÑܧ¿abcdefghijklmnopqrstuvwxyzäöñüà" <>
"^{}\\[~]|€"
@gsm_7bit_char_set MapSet.new(String.codepoints(gsm_7bit_ext_chars))
@gsm_single_length 160
@gsm_multi_length 153
@unicode_single_length 70
@unicode_multi_length 67
@doc """
Counts the characters in a string.
## Examples
iex> SmsPartCounter.count("Hello")
5
iex> SmsPartCounter.count("আম")
2
"""
@spec count(binary) :: integer()
def count(str) when is_binary(str) do
String.codepoints(str)
|> Enum.count()
end
@doc """
Counts the part of a message that's encoded with GSM 7 Bit encoding.
The GSM 7 Bit Encoded messages have following length requirement:
Signle SMS Part Length: 160 Chars
Multi SMS Part Length: 153 Chars
## Examples
iex> SmsPartCounter.gsm_part_count("asdf")
1
"""
@spec gsm_part_count(binary) :: integer()
def gsm_part_count(sms) when is_binary(sms) do
sms_char_count = count(sms)
part_count(sms_char_count, @gsm_single_length, @gsm_multi_length)
end
@doc """
Counts the part of a message that's encoded with Unicode encoding.
The Unicode Encoded messages have following length requirement:
Signle SMS Part Length: 70 Chars
Multi SMS Part Length: 67 Chars
## Examples
iex> SmsPartCounter.unicode_part_count("আমি")
1
"""
@spec unicode_part_count(binary) :: integer()
def unicode_part_count(sms) when is_binary(sms) do
sms_char_count = count(sms)
part_count(sms_char_count, @unicode_single_length, @unicode_multi_length)
end
defp part_count(sms_char_count, single_count, multi_count) do
cond do
sms_char_count < single_count + 1 ->
1
sms_char_count > single_count ->
div(sms_char_count, multi_count) +
if rem(sms_char_count, multi_count) == 0, do: 0, else: 1
end
end
@doc """
Detects the encoding of the SMS message based on the charset of GSM 7 bit Encoding.
It does a set difference between the characters in the sms and the gsm 7 bit encoding char set.
## Examples
iex> SmsPartCounter.detect_encoding("adb abc")
{:ok, "gsm_7bit"}
iex> SmsPartCounter.detect_encoding("আমি")
{:ok, "unicode"}
"""
@spec detect_encoding(binary) :: {:ok | :error, Sting.t()}
def detect_encoding(sms) when is_binary(sms) do
sms_char_set = MapSet.new(String.codepoints(sms))
diff = MapSet.difference(sms_char_set, @gsm_7bit_char_set)
empty_map_set?(diff)
|> case do
true ->
{:ok, "gsm_7bit"}
false ->
{:ok, "unicode"}
end
end
defp empty_map_set?(map_set = %MapSet{}) do
empty_map_set = MapSet.new
map_set
|> case do
^empty_map_set ->true
_ -> false
end
end
@doc """
Detects the encoding of the SMS then counts the part, returns all information
as a map of the following format:
%{
"encoding" => encoding,
"parts" => part count
}
## Examples
iex> SmsPartCounter.analyze("abc")
%{
"encoding" => "gsm_7bit",
"parts" => 1
}
"""
@spec analyze(binary) :: %{String.t() => String.t(), String.t() => integer()}
def analyze(sms) when is_binary(sms) do
{:ok, encoding} = detect_encoding(sms)
case encoding do
"gsm_7bit" ->
parts = gsm_part_count(sms)
%{
"encoding" => encoding,
"parts" => parts
}
"unicode" ->
parts = unicode_part_count(sms)
%{
"encoding" => encoding,
"parts" => parts
}
end
end
end | lib/sms_part_counter.ex | 0.635901 | 0.432183 | sms_part_counter.ex | starcoder |
defmodule Logger.Backends.Gelf do
@moduledoc """
Gelf Logger Backend
# GelfLogger [![Build Status](https://travis-ci.org/jschniper/gelf_logger.svg?branch=master)](https://travis-ci.org/jschniper/gelf_logger)
A logger backend that will generate Graylog Extended Log Format messages. The
current version only supports UDP messages.
## Configuration
In the config.exs, add gelf_logger as a backend like this:
```
config :logger,
backends: [:console, {Logger.Backends.Gelf, :gelf_logger}]
```
In addition, you'll need to pass in some configuration items to the backend
itself:
```
config :logger, :gelf_logger,
host: "127.0.0.1",
port: 12201,
application: "myapp",
compression: :gzip, # Defaults to :gzip, also accepts :zlib or :raw
metadata: [:request_id, :function, :module, :file, :line],
hostname: "hostname-override",
tags: [
list: "of",
extra: "tags"
]
```
In addition to the backend configuration, you might want to check the
[Logger configuration](https://hexdocs.pm/logger/Logger.html) for other
options that might be important for your particular environment. In
particular, modifying the `:utc_log` setting might be necessary
depending on your server configuration.
## Usage
Just use Logger as normal.
## Improvements
- [x] Tests
- [ ] TCP Support
- [x] Options for compression (none, zlib)
- [x] Send timestamp instead of relying on the Graylog server to set it
- [x] Find a better way of pulling the hostname
And probably many more. This is only out here because it might be useful to
someone in its current state. Pull requests are always welcome.
## Notes
Credit where credit is due, this would not exist without
[protofy/erl_graylog_sender](https://github.com/protofy/erl_graylog_sender).
"""
use GenEvent
@gelf_spec_version "1.1"
@max_size 1047040
@max_packet_size 8192
@max_payload_size 8180
@epoch :calendar.datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}})
def init({__MODULE__, name}) do
if user = Process.whereis(:user) do
Process.group_leader(self(), user)
{:ok, configure(name, [])}
else
{:error, :ignore}
end
end
def handle_call({:configure, options}, state) do
{:ok, :ok, configure(state[:name], options)}
end
def handle_event({_level, gl, _event}, state) when node(gl) != node() do
{:ok, state}
end
def handle_event({level, _gl, {Logger, msg, ts, md}}, %{level: min_level} = state) do
if is_nil(min_level) or Logger.compare_levels(level, min_level) != :lt do
log_event(level, to_string(msg), ts, md, state)
end
{:ok, state}
end
## Helpers
defp configure(name, options) do
config = Keyword.merge(Application.get_env(:logger, name, []), options)
Application.put_env(:logger, name, config)
{:ok, socket} = :gen_udp.open(0, [:binary, {:active, false}])
{:ok, hostname} = :inet.gethostname
hostname = Keyword.get(config, :hostname, hostname)
gl_host = Keyword.get(config, :host) |> to_char_list
port = Keyword.get(config, :port)
application = Keyword.get(config, :application)
level = Keyword.get(config, :level)
metadata = Keyword.get(config, :metadata, [])
compression = Keyword.get(config, :compression, :gzip)
tags = Keyword.get(config, :tags, [])
%{name: name, gl_host: gl_host, host: to_string(hostname), port: parse_port(port), metadata: metadata, level: level, application: application, socket: socket, compression: compression, tags: tags}
end
defp log_event(level, msg, ts, md, %{host: host, application: application, compression: compression} = state) do
%{
short_message: String.slice(msg, 0..79),
version: @gelf_spec_version,
host: host,
level: level_to_int(level),
timestamp: format_timestamp(ts),
_facility: application
}
|> full_message(msg)
|> additional_fields(md, state)
|> Poison.encode!()
|> compress(compression)
|> send_to_graylog(state)
end
defp send_to_graylog(data, state), do: do_send(data, byte_size(data), state)
defp do_send(_data, size, _state) when size > @max_size do
raise ArgumentError, message: "message too large"
end
defp do_send(data, size, %{socket: socket, gl_host: gl_host, port: port}) when size > @max_packet_size do
num = div(size, @max_packet_size)
num = if (num * @max_packet_size) < size, do: num + 1, else: num
id = :crypto.strong_rand_bytes(8)
send_chunks(socket, gl_host, port, data, id, :binary.encode_unsigned(num), 0, size)
end
defp do_send(data, _size, %{socket: socket, gl_host: gl_host, port: port}) do
:gen_udp.send(socket, gl_host, port, data)
end
defp send_chunks(socket, host, port, data, id, num, seq, size) when size > @max_payload_size do
<<payload :: binary - size(@max_payload_size), rest :: binary >> = data
:gen_udp.send(socket, host, port, make_chunk(payload, id, num, seq))
send_chunks(socket, host, port, rest, id, num, seq + 1, byte_size(rest))
end
defp send_chunks(socket, host, port, data, id, num, seq, _size) do
:gen_udp.send(socket, host, port, make_chunk(data, id, num, seq))
end
defp make_chunk(payload, id, num, seq) do
bin = :binary.encode_unsigned(seq)
<< 0x1e, 0x0f, id :: binary - size(8), bin :: binary - size(1), num :: binary - size(1), payload :: binary >>
end
defp parse_port(port) when is_binary(port) do
{val, ""} = Integer.parse(to_string(port))
val
end
defp parse_port(port), do: port
defp additional_fields(data, metadata, %{metadata: metadata_fields, tags: tags}) do
fields =
metadata
|> Keyword.take(metadata_fields)
|> Keyword.merge(tags)
|> Map.new(fn({k,v}) -> {"_#{k}", to_string(v)} end)
|> Map.drop(["_id"]) # http://docs.graylog.org/en/2.2/pages/gelf.html "Libraries SHOULD not allow to send id as additional field (_id). Graylog server nodes omit this field automatically."
Map.merge(data, fields)
end
defp full_message(data, msg) when byte_size(msg) > 80, do: Map.put(data, :full_message, msg)
defp full_message(data, _msg), do: data
defp compress(data, :gzip), do: :zlib.gzip(data)
defp compress(data, :zlib), do: :zlib.compress(data)
defp compress(data, _), do: data
defp format_timestamp({{year, month, day}, {hour, min, sec, milli}}) do
{{year, month, day}, {hour, min, sec}}
|> :calendar.datetime_to_gregorian_seconds()
|> Kernel.-(@epoch)
|> Kernel.+(milli / 1000)
|> Float.round(3)
end
defp level_to_int(:debug), do: 7
defp level_to_int(:info), do: 6
defp level_to_int(:warn), do: 4
defp level_to_int(:error), do: 3
end | lib/gelf_logger.ex | 0.779783 | 0.806091 | gelf_logger.ex | starcoder |
defmodule Neotomex.ExGrammar do
@moduledoc """
## Neotomex.ExGrammar
ExGrammar provides an interface for defining a PEG from within an
Elixir module.
For example:
defmodule Number do
use Neotomex.ExGrammar
@root true
define :digits, "[0-9]+" do
digits when is_list(digits) -> digits |> Enum.join |> String.to_integer
end
end
Number.parse!("42") = 42
Check the `examples/` folder for slightly more useful examples
of grammar specifications via `ExGrammar`.
By default, the grammar is validated on compile. To disable validation,
add `@validate true` to the module.
## Definitions
A grammar consists of a set of definitions with optional
transformation functions, and a pointer to the root
definition. Using the `ExGrammar` interface, a definition
is specified using the `define` macro.
Here are some example usages of `define`:
# No transformation
define name, expression
# With transformation
define name, expression do
match -> match
end
# With a more complex transformation
# (yup, you could just reduce the list, but that doesn't make the point :)
define name, expression do
[x] ->
String.to_integer(x)
[x, y] when is_binary(x) and is_binary(y) ->
String.to_integer(x) + String.to_integer(y)
end
The root rule must be labeled via `@root true`.
"""
@doc false
defmacro __using__(_opts) do
quote do
import Neotomex.ExGrammar, only: :macros
@before_compile Neotomex.ExGrammar
@validate true
@root false
@_root_def nil
@_neotomex_definitions %{}
end
end
@doc false
defmacro __before_compile__(_env) do
quote unquote: false do
if @_root_def == nil do
raise Neotomex.Error, message: "no root in grammar"
end
@_neotomex_grammar Neotomex.Grammar.new(@_root_def, @_neotomex_definitions)
def grammar do
unquote(Macro.escape(@_neotomex_grammar))
end
@doc """
Parse the input using the defined grammar.
"""
@spec parse(binary) :: {:ok, any} | :mismatch | {:error, term}
def parse(input) do
case Neotomex.Grammar.parse(grammar(), input) do
{:ok, result, ""} ->
{:ok, result}
otherwise ->
otherwise
end
end
@doc """
Parse the input using the defined grammar, raising `Neotomex.Error`
on failure.
"""
@spec parse!(binary) :: any
def parse!(input) do
case parse(input) do
{:ok, result} ->
result
{:ok, _, _} ->
raise Neotomex.Grammar.ParseError, message: "parse incomplete"
:mismatch ->
raise Neotomex.Grammar.ParseError, error: :mismatch, message: "parse failed"
{:error, reason} ->
# TODO -- human readable reason
raise Neotomex.Grammar.ParseError, error: reason, message: "parse error"
end
end
def validate do
Neotomex.Grammar.validate(grammar())
end
def validate! do
case validate() do
:ok ->
:ok
otherwise ->
# TODO -- human readable reason
raise Neotomex.Grammar.ValidationError, error: otherwise
end
end
if @validate do
case Neotomex.Grammar.validate(@_neotomex_grammar) do
:ok ->
:ok
otherwise ->
throw {:error, {:validation, otherwise}}
end
end
end
end
@doc """
Create a new definition for the module's grammar.
"""
defmacro define(identifier, expr, body \\ nil) do
quote bind_quoted: [identifier: identifier,
def_name: identifier_to_name(identifier),
neo_expr: Macro.escape(parse_expression(expr)),
branches: Macro.escape(body_to_branches(body))] do
if @root do
if @_root_def == nil do
@root false
@_root_def identifier
else
throw {:error, :more_than_one_root}
end
end
# Add the new definition with transform, when applicable
transform = if branches != [], do: {:transform, {__ENV__.module, def_name}}
@_neotomex_definitions Map.put(@_neotomex_definitions,
identifier,
{neo_expr, transform})
for {{args, guards}, body} <- branches do
def unquote(def_name)(unquote(args)) when unquote(guards) do
unquote(body)
end
end
end
end
## Private functions
# Wraps the Neotomex parse_expression function's return values
defp parse_expression(expr) do
case Neotomex.PEG.parse_expression(expr) do
:mismatch ->
throw {:error, :bad_expression}
{:ok, expr} ->
expr
end
end
# Returns the definition name for a given identifier
defp identifier_to_name(identifier) do
:"_transform_#{Atom.to_string(identifier)}"
end
# Convert a `define` body to [{{args, guards}, body}, ...], or [] for a nil body
defp body_to_branches(nil), do: []
defp body_to_branches(body) do
for {:->, _, [[branch_head], branch_body]} <- body[:do] do
{split_head(branch_head), branch_body}
end
end
# Split a `define` head into {args, guards}
@doc false
defp split_head({:when, _, [arg, guards]}) do
{arg, guards}
end
defp split_head(arg), do: {arg, true}
end | lib/neotomex/exgrammar.ex | 0.663669 | 0.689371 | exgrammar.ex | starcoder |
defmodule ExWeb3EcRecover.SignedType do
@moduledoc """
This module was written based on nomenclature
and algorithm specified in the [EIP-712](https://eips.ethereum.org/EIPS/eip-712#specification)
"""
defmodule Encoder do
@moduledoc false
@callback encode_value(value :: any(), type :: String.t()) :: binary()
end
@default_encoder __MODULE__.HexStringEncoder
@max_depth 5
@typedoc """
The map shape of this field must conform to:
```
%{
"name" => String.t(),
"type" => String.t()
}
```
"""
@type field :: %{String.t() => String.t()}
@type types :: %{String.t() => [field()]}
@doc """
Returns a hash of a message.
"""
@spec hash_message(map(), types(), String.t(), Keyword.t()) :: hash :: binary()
def hash_message(message, types, primary_type, opts \\ []) do
encode(message, types, primary_type, opts)
|> ExKeccak.hash_256()
end
@doc """
Encodes a message according to EIP-712
"""
@spec encode(map(), [field()], String.t(), Keyword.t()) :: binary()
def encode(message, types, primary_type, opts \\ []) do
encoder = Keyword.get(opts, :encoder, @default_encoder)
[
encode_types(types, primary_type),
encode_type(message, primary_type, types, encoder)
]
|> :erlang.iolist_to_binary()
end
@spec encode_type(map(), String.t(), types(), module()) :: binary()
def encode_type(data, primary_type, types, encoder) do
types[primary_type]
|> Enum.map_join(fn %{"name" => name, "type" => type} ->
value = data[name]
if custom_type?(types, type) do
hash_message(value, types, type)
else
encoder.encode_value(type, value)
end
end)
end
def encode_types(types, primary_type) do
sorted_deps =
types
|> find_deps(primary_type)
|> MapSet.to_list()
|> Enum.sort()
[primary_type | sorted_deps]
|> Enum.map(&format_dep(&1, types))
|> :erlang.iolist_to_binary()
|> ExKeccak.hash_256()
end
defp find_deps(types, primary_types, acc \\ MapSet.new(), depth \\ @max_depth) do
types[primary_types]
|> Enum.reduce(acc, fn %{"type" => type}, acc ->
if custom_type?(types, type) do
acc = MapSet.put(acc, type)
find_deps(types, type, acc, depth - 1)
else
acc
end
end)
end
defp custom_type?(types, type) do
# TODO verify not a builtin type
Map.has_key?(types, type)
end
defp format_dep(dep, types) do
arguments =
types[dep]
|> Enum.map(fn %{"name" => name, "type" => type} -> [type, " ", name] end)
|> Enum.intersperse(",")
[dep, "(", arguments, ")"]
end
end | lib/ex_web3_ec_recover/signed_type.ex | 0.774328 | 0.747455 | signed_type.ex | starcoder |
defmodule Chunkr do
@external_resource "README.md"
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
alias Chunkr.{Cursor, Opts, Page}
@doc false
defmacro __using__(opts) do
quote do
@default_chunkr_opts unquote(opts) ++ [{:repo, __MODULE__}, {:max_limit, 100}]
def paginate!(queryable, strategy, sort_dir, opts) do
unquote(__MODULE__).paginate!(queryable, strategy, sort_dir, opts ++ @default_chunkr_opts)
end
def paginate(queryable, strategy, sort_dir, opts) do
unquote(__MODULE__).paginate(queryable, strategy, sort_dir, opts ++ @default_chunkr_opts)
end
end
end
@spec paginate!(any, atom(), Chunkr.Opts.sort_dir(), keyword) :: Chunkr.Page.t()
@doc """
Same as `paginate/4`, but raises an error for invalid input.
"""
def paginate!(queryable, strategy, sort_dir, opts) do
case paginate(queryable, strategy, sort_dir, opts) do
{:ok, page} -> page
{:error, message} -> raise ArgumentError, message
end
end
@spec paginate(any, any, Chunkr.Opts.sort_dir(), keyword) ::
{:error, String.t()} | {:ok, Chunkr.Page.t()}
@doc """
Paginates a query.
Extends the provided query with the necessary filtering, ordering, and cursor field
selection for the sake of pagination, then executes the query and returns a `Chunkr.Page`
or results.
## Options
* `:max_limit` — The maximum number or results the user can request for this query.
The default is #{@default_max_limit}.
* `:first` — Retrieve the first _n_ results; must be between `0` and `:max_limit`.
* `:last` — Retrieve the last _n_ results; must be between `0` and `:max_limit`.
* `:after` — Return results starting after the provided cursor; optionally pairs with `:first`.
* `:before` — Return results ending at the provided cursor; optionally pairs with `:last`.
"""
def paginate(queryable, strategy, sort_dir, options) do
case Opts.new(queryable, strategy, sort_dir, options) do
{:ok, opts} ->
extended_rows =
queryable
|> apply_where(opts)
|> apply_order(opts)
|> apply_select(opts)
|> apply_limit(opts.limit + 1, opts)
|> opts.repo.all()
requested_rows = Enum.take(extended_rows, opts.limit)
rows_to_return =
case opts.paging_dir do
:forward -> requested_rows
:backward -> Enum.reverse(requested_rows)
end
{:ok,
%Page{
raw_results: rows_to_return,
has_previous_page: has_previous_page?(opts, extended_rows, requested_rows),
has_next_page: has_next_page?(opts, extended_rows, requested_rows),
start_cursor: List.first(rows_to_return) |> row_to_cursor(),
end_cursor: List.last(rows_to_return) |> row_to_cursor(),
opts: opts
}}
{:invalid_opts, message} ->
{:error, message}
end
end
defp has_previous_page?(%{paging_dir: :forward} = opts, _, _), do: !!opts.cursor
defp has_previous_page?(%{paging_dir: :backward}, rows, requested_rows),
do: rows != requested_rows
defp has_next_page?(%{paging_dir: :forward}, rows, requested_rows), do: rows != requested_rows
defp has_next_page?(%{paging_dir: :backward} = opts, _, _), do: !!opts.cursor
defp row_to_cursor(nil), do: nil
defp row_to_cursor({cursor_values, _record}), do: Cursor.encode(cursor_values)
defp apply_where(query, %{cursor: nil}), do: query
defp apply_where(query, opts) do
cursor_values = Cursor.decode!(opts.cursor)
opts.planner.beyond_cursor(
query,
opts.strategy,
opts.sort_dir,
opts.paging_dir,
cursor_values
)
end
defp apply_order(query, opts) do
opts.planner.apply_order(query, opts.strategy, opts.sort_dir, opts.paging_dir)
end
defp apply_select(query, opts) do
opts.planner.apply_select(query, opts.strategy)
end
defp apply_limit(query, limit, opts) do
opts.planner.apply_limit(query, limit)
end
end | lib/chunkr.ex | 0.797951 | 0.427875 | chunkr.ex | starcoder |
defmodule Play.Asteroid do
@moduledoc """
Represents an asteroid in the game
"""
defstruct [:id, :t, :direction, :speed, :color, :size]
alias Scenic.Math.Vector2
alias Play.Asteroid
@type t :: %__MODULE__{
id: Play.ScenicEntity.id(),
t: Play.Scene.Asteroids.coords(),
direction: Play.Scene.Asteroids.direction(),
speed: float,
color: atom,
size: integer
}
def new(coords, size, direction, speed) do
%__MODULE__{
id: Play.Utils.make_id(),
t: coords,
direction: Vector2.normalize(direction),
speed: speed,
color: :white,
size: size
}
end
defimpl Play.ScenicEntity, for: __MODULE__ do
def id(%Asteroid{id: id}), do: id
def tick(%Asteroid{} = asteroid) do
%{asteroid | t: new_position(asteroid)}
end
def draw(%Asteroid{} = asteroid, graph) do
%{id: id, color: color, size: size, t: t} = asteroid
Scenic.Primitives.circle(graph, size, id: id, stroke: {3, color}, t: t)
end
defp new_position(%Asteroid{} = asteroid) do
{x, y} = asteroid.t
size = asteroid.size
screen_width = Play.Utils.screen_width()
screen_height = Play.Utils.screen_height()
case offscreen(asteroid) do
:north -> {x, screen_height + size}
:east -> {0 - size, y}
:south -> {x, 0 - size}
:west -> {screen_width + size, y}
:onscreen -> next_tick_onscreen_pos(asteroid)
end
end
defp next_tick_onscreen_pos(%Asteroid{} = asteroid) do
%{t: t, direction: direction, speed: speed} = asteroid
Vector2.add(t, Vector2.mul(direction, speed))
end
defp offscreen(%Asteroid{} = asteroid) do
{width, height} = asteroid.t
screen_width = Play.Utils.screen_width()
screen_height = Play.Utils.screen_height()
cond do
width - asteroid.size > screen_width -> :east
width + asteroid.size < 0 -> :west
height - asteroid.size > screen_height -> :south
height + asteroid.size < 0 -> :north
true -> :onscreen
end
end
end
defimpl Play.Collision, for: __MODULE__ do
def from(%Asteroid{t: {width, height}, size: size, id: entity_id}) do
%Play.CollisionBox{
id: Play.CollisionBox.id(entity_id),
entity_id: entity_id,
t: {width - size, height - size},
size: size * 2
}
end
end
end | scenic_asteroids/play/lib/play/asteroid.ex | 0.849862 | 0.613179 | asteroid.ex | starcoder |
defmodule Phoenix.Presence do
@moduledoc """
Provides Presence tracking to processes and channels.
This behaviour provides presence features such as fetching
presences for a given topic, as well as handling diffs of
join and leave events as they occur in real-time. Using this
module defines a supervisor and a module that implements the
`Phoenix.Tracker` behaviour that uses `Phoenix.PubSub` to
broadcast presence updates.
In case you want to use only a subset of the functionality
provided by `Phoenix.Presence`, such as tracking processes
but without broadcasting updates, we recommend that you look
at the `Phoenix.Tracker` functionality from the `phoenix_pubsub`
project.
## Example Usage
Start by defining a presence module within your application
which uses `Phoenix.Presence` and provide the `:otp_app` which
holds your configuration, as well as the `:pubsub_server`.
defmodule MyApp.Presence do
use Phoenix.Presence, otp_app: :my_app,
pubsub_server: MyApp.PubSub
end
The `:pubsub_server` must point to an existing pubsub server
running in your application, which is included by default as
`MyApp.PubSub` for new applications.
Next, add the new supervisor to your supervision tree in `lib/my_app.ex`:
children = [
...
MyApp.Presence,
]
Once added, presences can be tracked in your channel after joining:
defmodule MyApp.MyChannel do
use MyAppWeb, :channel
alias MyApp.Presence
def join("some:topic", _params, socket) do
send(self(), :after_join)
{:ok, assign(socket, :user_id, ...)}
end
def handle_info(:after_join, socket) do
push(socket, "presence_state", Presence.list(socket))
{:ok, _} = Presence.track(socket, socket.assigns.user_id, %{
online_at: inspect(System.system_time(:second))
})
{:noreply, socket}
end
end
In the example above, the current presence information for
the socket's topic is pushed to the client as a `"presence_state"` event.
Next, `Presence.track` is used to register this
channel's process as a presence for the socket's user ID, with
a map of metadata.
Finally, a diff of presence join and leave events will be sent to the
client as they happen in real-time with the "presence_diff" event.
The diff structure will be a map of `:joins` and `:leaves` of the form:
%{joins: %{"123" => %{metas: [%{status: "away", phx_ref: ...}]},
leaves: %{"456" => %{metas: [%{status: "online", phx_ref: ...}]},
See `Phoenix.Presence.list/2` for more information on the presence
data structure.
## Fetching Presence Information
Presence metadata should be minimized and used to store small,
ephemeral state, such as a user's "online" or "away" status.
More detailed information, such as user details that need to
be fetched from the database, can be achieved by overriding the `fetch/2`
function. The `fetch/2` callback is triggered when using `list/1`
and serves as a mechanism to fetch presence information a single time,
before broadcasting the information to all channel subscribers.
This prevents N query problems and gives you a single place to group
isolated data fetching to extend presence metadata. The function must
return a map of data matching the outlined Presence data structure,
including the `:metas` key, but can extend the map of information
to include any additional information. For example:
def fetch(_topic, presences) do
query =
from u in User,
where: u.id in ^Map.keys(presences),
select: {u.id, u}
users = query |> Repo.all() |> Enum.into(%{})
for {key, %{metas: metas}} <- presences, into: %{} do
{key, %{metas: metas, user: users[key]}}
end
end
The function above fetches all users from the database who
have registered presences for the given topic. The fetched
information is then extended with a `:user` key of the user's
information, while maintaining the required `:metas` field from the
original presence data.
"""
alias Phoenix.Socket.Broadcast
@type presences :: %{String.t => %{metas: [map()]}}
@type presence :: %{key: String.t, meta: map()}
@type topic :: String.t
@doc false
@callback start_link(Keyword.t) ::
{:ok, pid()} |
{:error, reason :: term()} |
:ignore
@doc false
@callback init(Keyword.t) :: {:ok, state :: term} | {:error, reason :: term}
@doc """
Track a channel's process as a presence.
Tracked presences are grouped by `key`, cast as a string. For example, to
group each user's channels together, use user IDs as keys. Each presence can
be associated with a map of metadata to store small, emphemeral state, such as
a user's online status. To store detailed information, see `fetch/2`.
## Example
alias MyApp.Presence
def handle_info(:after_join, socket) do
{:ok, _} = Presence.track(socket, socket.assigns.user_id, %{
online_at: inspect(System.system_time(:second))
})
{:noreply, socket}
end
"""
@callback track(socket :: Phoenix.Socket.t, key :: String.t, meta :: map()) ::
{:ok, ref :: binary()} |
{:error, reason :: term()}
@doc """
Track an arbitary process as a presence.
Same with `track/3`, except track any process by `topic` and `key`.
"""
@callback track(pid, topic, key :: String.t, meta :: map()) ::
{:ok, ref :: binary()} |
{:error, reason :: term()}
@doc """
Stop tracking a channel's process.
"""
@callback untrack(socket :: Phoenix.Socket.t, key :: String.t) :: :ok
@doc """
Stop tracking a process.
"""
@callback untrack(pid, topic, key :: String.t) :: :ok
@doc """
Update a channel presence's metadata.
Replace a presence's metadata by passing a new map or a function that takes
the current map and returns a new one.
"""
@callback update(socket :: Phoenix.Socket.t, key :: String.t, meta :: map() | (map() -> map())) ::
{:ok, ref :: binary()} |
{:error, reason :: term()}
@doc """
Update a process presence's metadata.
Same as `update/3`, but with an arbitary process.
"""
@callback update(pid, topic, key :: String.t, meta :: map() | (map() -> map())) ::
{:ok, ref :: binary()} |
{:error, reason :: term()}
@doc """
Extend presence information with additional data.
When `list/1` is used to list all presences of the given `topic`, this
callback is triggered once to modify the result before it is broadcasted to
all channel subscribers. This avoids N query problems and provides a single
place to extend presence metadata. You must return a map of data matching the
original result, including the `:metas` key, but can extend the map to include
any additional information.
The default implementation simply passes `presences` through unchanged.
## Example
def fetch(_topic, presences) do
query =
from u in User,
where: u.id in ^Map.keys(presences),
select: {u.id, u}
users = query |> Repo.all() |> Enum.into(%{})
for {key, %{metas: metas}} <- presences, into: %{} do
{key, %{metas: metas, user: users[key]}}
end
end
"""
@callback fetch(topic, presences) :: presences
@doc """
Returns presences for a topic or a socket.
Calls `list/2` with presence module.
"""
@callback list(Phoenix.Socket.t | topic) :: presences
@doc false
@callback handle_diff(%{topic => {joins :: presences, leaves :: presences}}, state :: term) :: {:ok, state :: term}
defmacro __using__(opts) do
quote do
@opts unquote(opts)
@otp_app @opts[:otp_app] || raise "presence expects :otp_app to be given"
@behaviour unquote(__MODULE__)
@task_supervisor Module.concat(__MODULE__, TaskSupervisor)
@doc false
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
def start_link(opts \\ []) do
opts = Keyword.merge(@opts, opts)
Phoenix.Presence.start_link(__MODULE__, @otp_app, @task_supervisor, opts)
end
def init(opts) do
server = Keyword.fetch!(opts, :pubsub_server)
{:ok, %{pubsub_server: server,
node_name: Phoenix.PubSub.node_name(server),
task_sup: @task_supervisor}}
end
def track(%Phoenix.Socket{} = socket, key, meta) do
track(socket.channel_pid, socket.topic, key, meta)
end
def track(pid, topic, key, meta) do
Phoenix.Tracker.track(__MODULE__, pid, topic, key, meta)
end
def untrack(%Phoenix.Socket{} = socket, key) do
untrack(socket.channel_pid, socket.topic, key)
end
def untrack(pid, topic, key) do
Phoenix.Tracker.untrack(__MODULE__, pid, topic, key)
end
def update(%Phoenix.Socket{} = socket, key, meta) do
update(socket.channel_pid, socket.topic, key, meta)
end
def update(pid, topic, key, meta) do
Phoenix.Tracker.update(__MODULE__, pid, topic, key, meta)
end
def fetch(_topic, presences), do: presences
def list(%Phoenix.Socket{topic: topic}), do: list(topic)
def list(topic) do
Phoenix.Presence.list(__MODULE__, topic)
end
def get_by_key(%Phoenix.Socket{topic: topic}, key), do: get_by_key(topic, key)
def get_by_key(topic, key) do
Phoenix.Presence.get_by_key(__MODULE__, topic, key)
end
def handle_diff(diff, state) do
Phoenix.Presence.handle_diff(__MODULE__,
diff, state.node_name, state.pubsub_server, state.task_sup
)
{:ok, state}
end
defoverridable fetch: 2, child_spec: 1
end
end
@doc false
def start_link(module, otp_app, task_supervisor, opts) do
import Supervisor.Spec
opts =
opts
|> Keyword.merge(Application.get_env(otp_app, module) || [])
|> Keyword.put(:name, module)
children = [
supervisor(Task.Supervisor, [[name: task_supervisor]]),
worker(Phoenix.Tracker, [module, opts, opts])
]
Supervisor.start_link(children, strategy: :one_for_one)
end
@doc false
def handle_diff(module, diff, node_name, pubsub_server, sup_name) do
Task.Supervisor.start_child(sup_name, fn ->
for {topic, {joins, leaves}} <- diff do
msg = %Broadcast{topic: topic, event: "presence_diff", payload: %{
joins: module.fetch(topic, group(joins)),
leaves: module.fetch(topic, group(leaves))
}}
Phoenix.PubSub.direct_broadcast!(node_name, pubsub_server, topic, msg)
end
end)
end
@doc """
Returns presences for a topic.
## Presence data structure
The presence information is returned as a map with presences grouped
by key, cast as a string, and accumulated metadata, with the following form:
%{key => %{metas: [%{phx_ref: ..., ...}, ...]}}
For example, imagine a user with id `123` online from two
different devices, as well as a user with id `456` online from
just one device. The following presence information might be returned:
%{"123" => %{metas: [%{status: "away", phx_ref: ...},
%{status: "online", phx_ref: ...}]},
"456" => %{metas: [%{status: "online", phx_ref: ...}]}}
The keys of the map will usually point to a resource ID. The value
will contain a map with a `:metas` key containing a list of metadata
for each resource. Additionally, every metadata entry will contain a
`:phx_ref` key which can be used to uniquely identify metadata for a
given key. In the event that the metadata was previously updated,
a `:phx_ref_prev` key will be present containing the previous
`:phx_ref` value.
"""
def list(module, topic) do
grouped =
module
|> Phoenix.Tracker.list(topic)
|> group()
module.fetch(topic, grouped)
end
@doc """
Returns the map of presence metadata for a topic-key pair.
## Examples
Uses the same data format as `Phoenix.Presence.list/2`, but only
returns metadata for the presences under a topic and key pair. For example,
a user with key `"user1"`, connected to the same chat room `"room:1"` from two
devices, could return:
iex> MyPresence.get_by_key("room:1", "user1")
%{name: "User 1", metas: [%{device: "Desktop"}, %{device: "Mobile"}]}
Like `Phoenix.Presence.list/2`, the presence metadata is passed to the `fetch`
callback of your presence module to fetch any additional information.
"""
def get_by_key(module, topic, key) do
string_key = to_string(key)
case Phoenix.Tracker.get_by_key(module, topic, key) do
[] -> []
[_|_] = pid_metas ->
metas = Enum.map(pid_metas, fn {_pid, meta} -> meta end)
%{^string_key => fetched_metas} = module.fetch(topic, %{string_key => %{metas: metas}})
fetched_metas
end
end
defp group(presences) do
presences
|> Enum.reverse()
|> Enum.reduce(%{}, fn {key, meta}, acc ->
Map.update(acc, to_string(key), %{metas: [meta]}, fn %{metas: metas} ->
%{metas: [meta | metas]}
end)
end)
end
end | lib/phoenix/presence.ex | 0.928206 | 0.692609 | presence.ex | starcoder |
defmodule Sanbase.Cryptocompare.HistoricalWorker do
@moduledoc ~s"""
An Oban Worker that processes the jobs in the cryptocompare_historical_jobs_queue
queue.
An Oban Worker has one main function `perform/1` which receives as argument
one record from the oban jobs table. If it returns :ok or {:ok, _}, then the
job is considered successful and is completed. In order to have retries in case
of Kafka downtime, the export to Kafka is done via persist_sync/2. This guarantees
that if get_data/3 and export_data/1 return :ok, then the data is in Kafka.
If perform/1 returns :error or {:error, _} then the task is scheduled for retry.
An exponential backoff algorithm is used in order to decide when to retry. The
default 20 attempts and the default algorithm used first retry after some seconds
and the last attempt is done after about 3 weeks.
"""
use Oban.Worker,
queue: :cryptocompare_historical_jobs_queue,
unique: [period: 60 * 86_400]
import Sanbase.Cryptocompare.HTTPHeaderUtils, only: [parse_value_list: 1]
require Logger
require Sanbase.Utils.Config, as: Config
@url "https://min-api.cryptocompare.com/data/histo/minute/daily"
@oban_conf_name :oban_scrapers
def queue(), do: :cryptocompare_historical_jobs_queue
def conf_name(), do: @oban_conf_name
@impl Oban.Worker
def perform(%Oban.Job{args: args}) do
%{"base_asset" => base_asset, "quote_asset" => quote_asset, "date" => date} = args
t1 = System.monotonic_time(:millisecond)
should_snooze? = base_asset not in available_base_assets()
case should_snooze? do
true ->
{:snooze, 86_400}
false ->
case get_data(base_asset, quote_asset, date) do
{:ok, data} ->
t2 = System.monotonic_time(:millisecond)
result = export_data(data)
t3 = System.monotonic_time(:millisecond)
log_time_spent(t1, t2, t3)
result
{:error, error} ->
{:error, error}
end
end
end
@impl Oban.Worker
def timeout(_job), do: :timer.minutes(5)
# Private functions
defp log_time_spent(t1, t2, t3) do
get_data_time = ((t2 - t1) / 1000) |> Float.round(2)
export_data_time = ((t3 - t2) / 1000) |> Float.round(2)
Logger.info(
"[Cryptocompare Historical] Get data: #{get_data_time}s, Export data: #{export_data_time}s"
)
end
defp available_base_assets() do
# TODO: Remove once all the used assets are scrapped
# In order to priroritize the jobs that are more important, snooze
# the jobs that are not having a base asset that is stored in our DBs.
cache_key = {__MODULE__, :available_base_assets}
{:ok, assets} =
Sanbase.Cache.get_or_store(cache_key, fn ->
data =
Sanbase.Model.Project.SourceSlugMapping.get_source_slug_mappings("cryptocompare")
|> Enum.map(&elem(&1, 0))
{:ok, data}
end)
assets
end
@spec get_data(any, any, any) :: {:error, HTTPoison.Error.t()} | {:ok, any}
def get_data(base_asset, quote_asset, date) do
query_params = [
fsym: base_asset,
tsym: quote_asset,
e: "CCCAGG",
date: date
]
headers = [{"authorization", "Apikey #{api_key()}"}]
url = @url <> "?" <> URI.encode_query(query_params)
case HTTPoison.get(url, headers, recv_timeout: 15_000) do
{:ok, %HTTPoison.Response{status_code: 200, body: body} = resp} ->
case rate_limited?(resp) do
false -> csv_to_ohlcv_list(body)
biggest_rate_limited_window -> handle_rate_limit(resp, biggest_rate_limited_window)
end
{:error, error} ->
{:error, error}
end
end
defp rate_limited?(resp) do
zero_remainings =
get_header(resp, "X-RateLimit-Remaining-All")
|> elem(1)
|> parse_value_list()
|> Enum.filter(&(&1.value == 0))
case zero_remainings do
[] -> false
list -> Enum.max_by(list, & &1.time_period).time_period
end
end
defp handle_rate_limit(resp, biggest_rate_limited_window) do
Sanbase.Cryptocompare.HistoricalScheduler.pause()
header_value =
get_header(resp, "X-RateLimit-Reset-All")
|> elem(1)
Logger.info(
"[Cryptocompare Historical] Rate limited. X-RateLimit-Reset-All header: #{header_value}"
)
reset_after_seconds =
header_value
|> parse_value_list()
|> Enum.find(&(&1.time_period == biggest_rate_limited_window))
|> Map.get(:value)
data =
%{"type" => "resume"}
|> Sanbase.Cryptocompare.PauseResumeWorker.new(schedule_in: reset_after_seconds)
Oban.insert(@oban_conf_name, data)
{:error, :rate_limit}
end
defp get_header(%HTTPoison.Response{} = resp, header) do
Enum.find(resp.headers, &match?({^header, _}, &1))
end
defp csv_to_ohlcv_list(data) do
result =
data
|> String.trim()
|> NimbleCSV.RFC4180.parse_string()
|> Enum.map(&csv_line_to_point/1)
case Enum.find_index(result, &(&1 == :error)) do
nil -> {:ok, result}
_index -> {:error, "[Cryptocompare Historical] NaN values found in place of prices"}
end
end
defp csv_line_to_point([time, fsym, tsym, o, h, l, c, vol_from, vol_to] = list) do
case Enum.any?(list, &(&1 == "NaN")) do
true ->
:error
false ->
[o, h, l, c, vol_from, vol_to] =
[o, h, l, c, vol_from, vol_to] |> Enum.map(&Sanbase.Math.to_float/1)
%{
source: "cryptocompare",
interval_seconds: 60,
datetime: time |> String.to_integer() |> DateTime.from_unix!(),
base_asset: fsym,
quote_asset: tsym,
open: o,
high: h,
low: l,
close: c,
volume_from: vol_from,
volume_to: vol_to
}
end
end
defp csv_line_to_point([time, "CCCAGG", fsym, tsym, c, h, l, o, vol_from, vol_to]) do
csv_line_to_point([time, fsym, tsym, o, h, l, c, vol_from, vol_to])
end
defp export_data(data) do
export_asset_ohlcv_price_pairs_topic(data)
export_asset_price_pairs_only_topic(data)
end
defp export_asset_ohlcv_price_pairs_topic(data) do
data = Enum.map(data, &to_ohlcv_price_point/1)
topic = Config.module_get!(Sanbase.KafkaExporter, :asset_ohlcv_price_pairs_topic)
Sanbase.KafkaExporter.send_data_to_topic_from_current_process(data, topic)
end
defp export_asset_price_pairs_only_topic(data) do
data = Enum.map(data, &to_price_only_point/1)
topic = Config.module_get!(Sanbase.KafkaExporter, :asset_price_pairs_only_topic)
Sanbase.KafkaExporter.send_data_to_topic_from_current_process(data, topic)
end
defp to_ohlcv_price_point(point) do
point
|> Sanbase.Cryptocompare.OHLCVPricePoint.new()
|> Sanbase.Cryptocompare.OHLCVPricePoint.json_kv_tuple()
end
defp to_price_only_point(point) do
%{
price: point.close,
datetime: point.datetime,
base_asset: point.base_asset,
quote_asset: point.quote_asset,
source: point.source
}
|> Sanbase.Cryptocompare.PriceOnlyPoint.new()
|> Sanbase.Cryptocompare.PriceOnlyPoint.json_kv_tuple()
end
defp api_key(), do: Config.module_get(Sanbase.Cryptocompare, :api_key)
end | lib/sanbase/cryptocompare/workers/historical_worker.ex | 0.668015 | 0.425247 | historical_worker.ex | starcoder |
defmodule AWS.DirectoryService do
@moduledoc """
AWS Directory Service
AWS Directory Service is a web service that makes it easy for you to setup
and run directories in the AWS cloud, or connect your AWS resources with an
existing on-premises Microsoft Active Directory. This guide provides
detailed information about AWS Directory Service operations, data types,
parameters, and errors. For information about AWS Directory Services
features, see [AWS Directory
Service](https://aws.amazon.com/directoryservice/) and the [AWS Directory
Service Administration
Guide](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/what_is.html).
<note> AWS provides SDKs that consist of libraries and sample code for
various programming languages and platforms (Java, Ruby, .Net, iOS,
Android, etc.). The SDKs provide a convenient way to create programmatic
access to AWS Directory Service and other AWS services. For more
information about the AWS SDKs, including how to download and install them,
see [Tools for Amazon Web Services](http://aws.amazon.com/tools/).
</note>
"""
@doc """
If the DNS server for your on-premises domain uses a publicly addressable
IP address, you must add a CIDR address block to correctly route traffic to
and from your Microsoft AD on Amazon Web Services. *AddIpRoutes* adds this
address block. You can also use *AddIpRoutes* to facilitate routing traffic
that uses public IP ranges from your Microsoft AD on AWS to a peer VPC.
Before you call *AddIpRoutes*, ensure that all of the required permissions
have been explicitly granted through a policy. For details about what
permissions are required to run the *AddIpRoutes* operation, see [AWS
Directory Service API Permissions: Actions, Resources, and Conditions
Reference](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/UsingWithDS_IAM_ResourcePermissions.html).
"""
def add_ip_routes(client, input, options \\ []) do
request(client, "AddIpRoutes", input, options)
end
@doc """
Adds or overwrites one or more tags for the specified directory. Each
directory can have a maximum of 50 tags. Each tag consists of a key and
optional value. Tag keys must be unique to each resource.
"""
def add_tags_to_resource(client, input, options \\ []) do
request(client, "AddTagsToResource", input, options)
end
@doc """
Cancels an in-progress schema extension to a Microsoft AD directory. Once a
schema extension has started replicating to all domain controllers, the
task can no longer be canceled. A schema extension can be canceled during
any of the following states; `Initializing`, `CreatingSnapshot`, and
`UpdatingSchema`.
"""
def cancel_schema_extension(client, input, options \\ []) do
request(client, "CancelSchemaExtension", input, options)
end
@doc """
Creates an AD Connector to connect to an on-premises directory.
Before you call *ConnectDirectory*, ensure that all of the required
permissions have been explicitly granted through a policy. For details
about what permissions are required to run the *ConnectDirectory*
operation, see [AWS Directory Service API Permissions: Actions, Resources,
and Conditions
Reference](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/UsingWithDS_IAM_ResourcePermissions.html).
"""
def connect_directory(client, input, options \\ []) do
request(client, "ConnectDirectory", input, options)
end
@doc """
Creates an alias for a directory and assigns the alias to the directory.
The alias is used to construct the access URL for the directory, such as
`http://<alias>.awsapps.com`.
<important> After an alias has been created, it cannot be deleted or
reused, so this operation should only be used when absolutely necessary.
</important>
"""
def create_alias(client, input, options \\ []) do
request(client, "CreateAlias", input, options)
end
@doc """
Creates a computer account in the specified directory, and joins the
computer to the directory.
"""
def create_computer(client, input, options \\ []) do
request(client, "CreateComputer", input, options)
end
@doc """
Creates a conditional forwarder associated with your AWS directory.
Conditional forwarders are required in order to set up a trust relationship
with another domain. The conditional forwarder points to the trusted
domain.
"""
def create_conditional_forwarder(client, input, options \\ []) do
request(client, "CreateConditionalForwarder", input, options)
end
@doc """
Creates a Simple AD directory.
Before you call *CreateDirectory*, ensure that all of the required
permissions have been explicitly granted through a policy. For details
about what permissions are required to run the *CreateDirectory* operation,
see [AWS Directory Service API Permissions: Actions, Resources, and
Conditions
Reference](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/UsingWithDS_IAM_ResourcePermissions.html).
"""
def create_directory(client, input, options \\ []) do
request(client, "CreateDirectory", input, options)
end
@doc """
Creates a Microsoft AD in the AWS cloud.
Before you call *CreateMicrosoftAD*, ensure that all of the required
permissions have been explicitly granted through a policy. For details
about what permissions are required to run the *CreateMicrosoftAD*
operation, see [AWS Directory Service API Permissions: Actions, Resources,
and Conditions
Reference](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/UsingWithDS_IAM_ResourcePermissions.html).
"""
def create_microsoft_a_d(client, input, options \\ []) do
request(client, "CreateMicrosoftAD", input, options)
end
@doc """
Creates a snapshot of a Simple AD or Microsoft AD directory in the AWS
cloud.
<note> You cannot take snapshots of AD Connector directories.
</note>
"""
def create_snapshot(client, input, options \\ []) do
request(client, "CreateSnapshot", input, options)
end
@doc """
AWS Directory Service for Microsoft Active Directory allows you to
configure trust relationships. For example, you can establish a trust
between your Microsoft AD in the AWS cloud, and your existing on-premises
Microsoft Active Directory. This would allow you to provide users and
groups access to resources in either domain, with a single set of
credentials.
This action initiates the creation of the AWS side of a trust relationship
between a Microsoft AD in the AWS cloud and an external domain.
"""
def create_trust(client, input, options \\ []) do
request(client, "CreateTrust", input, options)
end
@doc """
Deletes a conditional forwarder that has been set up for your AWS
directory.
"""
def delete_conditional_forwarder(client, input, options \\ []) do
request(client, "DeleteConditionalForwarder", input, options)
end
@doc """
Deletes an AWS Directory Service directory.
Before you call *DeleteDirectory*, ensure that all of the required
permissions have been explicitly granted through a policy. For details
about what permissions are required to run the *DeleteDirectory* operation,
see [AWS Directory Service API Permissions: Actions, Resources, and
Conditions
Reference](http://docs.aws.amazon.com/directoryservice/latest/admin-guide/UsingWithDS_IAM_ResourcePermissions.html).
"""
def delete_directory(client, input, options \\ []) do
request(client, "DeleteDirectory", input, options)
end
@doc """
Deletes a directory snapshot.
"""
def delete_snapshot(client, input, options \\ []) do
request(client, "DeleteSnapshot", input, options)
end
@doc """
Deletes an existing trust relationship between your Microsoft AD in the AWS
cloud and an external domain.
"""
def delete_trust(client, input, options \\ []) do
request(client, "DeleteTrust", input, options)
end
@doc """
Removes the specified directory as a publisher to the specified SNS topic.
"""
def deregister_event_topic(client, input, options \\ []) do
request(client, "DeregisterEventTopic", input, options)
end
@doc """
Obtains information about the conditional forwarders for this account.
If no input parameters are provided for RemoteDomainNames, this request
describes all conditional forwarders for the specified directory ID.
"""
def describe_conditional_forwarders(client, input, options \\ []) do
request(client, "DescribeConditionalForwarders", input, options)
end
@doc """
Obtains information about the directories that belong to this account.
You can retrieve information about specific directories by passing the
directory identifiers in the *DirectoryIds* parameter. Otherwise, all
directories that belong to the current account are returned.
This operation supports pagination with the use of the *NextToken* request
and response parameters. If more results are available, the
*DescribeDirectoriesResult.NextToken* member contains a token that you pass
in the next call to `DescribeDirectories` to retrieve the next set of
items.
You can also specify a maximum number of return results with the *Limit*
parameter.
"""
def describe_directories(client, input, options \\ []) do
request(client, "DescribeDirectories", input, options)
end
@doc """
Obtains information about which SNS topics receive status messages from the
specified directory.
If no input parameters are provided, such as DirectoryId or TopicName, this
request describes all of the associations in the account.
"""
def describe_event_topics(client, input, options \\ []) do
request(client, "DescribeEventTopics", input, options)
end
@doc """
Obtains information about the directory snapshots that belong to this
account.
This operation supports pagination with the use of the *NextToken* request
and response parameters. If more results are available, the
*DescribeSnapshots.NextToken* member contains a token that you pass in the
next call to `DescribeSnapshots` to retrieve the next set of items.
You can also specify a maximum number of return results with the *Limit*
parameter.
"""
def describe_snapshots(client, input, options \\ []) do
request(client, "DescribeSnapshots", input, options)
end
@doc """
Obtains information about the trust relationships for this account.
If no input parameters are provided, such as DirectoryId or TrustIds, this
request describes all the trust relationships belonging to the account.
"""
def describe_trusts(client, input, options \\ []) do
request(client, "DescribeTrusts", input, options)
end
@doc """
Disables multi-factor authentication (MFA) with the Remote Authentication
Dial In User Service (RADIUS) server for an AD Connector directory.
"""
def disable_radius(client, input, options \\ []) do
request(client, "DisableRadius", input, options)
end
@doc """
Disables single-sign on for a directory.
"""
def disable_sso(client, input, options \\ []) do
request(client, "DisableSso", input, options)
end
@doc """
Enables multi-factor authentication (MFA) with the Remote Authentication
Dial In User Service (RADIUS) server for an AD Connector directory.
"""
def enable_radius(client, input, options \\ []) do
request(client, "EnableRadius", input, options)
end
@doc """
Enables single sign-on for a directory.
"""
def enable_sso(client, input, options \\ []) do
request(client, "EnableSso", input, options)
end
@doc """
Obtains directory limit information for the current region.
"""
def get_directory_limits(client, input, options \\ []) do
request(client, "GetDirectoryLimits", input, options)
end
@doc """
Obtains the manual snapshot limits for a directory.
"""
def get_snapshot_limits(client, input, options \\ []) do
request(client, "GetSnapshotLimits", input, options)
end
@doc """
Lists the address blocks that you have added to a directory.
"""
def list_ip_routes(client, input, options \\ []) do
request(client, "ListIpRoutes", input, options)
end
@doc """
Lists all schema extensions applied to a Microsoft AD Directory.
"""
def list_schema_extensions(client, input, options \\ []) do
request(client, "ListSchemaExtensions", input, options)
end
@doc """
Lists all tags on a directory.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Associates a directory with an SNS topic. This establishes the directory as
a publisher to the specified SNS topic. You can then receive email or text
(SMS) messages when the status of your directory changes. You get notified
if your directory goes from an Active status to an Impaired or Inoperable
status. You also receive a notification when the directory returns to an
Active status.
"""
def register_event_topic(client, input, options \\ []) do
request(client, "RegisterEventTopic", input, options)
end
@doc """
Removes IP address blocks from a directory.
"""
def remove_ip_routes(client, input, options \\ []) do
request(client, "RemoveIpRoutes", input, options)
end
@doc """
Removes tags from a directory.
"""
def remove_tags_from_resource(client, input, options \\ []) do
request(client, "RemoveTagsFromResource", input, options)
end
@doc """
Restores a directory using an existing directory snapshot.
When you restore a directory from a snapshot, any changes made to the
directory after the snapshot date are overwritten.
This action returns as soon as the restore operation is initiated. You can
monitor the progress of the restore operation by calling the
`DescribeDirectories` operation with the directory identifier. When the
**DirectoryDescription.Stage** value changes to `Active`, the restore
operation is complete.
"""
def restore_from_snapshot(client, input, options \\ []) do
request(client, "RestoreFromSnapshot", input, options)
end
@doc """
Applies a schema extension to a Microsoft AD directory.
"""
def start_schema_extension(client, input, options \\ []) do
request(client, "StartSchemaExtension", input, options)
end
@doc """
Updates a conditional forwarder that has been set up for your AWS
directory.
"""
def update_conditional_forwarder(client, input, options \\ []) do
request(client, "UpdateConditionalForwarder", input, options)
end
@doc """
Updates the Remote Authentication Dial In User Service (RADIUS) server
information for an AD Connector directory.
"""
def update_radius(client, input, options \\ []) do
request(client, "UpdateRadius", input, options)
end
@doc """
AWS Directory Service for Microsoft Active Directory allows you to
configure and verify trust relationships.
This action verifies a trust relationship between your Microsoft AD in the
AWS cloud and an external domain.
"""
def verify_trust(client, input, options \\ []) do
request(client, "VerifyTrust", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "ds"}
host = get_host("ds", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "DirectoryService_20150416.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end | lib/aws/directory_service.ex | 0.873754 | 0.565239 | directory_service.ex | starcoder |
alias Plug.Conn.Unfetched
defmodule Plug.Conn do
@moduledoc """
The Plug connection.
This module defines a `Plug.Conn` struct and the main functions
for working with Plug connections.
Note request headers are normalized to lowercase and response
headers are expected to have lower-case keys.
## Request fields
These fields contain request information:
* `host` - the requested host as a binary, example: `"www.example.com"`
* `method` - the request method as a binary, example: `"GET"`
* `path_info` - the path split into segments, example: `["hello", "world"]`
* `script_name` - the initial portion of the URL's path that corresponds to the application
routing, as segments, example: ["sub","app"].
* `request_path` - the requested path, example: `/trailing/and//double//slashes/`
* `port` - the requested port as an integer, example: `80`
* `peer` - the actual TCP peer that connected, example: `{{127, 0, 0, 1}, 12345}`. Often this
is not the actual IP and port of the client, but rather of a load-balancer or request-router.
* `remote_ip` - the IP of the client, example: `{151, 236, 219, 228}`. This field is meant to
be overwritten by plugs that understand e.g. the `X-Forwarded-For` header or HAProxy's PROXY
protocol. It defaults to peer's IP.
* `req_headers` - the request headers as a list, example: `[{"content-type", "text/plain"}]`.
Note all headers will be downcased.
* `scheme` - the request scheme as an atom, example: `:http`
* `query_string` - the request query string as a binary, example: `"foo=bar"`
## Fetchable fields
The request information in these fields is not populated until it is fetched
using the associated `fetch_` function. For example, the `cookies` field uses
`fetch_cookies/2`.
If you access these fields before fetching them, they will be returned as
`Plug.Conn.Unfetched` structs.
* `cookies`- the request cookies with the response cookies
* `body_params` - the request body params, populated through a `Plug.Parsers` parser.
* `query_params` - the request query params, populated through `fetch_query_params/2`
* `path_params` - the request path params, populated by routers such as `Plug.Router`
* `params` - the request params, the result of merging the `:body_params` and `:query_params`
with `:path_params`
* `req_cookies` - the request cookies (without the response ones)
## Response fields
These fields contain response information:
* `resp_body` - the response body, by default is an empty string. It is set
to nil after the response is set, except for test connections.
* `resp_charset` - the response charset, defaults to "utf-8"
* `resp_cookies` - the response cookies with their name and options
* `resp_headers` - the response headers as a list of tuples, by default `cache-control`
is set to `"max-age=0, private, must-revalidate"`. Note, response headers
are expected to have lower-case keys.
* `status` - the response status
Furthermore, the `before_send` field stores callbacks that are invoked
before the connection is sent. Callbacks are invoked in the reverse order
they are registered (callbacks registered first are invoked last) in order
to reproduce a pipeline ordering.
## Connection fields
* `assigns` - shared user data as a map
* `owner` - the Elixir process that owns the connection
* `halted` - the boolean status on whether the pipeline was halted
* `secret_key_base` - a secret key used to verify and encrypt cookies.
the field must be set manually whenever one of those features are used.
This data must be kept in the connection and never used directly, always
use `Plug.Crypto.KeyGenerator.generate/3` to derive keys from it
* `state` - the connection state
The connection state is used to track the connection lifecycle. It starts
as `:unset` but is changed to `:set` (via `Plug.Conn.resp/3`) or `:file`
(when invoked via `Plug.Conn.send_file/3`). Its final result is
`:sent` or `:chunked` depending on the response model.
## Private fields
These fields are reserved for libraries/framework usage.
* `adapter` - holds the adapter information in a tuple
* `private` - shared library data as a map
## Protocols
`Plug.Conn` implements both the Collectable and Inspect protocols
out of the box. The inspect protocol provides a nice representation
of the connection while the collectable protocol allows developers
to easily chunk data. For example:
# Send the chunked response headers
conn = send_chunked(conn, 200)
# Pipe the given list into a connection
# Each item is emitted as a chunk
Enum.into(~w(each chunk as a word), conn)
## Custom status codes
Plug allows status codes to be overridden or added in order to allow new codes
not directly specified by Plug or its adapters. Adding or overriding a status
code is done through the Mix configuration of the `:plug` application. For
example, to override the existing 404 reason phrase for the 404 status code
("Not Found" by default) and add a new 451 status code, the following config
can be specified:
config :plug, :statuses, %{
404 => "Actually This Was Found",
451 => "Unavailable For Legal Reasons"
}
As this configuration is Plug specific, Plug will need to be recompiled for
the changes to take place: this will not happen automatically as dependencies
are not automatically recompiled when their configuration changes. To recompile
Plug:
mix deps.clean --build plug
The atoms that can be used in place of the status code in many functions are
inflected from the reason phrase of the status code. With the above
configuration, the following will all work:
put_status(conn, :not_found) # 404
put_status(conn, :actually_this_was_found) # 404
put_status(conn, :unavailable_for_legal_reasons) # 451
Even though 404 has been overridden, the `:not_found` atom can still be used
to set the status to 404 as well as the new atom `:actually_this_was_found`
inflected from the reason phrase "Actually This Was Found".
"""
@type adapter :: {module, term}
@type assigns :: %{atom => any}
@type before_send :: [(t -> t)]
@type body :: iodata
@type cookies :: %{binary => binary}
@type halted :: boolean
@type headers :: [{binary, binary}]
@type host :: binary
@type int_status :: non_neg_integer | nil
@type owner :: pid
@type method :: binary
@type param :: binary | %{binary => param} | [param]
@type params :: %{binary => param}
@type peer :: {:inet.ip_address, :inet.port_number}
@type port_number :: :inet.port_number
@type query_string :: String.t
@type resp_cookies :: %{binary => %{}}
@type scheme :: :http | :https
@type secret_key_base :: binary | nil
@type segments :: [binary]
@type state :: :unset | :set | :file | :chunked | :sent
@type status :: atom | int_status
@type t :: %__MODULE__{
adapter: adapter,
assigns: assigns,
before_send: before_send,
body_params: params | Unfetched.t,
cookies: cookies | Unfetched.t,
host: host,
method: method,
owner: owner,
params: params | Unfetched.t,
path_info: segments,
path_params: params,
port: :inet.port_number,
private: assigns,
query_params: params | Unfetched.t,
query_string: query_string,
peer: peer,
remote_ip: :inet.ip_address,
req_cookies: cookies | Unfetched.t,
req_headers: headers,
request_path: binary,
resp_body: body | nil,
resp_cookies: resp_cookies,
resp_headers: headers,
scheme: scheme,
script_name: segments,
secret_key_base: secret_key_base,
state: state,
status: int_status}
defstruct adapter: {Plug.Conn, nil},
assigns: %{},
before_send: [],
body_params: %Unfetched{aspect: :body_params},
cookies: %Unfetched{aspect: :cookies},
halted: false,
host: "www.example.com",
method: "GET",
owner: nil,
params: %Unfetched{aspect: :params},
path_params: %{},
path_info: [],
port: 0,
private: %{},
query_params: %Unfetched{aspect: :query_params},
query_string: "",
peer: nil,
remote_ip: nil,
req_cookies: %Unfetched{aspect: :cookies},
req_headers: [],
request_path: "",
resp_body: nil,
resp_cookies: %{},
resp_headers: [{"cache-control", "max-age=0, private, must-revalidate"}],
scheme: :http,
script_name: [],
secret_key_base: nil,
state: :unset,
status: nil
defmodule NotSentError do
defexception message: "a response was neither set nor sent from the connection"
@moduledoc """
Error raised when no response is sent in a request
"""
end
defmodule AlreadySentError do
defexception message: "the response was already sent"
@moduledoc """
Error raised when trying to modify or send an already sent response
"""
end
defmodule CookieOverflowError do
defexception message: "cookie exceeds maximum size of 4096 bytes"
@moduledoc """
Error raised when the cookie exceeds the maximum size of 4096 bytes.
"""
end
defmodule InvalidHeaderError do
defexception message: "header is invalid"
@moduledoc ~S"""
Error raised when trying to send a header that has errors, for example:
* the header key contains uppercase chars
* the header value contains newlines \n
"""
end
defmodule InvalidQueryError do
@moduledoc """
Raised when the request string is malformed, for example:
* the query has bad utf-8 encoding
* the query fails to www-form decode
"""
defexception message: "query string is invalid", plug_status: 400
end
alias Plug.Conn
@already_sent {:plug_conn, :sent}
@unsent [:unset, :set]
@doc """
Assigns a value to a key in the connection
## Examples
iex> conn.assigns[:hello]
nil
iex> conn = assign(conn, :hello, :world)
iex> conn.assigns[:hello]
:world
"""
@spec assign(t, atom, term) :: t
def assign(%Conn{assigns: assigns} = conn, key, value) when is_atom(key) do
%{conn | assigns: Map.put(assigns, key, value)}
end
@doc """
Starts a task to assign a value to a key in the connection.
`await_assign/2` can be used to wait for the async task to complete and
retrieve the resulting value.
Behind the scenes, it uses `Task.async/1`.
## Examples
iex> conn.assigns[:hello]
nil
iex> conn = async_assign(conn, :hello, fn -> :world end)
iex> conn.assigns[:hello]
%Task{...}
"""
@spec async_assign(t, atom, (() -> term)) :: t
def async_assign(%Conn{} = conn, key, fun) when is_atom(key) and is_function(fun, 0) do
assign(conn, key, Task.async(fun))
end
@doc """
Awaits the completion of an async assign.
Returns a connection with the value resulting from the async assignment placed
under `key` in the `:assigns` field.
Behind the scenes, it uses `Task.await/2`.
## Examples
iex> conn.assigns[:hello]
nil
iex> conn = async_assign(conn, :hello, fn -> :world end)
iex> conn = await_assign(conn, :hello) # blocks until `conn.assigns[:hello]` is available
iex> conn.assigns[:hello]
:world
"""
@spec await_assign(t, atom, timeout) :: t
def await_assign(%Conn{} = conn, key, timeout \\ 5000) when is_atom(key) do
task = Map.fetch!(conn.assigns, key)
assign(conn, key, Task.await(task, timeout))
end
@doc """
Assigns a new **private** key and value in the connection.
This storage is meant to be used by libraries and frameworks to avoid writing
to the user storage (the `:assigns` field). It is recommended for
libraries/frameworks to prefix the keys with the library name.
For example, if some plug needs to store a `:hello` key, it
should do so as `:plug_hello`:
iex> conn.private[:plug_hello]
nil
iex> conn = put_private(conn, :plug_hello, :world)
iex> conn.private[:plug_hello]
:world
"""
@spec put_private(t, atom, term) :: t
def put_private(%Conn{private: private} = conn, key, value) when is_atom(key) do
%{conn | private: Map.put(private, key, value)}
end
@doc """
Stores the given status code in the connection.
The status code can be `nil`, an integer or an atom. The list of allowed
atoms is available in `Plug.Conn.Status`.
Raises a `Plug.Conn.AlreadySentError` if the connection has already been
`:sent`.
"""
@spec put_status(t, status) :: t
def put_status(%Conn{state: :sent}, _status),
do: raise AlreadySentError
def put_status(%Conn{} = conn, nil),
do: %{conn | status: nil}
def put_status(%Conn{} = conn, status),
do: %{conn | status: Plug.Conn.Status.code(status)}
@doc """
Sends a response to the client.
It expects the connection state to be `:set`, otherwise raises an
`ArgumentError` for `:unset` connections or a `Plug.Conn.AlreadySentError` for
already `:sent` connections.
At the end sets the connection state to `:sent`.
"""
@spec send_resp(t) :: t | no_return
def send_resp(conn)
def send_resp(%Conn{state: :unset}) do
raise ArgumentError, "cannot send a response that was not set"
end
def send_resp(%Conn{adapter: {adapter, payload}, state: :set, owner: owner} = conn) do
conn = run_before_send(conn, :set)
{:ok, body, payload} = adapter.send_resp(payload, conn.status, conn.resp_headers, conn.resp_body)
send owner, @already_sent
%{conn | adapter: {adapter, payload}, resp_body: body, state: :sent}
end
def send_resp(%Conn{}) do
raise AlreadySentError
end
@doc """
Sends a file as the response body with the given `status`
and optionally starting at the given offset until the given length.
If available, the file is sent directly over the socket using
the operating system `sendfile` operation.
It expects a connection that has not been `:sent` yet and sets its
state to `:sent` afterwards. Otherwise raises `Plug.Conn.AlreadySentError`.
## Examples
Plug.Conn.send_file(conn, 200, "README.md")
"""
@spec send_file(t, status, filename :: binary, offset ::integer, length :: integer | :all) :: t | no_return
def send_file(conn, status, file, offset \\ 0, length \\ :all)
def send_file(%Conn{state: state}, status, _file, _offset, _length)
when not state in @unsent do
_ = Plug.Conn.Status.code(status)
raise AlreadySentError
end
def send_file(%Conn{adapter: {adapter, payload}, owner: owner} = conn, status, file, offset, length) when is_binary(file) do
if file =~ "\0" do
raise ArgumentError, "cannot send_file/5 with null byte"
end
conn = run_before_send(%{conn | status: Plug.Conn.Status.code(status), resp_body: nil}, :file)
{:ok, body, payload} = adapter.send_file(payload, conn.status, conn.resp_headers, file, offset, length)
send owner, @already_sent
%{conn | adapter: {adapter, payload}, state: :sent, resp_body: body}
end
@doc """
Sends the response headers as a chunked response.
It expects a connection that has not been `:sent` yet and sets its
state to `:chunked` afterwards. Otherwise raises `Plug.Conn.AlreadySentError`.
"""
@spec send_chunked(t, status) :: t | no_return
def send_chunked(%Conn{state: state}, status)
when not state in @unsent do
_ = Plug.Conn.Status.code(status)
raise AlreadySentError
end
def send_chunked(%Conn{adapter: {adapter, payload}, owner: owner} = conn, status) do
conn = run_before_send(%{conn | status: Plug.Conn.Status.code(status), resp_body: nil}, :chunked)
{:ok, body, payload} = adapter.send_chunked(payload, conn.status, conn.resp_headers)
send owner, @already_sent
%{conn | adapter: {adapter, payload}, resp_body: body}
end
@doc """
Sends a chunk as part of a chunked response.
It expects a connection with state `:chunked` as set by
`send_chunked/2`. It returns `{:ok, conn}` in case of success,
otherwise `{:error, reason}`.
"""
@spec chunk(t, body) :: {:ok, t} | {:error, term} | no_return
def chunk(%Conn{state: :chunked} = conn, ""), do: {:ok, conn}
def chunk(%Conn{adapter: {adapter, payload}, state: :chunked} = conn, chunk) do
case adapter.chunk(payload, chunk) do
:ok -> {:ok, conn}
{:ok, body, payload} -> {:ok, %{conn | resp_body: body, adapter: {adapter, payload}}}
{:error, _} = error -> error
end
end
def chunk(%Conn{}, chunk) when is_binary(chunk) or is_list(chunk) do
raise ArgumentError, "chunk/2 expects a chunked response. Please ensure " <>
"you have called send_chunked/2 before you send a chunk"
end
@doc """
Sends a response with the given status and body.
See `send_resp/1` for more information.
"""
@spec send_resp(t, status, body) :: t | no_return
def send_resp(%Conn{} = conn, status, body) do
conn |> resp(status, body) |> send_resp()
end
@doc """
Sets the response to the given `status` and `body`.
It sets the connection state to `:set` (if not already `:set`)
and raises `Plug.Conn.AlreadySentError` if it was already `:sent`.
"""
@spec resp(t, status, body) :: t
def resp(%Conn{state: state}, status, _body)
when not state in @unsent do
_ = Plug.Conn.Status.code(status)
raise AlreadySentError
end
def resp(%Conn{}, _status, nil) do
raise ArgumentError, "response body cannot be set to nil"
end
def resp(%Conn{} = conn, status, body)
when is_binary(body) or is_list(body) do
%{conn | status: Plug.Conn.Status.code(status), resp_body: body, state: :set}
end
@doc """
Returns the values of the request header specified by `key`.
"""
@spec get_req_header(t, binary) :: [binary]
def get_req_header(%Conn{req_headers: headers}, key) when is_binary(key) do
for {k, v} <- headers, k == key, do: v
end
@doc """
Adds a new request header (`key`) if not present, otherwise replaces the
previous value of that header with `value`.
It is recommended for header keys to be in lower-case, to avoid sending
duplicate keys in a request. As a convenience, this is validated during
testing which raises a `Plug.Conn.InvalidHeaderError` if the header key
is not lowercase.
Raises a `Plug.Conn.AlreadySentError` if the connection has already been
`:sent`.
"""
@spec put_req_header(t, binary, binary) :: t
def put_req_header(%Conn{state: :sent}, _key, _value) do
raise AlreadySentError
end
def put_req_header(%Conn{adapter: adapter, req_headers: headers} = conn, key, value) when
is_binary(key) and is_binary(value) do
validate_header_key!(adapter, key)
%{conn | req_headers: List.keystore(headers, key, 0, {key, value})}
end
@doc """
Deletes a request header if present.
Raises a `Plug.Conn.AlreadySentError` if the connection has already been
`:sent`.
"""
@spec delete_req_header(t, binary) :: t
def delete_req_header(%Conn{state: :sent}, _key) do
raise AlreadySentError
end
def delete_req_header(%Conn{req_headers: headers} = conn, key) when
is_binary(key) do
%{conn | req_headers: List.keydelete(headers, key, 0)}
end
@doc """
Updates a request header if present, otherwise it sets it to an initial
value.
Raises a `Plug.Conn.AlreadySentError` if the connection has already been
`:sent`.
"""
@spec update_req_header(t, binary, binary, (binary -> binary)) :: t
def update_req_header(%Conn{state: :sent}, _key, _initial, _fun) do
raise AlreadySentError
end
def update_req_header(%Conn{} = conn, key, initial, fun) when
is_binary(key) and is_binary(initial) and is_function(fun, 1) do
case get_req_header(conn, key) do
[] -> put_req_header(conn, key, initial)
[current|_] -> put_req_header(conn, key, fun.(current))
end
end
@doc """
Returns the values of the response header specified by `key`.
## Examples
iex> conn = %{conn | resp_headers: [{"content-type", "text/plain"}]}
iex> get_resp_header(conn, "content-type")
["text/plain"]
"""
@spec get_resp_header(t, binary) :: [binary]
def get_resp_header(%Conn{resp_headers: headers}, key) when is_binary(key) do
for {k, v} <- headers, k == key, do: v
end
@doc ~S"""
Adds a new response header (`key`) if not present, otherwise replaces the
previous value of that header with `value`.
It is recommended for header keys to be in lower-case, to avoid sending
duplicate keys in a request. As a convenience, this is validated during
testing which raises a `Plug.Conn.InvalidHeaderError` if the header key
is not lowercase.
Raises a `Plug.Conn.AlreadySentError` if the connection has already been
`:sent`.
Raises a `Plug.Conn.InvalidHeaderError` if the header value contains control
feed (\r) or newline (\n) characters.
"""
@spec put_resp_header(t, binary, binary) :: t
def put_resp_header(%Conn{state: :sent}, _key, _value) do
raise AlreadySentError
end
def put_resp_header(%Conn{adapter: adapter, resp_headers: headers} = conn, key, value) when
is_binary(key) and is_binary(value) do
validate_header_key!(adapter, key)
validate_header_value!(value)
%{conn | resp_headers: List.keystore(headers, key, 0, {key, value})}
end
@doc """
Merges a series of response headers into the connection.
"""
@spec merge_resp_headers(t, Enum.t) :: t
def merge_resp_headers(%Conn{state: :sent}, _headers) do
raise AlreadySentError
end
def merge_resp_headers(conn, headers) when headers == %{} do
conn
end
def merge_resp_headers(%Conn{resp_headers: current} = conn, headers) do
headers =
Enum.reduce headers, current, fn
{key, value}, acc when is_binary(key) and is_binary(value) ->
List.keystore(acc, key, 0, {key, value})
end
%{conn | resp_headers: headers}
end
@doc """
Deletes a response header if present.
Raises a `Plug.Conn.AlreadySentError` if the connection has already been
`:sent`.
"""
@spec delete_resp_header(t, binary) :: t
def delete_resp_header(%Conn{state: :sent}, _key) do
raise AlreadySentError
end
def delete_resp_header(%Conn{resp_headers: headers} = conn, key) when
is_binary(key) do
%{conn | resp_headers: List.keydelete(headers, key, 0)}
end
@doc """
Updates a response header if present, otherwise it sets it to an initial
value.
Raises a `Plug.Conn.AlreadySentError` if the connection has already been
`:sent`.
"""
@spec update_resp_header(t, binary, binary, (binary -> binary)) :: t
def update_resp_header(%Conn{state: :sent}, _key, _initial, _fun) do
raise AlreadySentError
end
def update_resp_header(%Conn{} = conn, key, initial, fun) when
is_binary(key) and is_binary(initial) and is_function(fun, 1) do
case get_resp_header(conn, key) do
[] -> put_resp_header(conn, key, initial)
[current|_] -> put_resp_header(conn, key, fun.(current))
end
end
@doc """
Sets the value of the `"content-type"` response header taking into account the
`charset`.
"""
@spec put_resp_content_type(t, binary, binary | nil) :: t
def put_resp_content_type(conn, content_type, charset \\ "utf-8")
def put_resp_content_type(conn, content_type, nil) when is_binary(content_type) do
put_resp_header(conn, "content-type", content_type)
end
def put_resp_content_type(conn, content_type, charset) when
is_binary(content_type) and is_binary(charset) do
put_resp_header(conn, "content-type", "#{content_type}; charset=#{charset}")
end
@doc """
Fetches query parameters from the query string.
This function does not fetch parameters from the body. To fetch
parameters from the body, use the `Plug.Parsers` plug.
## Options
* `:length` - the maximum query string length. Defaults to 1_000_000 bytes.
"""
@spec fetch_query_params(t, Keyword.t) :: t
def fetch_query_params(conn, opts \\ [])
def fetch_query_params(%Conn{query_params: %Unfetched{}, params: params,
query_string: query_string} = conn, opts) do
Plug.Conn.Utils.validate_utf8!(query_string, InvalidQueryError, "query string")
length = Keyword.get(opts, :length, 1_000_000)
if byte_size(query_string) > length do
raise InvalidQueryError,
"maximum query string length is #{length}, got a query with #{byte_size(query_string)} bytes"
end
query_params = Plug.Conn.Query.decode(query_string)
case params do
%Unfetched{} -> %{conn | query_params: query_params, params: query_params}
%{} -> %{conn | query_params: query_params, params: Map.merge(query_params, params)}
end
end
def fetch_query_params(%Conn{} = conn, _opts) do
conn
end
@doc """
Reads the request body.
This function reads a chunk of the request body up to a given `:length`. If
there is more data to be read, then `{:more, partial_body, conn}` is
returned. Otherwise `{:ok, body, conn}` is returned. In case of an error
reading the socket, `{:error, reason}` is returned as per `:gen_tcp.recv/2`.
In order to, for instance, support slower clients you can tune the
`:read_length` and `:read_timeout` options. These specify how much time should
be allowed to pass for each read from the underlying socket.
Because the request body can be of any size, reading the body will only
work once, as Plug will not cache the result of these operations. If you
need to access the body multiple times, it is your responsibility to store
it. Finally keep in mind some plugs like `Plug.Parsers` may read the body,
so the body may be unavailable after being accessed by such plugs.
This function is able to handle both chunked and identity transfer-encoding
by default.
## Options
* `:length` - sets the maximum number of bytes to read from the body for each
chunk, defaults to 8_000_000 bytes
* `:read_length` - sets the amount of bytes to read at one time from the
underlying socket to fill the chunk, defaults to 1_000_000 bytes
* `:read_timeout` - sets the timeout for each socket read, defaults to
15_000 ms
The values above are not meant to be exact. For example, setting the
length to 8_000_000 may end up reading some hundred bytes more from
the socket until we halt.
## Examples
{:ok, body, conn} = Plug.Conn.read_body(conn, length: 1_000_000)
"""
@spec read_body(t, Keyword.t) :: {:ok, binary, t} |
{:more, binary, t} |
{:error, term}
def read_body(%Conn{adapter: {adapter, state}} = conn, opts \\ []) do
case adapter.read_req_body(state, opts) do
{:ok, data, state} ->
{:ok, data, %{conn | adapter: {adapter, state}}}
{:more, data, state} ->
{:more, data, %{conn | adapter: {adapter, state}}}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Fetches cookies from the request headers.
"""
@spec fetch_cookies(t, Keyword.t) :: t
def fetch_cookies(conn, opts \\ [])
def fetch_cookies(%Conn{req_cookies: %Unfetched{},
resp_cookies: resp_cookies,
req_headers: req_headers} = conn, _opts) do
req_cookies =
for {"cookie", cookie} <- req_headers,
kv <- Plug.Conn.Cookies.decode(cookie),
into: %{},
do: kv
cookies = Enum.reduce(resp_cookies, req_cookies, fn
{key, opts}, acc ->
if value = Map.get(opts, :value) do
Map.put(acc, key, value)
else
Map.delete(acc, key)
end
end)
%{conn | req_cookies: req_cookies, cookies: cookies}
end
def fetch_cookies(%Conn{} = conn, _opts) do
conn
end
@doc """
Puts a response cookie.
The cookie value is not automatically escaped. Therefore, if you
want to store values with comma, quotes, etc, you need to explicitly
escape them or use a function such as `Base.encode64` when writing
and `Base.decode64` when reading the cookie.
## Options
* `:domain` - the domain the cookie applies to
* `:max_age` - the cookie max-age, in seconds. Providing a value for this
option will set both the _max-age_ and _expires_ cookie attributes
* `:path` - the path the cookie applies to
* `:http_only` - when false, the cookie is accessible beyond http
* `:secure` - if the cookie must be sent only over https. Defaults
to true when the connection is https
* `:extra` - string to append to cookie. Use this to take advantage of
non-standard cookie attributes.
"""
@spec put_resp_cookie(t, binary, binary, Keyword.t) :: t
def put_resp_cookie(%Conn{resp_cookies: resp_cookies, scheme: scheme} = conn, key, value, opts \\ []) when
is_binary(key) and is_binary(value) and is_list(opts) do
cookie = [{:value, value}|opts] |> :maps.from_list() |> maybe_secure_cookie(scheme)
resp_cookies = Map.put(resp_cookies, key, cookie)
update_cookies(%{conn | resp_cookies: resp_cookies}, &Map.put(&1, key, value))
end
defp maybe_secure_cookie(cookie, :https), do: Map.put_new(cookie, :secure, true)
defp maybe_secure_cookie(cookie, _), do: cookie
@epoch {{1970, 1, 1}, {0, 0, 0}}
@doc """
Deletes a response cookie.
Deleting a cookie requires the same options as to when the cookie was put.
Check `put_resp_cookie/4` for more information.
"""
@spec delete_resp_cookie(t, binary, Keyword.t) :: t
def delete_resp_cookie(%Conn{resp_cookies: resp_cookies} = conn, key, opts \\ []) when
is_binary(key) and is_list(opts) do
opts = [universal_time: @epoch, max_age: 0] ++ opts
resp_cookies = Map.put(resp_cookies, key, :maps.from_list(opts))
update_cookies(%{conn | resp_cookies: resp_cookies}, &Map.delete(&1, key))
end
@doc """
Fetches the session from the session store. Will also fetch cookies.
"""
@spec fetch_session(t, Keyword.t) :: t
def fetch_session(conn, opts \\ [])
def fetch_session(%Conn{private: private} = conn, _opts) do
case Map.fetch(private, :plug_session_fetch) do
{:ok, :done} -> conn
{:ok, fun} -> conn |> fetch_cookies |> fun.()
:error -> raise ArgumentError, "cannot fetch session without a configured session plug"
end
end
@doc """
Puts the specified `value` in the session for the given `key`.
The key can be a string or an atom, where atoms are
automatically converted to strings. Can only be invoked
on unsent `conn`s. Will raise otherwise.
"""
@spec put_session(t, String.t | atom, any) :: t
def put_session(%Conn{state: state}, _key, _value) when not state in @unsent,
do: raise AlreadySentError
def put_session(conn, key, value) do
put_session(conn, &Map.put(&1, session_key(key), value))
end
@doc """
Returns session value for the given `key`.
The key can be a string or an atom, where atoms are
automatically converted to strings.
"""
@spec get_session(t, String.t | atom) :: any
def get_session(conn, key) do
conn |> get_session |> Map.get(session_key(key))
end
@doc """
Deletes the session for the given `key`.
The key can be a string or an atom, where atoms are
automatically converted to strings.
"""
@spec delete_session(t, String.t | atom) :: t
def delete_session(%Conn{state: state}, _key) when not state in @unsent,
do: raise AlreadySentError
def delete_session(conn, key) do
put_session(conn, &Map.delete(&1, session_key(key)))
end
@doc """
Clears the entire session.
This function removes every key from the session, clearing the session.
Note that, even if `clear_session/1` is used, the session is still sent to the
client. If the session should be effectively *dropped*, `configure_session/2`
should be used with the `:drop` option set to `true`.
"""
@spec clear_session(t) :: t
def clear_session(conn) do
put_session(conn, fn(_existing) -> Map.new end)
end
@doc """
Configures the session.
## Options
* `:renew` - generates a new session id for the cookie
* `:drop` - drops the session, a session cookie will not be included in the
response
* `:ignore` - ignores all changes made to the session in this request cycle
"""
@spec configure_session(t, Keyword.t) :: t
def configure_session(%Conn{state: state}, _opts) when not state in @unsent,
do: raise AlreadySentError
def configure_session(conn, opts) do
# Ensure the session is available.
_ = get_session(conn)
cond do
opts[:renew] -> put_private(conn, :plug_session_info, :renew)
opts[:drop] -> put_private(conn, :plug_session_info, :drop)
opts[:ignore] -> put_private(conn, :plug_session_info, :ignore)
true -> conn
end
end
@doc """
Registers a callback to be invoked before the response is sent.
Callbacks are invoked in the reverse order they are defined (callbacks
defined first are invoked last).
"""
@spec register_before_send(t, (t -> t)) :: t
def register_before_send(%Conn{state: state}, _callback)
when not state in @unsent do
raise AlreadySentError
end
def register_before_send(%Conn{before_send: before_send} = conn, callback)
when is_function(callback, 1) do
%{conn | before_send: [callback|before_send]}
end
@doc """
Halts the Plug pipeline by preventing further plugs downstream from being
invoked. See the docs for `Plug.Builder` for more information on halting a
plug pipeline.
"""
@spec halt(t) :: t
def halt(%Conn{} = conn) do
%{conn | halted: true}
end
## Helpers
defp run_before_send(%Conn{before_send: before_send} = conn, new) do
conn = Enum.reduce before_send, %{conn | state: new}, &(&1.(&2))
if conn.state != new do
raise ArgumentError, "cannot send/change response from run_before_send callback"
end
%{conn | resp_headers: merge_headers(conn.resp_headers, conn.resp_cookies)}
end
defp merge_headers(headers, cookies) do
Enum.reduce(cookies, headers, fn {key, opts}, acc ->
value =
key
|> Plug.Conn.Cookies.encode(opts)
|> verify_cookie!(key)
[{"set-cookie", value}|acc]
end)
end
defp verify_cookie!(cookie, key) when byte_size(cookie) > 4096 do
raise Plug.Conn.CookieOverflowError,
"cookie named #{inspect key} exceeds maximum size of 4096 bytes"
end
defp verify_cookie!(cookie, _key) do
validate_header_value!(cookie)
cookie
end
defp update_cookies(%Conn{state: :sent}, _fun),
do: raise AlreadySentError
defp update_cookies(%Conn{cookies: %Unfetched{}} = conn, _fun),
do: conn
defp update_cookies(%Conn{cookies: cookies} = conn, fun),
do: %{conn | cookies: fun.(cookies)}
defp session_key(binary) when is_binary(binary), do: binary
defp session_key(atom) when is_atom(atom), do: Atom.to_string(atom)
defp get_session(%Conn{private: private}) do
if session = Map.get(private, :plug_session) do
session
else
raise ArgumentError, "session not fetched, call fetch_session/2"
end
end
defp put_session(conn, fun) do
private = conn.private
|> Map.put(:plug_session, fun.(get_session(conn)))
|> Map.put_new(:plug_session_info, :write)
%{conn | private: private}
end
defp validate_header_key!({Plug.Adapters.Test.Conn, _}, key) do
unless valid_header_key?(key) do
raise InvalidHeaderError, "header key is not lowercase: " <> inspect(key)
end
end
defp validate_header_key!(_adapter, _key) do
:ok
end
# Any string containing an UPPERCASE char is not valid.
defp valid_header_key?(<<h, _::binary>>) when h in ?A..?Z, do: false
defp valid_header_key?(<<_, t::binary>>), do: valid_header_key?(t)
defp valid_header_key?(<<>>), do: true
defp valid_header_key?(_), do: false
defp validate_header_value!(value) do
case :binary.match(value, ["\n", "\r"]) do
{_, _} -> raise InvalidHeaderError, "header value contains control feed (\\r) or newline (\\n): " <> inspect(value)
:nomatch -> :ok
end
end
end
defimpl Inspect, for: Plug.Conn do
def inspect(conn, opts) do
conn =
if opts.limit == :infinity do
conn
else
update_in conn.adapter, fn {adapter, _data} -> {adapter, :...} end
end
Inspect.Any.inspect(conn, opts)
end
end
defimpl Collectable, for: Plug.Conn do
def into(conn) do
{conn, fn
conn, {:cont, x} ->
{:ok, conn} = Plug.Conn.chunk(conn, x)
conn
conn, _ ->
conn
end}
end
end | deps/plug/lib/plug/conn.ex | 0.94049 | 0.748915 | conn.ex | starcoder |
defmodule ShopifyAPI.Plugs.CustomerAuthenticator do
@moduledoc """
The Shopify.Plugs.CustomerAuthenticator plug allows for authentication of a customer call being made from a Shopify shop with a signed payload.
## Liquid Template
You can create the payload and signature that this plug will consume with the following `liquid` template:
```liquid
{% assign auth_expiry = "now" | date: "%s" | plus: 3600 | date: "%Y-%m-%dT%H:%M:%S.%L%z" %}
{% capture json_string %}
{"id":"{{ customer.id }}","expiry":"{{ auth_expiry }}"}
{% endcapture %}
{% assign AUTH_PAYLOAD = json_string | strip %}
{% assign AUTH_SIGNATURE = AUTH_PAYLOAD | hmac_sha256: settings.secret %}
```
The payload itself can be modified to include additional fields so long as it is valid json and contains the `expiry`.
The original intent was for this to generate a JWT, but Liquid does not include base64 encoding.
The combination of the payload and signature should be considered an access token. If it is compromised, an attacker will be able to make requests with the token until the token expires.
### Including Auth in calls
Include the payload and signatures in API calls by including it in the payload.
```liquid
data: {
auth_payload: {{ AUTH_PAYLOAD | json }},
auth_signature: {{ AUTH_SIGNATURE | json }}
}
```
You can also include the payload and signatures in a form.
```liquid
<input
type="hidden"
name="auth_payload"
value="{{ AUTH_PAYLOAD }}"
>
<input
type="hidden"
name="auth_signature"
value="{{ AUTH_SIGNATURE }}"
>
```
## Configuring Secrets
Include a shared secret in your Elixir config and in your Shopify settings. You can provide a list to make rotating secrets easier.
If the shared secret is compromised, an attacker would be able to generate their own payload/signature tokens. Be sure to keep the shared secret safe.
```elixir
# config.exs
config :shopify_api, :customer_api_secret_keys, ["new_secret", "old_secret"]
```
## Example Usage
```elixir
pipeline :customer_auth do
plug ShopifyAPI.Plugs.CustomerAuthenticator
end
scope "/api", YourAppWeb do
pipe_through(:customer_auth)
get "/", CustomerAPIController, :index
end
```
"""
@behaviour Plug
import Plug.Conn
alias ShopifyAPI.JSONSerializer
alias ShopifyAPI.Security
@impl true
def init([]), do: []
@impl true
def call(
%{params: %{"auth_payload" => payload, "auth_signature" => signature}} = conn,
_opts
) do
now = DateTime.utc_now()
with :ok <- validate_signature(payload, signature, customer_api_secret_keys()),
{:ok, auth_context} <- parse_payload(payload),
:ok <- validate_expiry(auth_context, now) do
assign(conn, :auth_payload, auth_context)
else
error -> handle_error(conn, error)
end
end
def call(conn, _), do: send_unauthorized_response(conn, "Authorization failed")
defp validate_signature(auth_payload, signature, secrets) do
secrets
|> List.wrap()
|> Enum.any?(fn secret ->
signature == Security.base16_sha256_hmac(auth_payload, secret)
end)
|> case do
true -> :ok
false -> :bad_signature
end
end
defp validate_expiry(%{"expiry" => expiry_string}, now) do
with {:ok, expiry_datetime, _} <- DateTime.from_iso8601(expiry_string),
:lt <- DateTime.compare(now, expiry_datetime) do
:ok
else
{:error, _} -> :invalid_expiry
_ -> :expired
end
end
defp validate_expiry(_auth_context, _now), do: :no_expiry
defp parse_payload(payload), do: JSONSerializer.decode(payload)
defp customer_api_secret_keys,
do: Application.get_env(:shopify_api, :customer_api_secret_keys, [])
defp send_unauthorized_response(conn, message) do
conn
|> resp(401, message)
|> halt()
end
defp handle_error(conn, :no_expiry),
do: send_unauthorized_response(conn, "A valid expiry must be included in auth_payload")
defp handle_error(conn, :invalid_expiry),
do:
send_unauthorized_response(conn, "A valid ISO8601 expiry must be included in auth_payload")
defp handle_error(conn, :expired),
do: send_unauthorized_response(conn, "auth_payload has expired")
defp handle_error(conn, :bad_signature),
do: send_unauthorized_response(conn, "Authorization failed")
defp handle_error(conn, {:error, _}),
do: send_unauthorized_response(conn, "Could not parse auth_payload")
end | lib/shopify_api/plugs/customer_authenticator.ex | 0.844313 | 0.772574 | customer_authenticator.ex | starcoder |
defmodule ConfigCat.CachePolicy do
@moduledoc """
Represents the [polling mode](https://configcat.com/docs/sdk-reference/elixir#polling-modes) used by ConfigCat.
The *ConfigCat SDK* supports 3 different polling mechanisms to
acquire the setting values from *ConfigCat*. After the latest
setting values are downloaded, they are stored in the internal
cache and all requests are served from there.
With the following polling modes, you can customize the SDK to
best fit to your application's lifecycle.
## Auto polling (default)
The *ConfigCat SDK* downloads the latest values and stores them
automatically on a regular schedule.
See `auto/1` below for details.
## Lazy loading
When calling any of the public API functions (like `get_value()`),
the *ConfigCat SDK* downloads the latest setting values if they are
not present or have expired. In this case the function will wait
until the settings have been fetched before returning.
See `lazy/1` below for details.
## Manual polling
Manual polling gives you full control over when the setting
values are downloaded. *ConfigCat SDK* will not update them
automatically. Calling `ConfigCat.force_refresh/1` is your
application's responsibility.
See `manual/0` below for details.
"""
alias ConfigCat.{ConfigCache, ConfigFetcher}
alias __MODULE__.{Auto, Lazy, Manual}
@typedoc "Options for auto-polling mode."
@type auto_options :: [
{:on_changed, on_changed_callback()}
| {:poll_interval_seconds, pos_integer()}
]
@typedoc false
@type id :: atom()
@typedoc "Options for lazy-polling mode."
@type lazy_options :: [{:cache_expiry_seconds, non_neg_integer()}]
@typedoc "Callback to call when configuration changes."
@type on_changed_callback :: (() -> :ok)
@typedoc false
@type option ::
{:cache, module()}
| {:cache_key, ConfigCache.key()}
| {:cache_policy, t()}
| {:fetcher, module()}
| {:fetcher_id, ConfigFetcher.id()}
| {:name, id()}
@typedoc false
@type options :: [option]
@typedoc false
@type refresh_result :: :ok | ConfigFetcher.fetch_error()
@typedoc "The polling mode"
@opaque t :: Auto.t() | Lazy.t() | Manual.t()
@doc """
Auto-polling mode.
The *ConfigCat SDK* downloads the latest values and stores them
automatically on a regular schedule.
Use the `poll_interval_seconds` option to change the
polling interval. Defaults to 60 seconds if not specified.
```elixir
ConfigCat.CachePolicy.auto(poll_interval_seconds: 60)
```
If you want your application to be notified whenever a new
configuration is available, provide a 0-arity callback function
using the `on_change` option.
The `on_change` callback is called asynchronously (using `Task.start`).
Any exceptions raised are caught and logged.
```elixir
ConfigCat.CachePolicy.auto(on_changed: callback)
```
"""
@spec auto(auto_options()) :: t()
def auto(options \\ []) do
Auto.new(options)
end
@doc """
Lazy polling mode.
When calling any of the public API functions (like `get_value()`),
the *ConfigCat SDK* downloads the latest setting values if they are
not present or have expired. In this case the function will wait
until the settings have been fetched before returning.
Use the required `cache_expiry_seconds` option to set the cache
lifetime.
```elixir
ConfigCat.CachePolicy.lazy(cache_expiry_seconds: 300)
```
"""
@spec lazy(lazy_options()) :: t()
def lazy(options) do
Lazy.new(options)
end
@doc """
Manual polling mode.
Manual polling gives you full control over when the setting
values are downloaded. *ConfigCat SDK* will not update them
automatically. Calling `ConfigCat.force_refresh/1` is your
application's responsibility.
```elixir
ConfigCat.CachePolicy.manual()
```
"""
@spec manual :: t()
def manual do
Manual.new()
end
@doc false
@spec policy_name(t()) :: module()
def policy_name(%policy{}), do: policy
@spec policy_name(options()) :: module()
def policy_name(options) when is_list(options) do
options
|> Keyword.fetch!(:cache_policy)
|> policy_name()
end
@doc false
@spec child_spec(options()) :: Supervisor.child_spec()
def child_spec(options) do
policy_name(options).child_spec(options)
end
@doc false
@spec start_link(options()) :: GenServer.on_start()
def start_link(options) do
policy_name(options).start_link(options)
end
end | lib/config_cat/cache_policy.ex | 0.907284 | 0.825519 | cache_policy.ex | starcoder |
defmodule Pets do
@moduledoc """
A generic datastore using PersistentEts.
Most Pets functions take a `signature` as the first argument, which
identifies a specific Pets datastore.
The signature is simply a map with two fields:
- tablekey - an atom which identifies the underlying ETS table
- filepath - the path in which to store the PersistentEts datafile
You can create a new Pets signature in many ways:
```
$> x = %Pets{}
$> y = Pets.test_sig()
$> z = %{tablekey: :asdf, filepath: "/tmp/myfile.data"}
```
Pets is generally wrapped in a container module for managing specific types
of records. The container module is responsible for establishing the
signature, and defining a struct that is stored in PersistentEts.
"""
defstruct [:tablekey, :filepath]
@doc "Start the datastore."
def start(sig, opts \\ []) do
unless started?(sig) do
tableopts = Enum.uniq([:named_table, :public] ++ opts)
PersistentEts.new(sig.tablekey, sig.filepath, tableopts)
end
end
@doc "Stop the database if it is running, then start."
def restart(sig, opts \\ []) do
if started?(sig), do: stop(sig)
start(sig, opts)
end
@doc "Stop the datastore."
def stop(sig) do
if started?(sig) do
try do
PersistentEts.delete(sig.tablekey)
rescue
_ -> :error
end
end
:ok
end
@doc "Stop the datastore and remove the data-file."
def cleanup(sig) do
stop(sig)
File.rm(sig.filepath)
:ok
end
@doc """
Insert a tuple into the data-store.
The datakey is the first element in the tuple.
"""
def insert(sig, tuple) do
start(sig)
case :ets.insert(sig.tablekey, tuple) do
true -> tuple
_ -> :error
end
end
@doc """
Lookup a datakey in the datastore.
The datakey is the first element in the tuple.
"""
def lookup(sig, datakey) do
start(sig)
result = :ets.lookup(sig.tablekey, datakey)
case result do
[] -> nil
_ -> result
end
end
@doc """
Delete an element from the datastore.
This function deletes the key and it's associated record.
"""
def delete(sig, datakey) do
start(sig)
:ets.delete(sig.tablekey, datakey)
end
@doc "Return all records in the table."
def all(sig) do
start(sig)
:ets.tab2list(sig.tablekey)
end
@doc "Check for existence of key in data-store."
def has_key?(sig, datakey) do
start(sig)
:ets.lookup(sig.tablekey, datakey) != []
end
@doc "Return true if a table has been started."
def started?(sig) do
:ets.whereis(sig.tablekey) != :undefined
end
@doc "Generate a test context."
def test_sig do test_sig(prefix: "pets") end
def test_sig([prefix: pref]) do
with num <- Enum.random(10000..99999),
do: %{
tablekey: String.to_atom("#{pref}_test_#{num}"),
filepath: "/tmp/#{pref}_test_#{num}.dat"
}
end
end | lib/pets.ex | 0.743075 | 0.947186 | pets.ex | starcoder |