code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule CCSP.Chapter4.Start do
alias CCSP.Chapter4.Graph
alias CCSP.Chapter4.WeightedGraph
alias CCSP.Chapter4.WeightedEdge
alias CCSP.Chapter4.MST
alias CCSP.Chapter4.Dijkstra
alias CCSP.Chapter2.GenericSearch
@moduledoc """
Convenience module for setting up and running more elaborate sections.
Good idea to migrate into some form of test
"""
def undirected_graph() do
city_graph =
Graph.new([
"Seattle",
"San Francisco",
"Los Angeles",
"Riverside",
"Phoenix",
"Chicago",
"Boston",
"New York",
"Atlanta",
"Miami",
"Dallas",
"Houston",
"Detroit",
"Philadelphia",
"Washington"
])
city_graph
|> Graph.add_edge_by_vertices("Seattle", "Chicago")
|> Graph.add_edge_by_vertices("Seattle", "San Francisco")
|> Graph.add_edge_by_vertices("San Francisco", "Riverside")
|> Graph.add_edge_by_vertices("San Francisco", "Los Angeles")
|> Graph.add_edge_by_vertices("Los Angeles", "Riverside")
|> Graph.add_edge_by_vertices("Los Angeles", "Phoenix")
|> Graph.add_edge_by_vertices("Riverside", "Phoenix")
|> Graph.add_edge_by_vertices("Riverside", "Chicago")
|> Graph.add_edge_by_vertices("Phoenix", "Dallas")
|> Graph.add_edge_by_vertices("Phoenix", "Houston")
|> Graph.add_edge_by_vertices("Dallas", "Chicago")
|> Graph.add_edge_by_vertices("Dallas", "Atlanta")
|> Graph.add_edge_by_vertices("Dallas", "Houston")
|> Graph.add_edge_by_vertices("Houston", "Atlanta")
|> Graph.add_edge_by_vertices("Houston", "Miami")
|> Graph.add_edge_by_vertices("Atlanta", "Chicago")
|> Graph.add_edge_by_vertices("Atlanta", "Washington")
|> Graph.add_edge_by_vertices("Atlanta", "Miami")
|> Graph.add_edge_by_vertices("Miami", "Washington")
|> Graph.add_edge_by_vertices("Chicago", "Detroit")
|> Graph.add_edge_by_vertices("Detroit", "Boston")
|> Graph.add_edge_by_vertices("Detroit", "Washington")
|> Graph.add_edge_by_vertices("Detroit", "New York")
|> Graph.add_edge_by_vertices("Boston", "New York")
|> Graph.add_edge_by_vertices("New York", "Philadelphia")
|> Graph.add_edge_by_vertices("Philadelphia", "Washington")
end
def weighted_graph() do
city_graph =
WeightedGraph.new([
"Seattle",
"San Francisco",
"Los Angeles",
"Riverside",
"Phoenix",
"Chicago",
"Boston",
"New York",
"Atlanta",
"Miami",
"Dallas",
"Houston",
"Detroit",
"Philadelphia",
"Washington"
])
city_graph
|> WeightedGraph.add_edge_by_vertices("Seattle", "Chicago", 1737)
|> WeightedGraph.add_edge_by_vertices("Seattle", "San Francisco", 678)
|> WeightedGraph.add_edge_by_vertices("San Francisco", "Riverside", 386)
|> WeightedGraph.add_edge_by_vertices("San Francisco", "Los Angeles", 348)
|> WeightedGraph.add_edge_by_vertices("Los Angeles", "Riverside", 50)
|> WeightedGraph.add_edge_by_vertices("Los Angeles", "Phoenix", 357)
|> WeightedGraph.add_edge_by_vertices("Riverside", "Phoenix", 307)
|> WeightedGraph.add_edge_by_vertices("Riverside", "Chicago", 1704)
|> WeightedGraph.add_edge_by_vertices("Phoenix", "Dallas", 887)
|> WeightedGraph.add_edge_by_vertices("Phoenix", "Houston", 1015)
|> WeightedGraph.add_edge_by_vertices("Dallas", "Chicago", 805)
|> WeightedGraph.add_edge_by_vertices("Dallas", "Atlanta", 721)
|> WeightedGraph.add_edge_by_vertices("Dallas", "Houston", 225)
|> WeightedGraph.add_edge_by_vertices("Houston", "Atlanta", 702)
|> WeightedGraph.add_edge_by_vertices("Houston", "Miami", 968)
|> WeightedGraph.add_edge_by_vertices("Atlanta", "Chicago", 588)
|> WeightedGraph.add_edge_by_vertices("Atlanta", "Washington", 543)
|> WeightedGraph.add_edge_by_vertices("Atlanta", "Miami", 604)
|> WeightedGraph.add_edge_by_vertices("Miami", "Washington", 923)
|> WeightedGraph.add_edge_by_vertices("Chicago", "Detroit", 238)
|> WeightedGraph.add_edge_by_vertices("Detroit", "Boston", 613)
|> WeightedGraph.add_edge_by_vertices("Detroit", "Washington", 396)
|> WeightedGraph.add_edge_by_vertices("Detroit", "New York", 482)
|> WeightedGraph.add_edge_by_vertices("Boston", "New York", 190)
|> WeightedGraph.add_edge_by_vertices("New York", "Philadelphia", 81)
|> WeightedGraph.add_edge_by_vertices("Philadelphia", "Washington", 123)
end
def find_shortest_path(initial \\ "Boston", final \\ "Miami") do
results =
GenericSearch.breadth_first_search(
undirected_graph(),
initial,
&(&1 == final),
&Graph.neighbors_for_vertex/2
)
if results == nil do
IO.puts("No solution found for bfs")
else
path = GenericSearch.node_to_path(results)
IO.puts("Path from Boston to Miami:")
path
end
end
def find_minimum_spanning_tree() do
wg = weighted_graph()
result = MST.mst(wg)
print_weighted_path(wg, result)
end
def find_weighted_shortest_path() do
wg = weighted_graph()
{distances, path_dict} = Dijkstra.dijkstra(wg, "Los Angeles")
name_distance = Dijkstra.distance_array_to_vertex_dict(wg, distances)
IO.puts("Distances from Los Angeles:")
name_distance
|> Map.to_list()
|> Enum.each(fn {key, value} ->
IO.puts("#{key} : #{value}")
end)
IO.puts("")
IO.puts("Shortest path from Los Angeles to Boston:")
start = WeightedGraph.index_of(wg, "Los Angeles")
goal = WeightedGraph.index_of(wg, "Boston")
path = Dijkstra.path_dict_to_path(start, goal, path_dict)
print_weighted_path(wg, path)
end
defp print_weighted_path(wg, wp) do
Enum.each(wp, fn edge ->
u = WeightedGraph.vertex_at(wg, edge.u)
weight = edge.weight
v = WeightedGraph.vertex_at(wg, edge.v)
IO.puts("#{u} #{weight} -> #{v}")
end)
IO.puts("Total weight: #{total_weight(wp)}")
end
@type weighted_path :: list(WeightedEdge.t())
@spec total_weight(weighted_path) :: non_neg_integer
def total_weight(wp) do
Enum.map(wp, fn edge ->
edge.weight
end)
|> Enum.sum()
end
end | lib/ccsp/chapter4/start.ex | 0.674158 | 0.422922 | start.ex | starcoder |
defmodule Day2 do
@moduledoc """
Decode the bathroom lock code.
Given a string of commands, figure out the number to enter
"""
# The lookup table for the keypad in part 1
@keypad1 %{
1 => %{?U => 1, ?L => 1, ?D => 4, ?R => 2},
2 => %{?U => 2, ?L => 1, ?D => 5, ?R => 3},
3 => %{?U => 3, ?L => 2, ?D => 6, ?R => 3},
4 => %{?U => 1, ?L => 4, ?D => 7, ?R => 5},
5 => %{?U => 2, ?L => 4, ?D => 8, ?R => 6},
6 => %{?U => 3, ?L => 5, ?D => 9, ?R => 6},
7 => %{?U => 4, ?L => 7, ?D => 7, ?R => 8},
8 => %{?U => 5, ?L => 7, ?D => 8, ?R => 9},
9 => %{?U => 6, ?L => 8, ?D => 9, ?R => 9}
}
# The lookup table for the keypad in part 2
@keypad2 %{
1 => %{?U => 1, ?L => 1, ?D => 3, ?R => 1},
2 => %{?U => 2, ?L => 2, ?D => 6, ?R => 3},
3 => %{?U => 1, ?L => 2, ?D => 7, ?R => 4},
4 => %{?U => 4, ?L => 3, ?D => 8, ?R => 4},
5 => %{?U => 5, ?L => 5, ?D => 5, ?R => 6},
6 => %{?U => 2, ?L => 5, ?D => ?A, ?R => 7},
7 => %{?U => 3, ?L => 6, ?D => ?B, ?R => 8},
8 => %{?U => 4, ?L => 7, ?D => ?C, ?R => 9},
9 => %{?U => 9, ?L => 8, ?D => 9, ?R => 9},
?A => %{?U => 6, ?L => ?A, ?D => ?A, ?R => ?B},
?B => %{?U => 7, ?L => ?A, ?D => ?D, ?R => ?C},
?C => %{?U => 8, ?L => ?B, ?D => ?C, ?R => ?C},
?D => %{?U => ?B, ?L => ?D, ?D => ?D, ?R => ?D}
}
@doc """
Decode a file
"""
def decode_file_part1(path) do
path
|> File.read!
|> decode_part_1
end
def decode_file_part2(path) do
path
|> File.read!
|> decode_part_2
end
@doc """
Given a string of up, down, left, right commands, return the number (as a string to return)
A digit is generated for each line of text. Assume we start at '5' in the middle of the keypad.
"""
def decode_part_1(str), do: decode(str, @keypad1)
def decode_part_2(str), do: decode(str, @keypad2)
defp decode(str, keypad) do
str
|> String.trim_trailing
|> String.split("\n")
|> Enum.reduce(%{idx: 5, result: [], keypad: keypad}, &decode_entry/2)
|> extract_result
|> Enum.reverse
|> Enum.map(&convert_to_string/1)
|> Enum.join
end
defp extract_result(%{result: result}), do: result
defp convert_to_string(x) when x <= 9, do: Integer.to_string(x)
defp convert_to_string(x), do: <<x>>
defp decode_entry(entry, %{idx: idx, result: result, keypad: keypad}) do
next_idx = _decode(idx, keypad, entry)
%{idx: next_idx, result: [next_idx | result], keypad: keypad}
end
defp _decode(idx, _keypad, <<>>), do: idx
defp _decode(idx, keypad, <<dir, rest::binary>>) do
next_step(idx, dir, keypad)
|> _decode(keypad, rest)
end
defp next_step(idx, dir, keypad) do
keypad[idx][dir]
end
end | day2/lib/day2.ex | 0.57821 | 0.530236 | day2.ex | starcoder |
defmodule AdventOfCode.Day7 do
@spec add_get_node({atom, atom}, :digraph.graph()) :: {:digraph.graph(), :digraph.vertex()}
def add_get_node(node_key, graph) do
case :digraph.vertex(graph, node_key) do
false ->
vertex = :digraph.add_vertex(graph, node_key)
{graph, vertex}
{vertex, _} ->
{graph, vertex}
end
end
@spec add_edges({atom, atom}, {atom, atom}, integer(), :digraph.graph()) :: :digraph.graph()
def add_edges(parent_node, this_node, quantity, graph) do
{graph, parent_node} = add_get_node(parent_node, graph)
{graph, this_node} = add_get_node(this_node, graph)
:digraph.add_edge(graph, parent_node, this_node, quantity)
graph
end
@spec parse_contents(:digraph.vertex(), :digraph.graph(), [atom | integer]) :: :digraph.graph()
def parse_contents(_, graph, []) do
graph
end
def parse_contents(_, graph, [:no | _]) do
graph
end
def parse_contents(parent_node, graph, [quantity, adjective, color, bags | rest])
when bags in [:"bags,", :"bags.", :"bag,", :"bag."] do
this_node = {adjective, color}
graph = add_edges(parent_node, this_node, quantity, graph)
parse_contents(parent_node, graph, rest)
end
@spec convert_atom_integer(binary) :: atom | integer
def convert_atom_integer(str) do
case Integer.parse(str) do
{value, _} -> value
:error -> String.to_atom(str)
end
end
@spec gold_closure(:digraph.graph()) :: [:digraph.vertex()]
def gold_closure(graph) do
gold_neighbors = :digraph.in_neighbours(graph, {:shiny, :gold})
closure(graph, gold_neighbors, MapSet.new())
end
@spec closure(:digraph.graph(), [:digraph.vertex()], MapSet.t()) :: [:digraph.vertex()]
def closure(_, [], acc) do
Enum.to_list(acc)
end
def closure(graph, [node | nodes], acc) do
node_out = :digraph.in_neighbours(graph, node)
closure(graph, node_out ++ nodes, MapSet.put(acc, node))
end
@spec count_bags_closure(:digraph.graph(), :digraph.vertex()) :: integer
def count_bags_closure(graph, node) do
out_edges = :digraph.out_edges(graph, node)
count_bags(graph, out_edges, 0)
end
@spec count_bags(:digraph.graph(), [:digraph.edge()], integer()) :: integer()
def count_bags(_, [], count) do
count
end
def count_bags(graph, [edge | edges], count) do
{_, _from, to, quantity} = :digraph.edge(graph, edge)
count = count + quantity
to_count = count_bags_closure(graph, to)
count = count + quantity * to_count
count_bags(graph, edges, count)
end
@spec day7 :: {non_neg_integer, integer}
def day7() do
{_, graph} =
"day7_input"
|> AdventOfCode.read_file()
|> Enum.map_reduce(:digraph.new(), fn entry, graph ->
components =
entry
|> String.split()
|> Enum.map(&convert_atom_integer/1)
[adjective, color, :bags, :contain | contents] = components
this_node = {adjective, color}
graph = parse_contents(this_node, graph, contents)
{contents, graph}
end)
part1 =
graph
|> gold_closure()
|> length()
part2 = count_bags_closure(graph, {:shiny, :gold})
{part1, part2}
end
end | lib/day7.ex | 0.851475 | 0.598136 | day7.ex | starcoder |
defmodule Membrane.RawVideo do
@moduledoc """
This module provides a struct (`t:#{inspect(__MODULE__)}.t/0`) describing raw video frames.
"""
require Integer
@typedoc """
Width of single frame in pixels.
"""
@type width_t :: pos_integer()
@typedoc """
Height of single frame in pixels.
"""
@type height_t :: pos_integer()
@typedoc """
Number of frames per second. To avoid using floating point numbers,
it is described by 2 integers number of frames per timeframe in seconds.
For example, NTSC's framerate of ~29.97 fps is represented by `{30_000, 1001}`
"""
@type framerate_t :: {frames :: non_neg_integer, seconds :: pos_integer}
@typedoc """
Format used to encode the color of every pixel in each video frame.
"""
@type pixel_format_t ::
:I420 | :I422 | :I444 | :RGB | :BGRA | :RGBA | :NV12 | :NV21 | :YV12 | :AYUV
@typedoc """
Determines, whether buffers are aligned i.e. each buffer contains one frame.
"""
@type aligned_t :: boolean()
@type t :: %__MODULE__{
width: width_t(),
height: height_t(),
framerate: framerate_t(),
pixel_format: pixel_format_t(),
aligned: aligned_t()
}
@enforce_keys [:width, :height, :framerate, :pixel_format, :aligned]
defstruct @enforce_keys
@supported_pixel_formats [:I420, :I422, :I444, :RGB, :BGRA, :RGBA, :NV12, :NV21, :YV12, :AYUV]
@doc """
Simple wrapper over `frame_size/3`. Returns the size of raw video frame
in bytes for the given caps.
"""
@spec frame_size(t()) :: {:ok, pos_integer()} | {:error, reason}
when reason: :invalid_dimensions | :invalid_pixel_format
def frame_size(%__MODULE__{pixel_format: format, width: width, height: height}) do
frame_size(format, width, height)
end
@doc """
Returns the size of raw video frame in bytes (without padding).
It may result in error when dimensions don't fulfill requirements for the given format
(e.g. I420 requires both dimensions to be divisible by 2).
"""
@spec frame_size(pixel_format_t(), width_t(), height_t()) ::
{:ok, pos_integer()} | {:error, reason}
when reason: :invalid_dimensions | :invalid_pixel_format
def frame_size(format, width, height)
when format in [:I420, :YV12, :NV12, :NV21] and Integer.is_even(width) and
Integer.is_even(height) do
# Subsampling by 2 in both dimensions
# Y = width * height
# V = U = (width / 2) * (height / 2)
{:ok, div(width * height * 3, 2)}
end
def frame_size(:I422, width, height) when Integer.is_even(width) do
# Subsampling by 2 in horizontal dimension
# Y = width * height
# V = U = (width / 2) * height
{:ok, width * height * 2}
end
def frame_size(format, width, height) when format in [:I444, :RGB] do
# No subsampling
{:ok, width * height * 3}
end
def frame_size(format, width, height) when format in [:AYUV, :RGBA, :BGRA] do
# No subsampling and added alpha channel
{:ok, width * height * 4}
end
def frame_size(format, _width, _height) when format in @supported_pixel_formats do
{:error, :invalid_dimensions}
end
def frame_size(_format, _width, _height) do
{:error, :invalid_pixel_format}
end
end | lib/membrane_raw_video.ex | 0.943165 | 0.662868 | membrane_raw_video.ex | starcoder |
defmodule FinTex.Parser.Tokenizer do
@moduledoc false
alias FinTex.Parser.Lexer
require Record
@type t :: record(:tokenization, tokens: [String.t] | String.t, escape_sequences: %{String.t => String.t})
Record.defrecordp :tokenization,
tokens: nil,
escape_sequences: nil
@spec split(String.t) :: [String.t]
def split(raw) when is_binary(raw) do
raw
|> extract_binaries(Map.new)
|> latin1_to_utf8
|> split_segments
|> replace_escape_sequences
end
@spec extract_binaries(String.t, map, non_neg_integer) :: t
defp extract_binaries(raw, escape_sequences, ref_counter \\ 0) when is_binary(raw) and is_map(escape_sequences) and
is_integer(ref_counter) do
case ~r/@(\d+)@.*/Us |> Regex.run(raw, capture: :all_but_first) do
[length] when is_binary(length) ->
key = "--#{ref_counter}--"
[_, binary_data, _] = length
|> Lexer.escaped_binary
|> Regex.run(raw, capture: :all_but_first)
raw = length
|> Lexer.escaped_binary
|> Regex.replace(raw, "\\1#{key}\\3", global: false) # replace only first occurence
escape_sequences = escape_sequences |> Map.put(key, binary_data)
ref_counter = ref_counter + 1
extract_binaries(raw, escape_sequences, ref_counter)
_ ->
tokenization(tokens: raw, escape_sequences: escape_sequences)
end
end
@spec latin1_to_utf8(t) :: t
defp latin1_to_utf8(tokenization = tokenization(tokens: tokens)) do
tokenization(tokenization, tokens: Lexer.latin1_to_utf8(tokens))
end
@spec split_segments(t) :: t
defp split_segments(tokenization = tokenization(tokens: tokens)) do
tokens = tokens
|> Lexer.split_segments
tokenization(tokenization, tokens: tokens)
end
@spec replace_escape_sequences(t) :: [String.t]
defp replace_escape_sequences(tokenization(tokens: tokens, escape_sequences: escape_sequences)) do
escape_sequences
|> Enum.reduce(tokens, fn ({k, v}, t) -> replace_escape_sequences(t, k, v) end)
end
@spec replace_escape_sequences([String.t] | String.t, String.pattern | Regex.t, String.t) :: [String.t] | String.t
defp replace_escape_sequences(tokens, k, v) when is_list(tokens) do
tokens |> Enum.map(&replace_escape_sequences(&1, k, v))
end
defp replace_escape_sequences(token, k, v) when is_binary(token) do
token |> String.replace(k, v, global: false) # replace only first occurence
end
defp replace_escape_sequences(nil, _k, _v) do
nil
end
end | lib/parser/tokenizer.ex | 0.596786 | 0.426441 | tokenizer.ex | starcoder |
defmodule Talan.CountingBloomFilter do
@moduledoc """
Counting bloom filter implementation with **concurrent accessibility**,
powered by [:atomics](http://erlang.org/doc/man/atomics.html) module.
## Features
* Fixed size Counting Bloom filter
* Concurrent reads & writes
* Custom & default hash functions
* Estimate number of unique elements
* Estimate false positive probability
Counting bloom filters support probabilistic deletion
of elements but have higher memory consumption because
they need to store a counter of N bits for every bloom filter bit.
"""
alias Talan.BloomFilter, as: BF
alias Talan.CountingBloomFilter, as: CBF
@enforce_keys [:bloom_filter, :counter]
defstruct [:bloom_filter, :counter]
@type t :: %__MODULE__{
bloom_filter: reference,
counter: Abit.Counter.t()
}
@doc """
Returns a new `%Talan.CountingBloomFilter{}` struct.
`cardinality` is the expected number of unique items. Duplicated items
can be infinite.
## Options
* `:counters_bit_size` - bit size of counters, defaults to `8`
* `:signed` - to have signed or unsigned counters, defaults to `true`
* `:false_positive_probability` - a float, defaults to `0.01`
* `:hash_functions` - a list of hash functions, defaults to randomly seeded murmur
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("phone")
:ok
iex> cbf |> Talan.CountingBloomFilter.count("hat")
2
iex> cbf |> Talan.CountingBloomFilter.count("phone")
1
"""
@spec new(pos_integer, list) :: t
def new(cardinality, options \\ []) do
bloom_filter = BF.new(cardinality, options)
counters_bit_size = options |> Keyword.get(:counters_bit_size, 8)
signed = options |> Keyword.get(:signed, true)
counter =
Abit.Counter.new(
bloom_filter.filter_length * counters_bit_size,
counters_bit_size,
signed: signed
)
%CBF{
bloom_filter: bloom_filter,
counter: counter
}
end
@doc """
Puts `term` into `bloom_filter` and increments counters in `counter`.
After this the `member?/2` function will return `true`
for the membership of `term` unless bits representing
membership are modified by the `delete/2` function.
Returns `:ok`.
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
:ok
"""
@spec put(t, any) :: :ok
def put(%CBF{bloom_filter: bloom_filter, counter: counter}, term) do
hashes = BF.hash_term(bloom_filter, term)
BF.put_hashes(bloom_filter, hashes)
hashes
|> Enum.each(fn hash ->
Abit.Counter.add(counter, hash, 1)
end)
:ok
end
@doc """
Probabilistically delete `term` from `bloom_filter` and
decrement counters in `counter`.
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.count("hat")
1
iex> cbf |> Talan.CountingBloomFilter.delete("hat")
:ok
iex> cbf |> Talan.CountingBloomFilter.count("hat")
0
iex> cbf |> Talan.CountingBloomFilter.delete("this wasn't there")
iex> cbf |> Talan.CountingBloomFilter.count("this wasn't there")
-1
"""
@spec delete(t, any) :: :ok
def delete(%CBF{bloom_filter: bloom_filter, counter: counter}, term) do
hashes = BF.hash_term(bloom_filter, term)
hashes
|> Enum.each(fn hash ->
Abit.Counter.add(counter, hash, -1)
if Abit.Counter.get(counter, hash) <= 0 do
Abit.set_bit_at(bloom_filter.atomics_ref, hash, 0)
end
end)
:ok
end
@doc """
See `Talan.BloomFilter.member?/2` for docs.
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.member?("hat")
true
"""
@spec member?(t, any) :: boolean
def member?(%CBF{bloom_filter: bloom_filter}, term) do
BF.member?(bloom_filter, term)
end
@doc """
Returns probabilistic count of term in `counter`.
This means that (given no hash collisions) it returns how many times
the item was put into the CountingBloomFilter. A few hash collisions
should be also fine since it returns the average count of the counters.
An single item is hashed with multiple counters.
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.count("hat")
3
"""
@spec count(t, any) :: non_neg_integer
def count(%CBF{bloom_filter: bloom_filter, counter: counter}, term) do
hashes = BF.hash_term(bloom_filter, term)
counters =
hashes
|> Enum.map(fn hash ->
Abit.Counter.get(counter, hash)
end)
round(Enum.sum(counters) / length(counters))
end
@doc """
See `Talan.BloomFilter.cardinality/1` for docs.
## Examples
iex> cbf = Talan.CountingBloomFilter.new(10_000)
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("hat")
iex> cbf |> Talan.CountingBloomFilter.put("car keys")
iex> cbf |> Talan.CountingBloomFilter.cardinality()
2
"""
@spec cardinality(t) :: non_neg_integer
def cardinality(%CBF{bloom_filter: bloom_filter}) do
BF.cardinality(bloom_filter)
end
@doc """
See `Talan.BloomFilter.false_positive_probability/1` for
docs.
"""
@spec false_positive_probability(t) :: float
def false_positive_probability(%CBF{bloom_filter: bloom_filter}) do
BF.false_positive_probability(bloom_filter)
end
end | lib/talan/counting_bloom_filter.ex | 0.919335 | 0.681256 | counting_bloom_filter.ex | starcoder |
defmodule Taxes.Calculator do
@moduledoc """
Module with logic to calculate taxes based at Taxes tree
"""
alias Taxes.Types
alias Taxes.Logic
@hundred_percents 100
@doc """
Method to get Net price from `raw_price` by exclude inclusive taxes
"""
@spec set_net_price(Types.payload()) :: Types.payload()
def set_net_price(%{inclusive: taxes, raw_price: price, exponent: exponent} = payload) do
%{percent: percent, fixed: fixed} = get_tax_amounts(taxes, %{percent: 0, fixed: 0}, payload)
price_without_fixed = price - fixed
percents_amount =
price_without_fixed -
price_without_fixed / ((@hundred_percents + percent) / @hundred_percents)
Map.put(
payload,
:net_price,
Float.round(price_without_fixed - percents_amount, exponent)
)
end
def set_net_price(%{raw_price: price} = payload) do
Map.put(payload, :net_price, price)
end
@doc """
Method to calculate taxes based at `payload` and type of taxes
"""
@spec calculate_taxes(Types.payload(), :atom) :: Types.payload()
def calculate_taxes(
%{inclusive: taxes, net_price: price, calculated_taxes: acc} = payload,
:inclusive
) do
Map.put(payload, :calculated_taxes, calculate_taxes(taxes, price, acc, payload))
end
def calculate_taxes(
%{exclusive: taxes, raw_price: price, calculated_taxes: acc} = payload,
:exclusive
) do
Map.put(payload, :calculated_taxes, calculate_taxes(taxes, price, acc, payload))
end
def calculate_taxes(payload, _), do: payload
@doc """
Method to calculate tax amounts
"""
@spec get_tax_amounts(map(), map(), Types.payload()) :: map()
def get_tax_amounts(taxes, acc, payload) do
taxes
|> Enum.reduce(acc, fn {_mode, taxes}, acc ->
taxes
|> Enum.reduce(acc, fn tax, acc ->
%{percent: percent, fixed: fixed} =
get_tax_amounts(Map.get(tax, :taxes, %{}), %{percent: 0, fixed: 0}, payload)
case tax.logic do
:percent ->
%{
percent: acc.percent + tax.rate + percent + percent / 100 * (tax.rate / 100) * 100,
fixed: acc.fixed + fixed + fixed * (tax.rate / 100)
}
"percent" ->
%{
percent:
acc.percent + tax.rate + percent + percent / 100 * (tax.rate / 100) * 100,
fixed: acc.fixed + fixed + fixed * (tax.rate / 100)
}
_ ->
{_, tax_amount} = Logic.calculate_tax(tax, payload)
%{
percent: acc.percent + percent,
fixed: acc.fixed + fixed + tax_amount
}
end
end)
end)
end
@doc """
Method to calculate tax values
"""
@spec calculate_taxes(map(), float(), list(), Types.payload()) :: list()
def calculate_taxes(taxes, price, acc, payload) do
taxes
|> Enum.reduce(acc, fn {_mode, taxes}, acc ->
taxes
|> Enum.reduce(acc, fn tax, acc ->
child_taxes = calculate_taxes(Map.get(tax, :taxes, %{}), price, [], payload)
new_price = Enum.reduce(child_taxes, 0, fn {_, amount}, acc -> acc + amount end) + price
[Logic.calculate_tax(tax, payload |> Map.put(:price, new_price)) | child_taxes ++ acc]
end)
end)
end
@doc """
Method to set total price into payload
"""
@spec set_total_price(Types.payload()) :: Types.payload
def set_total_price(%{net_price: net_price, calculated_taxes: taxes} = payload) do
Map.put(
payload,
:total_price,
net_price + Enum.reduce(taxes, 0, fn {_, amount}, acc -> acc + amount end)
)
end
@doc """
Remove duplicated taxes
"""
@spec remove_duplicates(Types.payload()) :: Types.payload
def remove_duplicates(%{calculated_taxes: taxes} = payload) do
Map.put(
payload,
:calculated_taxes,
Enum.uniq(taxes)
)
end
end | lib/taxes/calculator.ex | 0.851197 | 0.738551 | calculator.ex | starcoder |
defmodule BrDocs.CNPJ.Formatter do
@moduledoc ~S"""
CNPJ Formatter.
"""
alias BrDocs.Doc
@raw_size 14
@regex_replacement "\\1.\\2.\\3/\\4-\\5"
@doc_regex ~r/(\d{2})?(\d{3})?(\d{3})?(\d{4})?(\d{2})/
@doc """
Formats a `BrDocs.Doc` CNPJ value into CNPJ format. Returns a formatted `BrDocs.Doc`.
CNPJ value length should be 14 characters. Otherwise, returns a `BrDocs.Doc` with the raw and unformatted value.
This function accepts either a string containing the CNPJ value or a `BrDocs.Doc`.
## Examples
iex> BrDocs.CNPJ.Formatter.format("")
%BrDocs.Doc{kind: :cnpj, value: ""}
iex> BrDocs.CNPJ.Formatter.format(nil)
%BrDocs.Doc{kind: :cnpj, value: nil}
iex> BrDocs.CNPJ.Formatter.format("123")
%BrDocs.Doc{kind: :cnpj, value: "123"}
iex> BrDocs.CNPJ.Formatter.format("11444777000161")
%BrDocs.Doc{kind: :cnpj, value: "11.444.777/0001-61"}
iex> BrDocs.CNPJ.Formatter.format("11.444.777/0001-61")
%BrDocs.Doc{kind: :cnpj, value: "11.444.777/0001-61"}
iex> BrDocs.CNPJ.Formatter.format(%BrDocs.Doc{kind: :cnpj, value: ""})
%BrDocs.Doc{kind: :cnpj, value: ""}
iex> BrDocs.CNPJ.Formatter.format(%BrDocs.Doc{kind: :cnpj, value: nil})
%BrDocs.Doc{kind: :cnpj, value: ""}
iex> BrDocs.CNPJ.Formatter.format(%BrDocs.Doc{kind: :cnpj, value: "123"})
%BrDocs.Doc{kind: :cnpj, value: "123"}
iex> BrDocs.CNPJ.Formatter.format(%BrDocs.Doc{kind: :cnpj, value: "11444777000161"})
%BrDocs.Doc{kind: :cnpj, value: "11.444.777/0001-61"}
iex> BrDocs.CNPJ.Formatter.format(%BrDocs.Doc{kind: :cnpj, value: "11.444.777/0001-61"})
%BrDocs.Doc{kind: :cnpj, value: "11.444.777/0001-61"}
"""
@spec format(BrDocs.Doc.t()) :: BrDocs.Doc.t()
def format(%Doc{kind: :cnpj, value: ""}), do: make_cnpj("")
def format(%Doc{kind: :cnpj, value: nil}), do: make_cnpj("")
def format(%Doc{kind: :cnpj, value: value}) do
raw_value = value |> to_string() |> String.replace(~r/\D/, "", global: true)
doc =
if String.length(raw_value) == @raw_size,
do: format_value(raw_value),
else: to_string(value)
make_cnpj(doc)
end
@spec format(String.t()) :: BrDocs.Doc.t()
def format(value) do
value
|> make_cnpj()
|> format()
end
defp format_value(value) do
Regex.replace(@doc_regex, value, @regex_replacement)
end
defp make_cnpj(value) do
%Doc{kind: :cnpj, value: value}
end
end | lib/brdocs/cnpj/formatter.ex | 0.842053 | 0.428084 | formatter.ex | starcoder |
defmodule Mix.Tasks.Dialyzer do
@shortdoc "Runs dialyzer with default or project-defined flags."
@moduledoc """
This task compiles the mix project, creates a PLT with dependencies if needed and runs `dialyzer`. Much of its behavior can be managed in configuration as described below.
If executed outside of a mix project, it will build the core PLT files and exit.
## Command line options
* `--no-compile` - do not compile even if needed.
* `--no-check` - do not perform (quick) check to see if PLT needs update.
* `--force-check` - force PLT check also if lock file is unchanged.
useful when dealing with local deps.
* `--ignore-exit-status` - display warnings but do not halt the VM or return an exit status code
* `--list-unused-filters` - list unused ignore filters
useful for CI. do not use with `mix do`.
* `--plt` - only build the required plt(s) and exit.
* `--format short` - format the warnings in a compact format.
* `--format raw` - format the warnings in format returned before Dialyzer formatting
* `--format dialyxir` - format the warnings in a pretty printed format
* `--format dialyzer` - format the warnings in the original Dialyzer format
* `--quiet` - suppress all informational messages
Warning flags passed to this task are passed on to `:dialyzer`.
e.g.
`mix dialyzer --unmatched_returns`
## Configuration
All configuration is included under a dialyzer key in the mix project keyword list.
### Flags
You can specify any `dialyzer` command line argument with the :flags keyword.
Dialyzer supports a number of warning flags used to enable or disable certain kinds of analysis features. Until version 0.4, `dialyxir` used by default the additional warning flags shown in the example below. However some of these create warnings that are often more confusing than helpful, particularly to new users of Dialyzer. As of 0.4, there are no longer any flags used by default. To get the old behavior, specify them in your Mix project file. For compatibility reasons you can use either the `-Wwarning` convention of the dialyzer CLI, or (preferred) the `WarnOpts` atoms supported by the [API](http://erlang.org/doc/man/dialyzer.html#gui-1). e.g.
```elixir
def project do
[
app: :my_app,
version: "0.0.1",
deps: deps,
dialyzer: [flags: ["-Wunmatched_returns", :error_handling, :underspecs]]
]
end
```
### PLT Configuration
The task will build a PLT with default core Erlang applications: `:erts :kernel :stdlib :crypto` and re-use this core file in multiple projects - another core file is created for Elixir.
OTP application dependencies are (transitively) added to your project's PLT by default. The applications added are the same as you would see displayed with the command `mix app.tree`. There is also a `:plt_add_deps` option you can set to control the dependencies added. The following options are supported:
* :apps_direct - Only Direct OTP runtime application dependencies - not the entire tree
* :app_tree - Transitive OTP runtime application dependencies e.g. `mix app.tree` (default)
```
def project do
[
app: :my_app,
version: "0.0.1",
deps: deps,
dialyzer: [plt_add_deps: :apps_direct, plt_add_apps: [:wx]]
]
end
```
You can also configure applications to include in the PLT more directly:
* `dialyzer: :plt_add_apps` - applications to include
*in addition* to the core applications and project dependencies.
* `dialyzer: :plt_ignore_apps` - applications to ignore from the list of core
applications and dependencies.
* `dialyzer: :plt_apps` - a list of applications to include that will replace the default,
include all the apps you need e.g.
### Other Configuration
* `dialyzer: :plt_file` - Deprecated - specify the plt file name to create and use - default is to create one in the project's current build environmnet (e.g. _build/dev/) specific to the Erlang/Elixir version used. Note that use of this key in version 0.4 or later will produce a deprecation warning - you can silence the warning by providing a pair with key :no_warn e.g. `plt_file: {:no_warn,"filename"}`.
* `dialyzer: :plt_core_path` - specify an alternative to MIX_HOME to use to store the Erlang and Elixir core files.
* `dialyzer: :ignore_warnings` - specify file path to filter well-known warnings.
"""
use Mix.Task
import System, only: [user_home!: 0]
import Dialyxir.Output, only: [info: 1, error: 1]
alias Dialyxir.Project
alias Dialyxir.Plt
alias Dialyxir.Dialyzer
defmodule Build do
@shortdoc "Build the required plt(s) and exit."
@moduledoc """
This task compiles the mix project and creates a PLT with dependencies if needed.
It is equivalent to running `mix dialyzer --plt`
## Command line options
* `--no-compile` - do not compile even if needed.
"""
use Mix.Task
def run(args) do
Mix.Tasks.Dialyzer.run(["--plt" | args])
end
end
defmodule Clean do
@shortdoc "Delete plt(s) and exit."
@moduledoc """
This task deletes PLT files and hash files.
## Command line options
* `--all` - delete also core PLTs.
"""
use Mix.Task
@command_options [all: :boolean]
def run(args) do
{opts, _, _dargs} = OptionParser.parse(args, strict: @command_options)
Mix.Tasks.Dialyzer.clean(opts)
end
end
@default_warnings [:unknown]
@old_options [
halt_exit_status: :boolean
]
@command_options Keyword.merge(@old_options,
force_check: :boolean,
ignore_exit_status: :boolean,
list_unused_filters: :boolean,
no_check: :boolean,
no_compile: :boolean,
plt: :boolean,
quiet: :boolean,
raw: :boolean,
format: :string
)
def run(args) do
{opts, _, dargs} = OptionParser.parse(args, strict: @command_options)
original_shell = Mix.shell()
if opts[:quiet], do: Mix.shell(Mix.Shell.Quiet)
opts = Keyword.delete(opts, :quiet)
check_dialyzer()
compatibility_notice()
if Mix.Project.get() do
Project.check_config()
unless opts[:no_compile], do: Mix.Project.compile([])
_ =
unless no_check?(opts) do
info("Finding suitable PLTs")
force_check? = Keyword.get(opts, :force_check, false)
check_plt(force_check?)
end
default = Dialyxir.Project.default_ignore_warnings()
ignore_warnings = Dialyxir.Project.dialyzer_ignore_warnings()
cond do
!ignore_warnings && File.exists?(default) ->
info("""
No :ignore_warnings opt specified in mix.exs. Using default: #{default}.
""")
ignore_warnings && File.exists?(ignore_warnings) ->
info("""
ignore_warnings: #{ignore_warnings}
""")
ignore_warnings ->
info("""
:ignore_warnings opt specified in mix.exs: #{ignore_warnings}, but file does not exist.
""")
true ->
info("""
No :ignore_warnings opt specified in mix.exs and default does not exist.
""")
end
warn_old_options(opts)
unless opts[:plt], do: run_dialyzer(opts, dargs)
else
info("No mix project found - checking core PLTs...")
Project.plts_list([], false) |> Plt.check()
end
Mix.shell(original_shell)
end
def clean(opts, fun \\ &delete_plt/4) do
check_dialyzer()
compatibility_notice()
if opts[:all], do: Project.plts_list([], false) |> Plt.check(fun)
if Mix.Project.get() do
{apps, _hash} = dependency_hash()
info("Deleting PLTs")
Project.plts_list(apps, true, true) |> Plt.check(fun)
info("About to delete PLT hash file: #{plt_hash_file()}")
File.rm(plt_hash_file())
end
end
def delete_plt(plt, _, _, _) do
info("About to delete PLT file: #{plt}")
File.rm(plt)
end
defp no_check?(opts) do
case {in_child?(), no_plt?()} do
{true, true} ->
info("In an Umbrella child and no PLT found - building that first.")
build_parent_plt()
true
{true, false} ->
info("In an Umbrella child, not checking PLT...")
true
_ ->
opts[:no_check]
end
end
defp check_plt(force_check?) do
info("Checking PLT...")
{apps, hash} = dependency_hash()
if not force_check? and check_hash?(hash) do
info("PLT is up to date!")
else
Project.plts_list(apps) |> Plt.check()
File.write(plt_hash_file(), hash)
end
end
defp run_dialyzer(opts, dargs) do
args = [
{:check_plt, opts[:force_check] || false},
{:init_plt, String.to_charlist(Project.plt_file())},
{:files, Project.dialyzer_files()},
{:warnings, dialyzer_warnings(dargs)},
{:format, opts[:format]},
{:raw, opts[:raw]},
{:list_unused_filters, opts[:list_unused_filters]},
{:ignore_exit_status, opts[:ignore_exit_status]}
]
{status, exit_status, [time | result]} = Dialyzer.dialyze(args)
info(time)
report = if status == :ok, do: &info/1, else: &error/1
Enum.each(result, report)
unless exit_status == 0 || opts[:ignore_exit_status] do
error("Halting VM with exit status #{exit_status}")
System.halt(exit_status)
end
end
defp dialyzer_warnings(dargs) do
raw_opts = Project.dialyzer_flags() ++ Enum.map(dargs, &elem(&1, 0))
transform(raw_opts) ++ (@default_warnings -- Project.dialyzer_removed_defaults())
end
defp transform(options) when is_list(options), do: Enum.map(options, &transform/1)
defp transform(option) when is_atom(option), do: option
defp transform(option) when is_binary(option) do
option
|> String.replace_leading("-W", "")
|> String.replace("--", "")
|> String.to_atom()
end
defp in_child? do
String.contains?(Mix.Project.config()[:lockfile], "..")
end
defp no_plt? do
not File.exists?(Project.deps_plt())
end
defp build_parent_plt() do
parent = Mix.Project.config()[:lockfile] |> Path.expand() |> Path.dirname()
opts = [into: IO.stream(:stdio, :line), stderr_to_stdout: true, cd: parent]
# It would seem more natural to use Mix.in_project here to start in our parent project.
# However part of the app.tree resolution includes loading all sub apps, and we will
# hit an exception when we try to do that for *this* child, which is already loaded.
{out, rc} = System.cmd("mix", ["dialyzer", "--plt"], opts)
unless rc == 0 do
info("Error building parent PLT, process returned code: #{rc}\n#{out}")
end
end
defp check_dialyzer do
if not Code.ensure_loaded?(:dialyzer) do
error("""
DEPENDENCY MISSING
------------------------
If you are reading this message, then Elixir and Erlang are installed but the
Erlang Dialyzer is not available. Probably this is because you installed Erlang
with your OS package manager and the Dialyzer package is separate.
On Debian/Ubuntu:
`apt-get install erlang-dialyzer`
Fedora:
`yum install erlang-dialyzer`
Arch and Homebrew include Dialyzer in their base erlang packages. Please report a Github
issue to add or correct distribution-specific information.
""")
:erlang.halt(3)
end
end
defp warn_old_options(opts) do
for {opt, _} <- opts, @old_options[opt] do
error("#{opt} is no longer a valid CLI argument.")
end
nil
end
defp compatibility_notice do
old_plt = "#{user_home!()}/.dialyxir_core_*.plt"
if File.exists?(old_plt) &&
(!File.exists?(Project.erlang_plt()) || !File.exists?(Project.elixir_plt())) do
info("""
COMPATIBILITY NOTICE
------------------------
Previous usage of a pre-0.4 version of Dialyxir detected. Please be aware that the 0.4 release
makes a number of changes to previous defaults. Among other things, the PLT task is automatically
run when dialyzer is run, PLT paths have changed,
transitive dependencies are included by default in the PLT, and no additional warning flags
beyond the dialyzer defaults are included. All these properties can be changed in configuration.
(see `mix help dialyzer`).
If you no longer use the older Dialyxir in any projects and do not want to see this notice each time you upgrade your Erlang/Elixir distribution, you can delete your old pre-0.4 PLT files. ( rm ~/.dialyxir_core_*.plt )
""")
end
end
@spec check_hash?(binary()) :: boolean()
defp check_hash?(hash) do
case File.read(plt_hash_file()) do
{:ok, stored_hash} -> hash == stored_hash
_ -> false
end
end
defp plt_hash_file, do: Project.plt_file() <> ".hash"
@spec dependency_hash :: {[atom()], binary()}
def dependency_hash do
lock_file = Mix.Dep.Lock.read() |> :erlang.term_to_binary()
apps = Project.cons_apps()
apps |> inspect() |> info()
hash = :crypto.hash(:sha, lock_file <> :erlang.term_to_binary(apps))
{apps, hash}
end
end | deps/dialyxir/lib/mix/tasks/dialyzer.ex | 0.852537 | 0.765681 | dialyzer.ex | starcoder |
defmodule Ockam.Examples.Stream.BiDirectional.Local do
@moduledoc """
Ping-pong example for bi-directional stream communication using local subsctiption
Use-case: integrate ockam nodes which implement stream protocol consumer and publisher
Pre-requisites:
Ockam cloud node running with stream service and TCP listener
Two ockam nodes "ping" and "pong"
Expected behaviour:
Two nodes "ping" and "pong" send messages to each other using two streams:
"pong_topic" to send messages to "pong" node
"ping_topic" to send messages to "ping" node
Implementation:
Stream service is running on the cloud node
Ping and pong nodes create local consumers and publishers to exchange messages
"""
alias Ockam.Examples.Ping
alias Ockam.Examples.Pong
alias Ockam.Stream.Client.BiDirectional
alias Ockam.Stream.Client.BiDirectional.PublisherRegistry
alias Ockam.Transport.TCP
def config() do
%{
cloud_ip: "127.0.0.1",
cloud_port: 4000,
service_address: "stream",
index_address: "stream_index"
}
end
def stream_options() do
config = config()
{:ok, cloud_ip_n} = :inet.parse_address(to_charlist(config.cloud_ip))
tcp_address = Ockam.Transport.TCPAddress.new(cloud_ip_n, config.cloud_port)
[
service_route: [tcp_address, config.service_address],
index_route: [tcp_address, config.index_address],
partitions: 1
]
end
## This should be run on the PONG node
def init_pong() do
TCP.start()
## PONG worker
{:ok, "pong"} = Pong.create(address: "pong")
## Create a local subscription to forward pong_topic messages to local node
subscribe("pong_topic", "pong")
end
def run() do
TCP.start()
## PING worker
Ping.create(address: "ping")
## Subscribe to response topic
subscribe("ping_topic", "ping")
## Create local publisher worker to forward to pong_topic and add metadata to
## messages to send responses to ping_topic
{:ok, address} = init_publisher("pong_topic", "ping_topic", "ping")
## Send a message THROUGH the local publisher to the remote worker
send_message([address, "pong"])
end
def init_publisher(publisher_stream, consumer_stream, subscription_id) do
BiDirectional.ensure_publisher(
consumer_stream,
publisher_stream,
subscription_id,
stream_options()
)
end
def send_message(onward_route) do
msg = %{
onward_route: onward_route,
return_route: ["ping"],
payload: "0"
}
Ockam.Router.route(msg)
end
def subscribe(stream, subscription_id) do
## Local subscribe
## Create bidirectional subscription on local node
## using stream service configuration from stream_options
BiDirectional.subscribe(stream, subscription_id, stream_options())
## This is necessary to make sure we don't spawn publisher for each message
PublisherRegistry.start_link([])
end
end | implementations/elixir/ockam/ockam/lib/ockam/examples/stream/bi_directional/local.ex | 0.823115 | 0.515193 | local.ex | starcoder |
defmodule ParkingTweets.IdMapSet do
@moduledoc """
Similar interface to `MapSet`, but items are unique only by an ID.
"""
defstruct [:id_fun, :map]
@opaque t :: %__MODULE__{}
@doc """
Returns a new IdMapSet.
iex> new(& &1)
#ParkingTweets.IdMapSet<[]>
iex> new(&elem(&1, 0), [a: 1, b: 2, a: 3])
#ParkingTweets.IdMapSet<[a: 3, b: 2]>
IdMapSet also implements the Enumerable protocol:
iex> set = new(& &1, [1, 2, 3])
iex> Enum.count(set)
3
iex> 3 in set
true
iex> Enum.map(set, & &1 + 1)
[2, 3, 4]
"""
def new(id_fun, enum \\ []) when is_function(id_fun, 1) do
map =
for item <- enum, into: %{} do
{id_fun.(item), item}
end
%__MODULE__{id_fun: id_fun, map: map}
end
@doc """
Returns the number of items in the IdMapSet.
iex> size(new(& &1))
0
iex> size(new(& &1, [1, 2, 3]))
3
"""
def size(%__MODULE__{map: map}) do
map_size(map)
end
@doc """
Returns the items in the IdMapSet as a list.
iex> set = new(&rem(&1, 2), [1, 2, 3])
iex> to_list(set)
[2, 3]
"""
def to_list(%__MODULE__{} = id_map_set) do
Map.values(id_map_set.map)
end
@doc """
Insert or update an item in the IdMapSet.
iex> set = new(& &1)
iex> 1 in set
false
iex> new_set = put(set, 1)
#ParkingTweets.IdMapSet<[1]>
iex> 1 in new_set
true
"""
def put(%__MODULE__{} = id_map_set, item) do
%{id_map_set | map: Map.put(id_map_set.map, id_map_set.id_fun.(item), item)}
end
@doc """
Get an item from the IdMapSet by its ID.
iex> set = new(&elem(&1, 0), [a: 1])
iex> get(set, :a)
{:a, 1}
iex> get(set, :b)
nil
"""
def get(%__MODULE__{} = id_map_set, id) do
Map.get(id_map_set.map, id)
end
@doc """
Returns the items from `id_map_set_1` that are not in `id_map_set_2` with the same values.
iex> set_1 = new(&elem(&1, 0), [a: 1, b: 2, c: 3])
iex> set_2 = new(&elem(&1, 0), [a: 1, b: 4])
iex> difference_by(set_1, set_2, &elem(&1, 1) - 2)
#ParkingTweets.IdMapSet<[b: 2, c: 3]>
iex> difference_by(set_1, set_2, &rem(elem(&1, 1), 2))
#ParkingTweets.IdMapSet<[c: 3]>
"""
def difference_by(%__MODULE__{} = id_map_set_1, %__MODULE__{} = id_map_set_2, compare_fn)
when is_function(compare_fn, 1) do
new(id_map_set_1.id_fun)
new_set = new(id_map_set_1.id_fun)
:maps.fold(
fn id, item, set ->
old_item = Map.get(id_map_set_2.map, id)
if is_nil(old_item) or compare_fn.(old_item) != compare_fn.(item) do
put(set, item)
else
set
end
end,
new_set,
id_map_set_1.map
)
end
defimpl Inspect do
import Inspect.Algebra
def inspect(id_map_set, opts) do
concat(["#ParkingTweets.IdMapSet<", to_doc(@for.to_list(id_map_set), opts), ">"])
end
end
defimpl Enumerable do
def count(id_map_set) do
{:ok, @for.size(id_map_set)}
end
def member?(id_map_set, element) do
key = id_map_set.id_fun.(element)
{:ok, Map.fetch(id_map_set.map, key) == {:ok, element}}
end
def reduce(id_map_set, acc, fun) do
Enumerable.List.reduce(@for.to_list(id_map_set), acc, fun)
end
def slice(_id_map_set) do
{:error, __MODULE__}
end
end
end | lib/parking_tweets/id_map_set.ex | 0.793066 | 0.412353 | id_map_set.ex | starcoder |
defmodule EVM.Stack do
@moduledoc """
Operations to read / write to the EVM's stack.
"""
@type t :: [EVM.val()]
@doc """
Pushes value onto stack.
## Examples
iex> EVM.Stack.push([], 5)
[5]
iex> EVM.Stack.push([5], 6)
[6, 5]
iex> EVM.Stack.push([], [5, 6])
[5, 6]
"""
@spec push(t, EVM.val() | list(EVM.val())) :: t
def push(stack, val) when is_list(val), do: val ++ stack
def push(stack, val), do: [val | stack]
@doc """
Pops value from stack, returning a new
stack with value popped.
This function raises if stack is empty.
## Examples
iex> EVM.Stack.pop([1, 2, 3])
{1, [2, 3]}
iex> EVM.Stack.pop([5])
{5, []}
iex> EVM.Stack.pop([])
** (FunctionClauseError) no function clause matching in EVM.Stack.pop/1
"""
@spec pop(t) :: {EVM.val(), t}
def pop(_stack = [h | t]), do: {h, t}
@doc """
Peeks at head of stack, returns nil
if stack is empty.
## Examples
iex> EVM.Stack.peek([])
nil
iex> EVM.Stack.peek([1, 2])
1
"""
@spec peek(t) :: EVM.val() | nil
def peek([]), do: nil
def peek([h | _]), do: h
@doc """
Peeks at n elements of stack, and
raises if unsufficient elements exist.
## Examples
iex> EVM.Stack.peek_n([1, 2, 3], 2)
[1, 2]
iex> EVM.Stack.peek_n([1, 2, 3], 4)
[1, 2, 3]
"""
@spec peek_n(t, integer()) :: [EVM.val()]
def peek_n(stack, n) do
{r, _} = pop_n(stack, n)
r
end
@doc """
Pops multiple values off of stack, returning a new stack
less that many elements.
Raises if stack contains insufficient elements.
## Examples
iex> EVM.Stack.pop_n([1, 2, 3], 0)
{[], [1, 2, 3]}
iex> EVM.Stack.pop_n([1, 2, 3], 1)
{[1], [2, 3]}
iex> EVM.Stack.pop_n([1, 2, 3], 2)
{[1, 2], [3]}
iex> EVM.Stack.pop_n([1, 2, 3], 4)
{[1, 2, 3], []}
"""
@spec pop_n(t, integer()) :: {[EVM.val()], t}
def pop_n(stack, 0), do: {[], stack}
def pop_n([h | t], n) do
{a, b} = pop_n(t, n - 1)
{[h | a], b}
end
def pop_n([], _stack), do: {[], []}
@doc """
Returns the length of the stack.
## Examples
iex> EVM.Stack.length([1, 2, 3])
3
iex> EVM.Stack.length([])
0
"""
@spec length(t) :: integer()
def length(stack), do: Kernel.length(stack)
end | apps/evm/lib/evm/stack.ex | 0.851845 | 0.449091 | stack.ex | starcoder |
defmodule FuzzyCompare.ChunkSet do
@moduledoc """
For strings which among shared words also contain many dissimilar words the
ChunkSet is ideal.
It works in the following way:
Our input strings are
* `"<NAME>"`
* `"<NAME> was the wife of <NAME>"`
From the input string three strings are created.
* `common_words = "claude monet"`
* `common_words_plus_remaining_words_left = "claude monet oscar"`
* `common_words_plus_remaining_words_right = "claude monet <NAME> was the wife of"`
These are then all compared with each other in pairs and the maximum ratio
is returned.
## Examples
iex> FuzzyCompare.ChunkSet.standard_similarity("<NAME>", "<NAME> was the wife of claude monet")
0.8958333333333334
iex> FuzzyCompare.ChunkSet.substring_similarity("<NAME>", "<NAME> was the wife of claude monet")
1.0
"""
alias FuzzyCompare.{
Preprocessed,
Preprocessor,
StandardStringComparison,
SubstringComparison
}
@spec standard_similarity(binary() | Preprocessed.t(), binary() | Preprocessed.t()) :: float()
def standard_similarity(left, right) when is_binary(left) and is_binary(right) do
{processed_left, processed_right} = Preprocessor.process(left, right)
standard_similarity(processed_left, processed_right)
end
def standard_similarity(%Preprocessed{set: left}, %Preprocessed{set: right}) do
similarity(left, right, StandardStringComparison)
end
@spec substring_similarity(binary() | Preprocessed.t(), binary() | Preprocessed.t()) :: float()
def substring_similarity(left, right) when is_binary(left) and is_binary(right) do
{processed_left, processed_right} = Preprocessor.process(left, right)
substring_similarity(processed_left, processed_right)
end
def substring_similarity(%Preprocessed{set: left}, %Preprocessed{set: right}) do
similarity(left, right, SubstringComparison)
end
@spec similarity(
MapSet.t(String.t()),
MapSet.t(String.t()),
StandardStringComparison | SubstringComparison
) :: float()
defp similarity(left, right, ratio_mod) do
{common_words, common_words_plus_remaining_words_left,
common_words_plus_remaining_words_right} = set_operations(left, right)
[
0.0,
ratio_mod.similarity(common_words, common_words_plus_remaining_words_left),
ratio_mod.similarity(common_words, common_words_plus_remaining_words_right),
ratio_mod.similarity(
common_words_plus_remaining_words_left,
common_words_plus_remaining_words_right
)
]
|> Enum.max()
end
@spec set_operations(MapSet.t(String.t()), MapSet.t(String.t())) ::
{binary(), binary(), binary()}
defp set_operations(left, right) do
common_words = MapSet.intersection(left, right)
common_words_string =
common_words
|> Enum.sort()
|> Enum.join()
[
common_words_plus_remaining_words_left_string,
common_words_plus_remaining_words_right_string
] =
[left, right]
|> Enum.map(fn x ->
common_words_string <>
(x
|> MapSet.difference(common_words)
|> Enum.sort()
|> Enum.join())
end)
{common_words_string, common_words_plus_remaining_words_left_string,
common_words_plus_remaining_words_right_string}
end
end | lib/fuzzy_compare/chunk_set.ex | 0.89041 | 0.501709 | chunk_set.ex | starcoder |
defmodule Concentrate.Filter.Alert.TimeTable do
@moduledoc """
Wrapper for an ETS table which maintains a mapping of keys to values at particular times.
"""
@epoch_seconds :calendar.datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}})
@one_day_minus_one 86_399
@doc "Creates a new TimeTable with the given name"
def new(name) when is_atom(name) do
^name = :ets.new(name, [:named_table, :public, :bag])
:ok
end
@doc "Inserts the given 4-tuples into the table"
def update(name, [{_, _, _, _} | _] = records) when is_atom(name) do
:ets.delete_all_objects(name)
:ets.insert(name, records)
end
def update(name, []) do
:ets.delete_all_objects(name)
end
@doc "Queries the table for matching keys that overlap the date/timestamp."
def date_overlaps(name, key, date_or_timestamp) do
{start, stop} = start_stop_times(date_or_timestamp)
selector = selector(key, start, stop)
:ets.select(name, [selector])
rescue
ArgumentError -> []
end
@doc "Queries the table and returns true if there are any matches."
def date_overlaps?(name, key, date_or_timestamp) do
{start, stop} = start_stop_times(date_or_timestamp)
selector = selector(key, start, stop)
:ets.select(name, [selector], 1) != :"$end_of_table"
rescue
ArgumentError -> false
end
defp start_stop_times(timestamp) when is_integer(timestamp) do
{timestamp, timestamp}
end
defp start_stop_times({_, _, _} = date) do
start = :calendar.datetime_to_gregorian_seconds({date, {0, 0, 0}}) - @epoch_seconds
stop = start + @one_day_minus_one
{start, stop}
end
defp start_stop_times({start_timestamp, stop_timestamp} = times)
when is_integer(start_timestamp) and is_integer(stop_timestamp) do
times
end
defp selector(key, start_timestamp, stop_timestamp) do
{
{key, :"$1", :"$2", :"$3"},
[
# DateTime is between the start/end dates
{:"=<", :"$1", stop_timestamp},
{:"=<", start_timestamp, :"$2"}
],
[:"$3"]
}
end
end | lib/concentrate/filter/alert/time_table.ex | 0.731442 | 0.552781 | time_table.ex | starcoder |
defmodule GraphQL.Plug.Endpoint do
@moduledoc """
This is the core plug for mounting a GraphQL server.
You can build your own pipeline by mounting the
`GraphQL.Plug.Endpoint` plug directly.
```elixir
forward "/graphql", GraphQL.Plug.Endpoint, schema: {MyApp.Schema, :schema}
```
You may want to look at how `GraphQL.Plug` configures its pipeline.
Specifically note how `Plug.Parsers` are configured, as this is required
for pre-parsing the various POST bodies depending on `content-type`.
"""
import Plug.Conn
alias Plug.Conn
alias GraphQL.Plug.ConfigurableValue
alias GraphQL.Plug.Parameter
@behaviour Plug
def init(opts) do
# NOTE: This code needs to be kept in sync with
# GraphQL.Plug.GraphiQL and GraphQL.Plugs.Endpoint as the
# returned data structure is shared.
schema = case Keyword.get(opts, :schema) do
{mod, func} -> apply(mod, func, [])
s -> s
end
root_value = Keyword.get(opts, :root_value, %{})
query = Keyword.get(opts, :query, nil)
[
schema: schema,
root_value: root_value,
query: query
]
end
def call(%Conn{method: m} = conn, opts) when m in ["GET", "POST"] do
args = extract_arguments(conn, opts)
handle_call(conn, args)
end
def call(%Conn{method: _} = conn, _) do
handle_error(conn, "GraphQL only supports GET and POST requests.")
end
def extract_arguments(conn, opts) do
query = Parameter.query(conn) ||
ConfigurableValue.evaluate(conn, opts[:query], nil)
variables = Parameter.variables(conn)
operation_name = Parameter.operation_name(conn)
root_value = ConfigurableValue.evaluate(conn, opts[:root_value], %{})
%{
query: query,
variables: variables,
operation_name: operation_name,
root_value: root_value,
schema: opts[:schema]
}
end
def handle_error(conn, message) do
{:ok, errors} = Poison.encode(%{errors: [%{message: message}]})
conn
|> put_resp_content_type("application/json")
|> send_resp(400, errors)
end
def handle_call(conn, %{query: nil}) do
handle_error(conn, "Must provide query string.")
end
def handle_call(conn, args) do
conn
|> put_resp_content_type("application/json")
|> execute(args)
end
defp execute(conn, args) do
case GraphQL.execute(args.schema, args.query, args.root_value, args.variables, args.operation_name) do
{:ok, data} ->
case Poison.encode(data) do
{:ok, json} -> send_resp(conn, 200, json)
{:error, errors} -> send_resp(conn, 500, errors)
end
{:error, errors} ->
case Poison.encode(errors) do
{:ok, json} -> send_resp(conn, 400, json)
{:error, errors} -> send_resp(conn, 400, errors)
end
end
end
end | lib/graphql/plug/endpoint.ex | 0.812533 | 0.734191 | endpoint.ex | starcoder |
defmodule AWS.Rekognition do
@moduledoc """
This is the Amazon Rekognition API reference.
"""
@doc """
Compares a face in the *source* input image with each of the 100 largest
faces detected in the *target* input image.
<note> If the source image contains multiple faces, the service detects the
largest face and compares it with each face detected in the target image.
</note> You pass the input and target images either as base64-encoded image
bytes or as references to images in an Amazon S3 bucket. If you use the AWS
CLI to call Amazon Rekognition operations, passing image bytes isn't
supported. The image must be formatted as a PNG or JPEG file.
In response, the operation returns an array of face matches ordered by
similarity score in descending order. For each face match, the response
provides a bounding box of the face, facial landmarks, pose details (pitch,
role, and yaw), quality (brightness and sharpness), and confidence value
(indicating the level of confidence that the bounding box contains a face).
The response also provides a similarity score, which indicates how closely
the faces match.
<note> By default, only faces with a similarity score of greater than or
equal to 80% are returned in the response. You can change this value by
specifying the `SimilarityThreshold` parameter.
</note> `CompareFaces` also returns an array of faces that don't match the
source image. For each face, it returns a bounding box, confidence value,
landmarks, pose details, and quality. The response also returns information
about the face in the source image, including the bounding box of the face
and confidence value.
If the image doesn't contain Exif metadata, `CompareFaces` returns
orientation information for the source and target images. Use these values
to display the images with the correct image orientation.
If no faces are detected in the source or target images, `CompareFaces`
returns an `InvalidParameterException` error.
<note> This is a stateless API operation. That is, data returned by this
operation doesn't persist.
</note> For an example, see Comparing Faces in Images in the Amazon
Rekognition Developer Guide.
This operation requires permissions to perform the
`rekognition:CompareFaces` action.
"""
def compare_faces(client, input, options \\ []) do
request(client, "CompareFaces", input, options)
end
@doc """
Creates a collection in an AWS Region. You can add faces to the collection
using the `IndexFaces` operation.
For example, you might create collections, one for each of your application
users. A user can then index faces using the `IndexFaces` operation and
persist results in a specific collection. Then, a user can search the
collection for faces in the user-specific container.
When you create a collection, it is associated with the latest version of
the face model version.
<note> Collection names are case-sensitive.
</note> This operation requires permissions to perform the
`rekognition:CreateCollection` action.
"""
def create_collection(client, input, options \\ []) do
request(client, "CreateCollection", input, options)
end
@doc """
Creates an Amazon Rekognition stream processor that you can use to detect
and recognize faces in a streaming video.
Amazon Rekognition Video is a consumer of live video from Amazon Kinesis
Video Streams. Amazon Rekognition Video sends analysis results to Amazon
Kinesis Data Streams.
You provide as input a Kinesis video stream (`Input`) and a Kinesis data
stream (`Output`) stream. You also specify the face recognition criteria in
`Settings`. For example, the collection containing faces that you want to
recognize. Use `Name` to assign an identifier for the stream processor. You
use `Name` to manage the stream processor. For example, you can start
processing the source video by calling `StartStreamProcessor` with the
`Name` field.
After you have finished analyzing a streaming video, use
`StopStreamProcessor` to stop processing. You can delete the stream
processor by calling `DeleteStreamProcessor`.
"""
def create_stream_processor(client, input, options \\ []) do
request(client, "CreateStreamProcessor", input, options)
end
@doc """
Deletes the specified collection. Note that this operation removes all
faces in the collection. For an example, see `delete-collection-procedure`.
This operation requires permissions to perform the
`rekognition:DeleteCollection` action.
"""
def delete_collection(client, input, options \\ []) do
request(client, "DeleteCollection", input, options)
end
@doc """
Deletes faces from a collection. You specify a collection ID and an array
of face IDs to remove from the collection.
This operation requires permissions to perform the
`rekognition:DeleteFaces` action.
"""
def delete_faces(client, input, options \\ []) do
request(client, "DeleteFaces", input, options)
end
@doc """
Deletes the stream processor identified by `Name`. You assign the value for
`Name` when you create the stream processor with `CreateStreamProcessor`.
You might not be able to use the same name for a stream processor for a few
seconds after calling `DeleteStreamProcessor`.
"""
def delete_stream_processor(client, input, options \\ []) do
request(client, "DeleteStreamProcessor", input, options)
end
@doc """
Describes the specified collection. You can use `DescribeCollection` to get
information, such as the number of faces indexed into a collection and the
version of the model used by the collection for face detection.
For more information, see Describing a Collection in the Amazon Rekognition
Developer Guide.
"""
def describe_collection(client, input, options \\ []) do
request(client, "DescribeCollection", input, options)
end
@doc """
Provides information about a stream processor created by
`CreateStreamProcessor`. You can get information about the input and output
streams, the input parameters for the face recognition being performed, and
the current status of the stream processor.
"""
def describe_stream_processor(client, input, options \\ []) do
request(client, "DescribeStreamProcessor", input, options)
end
@doc """
Detects faces within an image that is provided as input.
`DetectFaces` detects the 100 largest faces in the image. For each face
detected, the operation returns face details. These details include a
bounding box of the face, a confidence value (that the bounding box
contains a face), and a fixed set of attributes such as facial landmarks
(for example, coordinates of eye and mouth), gender, presence of beard,
sunglasses, and so on.
The face-detection algorithm is most effective on frontal faces. For
non-frontal or obscured faces, the algorithm might not detect the faces or
might detect faces with lower confidence.
You pass the input image either as base64-encoded image bytes or as a
reference to an image in an Amazon S3 bucket. If you use the to call Amazon
Rekognition operations, passing image bytes is not supported. The image
must be either a PNG or JPEG formatted file.
<note> This is a stateless API operation. That is, the operation does not
persist any data.
</note> This operation requires permissions to perform the
`rekognition:DetectFaces` action.
"""
def detect_faces(client, input, options \\ []) do
request(client, "DetectFaces", input, options)
end
@doc """
Detects instances of real-world entities within an image (JPEG or PNG)
provided as input. This includes objects like flower, tree, and table;
events like wedding, graduation, and birthday party; and concepts like
landscape, evening, and nature.
For an example, see Analyzing Images Stored in an Amazon S3 Bucket in the
Amazon Rekognition Developer Guide.
<note> `DetectLabels` does not support the detection of activities.
However, activity detection is supported for label detection in videos. For
more information, see StartLabelDetection in the Amazon Rekognition
Developer Guide.
</note> You pass the input image as base64-encoded image bytes or as a
reference to an image in an Amazon S3 bucket. If you use the AWS CLI to
call Amazon Rekognition operations, passing image bytes is not supported.
The image must be either a PNG or JPEG formatted file.
For each object, scene, and concept the API returns one or more labels.
Each label provides the object name, and the level of confidence that the
image contains the object. For example, suppose the input image has a
lighthouse, the sea, and a rock. The response includes all three labels,
one for each object.
`{Name: lighthouse, Confidence: 98.4629}`
`{Name: rock,Confidence: 79.2097}`
` {Name: sea,Confidence: 75.061}`
In the preceding example, the operation returns one label for each of the
three objects. The operation can also return multiple labels for the same
object in the image. For example, if the input image shows a flower (for
example, a tulip), the operation might return the following three labels.
`{Name: flower,Confidence: 99.0562}`
`{Name: plant,Confidence: 99.0562}`
`{Name: tulip,Confidence: 99.0562}`
In this example, the detection algorithm more precisely identifies the
flower as a tulip.
In response, the API returns an array of labels. In addition, the response
also includes the orientation correction. Optionally, you can specify
`MinConfidence` to control the confidence threshold for the labels
returned. The default is 55%. You can also add the `MaxLabels` parameter to
limit the number of labels returned.
<note> If the object detected is a person, the operation doesn't provide
the same facial details that the `DetectFaces` operation provides.
</note> `DetectLabels` returns bounding boxes for instances of common
object labels in an array of `Instance` objects. An `Instance` object
contains a `BoundingBox` object, for the location of the label on the
image. It also includes the confidence by which the bounding box was
detected.
`DetectLabels` also returns a hierarchical taxonomy of detected labels. For
example, a detected car might be assigned the label *car*. The label *car*
has two parent labels: *Vehicle* (its parent) and *Transportation* (its
grandparent). The response returns the entire list of ancestors for a
label. Each ancestor is a unique label in the response. In the previous
example, *Car*, *Vehicle*, and *Transportation* are returned as unique
labels in the response.
This is a stateless API operation. That is, the operation does not persist
any data.
This operation requires permissions to perform the
`rekognition:DetectLabels` action.
"""
def detect_labels(client, input, options \\ []) do
request(client, "DetectLabels", input, options)
end
@doc """
Detects explicit or suggestive adult content in a specified JPEG or PNG
format image. Use `DetectModerationLabels` to moderate images depending on
your requirements. For example, you might want to filter images that
contain nudity, but not images containing suggestive content.
To filter images, use the labels returned by `DetectModerationLabels` to
determine which types of content are appropriate.
For information about moderation labels, see Detecting Unsafe Content in
the Amazon Rekognition Developer Guide.
You pass the input image either as base64-encoded image bytes or as a
reference to an image in an Amazon S3 bucket. If you use the AWS CLI to
call Amazon Rekognition operations, passing image bytes is not supported.
The image must be either a PNG or JPEG formatted file.
"""
def detect_moderation_labels(client, input, options \\ []) do
request(client, "DetectModerationLabels", input, options)
end
@doc """
Detects text in the input image and converts it into machine-readable text.
Pass the input image as base64-encoded image bytes or as a reference to an
image in an Amazon S3 bucket. If you use the AWS CLI to call Amazon
Rekognition operations, you must pass it as a reference to an image in an
Amazon S3 bucket. For the AWS CLI, passing image bytes is not supported.
The image must be either a .png or .jpeg formatted file.
The `DetectText` operation returns text in an array of `TextDetection`
elements, `TextDetections`. Each `TextDetection` element provides
information about a single word or line of text that was detected in the
image.
A word is one or more ISO basic latin script characters that are not
separated by spaces. `DetectText` can detect up to 50 words in an image.
A line is a string of equally spaced words. A line isn't necessarily a
complete sentence. For example, a driver's license number is detected as a
line. A line ends when there is no aligned text after it. Also, a line ends
when there is a large gap between words, relative to the length of the
words. This means, depending on the gap between words, Amazon Rekognition
may detect multiple lines in text aligned in the same direction. Periods
don't represent the end of a line. If a sentence spans multiple lines, the
`DetectText` operation returns multiple lines.
To determine whether a `TextDetection` element is a line of text or a word,
use the `TextDetection` object `Type` field.
To be detected, text must be within +/- 90 degrees orientation of the
horizontal axis.
For more information, see DetectText in the Amazon Rekognition Developer
Guide.
"""
def detect_text(client, input, options \\ []) do
request(client, "DetectText", input, options)
end
@doc """
Gets the name and additional information about a celebrity based on his or
her Amazon Rekognition ID. The additional information is returned as an
array of URLs. If there is no additional information about the celebrity,
this list is empty.
For more information, see Recognizing Celebrities in an Image in the Amazon
Rekognition Developer Guide.
This operation requires permissions to perform the
`rekognition:GetCelebrityInfo` action.
"""
def get_celebrity_info(client, input, options \\ []) do
request(client, "GetCelebrityInfo", input, options)
end
@doc """
Gets the celebrity recognition results for a Amazon Rekognition Video
analysis started by `StartCelebrityRecognition`.
Celebrity recognition in a video is an asynchronous operation. Analysis is
started by a call to `StartCelebrityRecognition` which returns a job
identifier (`JobId`). When the celebrity recognition operation finishes,
Amazon Rekognition Video publishes a completion status to the Amazon Simple
Notification Service topic registered in the initial call to
`StartCelebrityRecognition`. To get the results of the celebrity
recognition analysis, first check that the status value published to the
Amazon SNS topic is `SUCCEEDED`. If so, call `GetCelebrityDetection` and
pass the job identifier (`JobId`) from the initial call to
`StartCelebrityDetection`.
For more information, see Working With Stored Videos in the Amazon
Rekognition Developer Guide.
`GetCelebrityRecognition` returns detected celebrities and the time(s) they
are detected in an array (`Celebrities`) of `CelebrityRecognition` objects.
Each `CelebrityRecognition` contains information about the celebrity in a
`CelebrityDetail` object and the time, `Timestamp`, the celebrity was
detected.
<note> `GetCelebrityRecognition` only returns the default facial attributes
(`BoundingBox`, `Confidence`, `Landmarks`, `Pose`, and `Quality`). The
other facial attributes listed in the `Face` object of the following
response syntax are not returned. For more information, see FaceDetail in
the Amazon Rekognition Developer Guide.
</note> By default, the `Celebrities` array is sorted by time (milliseconds
from the start of the video). You can also sort the array by celebrity by
specifying the value `ID` in the `SortBy` input parameter.
The `CelebrityDetail` object includes the celebrity identifer and
additional information urls. If you don't store the additional information
urls, you can get them later by calling `GetCelebrityInfo` with the
celebrity identifer.
No information is returned for faces not recognized as celebrities.
Use MaxResults parameter to limit the number of labels returned. If there
are more results than specified in `MaxResults`, the value of `NextToken`
in the operation response contains a pagination token for getting the next
set of results. To get the next page of results, call
`GetCelebrityDetection` and populate the `NextToken` request parameter with
the token value returned from the previous call to
`GetCelebrityRecognition`.
"""
def get_celebrity_recognition(client, input, options \\ []) do
request(client, "GetCelebrityRecognition", input, options)
end
@doc """
Gets the content moderation analysis results for a Amazon Rekognition Video
analysis started by `StartContentModeration`.
Content moderation analysis of a video is an asynchronous operation. You
start analysis by calling `StartContentModeration` which returns a job
identifier (`JobId`). When analysis finishes, Amazon Rekognition Video
publishes a completion status to the Amazon Simple Notification Service
topic registered in the initial call to `StartContentModeration`. To get
the results of the content moderation analysis, first check that the status
value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
`GetContentModeration` and pass the job identifier (`JobId`) from the
initial call to `StartContentModeration`.
For more information, see Working with Stored Videos in the Amazon
Rekognition Devlopers Guide.
`GetContentModeration` returns detected content moderation labels, and the
time they are detected, in an array, `ModerationLabels`, of
`ContentModerationDetection` objects.
By default, the moderated labels are returned sorted by time, in
milliseconds from the start of the video. You can also sort them by
moderated label by specifying `NAME` for the `SortBy` input parameter.
Since video analysis can return a large number of results, use the
`MaxResults` parameter to limit the number of labels returned in a single
call to `GetContentModeration`. If there are more results than specified in
`MaxResults`, the value of `NextToken` in the operation response contains a
pagination token for getting the next set of results. To get the next page
of results, call `GetContentModeration` and populate the `NextToken`
request parameter with the value of `NextToken` returned from the previous
call to `GetContentModeration`.
For more information, see Detecting Unsafe Content in the Amazon
Rekognition Developer Guide.
"""
def get_content_moderation(client, input, options \\ []) do
request(client, "GetContentModeration", input, options)
end
@doc """
Gets face detection results for a Amazon Rekognition Video analysis started
by `StartFaceDetection`.
Face detection with Amazon Rekognition Video is an asynchronous operation.
You start face detection by calling `StartFaceDetection` which returns a
job identifier (`JobId`). When the face detection operation finishes,
Amazon Rekognition Video publishes a completion status to the Amazon Simple
Notification Service topic registered in the initial call to
`StartFaceDetection`. To get the results of the face detection operation,
first check that the status value published to the Amazon SNS topic is
`SUCCEEDED`. If so, call `GetFaceDetection` and pass the job identifier
(`JobId`) from the initial call to `StartFaceDetection`.
`GetFaceDetection` returns an array of detected faces (`Faces`) sorted by
the time the faces were detected.
Use MaxResults parameter to limit the number of labels returned. If there
are more results than specified in `MaxResults`, the value of `NextToken`
in the operation response contains a pagination token for getting the next
set of results. To get the next page of results, call `GetFaceDetection`
and populate the `NextToken` request parameter with the token value
returned from the previous call to `GetFaceDetection`.
"""
def get_face_detection(client, input, options \\ []) do
request(client, "GetFaceDetection", input, options)
end
@doc """
Gets the face search results for Amazon Rekognition Video face search
started by `StartFaceSearch`. The search returns faces in a collection that
match the faces of persons detected in a video. It also includes the
time(s) that faces are matched in the video.
Face search in a video is an asynchronous operation. You start face search
by calling to `StartFaceSearch` which returns a job identifier (`JobId`).
When the search operation finishes, Amazon Rekognition Video publishes a
completion status to the Amazon Simple Notification Service topic
registered in the initial call to `StartFaceSearch`. To get the search
results, first check that the status value published to the Amazon SNS
topic is `SUCCEEDED`. If so, call `GetFaceSearch` and pass the job
identifier (`JobId`) from the initial call to `StartFaceSearch`.
For more information, see Searching Faces in a Collection in the Amazon
Rekognition Developer Guide.
The search results are retured in an array, `Persons`, of `PersonMatch`
objects. Each`PersonMatch` element contains details about the matching
faces in the input collection, person information (facial attributes,
bounding boxes, and person identifer) for the matched person, and the time
the person was matched in the video.
<note> `GetFaceSearch` only returns the default facial attributes
(`BoundingBox`, `Confidence`, `Landmarks`, `Pose`, and `Quality`). The
other facial attributes listed in the `Face` object of the following
response syntax are not returned. For more information, see FaceDetail in
the Amazon Rekognition Developer Guide.
</note> By default, the `Persons` array is sorted by the time, in
milliseconds from the start of the video, persons are matched. You can also
sort by persons by specifying `INDEX` for the `SORTBY` input parameter.
"""
def get_face_search(client, input, options \\ []) do
request(client, "GetFaceSearch", input, options)
end
@doc """
Gets the label detection results of a Amazon Rekognition Video analysis
started by `StartLabelDetection`.
The label detection operation is started by a call to `StartLabelDetection`
which returns a job identifier (`JobId`). When the label detection
operation finishes, Amazon Rekognition publishes a completion status to the
Amazon Simple Notification Service topic registered in the initial call to
`StartlabelDetection`. To get the results of the label detection operation,
first check that the status value published to the Amazon SNS topic is
`SUCCEEDED`. If so, call `GetLabelDetection` and pass the job identifier
(`JobId`) from the initial call to `StartLabelDetection`.
`GetLabelDetection` returns an array of detected labels (`Labels`) sorted
by the time the labels were detected. You can also sort by the label name
by specifying `NAME` for the `SortBy` input parameter.
The labels returned include the label name, the percentage confidence in
the accuracy of the detected label, and the time the label was detected in
the video.
The returned labels also include bounding box information for common
objects, a hierarchical taxonomy of detected labels, and the version of the
label model used for detection.
Use MaxResults parameter to limit the number of labels returned. If there
are more results than specified in `MaxResults`, the value of `NextToken`
in the operation response contains a pagination token for getting the next
set of results. To get the next page of results, call `GetlabelDetection`
and populate the `NextToken` request parameter with the token value
returned from the previous call to `GetLabelDetection`.
"""
def get_label_detection(client, input, options \\ []) do
request(client, "GetLabelDetection", input, options)
end
@doc """
Gets the path tracking results of a Amazon Rekognition Video analysis
started by `StartPersonTracking`.
The person path tracking operation is started by a call to
`StartPersonTracking` which returns a job identifier (`JobId`). When the
operation finishes, Amazon Rekognition Video publishes a completion status
to the Amazon Simple Notification Service topic registered in the initial
call to `StartPersonTracking`.
To get the results of the person path tracking operation, first check that
the status value published to the Amazon SNS topic is `SUCCEEDED`. If so,
call `GetPersonTracking` and pass the job identifier (`JobId`) from the
initial call to `StartPersonTracking`.
`GetPersonTracking` returns an array, `Persons`, of tracked persons and the
time(s) their paths were tracked in the video.
<note> `GetPersonTracking` only returns the default facial attributes
(`BoundingBox`, `Confidence`, `Landmarks`, `Pose`, and `Quality`). The
other facial attributes listed in the `Face` object of the following
response syntax are not returned.
For more information, see FaceDetail in the Amazon Rekognition Developer
Guide.
</note> By default, the array is sorted by the time(s) a person's path is
tracked in the video. You can sort by tracked persons by specifying `INDEX`
for the `SortBy` input parameter.
Use the `MaxResults` parameter to limit the number of items returned. If
there are more results than specified in `MaxResults`, the value of
`NextToken` in the operation response contains a pagination token for
getting the next set of results. To get the next page of results, call
`GetPersonTracking` and populate the `NextToken` request parameter with the
token value returned from the previous call to `GetPersonTracking`.
"""
def get_person_tracking(client, input, options \\ []) do
request(client, "GetPersonTracking", input, options)
end
@doc """
Detects faces in the input image and adds them to the specified collection.
Amazon Rekognition doesn't save the actual faces that are detected.
Instead, the underlying detection algorithm first detects the faces in the
input image. For each face, the algorithm extracts facial features into a
feature vector, and stores it in the backend database. Amazon Rekognition
uses feature vectors when it performs face match and search operations
using the `SearchFaces` and `SearchFacesByImage` operations.
For more information, see Adding Faces to a Collection in the Amazon
Rekognition Developer Guide.
To get the number of faces in a collection, call `DescribeCollection`.
If you're using version 1.0 of the face detection model, `IndexFaces`
indexes the 15 largest faces in the input image. Later versions of the face
detection model index the 100 largest faces in the input image.
If you're using version 4 or later of the face model, image orientation
information is not returned in the `OrientationCorrection` field.
To determine which version of the model you're using, call
`DescribeCollection` and supply the collection ID. You can also get the
model version from the value of `FaceModelVersion` in the response from
`IndexFaces`
For more information, see Model Versioning in the Amazon Rekognition
Developer Guide.
If you provide the optional `ExternalImageID` for the input image you
provided, Amazon Rekognition associates this ID with all faces that it
detects. When you call the `ListFaces` operation, the response returns the
external ID. You can use this external image ID to create a client-side
index to associate the faces with each image. You can then use the index to
find all faces in an image.
You can specify the maximum number of faces to index with the `MaxFaces`
input parameter. This is useful when you want to index the largest faces in
an image and don't want to index smaller faces, such as those belonging to
people standing in the background.
The `QualityFilter` input parameter allows you to filter out detected faces
that don’t meet the required quality bar chosen by Amazon Rekognition. The
quality bar is based on a variety of common use cases. By default,
`IndexFaces` filters detected faces. You can also explicitly filter
detected faces by specifying `AUTO` for the value of `QualityFilter`. If
you do not want to filter detected faces, specify `NONE`.
<note> To use quality filtering, you need a collection associated with
version 3 of the face model. To get the version of the face model
associated with a collection, call `DescribeCollection`.
</note> Information about faces detected in an image, but not indexed, is
returned in an array of `UnindexedFace` objects, `UnindexedFaces`. Faces
aren't indexed for reasons such as:
<ul> <li> The number of faces detected exceeds the value of the `MaxFaces`
request parameter.
</li> <li> The face is too small compared to the image dimensions.
</li> <li> The face is too blurry.
</li> <li> The image is too dark.
</li> <li> The face has an extreme pose.
</li> </ul> In response, the `IndexFaces` operation returns an array of
metadata for all detected faces, `FaceRecords`. This includes:
<ul> <li> The bounding box, `BoundingBox`, of the detected face.
</li> <li> A confidence value, `Confidence`, which indicates the confidence
that the bounding box contains a face.
</li> <li> A face ID, `FaceId`, assigned by the service for each face
that's detected and stored.
</li> <li> An image ID, `ImageId`, assigned by the service for the input
image.
</li> </ul> If you request all facial attributes (by using the
`detectionAttributes` parameter), Amazon Rekognition returns detailed
facial attributes, such as facial landmarks (for example, location of eye
and mouth) and other facial attributes like gender. If you provide the same
image, specify the same collection, and use the same external ID in the
`IndexFaces` operation, Amazon Rekognition doesn't save duplicate face
metadata.
<p/> The input image is passed either as base64-encoded image bytes, or as
a reference to an image in an Amazon S3 bucket. If you use the AWS CLI to
call Amazon Rekognition operations, passing image bytes isn't supported.
The image must be formatted as a PNG or JPEG file.
This operation requires permissions to perform the `rekognition:IndexFaces`
action.
"""
def index_faces(client, input, options \\ []) do
request(client, "IndexFaces", input, options)
end
@doc """
Returns list of collection IDs in your account. If the result is truncated,
the response also provides a `NextToken` that you can use in the subsequent
request to fetch the next set of collection IDs.
For an example, see Listing Collections in the Amazon Rekognition Developer
Guide.
This operation requires permissions to perform the
`rekognition:ListCollections` action.
"""
def list_collections(client, input, options \\ []) do
request(client, "ListCollections", input, options)
end
@doc """
Returns metadata for faces in the specified collection. This metadata
includes information such as the bounding box coordinates, the confidence
(that the bounding box contains a face), and face ID. For an example, see
Listing Faces in a Collection in the Amazon Rekognition Developer Guide.
This operation requires permissions to perform the `rekognition:ListFaces`
action.
"""
def list_faces(client, input, options \\ []) do
request(client, "ListFaces", input, options)
end
@doc """
Gets a list of stream processors that you have created with
`CreateStreamProcessor`.
"""
def list_stream_processors(client, input, options \\ []) do
request(client, "ListStreamProcessors", input, options)
end
@doc """
Returns an array of celebrities recognized in the input image. For more
information, see Recognizing Celebrities in the Amazon Rekognition
Developer Guide.
`RecognizeCelebrities` returns the 100 largest faces in the image. It lists
recognized celebrities in the `CelebrityFaces` array and unrecognized faces
in the `UnrecognizedFaces` array. `RecognizeCelebrities` doesn't return
celebrities whose faces aren't among the largest 100 faces in the image.
For each celebrity recognized, `RecognizeCelebrities` returns a `Celebrity`
object. The `Celebrity` object contains the celebrity name, ID, URL links
to additional information, match confidence, and a `ComparedFace` object
that you can use to locate the celebrity's face on the image.
Amazon Rekognition doesn't retain information about which images a
celebrity has been recognized in. Your application must store this
information and use the `Celebrity` ID property as a unique identifier for
the celebrity. If you don't store the celebrity name or additional
information URLs returned by `RecognizeCelebrities`, you will need the ID
to identify the celebrity in a call to the `GetCelebrityInfo` operation.
You pass the input image either as base64-encoded image bytes or as a
reference to an image in an Amazon S3 bucket. If you use the AWS CLI to
call Amazon Rekognition operations, passing image bytes is not supported.
The image must be either a PNG or JPEG formatted file.
For an example, see Recognizing Celebrities in an Image in the Amazon
Rekognition Developer Guide.
This operation requires permissions to perform the
`rekognition:RecognizeCelebrities` operation.
"""
def recognize_celebrities(client, input, options \\ []) do
request(client, "RecognizeCelebrities", input, options)
end
@doc """
For a given input face ID, searches for matching faces in the collection
the face belongs to. You get a face ID when you add a face to the
collection using the `IndexFaces` operation. The operation compares the
features of the input face with faces in the specified collection.
<note> You can also search faces without indexing faces by using the
`SearchFacesByImage` operation.
</note> The operation response returns an array of faces that match,
ordered by similarity score with the highest similarity first. More
specifically, it is an array of metadata for each face match that is found.
Along with the metadata, the response also includes a `confidence` value
for each face match, indicating the confidence that the specific face
matches the input face.
For an example, see Searching for a Face Using Its Face ID in the Amazon
Rekognition Developer Guide.
This operation requires permissions to perform the
`rekognition:SearchFaces` action.
"""
def search_faces(client, input, options \\ []) do
request(client, "SearchFaces", input, options)
end
@doc """
For a given input image, first detects the largest face in the image, and
then searches the specified collection for matching faces. The operation
compares the features of the input face with faces in the specified
collection.
<note> To search for all faces in an input image, you might first call the
`IndexFaces` operation, and then use the face IDs returned in subsequent
calls to the `SearchFaces` operation.
You can also call the `DetectFaces` operation and use the bounding boxes in
the response to make face crops, which then you can pass in to the
`SearchFacesByImage` operation.
</note> You pass the input image either as base64-encoded image bytes or as
a reference to an image in an Amazon S3 bucket. If you use the AWS CLI to
call Amazon Rekognition operations, passing image bytes is not supported.
The image must be either a PNG or JPEG formatted file.
The response returns an array of faces that match, ordered by similarity
score with the highest similarity first. More specifically, it is an array
of metadata for each face match found. Along with the metadata, the
response also includes a `similarity` indicating how similar the face is to
the input face. In the response, the operation also returns the bounding
box (and a confidence level that the bounding box contains a face) of the
face that Amazon Rekognition used for the input image.
For an example, Searching for a Face Using an Image in the Amazon
Rekognition Developer Guide.
This operation requires permissions to perform the
`rekognition:SearchFacesByImage` action.
"""
def search_faces_by_image(client, input, options \\ []) do
request(client, "SearchFacesByImage", input, options)
end
@doc """
Starts asynchronous recognition of celebrities in a stored video.
Amazon Rekognition Video can detect celebrities in a video must be stored
in an Amazon S3 bucket. Use `Video` to specify the bucket name and the
filename of the video. `StartCelebrityRecognition` returns a job identifier
(`JobId`) which you use to get the results of the analysis. When celebrity
recognition analysis is finished, Amazon Rekognition Video publishes a
completion status to the Amazon Simple Notification Service topic that you
specify in `NotificationChannel`. To get the results of the celebrity
recognition analysis, first check that the status value published to the
Amazon SNS topic is `SUCCEEDED`. If so, call `GetCelebrityRecognition` and
pass the job identifier (`JobId`) from the initial call to
`StartCelebrityRecognition`.
For more information, see Recognizing Celebrities in the Amazon Rekognition
Developer Guide.
"""
def start_celebrity_recognition(client, input, options \\ []) do
request(client, "StartCelebrityRecognition", input, options)
end
@doc """
Starts asynchronous detection of explicit or suggestive adult content in a
stored video.
Amazon Rekognition Video can moderate content in a video stored in an
Amazon S3 bucket. Use `Video` to specify the bucket name and the filename
of the video. `StartContentModeration` returns a job identifier (`JobId`)
which you use to get the results of the analysis. When content moderation
analysis is finished, Amazon Rekognition Video publishes a completion
status to the Amazon Simple Notification Service topic that you specify in
`NotificationChannel`.
To get the results of the content moderation analysis, first check that the
status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
`GetContentModeration` and pass the job identifier (`JobId`) from the
initial call to `StartContentModeration`.
For more information, see Detecting Unsafe Content in the Amazon
Rekognition Developer Guide.
"""
def start_content_moderation(client, input, options \\ []) do
request(client, "StartContentModeration", input, options)
end
@doc """
Starts asynchronous detection of faces in a stored video.
Amazon Rekognition Video can detect faces in a video stored in an Amazon S3
bucket. Use `Video` to specify the bucket name and the filename of the
video. `StartFaceDetection` returns a job identifier (`JobId`) that you use
to get the results of the operation. When face detection is finished,
Amazon Rekognition Video publishes a completion status to the Amazon Simple
Notification Service topic that you specify in `NotificationChannel`. To
get the results of the face detection operation, first check that the
status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
`GetFaceDetection` and pass the job identifier (`JobId`) from the initial
call to `StartFaceDetection`.
For more information, see Detecting Faces in a Stored Video in the Amazon
Rekognition Developer Guide.
"""
def start_face_detection(client, input, options \\ []) do
request(client, "StartFaceDetection", input, options)
end
@doc """
Starts the asynchronous search for faces in a collection that match the
faces of persons detected in a stored video.
The video must be stored in an Amazon S3 bucket. Use `Video` to specify the
bucket name and the filename of the video. `StartFaceSearch` returns a job
identifier (`JobId`) which you use to get the search results once the
search has completed. When searching is finished, Amazon Rekognition Video
publishes a completion status to the Amazon Simple Notification Service
topic that you specify in `NotificationChannel`. To get the search results,
first check that the status value published to the Amazon SNS topic is
`SUCCEEDED`. If so, call `GetFaceSearch` and pass the job identifier
(`JobId`) from the initial call to `StartFaceSearch`. For more information,
see `procedure-person-search-videos`.
"""
def start_face_search(client, input, options \\ []) do
request(client, "StartFaceSearch", input, options)
end
@doc """
Starts asynchronous detection of labels in a stored video.
Amazon Rekognition Video can detect labels in a video. Labels are instances
of real-world entities. This includes objects like flower, tree, and table;
events like wedding, graduation, and birthday party; concepts like
landscape, evening, and nature; and activities like a person getting out of
a car or a person skiing.
The video must be stored in an Amazon S3 bucket. Use `Video` to specify the
bucket name and the filename of the video. `StartLabelDetection` returns a
job identifier (`JobId`) which you use to get the results of the operation.
When label detection is finished, Amazon Rekognition Video publishes a
completion status to the Amazon Simple Notification Service topic that you
specify in `NotificationChannel`.
To get the results of the label detection operation, first check that the
status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
`GetLabelDetection` and pass the job identifier (`JobId`) from the initial
call to `StartLabelDetection`.
<p/>
"""
def start_label_detection(client, input, options \\ []) do
request(client, "StartLabelDetection", input, options)
end
@doc """
Starts the asynchronous tracking of a person's path in a stored video.
Amazon Rekognition Video can track the path of people in a video stored in
an Amazon S3 bucket. Use `Video` to specify the bucket name and the
filename of the video. `StartPersonTracking` returns a job identifier
(`JobId`) which you use to get the results of the operation. When label
detection is finished, Amazon Rekognition publishes a completion status to
the Amazon Simple Notification Service topic that you specify in
`NotificationChannel`.
To get the results of the person detection operation, first check that the
status value published to the Amazon SNS topic is `SUCCEEDED`. If so, call
`GetPersonTracking` and pass the job identifier (`JobId`) from the initial
call to `StartPersonTracking`.
"""
def start_person_tracking(client, input, options \\ []) do
request(client, "StartPersonTracking", input, options)
end
@doc """
Starts processing a stream processor. You create a stream processor by
calling `CreateStreamProcessor`. To tell `StartStreamProcessor` which
stream processor to start, use the value of the `Name` field specified in
the call to `CreateStreamProcessor`.
"""
def start_stream_processor(client, input, options \\ []) do
request(client, "StartStreamProcessor", input, options)
end
@doc """
Stops a running stream processor that was created by
`CreateStreamProcessor`.
"""
def stop_stream_processor(client, input, options \\ []) do
request(client, "StopStreamProcessor", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "rekognition"}
host = get_host("rekognition", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "RekognitionService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end | lib/aws/rekognition.ex | 0.959564 | 0.930521 | rekognition.ex | starcoder |
defmodule Pandadoc do
@moduledoc """
This library provides an Elixir API for accessing the [Pandadoc Developer APIs](https://developers.pandadoc.com/reference/about).
The API access uses the [Tesla](https://github.com/teamon/tesla) library and
relies on the caller passing in an an API Key to create a
client. The client is then passed into all API calls.
The API returns a 3 element tuple. If the API HTTP status code is less
the 300 (ie. suceeded) it returns `:ok`, the HTTP body as a map and the full
Tesla Env if you need to access more data about thre return. if the API HTTP
status code is greater than 300. it returns `:error`, the HTTP body and the
Telsa Env. If the API doesn't return at all it should return `:error`, a blank
map and the error from Tesla.
## Installation
If [available in Hex](https://hex.pm/docs/publish), the package can be
installed by adding `pandadoc_api` to your list of dependencies in `mix.exs`:
def deps do
[
{:pandadoc_api, "~> 0.0.1"},
]
end
Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc)
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
be found at [https://hexdocs.pm/pandadoc_api](https://hexdocs.pm/pandadoc_api).
"""
@type client() :: Tesla.Client.t()
@type result() :: {:ok, map() | String.t(), Tesla.Env.t()} | {:error, map(), any}
@spec client(String.t()) :: client()
def client(api_key) do
middleware = [
{Tesla.Middleware.BaseUrl, "https://api.pandadoc.com/public/v1"},
Tesla.Middleware.JSON,
{Tesla.Middleware.Headers, [{"authorization", "API-Key " <> api_key}]}
]
Tesla.client(middleware, adapter())
end
@spec result({:ok, Tesla.Env.t()}) :: result()
def result({:ok, %{status: status, body: body} = env}) when status < 300 do
{:ok, body, env}
end
@spec result({:ok, Tesla.Env.t()}) :: result()
def result({:ok, %{status: status, body: body} = env}) when status >= 300 do
{:error, body, env}
end
@spec result({:error, any}) :: result()
def result({:error, any}), do: {:error, %{}, any}
@doc false
def adapter do
case Application.get_env(:pandadoc_api, :tesla) do
nil -> {Tesla.Adapter.Hackney, [recv_timeout: 30_000]}
tesla -> tesla[:adapter]
end
end
end | lib/pandadoc.ex | 0.851011 | 0.76388 | pandadoc.ex | starcoder |
defmodule Trans do
@moduledoc """
Manage translations embedded into structs.
Although it can be used with any struct **`Trans` shines when paired with an `Ecto.Schema`**. It
allows you to keep the translations into a field of the schema and avoids requiring extra tables
for translation storage and complex _joins_ when retrieving translations from the database.
`Trans` is split into two main components:
* `Trans.Translator` - provides easy access to struct translations.
* `Trans.QueryBuilder` - provides helpers for querying translations using `Ecto.Query`
(requires `Ecto.SQL`).
When used, `Trans` accepts the following options:
* `:translates` (required) - list of the fields that will be translated.
* `:container` (optional) - name of the field that contains the embedded translations.
Defaults to`:translations`.
## Structured translations
Structured translations are the preferred and recommended way of using `Trans`. To use structured
translations **you must define the translations as embedded schemas**:
defmodule MyApp.Article do
use Ecto.Schema
use Trans, translates: [:title, :body]
schema "articles" do
field :title, :string
field :body, :string
embeds_one :translations, Translations, on_replace: :update, primary_key: false do
embeds_one :es, MyApp.Article.Translation
embeds_one :fr, MyApp.Article.Translation
end
end
end
defmodule MyApp.Article.Translation do
use Ecto.Schema
@primary_key false
embedded_schema do
field :title, :string
field :body, :string
end
end
Although they required more code than free-form translations, **structured translations provide
some nice benefits** that make them the preferred way of using `Trans`:
* High flexibility when making validations and transformation using the embedded schema's own
changeset.
* Easy to integrate with HTML forms leveraging the capabilities of `inputs_for`
* Easy navegability using the dot notation.
## Free-form translations
Free-form translations were the main way of using `Trans` until the 2.3.0 version. They are still
supported for compatibility with older versions but not recommended for new projects.
To use free-form translations you must define the translations as a map:
defmodule MyApp.Article do
use Ecto.Schema
use Trans, translates: [:title, :body]
schema "articles" do
field :title, :string
field :body, :string
field :translations, :map
end
end
Although they require less code, **free-form translations provide much less guarantees**:
* There is no way to tell what content and wich form will be stored in the translations field.
* Hard to integrate with HTML forms since the Phoenix helpers are not available.
* Difficult navigation requiring the braces notation from the `Access` protocol.
## The translation container
As we have seen in the previous examples, `Trans` automatically stores and looks for translations
in a field called `:translations`. This is known as the **translations container.**
In certain cases you may want to use a different field for storing the translations, this can
be specified when using `Trans` in your module.
# Use the field `:locales` as translation container instead of the default `:translations`
use Trans, translates: [...], container: :locales
## Reflection
Any module that uses `Trans` will have an autogenerated `__trans__` function that can be used for
runtime introspection of the translation metadata.
* `__trans__(:fields)` - Returns the list of translatable fields.
* `__trans__(:container)` - Returns the name of the translation container.
"""
@typedoc """
A translatable struct that uses `Trans`
"""
@type translatable() :: struct()
@typedoc """
A locale that may be a string or an atom
"""
@type locale() :: String.t() | atom()
defmacro __using__(opts) do
quote do
Module.put_attribute(__MODULE__, :trans_fields, unquote(translatable_fields(opts)))
Module.put_attribute(__MODULE__, :trans_container, unquote(translation_container(opts)))
@after_compile {Trans, :__validate_translatable_fields__}
@after_compile {Trans, :__validate_translation_container__}
@spec __trans__(:fields) :: list(atom)
def __trans__(:fields), do: @trans_fields
@spec __trans__(:container) :: atom
def __trans__(:container), do: @trans_container
end
end
@doc """
Checks whether the given field is translatable or not.
Returns true if the given field is translatable. Raises if the given module or struct does not use
`Trans`.
## Examples
Assuming the Article schema defined in [Structured translations](#module-structued-translations).
If we want to know whether a certain field is translatable or not we can use
this function as follows (we can also pass a struct instead of the module
name itself):
iex> Trans.translatable?(Article, :title)
true
May be also used with translatable structs:
iex> article = %Article{}
iex> Trans.translatable?(article, :not_existing)
false
Raises if the given module or struct does not use `Trans`:
iex> Trans.translatable?(Date, :day)
** (RuntimeError) Elixir.Date must use `Trans` in order to be translated
"""
def translatable?(module_or_translatable, field)
@spec translatable?(module | translatable(), String.t() | atom) :: boolean
def translatable?(%{__struct__: module}, field), do: translatable?(module, field)
def translatable?(module, field) when is_atom(module) and is_binary(field) do
translatable?(module, String.to_atom(field))
end
def translatable?(module, field) when is_atom(module) and is_atom(field) do
if Keyword.has_key?(module.__info__(:functions), :__trans__) do
Enum.member?(module.__trans__(:fields), field)
else
raise "#{module} must use `Trans` in order to be translated"
end
end
@doc false
def __validate_translatable_fields__(%{module: module}, _bytecode) do
struct_fields =
module.__struct__()
|> Map.keys()
|> MapSet.new()
translatable_fields =
:fields
|> module.__trans__
|> MapSet.new()
invalid_fields = MapSet.difference(translatable_fields, struct_fields)
case MapSet.size(invalid_fields) do
0 ->
nil
1 ->
raise ArgumentError,
message:
"#{module} declares '#{MapSet.to_list(invalid_fields)}' as translatable but it is not defined in the module's struct"
_ ->
raise ArgumentError,
message:
"#{module} declares '#{MapSet.to_list(invalid_fields)}' as translatable but it they not defined in the module's struct"
end
end
@doc false
def __validate_translation_container__(%{module: module}, _bytecode) do
container = module.__trans__(:container)
unless Enum.member?(Map.keys(module.__struct__()), container) do
raise ArgumentError,
message:
"The field #{container} used as the translation container is not defined in #{module} struct"
end
end
defp translatable_fields(opts) do
case Keyword.fetch(opts, :translates) do
{:ok, fields} when is_list(fields) ->
fields
_ ->
raise ArgumentError,
message:
"Trans requires a 'translates' option that contains the list of translatable fields names"
end
end
defp translation_container(opts) do
case Keyword.fetch(opts, :container) do
:error -> :translations
{:ok, container} -> container
end
end
end | lib/trans.ex | 0.93749 | 0.715264 | trans.ex | starcoder |
defmodule Herd.Pool do
@moduledoc """
Builds a connection pool manager for a given herd. The manager has a number of overrideable
functions, including:
* spec_for_node/1 - infers a child spec for a given node in the cluster
* nodename/1 - generates an atom for a given node
* poolname/1 - generates a {:via, Registry, {MyRegistry, name}} tuple for a given node
* worker_config/1 - generates the worker config only for a given node. Note that pool config
is inferred from `Application.get_env(app, __MODULE__)` by default which might be sufficient
for configuring poolboy.
Use with:
```
defmodule MyHerdPool do
use Herd.Pool, otp_app: :myapp, herd: :myherd
end
```
This will spawn a pool for each node in the cluster and register them against a registry named
`MyHerdPool.Registry`. It also is responsible for handling node ups/downs from the clusters
sporadic health check.
"""
defmacro __using__(opts) do
app = Keyword.get(opts, :otp_app)
quote do
use DynamicSupervisor
import Herd.Pool
alias Memcachir.Util
@otp unquote(app)
@default_pool_config [
strategy: :lifo,
size: 10,
max_overflow: 10
]
@registry Module.concat(__MODULE__, Registry)
def start_link(options) do
DynamicSupervisor.start_link(__MODULE__, options, name: __MODULE__)
end
def init(_options) do
DynamicSupervisor.init(strategy: :one_for_one)
end
def spec_for_node(node) do
pool = pool_conf()
|> Keyword.put(:name, poolname(node))
|> pool_config()
name = nodename(node)
%{id: name, start: {:poolboy, :start_link, [pool, worker_config(node)]}}
end
def poolname(node), do: {:via, Registry, {@registry, nodename(node)}}
def nodename({host, port}), do: :"#{host}_#{port}"
def worker_config(node), do: nodename(node)
def pool_config(config), do: config
def start_node({host, port} = node) do
spec = spec_for_node(node)
DynamicSupervisor.start_child(__MODULE__, spec)
end
def terminate_node(node), do: terminate_node(__MODULE__, @registry, nodename(node))
def initialize(servers), do: handle_diff(servers, [])
def handle_diff(adds, removes), do: handle_diff(__MODULE__, adds, removes)
defp config(), do: Application.get_env(@otp, __MODULE__, [])
defp pool_conf(), do: @default_pool_config |> Keyword.merge(config())
defoverridable [spec_for_node: 1, nodename: 1, poolname: 1, worker_config: 1, pool_config: 1]
end
end
def terminate_node(pool, registry, node) do
registry
|> Registry.lookup(node)
|> case do
[{pid, _}] -> DynamicSupervisor.terminate_child(pool, pid)
_ -> :ok
end
end
def handle_diff(pool, adds, removes) do
for add <- adds, do: pool.start_node(add)
for remove <- removes, do: pool.terminate_node(remove)
end
end | lib/herd/pool.ex | 0.820433 | 0.770853 | pool.ex | starcoder |
defmodule Segment.Http.Stub do
@moduledoc """
The `Segment.Http.Stub` is used to replace the Tesla adapter with something that logs and returns success. It is used if `send_to_http` has been set to false
"""
require Logger
def call(env, _opts) do
Logger.debug("[Segment] HTTP API called with #{inspect(env)}")
{:ok, %{env | status: 200, body: ""}}
end
end
defmodule Segment.Http do
@moduledoc """
`Segment.Http` is the underlying implementation for making calls to the Segment HTTP API.
The `send/2` and `batch/4` methods can be used for sending events or batches of events to the API. The sending can be configured with
```elixir
config :segment,
send_to_http: true
retry_attempts: 3,
retry_expiry: 10_000,
retry_start: 100
```
* `config :segment, :retry_attempts` The number of times to retry sending against the segment API. Default value is 3
* `config :segment, :retry_expiry` The maximum time (in ms) spent retrying. Default value is 10000 (10 seconds)
* `config :segment, :retry_start` The time (in ms) to start the first retry. Default value is 100
* `config :segment, :send_to_http` If set to `false`, the libray will override the Tesla Adapter implementation to only log segment calls to `debug` but not make any actual API calls. This can be useful if you want to switch off Segment for test or dev. Default value is true
The retry uses a linear back-off strategy when retring the Segment API.
Additionally a different Tesla Adapter can be used if you want to use something other than Hackney.
* `config :segment, :tesla, :adapter` This config option allows for overriding the HTTP Adapter for Tesla (which the library defaults to Hackney).This can be useful if you prefer something else, or want to mock the adapter for testing.
"""
@type client :: Tesla.Client.t()
@type adapter :: Tesla.Client.adapter()
require Logger
use Retry
@segment_api_url Application.get_env(:segment, :api_url, "https://api.segment.io/v1/")
@doc """
Create a Tesla client with the Segment Source Write API Key
"""
@spec client(String.t()) :: client()
def client(api_key) do
adapter =
case Segment.Config.send_to_http() do
true ->
Application.get_env(:segment, :tesla)[:adapter] ||
{Tesla.Adapter.Hackney, [recv_timeout: 30_000]}
false ->
{Segment.Http.Stub, []}
end
client(api_key, adapter)
end
@doc """
Create a Tesla client with the Segment Source Write API Key and the given Tesla adapter
"""
@spec client(String.t(), adapter()) :: client()
def client(api_key, adapter) do
middleware = [
{Tesla.Middleware.BaseUrl, @segment_api_url},
Tesla.Middleware.JSON,
{Tesla.Middleware.BasicAuth, %{username: api_key, password: ""}}
]
Tesla.client(middleware, adapter)
end
@doc """
Send a list of Segment events as a batch
"""
@spec send(client(), list(Segment.segment_event())) :: :ok | :error
def send(client, events) when is_list(events), do: batch(client, events)
@doc """
Send a list of Segment events as a batch
"""
@spec send(client(), Segment.segment_event()) :: :ok | :error
def send(client, event) do
case make_request(client, event.type, prepare_events(event), Segment.Config.retry_attempts()) do
{:ok, %{status: status}} when status == 200 ->
:ok
{:ok, %{status: status}} when status == 400 ->
Logger.error("[Segment] Call Failed. JSON too large or invalid")
:error
{:error, err} ->
Logger.error(
"[Segment] Call Failed after #{Segment.Config.retry_attempts()} retries. #{inspect(err)}"
)
:error
err ->
Logger.error("[Segment] Call Failed #{inspect(err)}")
:error
end
end
@doc """
Send a list of Segment events as a batch.
The `batch` function takes optional arguments for context and integrations which can
be applied to the entire batch of events. See [Segment's docs](https://segment.com/docs/sources/server/http/#batch)
"""
@spec batch(client(), list(Segment.segment_event()), map() | nil, map() | nil) :: :ok | :error
def batch(client, events, context \\ nil, integrations \\ nil) do
data =
%{batch: prepare_events(events)}
|> add_if(:context, context)
|> add_if(:integrations, integrations)
case make_request(client, "batch", data, Segment.Config.retry_attempts()) do
{:ok, %{status: status}} when status == 200 ->
:ok
{:ok, %{status: status}} when status == 400 ->
Logger.error(
"[Segment] Batch call of #{length(events)} events failed. JSON too large or invalid"
)
:error
{:error, err} ->
Logger.error(
"[Segment] Batch call of #{length(events)} events failed after #{
Segment.Config.retry_attempts()
} retries. #{inspect(err)}"
)
:error
err ->
Logger.error("[Segment] Batch callof #{length(events)} events failed #{inspect(err)}")
:error
end
end
defp make_request(client, url, data, retries) when retries > 0 do
retry with:
linear_backoff(Segment.Config.retry_start(), 2)
|> cap(Segment.Config.retry_expiry())
|> Stream.take(retries) do
Tesla.post(client, url, data)
after
result -> result
else
error -> error
end
end
defp make_request(client, url, data, _retries) do
Tesla.post(client, url, data)
end
defp prepare_events(items) when is_list(items), do: Enum.map(items, &prepare_events/1)
defp prepare_events(item) do
Map.from_struct(item)
|> prep_context()
|> add_sent_at()
|> drop_nils()
end
defp drop_nils(map) do
map
|> Enum.filter(fn
{_, %{} = item} when map_size(item) == 0 -> false
{_, nil} -> false
{_, _} -> true
end)
|> Enum.into(%{})
end
defp prep_context(%{context: nil} = map),
do: %{map | context: map_content(Segment.Analytics.Context.new())}
defp prep_context(%{context: context} = map), do: %{map | context: map_content(context)}
defp prep_context(map),
do: Map.put_new(map, :context, map_content(Segment.Analytics.Context.new()))
defp map_content(%Segment.Analytics.Context{} = context), do: Map.from_struct(context)
defp map_content(context) when is_map(context), do: context
defp add_sent_at(%{sentAt: nil} = map), do: Map.put(map, :sentAt, DateTime.utc_now())
defp add_sent_at(map), do: Map.put_new(map, :sentAt, DateTime.utc_now())
defp add_if(map, _key, nil), do: map
defp add_if(map, key, value), do: Map.put_new(map, key, value)
end | lib/segment/client/http.ex | 0.876271 | 0.76782 | http.ex | starcoder |
defmodule Supermemo do
@default_ef 2.5
@min_ef 1.3
@first_interval 1
@second_interval 6
@iteration_reset_boundary 0.4
@repeat_boundary 4.0 / 5.0
@doc """
Given a value between 0.0 and 1.0, returns an initial `%Supermemo.Rep{}`.
"""
def rep(score) do
%Supermemo.Rep{
due: first_due_date(),
repeat: repeat?(score),
e_factor: adjust_efactor_or_min(@default_ef, score),
interval: @first_interval,
iteration: 1
}
end
@doc """
Given a score between 0.0 and 1.0, and a `%Supermemo.Rep{}` struct, returns
a new struct with updated `due` date, `interval`, `iteration` and `e_factor`.
"""
def rep(score, %Supermemo.Rep{
e_factor: ef,
interval: interval,
iteration: iteration}) do
new_interval = set_interval(score, iteration, interval, ef)
new_ef = adjust_efactor_or_min(ef, score)
_rep(score, new_ef, new_interval, iteration)
end
defp _rep(score, ef, interval, iteration) do
%Supermemo.Rep{
due: due_date(interval),
repeat: repeat?(score),
e_factor: ef,
interval: interval,
iteration: find_iteration(score, iteration) + 1
}
end
def due_date(interval) do
DateTime.utc_now()
|> DateTime.add(days_to_seconds(interval))
end
def set_interval(score, iteration, interval, ef) do
case find_iteration(score, iteration) do
0 -> @first_interval
1 -> @second_interval
_ -> adjust_interval(interval, ef)
end
end
def adjust_interval(interval, ef) do
round(interval * ef)
end
def find_iteration(score, iteration) do
cond do
score < @iteration_reset_boundary -> 0
true -> iteration
end
end
def first_due_date do
DateTime.utc_now()
|> DateTime.add(days_to_seconds(@first_interval))
end
def repeat?(score) do
cond do
score < @repeat_boundary -> true
true -> false
end
end
def adjust_efactor_or_min(ef, score) do
adjusted = adjust_efactor(ef, score)
cond do
adjusted < @min_ef -> @min_ef
true -> adjusted
end
end
def adjust_efactor(ef, score) do
(score * 5)
|> adjust_efactor_formula(ef)
end
defp adjust_efactor_formula(q, ef) do
ef + (0.1 - (5.0 - q) * (0.08 + (5.0 - q) * 0.02))
end
defp days_to_seconds(days) do
days * 60 * 60 * 24
end
end | lib/supermemo.ex | 0.821008 | 0.467757 | supermemo.ex | starcoder |
defmodule Phoenix.PubSub.PG2 do
use Supervisor
@moduledoc """
Phoenix PubSub adapter based on [PG2](http://erlang.org/doc/man/pg2.html).
To use it as your PubSub adapter, simply add it to your Endpoint's config:
config :my_app, MyApp.Endpoint,
pubsub: [name: MyApp.PubSub,
adapter: Phoenix.PubSub.PG2]
To use this adapter outside of Phoenix, you must start an instance of
this module as part of your supervision tree:
children = [
{Phoenix.PubSub.PG2, name: MyApp.PubSub},
...
]
## Options
* `:name` - The required registered name and optional node name for pubsub
processes, for example: `MyApp.PubSub`, `{MyApp.PubSub, :node@host}`.
When only a server name is provided, the node name defaults to `node()`.
* `:pool_size` - Both the size of the local pubsub pool and subscriber
shard size. Defaults to the number of schedulers (cores). A single pool is
often enough for most use-cases, but for high subscriber counts on single
topics or greater than 1M clients, a pool size equal to the number of
schedulers (cores) is a well rounded size.
"""
def child_spec(opts) when is_list(opts) do
_name = name!(opts)
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
def start_link(opts) when is_list(opts) do
start_link(name!(opts), opts)
end
def start_link(name, opts) do
supervisor_name = Module.concat(name, Supervisor)
Supervisor.start_link(__MODULE__, [name, opts], name: supervisor_name)
end
defp name!(opts) do
case Keyword.fetch(opts, :name) do
{:ok, name} ->
name
:error ->
raise ArgumentError, """
a registered name is required for PubSub supervisors,
for example as a child spec:
children = [
{Phoenix.PubSub.PG2, name: MyApp.PubSub},
...
]
or starting directly:
Phoenix.PubSub.PG2.start_link(name: MyApp.PubSub)
got: #{inspect(opts)}
"""
end
end
@doc false
def init([server, opts]) do
scheduler_count = :erlang.system_info(:schedulers)
pool_size = Keyword.get(opts, :pool_size, scheduler_count)
node_name = opts[:node_name]
dispatch_rules = [
{:broadcast, Phoenix.PubSub.PG2Server, [opts[:fastlane], server, pool_size]},
{:direct_broadcast, Phoenix.PubSub.PG2Server, [opts[:fastlane], server, pool_size]},
{:node_name, __MODULE__, [node_name]}
]
children = [
supervisor(Phoenix.PubSub.LocalSupervisor, [server, pool_size, dispatch_rules]),
worker(Phoenix.PubSub.PG2Server, [server, pool_size])
]
supervise(children, strategy: :rest_for_one)
end
@doc false
def node_name(nil), do: node()
def node_name(configured_name), do: configured_name
end | lib/phoenix/pubsub/pg2.ex | 0.797557 | 0.455986 | pg2.ex | starcoder |
defmodule Cloudevents.Format.V_1_0.Event do
@desc "Cloudevents format v1.0."
@moduledoc @desc
use TypedStruct
alias Cloudevents.Format.ParseError
@typedoc @desc
typedstruct do
field(:specversion, String.t(), default: "1.0")
field(:type, String.t(), enforce: true)
field(:source, String.t(), enforce: true)
field(:id, String.t(), enforce: true)
field(:subject, String.t())
field(:time, String.t())
field(:datacontenttype, String.t())
field(:dataschema, String.t())
field(:data, any)
field(:extensions, %{optional(String.t()) => any})
end
def from_map(map) when is_map(map) do
# Cloudevents carry the actual payload in the "data" field. The other fields are
# called "context attributes" (abbreviated `ctx_attrs` here). Extensions are all
# context attributes that are not well-known, i.e., defined in the spec. They are
# context attributes as well but also called "extension attributes".
{event_data, ctx_attrs} = Map.pop(map, "data")
{_, extension_attrs} =
Map.split(ctx_attrs, [
"specversion",
"type",
"source",
"id",
"subject",
"time",
"datacontenttype",
"dataschema",
"data"
])
with :ok <- parse_specversion(ctx_attrs),
{:ok, type} <- parse_type(ctx_attrs),
{:ok, source} <- parse_source(ctx_attrs),
{:ok, id} <- parse_id(ctx_attrs),
{:ok, subject} <- parse_subject(ctx_attrs),
{:ok, time} <- parse_time(ctx_attrs),
{:ok, datacontenttype} <- parse_datacontenttype(ctx_attrs),
{:ok, dataschema} <- parse_dataschema(ctx_attrs),
{:ok, data} <- parse_data(event_data),
{:ok, extensions} <- validated_extensions_attributes(extension_attrs) do
datacontenttype =
if is_nil(datacontenttype) and not is_nil(data),
do: "application/json",
else: datacontenttype
event = %__MODULE__{
type: type,
source: source,
id: id,
subject: subject,
time: time,
datacontenttype: datacontenttype,
dataschema: dataschema,
data: data,
extensions: extensions
}
{:ok, event}
else
{:error, parse_error} ->
{:error, %ParseError{message: parse_error}}
end
end
# ---
defp parse_specversion(%{"specversion" => "1.0"}), do: :ok
defp parse_specversion(%{"specversion" => x}), do: {:error, "unexpected specversion #{x}"}
defp parse_specversion(_), do: {:error, "missing specversion"}
defp parse_type(%{"type" => type}) when byte_size(type) > 0, do: {:ok, type}
defp parse_type(_), do: {:error, "missing type"}
defp parse_source(%{"source" => source}) when byte_size(source) > 0, do: {:ok, source}
defp parse_source(_), do: {:error, "missing source"}
defp parse_id(%{"id" => id}) when byte_size(id) > 0, do: {:ok, id}
defp parse_id(_), do: {:error, "missing id"}
defp parse_subject(%{"subject" => sub}) when byte_size(sub) > 0, do: {:ok, sub}
defp parse_subject(%{"subject" => ""}), do: {:error, "subject given but empty"}
defp parse_subject(_), do: {:ok, nil}
defp parse_time(%{"time" => time}) when byte_size(time) > 0, do: {:ok, time}
defp parse_time(%{"time" => ""}), do: {:error, "time given but empty"}
defp parse_time(_), do: {:ok, nil}
defp parse_datacontenttype(%{"datacontenttype" => ct}) when byte_size(ct) > 0, do: {:ok, ct}
defp parse_datacontenttype(%{"datacontenttype" => ""}),
do: {:error, "datacontenttype given but empty"}
defp parse_datacontenttype(_), do: {:ok, nil}
defp parse_dataschema(%{"dataschema" => schema}) when byte_size(schema) > 0, do: {:ok, schema}
defp parse_dataschema(%{"dataschema" => ""}),
do: {:error, "dataschema given but empty"}
defp parse_dataschema(_), do: {:ok, nil}
defp parse_data(""), do: {:error, "data field given but empty"}
defp parse_data(data), do: {:ok, data}
# ---
defp try_decode(key, val) when is_binary(val) do
case Jason.decode(val) do
{:ok, val_map} ->
{key, val_map}
_ ->
{key, val}
end
end
defp try_decode(key, val), do: {key, val}
# ---
defp validated_extensions_attributes(extension_attrs) do
invalid =
extension_attrs
|> Map.keys()
|> Enum.map(fn key -> {key, valid_extension_attribute_name(key)} end)
|> Enum.filter(fn {_, valid?} -> not valid? end)
case invalid do
[] ->
extensions = Map.new(extension_attrs, fn {key, val} -> try_decode(key, val) end)
{:ok, extensions}
_ ->
{:error,
"invalid extension attributes: #{Enum.map(invalid, fn {key, _} -> inspect(key) end)}"}
end
end
# ---
defp valid_extension_attribute_name(name) do
# Cloudevents attribute names MUST consist of lower-case letters ('a' to 'z') or
# digits ('0' to '9') from the ASCII character set. Attribute names SHOULD be
# descriptive and terse and SHOULD NOT exceed 20 characters in length.
# https://github.com/cloudevents/spec/blob/v1.0/spec.md#attribute-naming-convention
name =~ ~r/^[a-z0-9]+$/
end
end | lib/cloudevents/format/v_1_0/event.ex | 0.743727 | 0.409162 | event.ex | starcoder |
defmodule Dogmatix do
@moduledoc """
This module provides the main API to interface with a StasD/DogStatsD agent.
## Getting started
A new instance of Dogmatix can be started via the `start_link/2` function:
{:ok, pid} = Dogmatix.start_link("my_dogmatix", "localhost", 8125)
This will create a new instance named "my_dogmatix", communicating via UDP with an agent located at `localhost:8125`.
This instance will use all the default options.
### Instantiation via a custom module
defmodule MyApp.Dogmatix do
use Dogmatix
end
MyApp.Dogmatix.start_link("localhost", 8125)
Your custom module can then be used without having to provide the name of your Dogmatix instance:
MyApp.Dogmatix.increment("my_counter")
### Configuration
Various options can be provided to `start_link` to configure the instance:
* `:worker_count` - (positive integer) - number of UDP sockets and workers used to distribute the metrics. Defaults to `4`.
* `:prefix` - (binary) - all metrics sent to the agent will be prefixed with this value. Not set by default.
* `:tags` - ([binary]) - a list of tags to be sent with all metrics. Format: `["tag1:value1", "tag2:value2"]`. Not set by default.
* `:max_datagram_size` - (integer) - the maximum number of bytes for a message that can be sent. Defaults to `1432`.
* `:buffer_flush_ms` - (integer) - metric flush interval in milliseconds. Defaults to `500`.
## Sending metrics
The module provides the functions to send all the supported metrics, event and service checks to the agent.
All the functions accept the following options:
* `:tags` - ([binary]) - to add metric specific tags to the global tags
* `:timeout` - (integer) - a timeout value in milliseconds to be used when calling a worker from the pool. Defaults to `1000`.
## Sampling
All metric-sending functions support a `:sample_rate` options. A sample rate is a float between 0 and 1 representing
the percentage of packets that are effectively sent to the agent.
Dogmatix.count("my_dogmatix", "current_users", 1, sample_rate: 0.2)
In the example above, only 20% of the calls will effectively send a packet to the agent. Agents supporting sampling
will adjust the value according to the sample rate.
## Tagging
Metrics, events and service checks can be tagged to add dimensions to their information. Note that not all agents
support this feature.
Constant tags can be defined when instantiating a Dogmatix client:
Dogmatix.start_link("my_dogmatix", "localhost", 8125, tags: ["env:dev"])
In the example above, all metrics, events and service checks sent with this instance will be tagged with "env:dev".
Additionally, all functions support the `:tags` option to add ad-hoc tags.
Dogmatix.increment("my_dogmatix", "page_views", tags: ["page:home"])
In the example above, the metric "page_views" will be tagged with both "page:home" (and "env:dev").
## Pooling
For each instance of Dogmatix, a pool of worker/socket is started to format and send datagrams. The amount of workers
can be configured via the `:worker_count` options when starting the instance.
## Metric Buffering
In order to reduce network traffic, Dogmatix supports metric buffering. It attempts to group as many metrics as
possible into a single datagram before sending to the agent. This behavior can be configured via two instantiation
options:
### `:max_datagram_size`
An integer representing the maximum size in bytes of a datagram. Make sure to configure a size that does not exceed
the Agent-side per-datagram buffer size or the network/OS max datagram size. The default value is `1432` - the largest
possible size given the Ethernet MTU of 1514 bytes.
### `:buffer_flush_ms`
An integer representing in milliseconds the frequency of datagram buffer flush. Each worker/socket maintains its
own local buffered datagram, i.e. an accumulation of metrics to be sent once the size of the datagram reaches the
maximum possible size of a packet. For the case where your application does not capture metrics frequently, Dogmatix
will regularly flush these buffers to make sure that buffered metrics are sent to the agent in a timely manner.
The default value is `500`.
If your application is so "metric intensive" that there is no chance of seeing your metrics lingering in the buffer,
you can disable this behavior completely by setting this option to `0`.
"""
use Supervisor
alias Dogmatix.Event
alias Dogmatix.ServiceCheck
alias Dogmatix.SocketWorker
@default_call_timeout 1000
@default_worker_count 4
@type id :: binary | atom
@type start_option ::
{:worker_count, pos_integer}
| {:prefix, binary}
| {:tags, [binary]}
| {:max_datagram_size, pos_integer}
| {:buffer_flush_ms, non_neg_integer}
@type start_options :: [start_option]
@type metric_option ::
{:tags, [binary]}
| {:timeout, non_neg_integer}
@type metric_options :: [metric_option]
@type metric_result :: :ok | {:error, binary}
@type metric_value :: integer | float
@doc """
Starts a new pool of connection to an agent.
* `name` - a binary or an atom to identify this instance of Dogmatix
* `host` - a binary, the agent's host
* `port` - a positive integer, the agent's host
## Options
See the module documentation for details.
* `:worker_count` - (positive integer) - number of UDP sockets and workers used to distribute the metrics. Defaults to `4`.
* `:prefix` - (binary) - all metrics sent to the agent will be prefixed with this value. Not set by default.
* `:tags` - ([binary]) - a list of tags to be sent with all metrics. Format: `["tag1:value1", "tag2:value2"]`. Not set by default.
* `:max_datagram_size` - (integer) - the maximum number of bytes for a message that can be sent. Defaults to `1432`.
* `:buffer_flush_ms` - (integer) - metric flush interval in milliseconds. Defaults to `500`. `0` to disable.
"""
@spec start_link(id, binary, pos_integer, start_options) :: Supervisor.on_start()
def start_link(name, host, port, opts \\ []) when is_binary(host) and is_integer(port) do
Supervisor.start_link(__MODULE__, {name, host, port, opts})
end
@doc """
Increments the counter identified by `metric_name` by 1.
Equivalent to calling `count/4` with a `value` of 1.
## Examples
iex> Dogmatix.increment("my_dogmatix", "page_views")
:ok
"""
@spec increment(id, binary, metric_options) :: metric_result
def increment(name, metric_name, opts \\ []), do: count(name, metric_name, 1, opts)
@doc """
Decrements the counter identified by `metric_name` by 1.
Equivalent to calling `count/4` with a `value` of -1.
## Examples
iex> Dogmatix.decrement("my_dogmatix", "current_users")
:ok
"""
@spec decrement(id, binary, metric_options) :: metric_result
def decrement(name, metric_name, opts \\ []), do: count(name, metric_name, -1, opts)
@doc """
Changes the counter identified by `metric_name` by the given `value`.
## Examples
iex> Dogmatix.count("my_dogmatix", "cache_hits", 4)
:ok
"""
@spec count(id, binary, metric_value, metric_options) :: metric_result
def count(name, metric_name, value, opts \\ []), do: metric(name, metric_name, value, "c", opts)
@doc """
Sets the value of the gauge identified by `metric_name` to the given `value`.
## Examples
iex> Dogmatix.gauge("my_dogmatix", "disk_usage", 0.75)
:ok
"""
@spec gauge(id, binary, metric_value, metric_options) :: metric_result
def gauge(name, metric_name, value, opts \\ []), do: metric(name, metric_name, value, "g", opts)
@doc """
Writes `value` to the timer identified by `metric_name`.
## Examples
iex> Dogmatix.timer("my_dogmatix", "query_latency", 15)
:ok
"""
@spec timer(id, binary, metric_value, metric_options) :: metric_result
def timer(name, metric_name, value, opts \\ []), do: metric(name, metric_name, value, "ms", opts)
@doc """
Writes `value` to the histogram identified by `metric_name`.
## Examples
iex> Dogmatix.histogram("my_dogmatix", "page_views", 15)
:ok
"""
@spec histogram(id, binary, metric_value, metric_options) :: metric_result
def histogram(name, metric_name, value, opts \\ []), do: metric(name, metric_name, value, "h", opts)
@doc """
Writes `value` to the set identified by `metric_name`.
## Examples
iex> Dogmatix.set("my_dogmatix", "metric.set", 42)
:ok
"""
@spec set(id, binary, metric_value, metric_options) :: metric_result
def set(name, metric_name, value, opts \\ []), do: metric(name, metric_name, value, "s", opts)
@doc """
Writes `value` to the distribution identified by `metric_name`.
## Examples
iex> Dogmatix.set("my_dogmatix", "response_time", 9)
:ok
"""
@spec distribution(id, binary, metric_value, metric_options) :: metric_result
def distribution(name, metric_name, value, opts \\ []), do: metric(name, metric_name, value, "d", opts)
@doc """
Sends the provided event.
## Examples
iex> Dogmatix.event("my_dogmatix", %Dogmatix.Event{title: "An error occurred", text: "Error message"})
:ok
"""
@spec event(id, Dogmatix.Event.t()) :: metric_result
def event(name, %Event{} = event, opts \\ []),
do: send_to_worker(name, {:event, event, opts}, opts[:timeout] || @default_call_timeout)
@doc """
Sends the provided service check.
## Examples
iex> Dogmatix.event("my_dogmatix", %Dogmatix.ServiceCheck{name: "application_check", status: :ok, message: "All good!"})
:ok
"""
@spec service_check(id, Dogmatix.ServiceCheck.t()) :: metric_result
def service_check(name, %ServiceCheck{} = sc, opts \\ []),
do: send_to_worker(name, {:service_check, sc, opts}, opts[:timeout] || @default_call_timeout)
defp metric(name, metric_name, value, type, opts) do
case apply_sampling(opts[:sample_rate]) do
:send ->
send_to_worker(name, {:metric, metric_name, value, opts, type}, opts[:timeout] || @default_call_timeout)
:drop ->
:ok
end
end
defp apply_sampling(nil), do: :send
defp apply_sampling(rate) do
if :rand.uniform() > rate,
do: :drop,
else: :send
end
defp send_to_worker(name, message, timeout) do
workers = Registry.lookup(registry_name(name), :workers)
{pid, _value = nil} = Enum.random(workers)
GenServer.call(pid, message, timeout)
end
defp registry_name(name), do: :"dogmatix_#{name}_registry"
## Callbacks
@doc false
@impl true
def init({name, host, port, opts}) do
worker_count = opts[:worker_count] || @default_worker_count
registry_name = registry_name(name)
worker_specs =
for idx <- 1..worker_count do
Supervisor.child_spec({SocketWorker, {registry_name, host, port, opts}}, id: {name, SocketWorker, idx})
end
worker_supervisor_spec = %{
id: :"#{name}_worker_supervisor",
type: :supervisor,
start: {Supervisor, :start_link, [worker_specs, [strategy: :one_for_one]]}
}
children = [
{Registry, name: registry_name, keys: :duplicate},
worker_supervisor_spec
]
Supervisor.init(children, strategy: :rest_for_one)
end
## Macro syntax
defmacro __using__(_opts) do
quote do
def start_link(host, port, opts \\ []) when is_binary(host) and is_integer(port) do
Dogmatix.start_link(__MODULE__, host, port, opts)
end
def increment(metric_name, opts \\ []), do: Dogmatix.increment(__MODULE__, metric_name, opts)
def decrement(metric_name, opts \\ []), do: Dogmatix.decrement(__MODULE__, metric_name, opts)
def count(metric_name, value, opts \\ []), do: Dogmatix.count(__MODULE__, metric_name, value, opts)
def gauge(metric_name, value, opts \\ []), do: Dogmatix.gauge(__MODULE__, metric_name, value, opts)
def timer(metric_name, value, opts \\ []), do: Dogmatix.timer(__MODULE__, metric_name, value, opts)
def histogram(metric_name, value, opts \\ []), do: Dogmatix.histogram(__MODULE__, metric_name, value, opts)
def set(metric_name, value, opts \\ []), do: Dogmatix.set(__MODULE__, metric_name, value, opts)
def distribution(metric_name, value, opts \\ []), do: Dogmatix.distribution(__MODULE__, metric_name, value, opts)
def event(%Event{} = event, opts \\ []), do: Dogmatix.event(__MODULE__, event, opts)
def service_check(%ServiceCheck{} = sc, opts \\ []), do: Dogmatix.service_check(__MODULE__, sc, opts)
end
end
end | lib/dogmatix.ex | 0.941782 | 0.731251 | dogmatix.ex | starcoder |
defmodule Xema.Utils do
@moduledoc """
Some utilities for Xema.
"""
@doc """
Converts the given `string` to an existing atom. Returns `nil` if the
atom does not exist.
## Examples
iex> import Xema.Utils
iex> to_existing_atom(:my_atom)
:my_atom
iex> to_existing_atom("my_atom")
:my_atom
iex> to_existing_atom("not_existing_atom")
nil
"""
@spec to_existing_atom(String.t() | atom) :: atom | nil
def to_existing_atom(atom) when is_atom(atom), do: atom
def to_existing_atom(string) when is_binary(string) do
String.to_existing_atom(string)
rescue
_ -> nil
end
@doc """
Returns whether the given `key` exists in the given `value`.
Returns true if
* `value` is a map and contains `key` as a key.
* `value` is a keyword and contains `key` as a key.
* `value` is a list of tuples with `key`as the first element.
## Example
iex> alias Xema.Utils
iex> Utils.has_key?(%{foo: 5}, :foo)
true
iex> Utils.has_key?([foo: 5], :foo)
true
iex> Utils.has_key?([{"foo", 5}], "foo")
true
"""
@spec has_key?(map | keyword | [{String.t(), any}], any) :: boolean
def has_key?([], _), do: false
def has_key?(value, key) when is_map(value), do: Map.has_key?(value, key)
def has_key?(value, key) when is_list(value) do
case Keyword.keyword?(value) do
true -> Keyword.has_key?(value, key)
false -> Enum.any?(value, fn {k, _} -> k == key end)
end
end
@doc """
Returns `nil` if `uri_1` and `uri_2` are `nil`.
Parses a URI when the other URI is `nil`.
Merges URIs if both are not nil.
"""
@spec update_uri(URI.t() | String.t() | nil, URI.t() | String.t() | nil) ::
URI.t() | nil
def update_uri(nil, nil), do: nil
def update_uri(uri_1, nil), do: URI.parse(uri_1)
def update_uri(nil, uri_2), do: URI.parse(uri_2)
def update_uri(uri_1, uri_2), do: URI.merge(uri_1, uri_2)
@doc """
Returns the size of a `list` or `tuple`.
"""
@spec size(list | tuple) :: integer
def size(list) when is_list(list), do: length(list)
def size(tuple) when is_tuple(tuple), do: tuple_size(tuple)
end | lib/xema/utils.ex | 0.926968 | 0.558959 | utils.ex | starcoder |
defmodule Weaver.IntegrationCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a store.
Such tests rely on `Weaver.Store` and also
import other functionality to make it easier
to build common data structures and query the schema.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test resets
the store in use at the beginning of the test.
"""
use ExUnit.CaseTemplate
using do
quote location: :keep do
alias Weaver.{Marker, Ref}
# Default test schema
alias Weaver.Absinthe.Schema
alias Weaver.ExTwitter.Mock, as: Twitter
alias ExTwitter.Model.User, as: TwitterUser
alias ExTwitter.Model.Tweet
import Weaver.IntegrationCase
import Test.Support.Factory
require Weaver.IntegrationCase
end
end
# Mock helpers
def twitter_mock_for(user, tweets) do
fn [id: user_id, tweet_mode: :extended, count: count] ->
assert user_id == user.id
Enum.take(tweets, count)
end
end
def twitter_mock_for(user, tweets, max_id: max_id) do
fn [id: user_id, tweet_mode: :extended, count: count, max_id: ^max_id] ->
assert user_id == user.id
{_skipped, tweets} = Enum.split_while(tweets, &(&1.id > max_id))
Enum.take(tweets, count)
end
end
alias Weaver.Step.Result
def use_graph(_context) do
Weaver.Graph.reset!()
end
def weave_initial({:ok, step}, mock, fn_name, mock_fun) do
Mox.expect(mock, fn_name, mock_fun)
weave_step(step)
end
def weave_dispatched(result, index \\ 0, mock, fn_name, mock_fun) do
Mox.expect(mock, fn_name, mock_fun)
result
|> Result.dispatched()
|> Enum.at(index)
|> weave_step()
end
def weave_next(result, mock, fn_name, mock_fun) do
Mox.expect(mock, fn_name, mock_fun)
weave_next(result)
end
def weave_next(result) do
result
|> Result.next()
|> weave_step()
end
defp weave_step(step) do
{:ok, result} = Weaver.weave(step)
Mox.verify!()
case step.execution.context do
%{cache: {mod, opts}} when mod != nil ->
assert mod.store!(Result.data(result), Result.meta(result), opts)
%{cache: mod} when mod != nil ->
assert mod.store!(Result.data(result), Result.meta(result))
_else ->
nil
end
result
end
@doc "Matches the given expression against the result's `data`."
defmacro assert_has_data(result_expr, match_expr) do
quote do
result = unquote(result_expr)
data = Result.data(result)
case unquote(match_expr) do
subset when is_list(subset) -> Enum.each(subset, &assert(&1 in data))
tuple when is_tuple(tuple) -> assert tuple in data
end
result
end
end
@doc "Matches the given expression against the result's `data`."
defmacro refute_has_data(result_expr, match_expr) do
quote do
result = unquote(result_expr)
data = Result.data(result)
case unquote(match_expr) do
subset when is_list(subset) -> Enum.each(subset, &refute(&1 in data))
tuple when is_tuple(tuple) -> refute tuple in data
end
result
end
end
@doc "Compares the result's `data` with the given term."
def assert_data(result, match) do
assert Result.data(result) == match
result
end
@doc "Compares the result's `meta` with the given term."
def assert_meta(result, match) do
assert Result.meta(result) == match
result
end
@doc "Matches the given expression against the result's `dispatched` paths."
defmacro assert_dispatched_paths(result_expr, match_expr) do
quote do
result = unquote(result_expr)
paths =
result
|> Result.dispatched()
|> Enum.map(fn
%{execution: %{acc: %{resolution: paths}}} -> paths
end)
assert unquote(match_expr) = paths
result
end
end
@doc "Matches the given expression against the result's `next` path."
defmacro assert_next_path(result_expr, match_expr) do
quote do
result = unquote(result_expr)
assert %{
execution: %{
acc: %{
resolution: unquote(match_expr)
}
}
} = Result.next(result)
result
end
end
@doc "Matches the given expression against the result's `next` Weaver state."
defmacro assert_next_state(result_expr, match_expr) do
quote do
result = unquote(result_expr)
assert %{
execution: %{
acc: %{
Weaver.Absinthe.Middleware.Continue => unquote(match_expr)
}
}
} = Result.next(result)
result
end
end
@doc "Expects the result's `next` to be nil."
def refute_next(result) do
refute Result.next(result)
result
end
def assert_done(result) do
assert result == Result.empty()
result
end
end | test/support/integration_case.ex | 0.829734 | 0.668784 | integration_case.ex | starcoder |
defmodule ExQueb do
@moduledoc """
Build Ecto filter Queries.
"""
import Ecto.Query
@doc """
Create the filter
Uses the :q query parameter to build the filter.
"""
def filter(query, params) do
q = params[Application.get_env(:ex_queb, :filter_param, :q)]
if q do
filters = Map.to_list(q)
|> Enum.filter(&(not elem(&1,1) in ["", nil]))
|> Enum.map(&({Atom.to_string(elem(&1, 0)), elem(&1, 1)}))
query
|> ExQueb.StringFilters.string_filters(filters)
|> integer_filters(filters)
|> date_filters(filters)
else
query
end
end
defp integer_filters(builder, filters) do
builder
|> build_integer_filters(filters, :eq)
|> build_integer_filters(filters, :lt)
|> build_integer_filters(filters, :gt)
end
defp date_filters(builder, filters) do
builder
|> build_date_filters(filters, :gte)
|> build_date_filters(filters, :lte)
end
defp build_integer_filters(builder, filters, condition) do
filters
|> Enum.filter(& String.match?(elem(&1,0), ~r/_#{condition}$/))
|> Enum.map(& {String.replace(elem(&1, 0), "_#{condition}", ""), elem(&1, 1)})
|> Enum.reduce(builder, fn({k,v}, acc) ->
_build_integer_filter(acc, String.to_atom(k), v, condition)
end)
end
defp _build_integer_filter(query, fld, value, :eq) do
where(query, [q], field(q, ^fld) == ^value)
end
defp _build_integer_filter(query, fld, value, :lt) do
where(query, [q], field(q, ^fld) < ^value)
end
defp _build_integer_filter(query, fld, value, :gte) do
where(query, [q], field(q, ^fld) >= ^value)
end
defp _build_integer_filter(query, fld, value, :lte) do
where(query, [q], field(q, ^fld) <= ^value)
end
defp _build_integer_filter(query, fld, value, :gt) do
where(query, [q], field(q, ^fld) > ^value)
end
defp build_date_filters(builder, filters, condition) do
filters
|> Enum.filter(& String.match?(elem(&1,0), ~r/_#{condition}$/))
|> Enum.map(& {String.replace(elem(&1, 0), "_#{condition}", ""), elem(&1, 1)})
|> Enum.reduce(builder, fn({k,v}, acc) ->
_build_date_filter(acc, String.to_atom(k), cast_date_time(v), condition)
end)
end
defp _build_date_filter(query, fld, value, :gte) do
where(query, [q], fragment("? >= ?", field(q, ^fld), type(^value, Ecto.DateTime)))
end
defp _build_date_filter(query, fld, value, :lte) do
where(query, [q], fragment("? <= ?", field(q, ^fld), type(^value, Ecto.DateTime)))
end
defp cast_date_time(%NaiveDateTime{} = datetime), do: datetime
defp cast_date_time(value) do
{:ok, date} = Ecto.Date.cast(value)
date
|> Ecto.DateTime.from_date
|> Ecto.DateTime.to_string
end
@doc """
Build order for a given query.
"""
def build_order_bys(query, opts, action, params) when action in ~w(index csv)a do
case Keyword.get(params, :order, nil) do
nil ->
build_default_order_bys(query, opts, action, params)
order ->
case get_sort_order(order) do
nil ->
build_default_order_bys(query, opts, action, params)
{name, sort_order} ->
name_atom = String.to_existing_atom name
if sort_order == "desc" do
order_by query, [c], [desc: field(c, ^name_atom)]
else
order_by query, [c], [asc: field(c, ^name_atom)]
end
end
end
end
def build_order_bys(query, _, _, _), do: query
defp build_default_order_bys(query, opts, action, _params) when action in ~w(index csv)a do
case query.order_bys do
[] ->
index_opts = Map.get(opts, action, []) |> Enum.into(%{})
{order, primary_key} = get_default_order_by_field(query, index_opts)
order_by(query, [c], [{^order, field(c, ^primary_key)}])
_ -> query
end
end
defp build_default_order_bys(query, _opts, _action, _params), do: query
@doc """
Get the sort order for a params entry.
"""
def get_sort_order(nil), do: nil
def get_sort_order(order) do
case Regex.scan ~r/(.+)_(desc|asc)$/, order do
[] -> nil
[[_, name, sort_order]] -> {name, sort_order}
end
end
defp get_default_order_by_field(_query, %{default_sort: [{order, field}]}) do
{order, field}
end
defp get_default_order_by_field(query, %{default_sort_order: order}) do
{order, get_default_order_by_field(query)}
end
defp get_default_order_by_field(_query, %{default_sort_field: field}) do
{:desc, field}
end
defp get_default_order_by_field(query, _) do
{:desc, get_default_order_by_field(query)}
end
defp get_default_order_by_field(query) do
case query do
%{from: {_, mod}} ->
case mod.__schema__(:primary_key) do
[name |_] -> name
_ -> mod.__schema__(:fields) |> List.first
end
_ -> :id
end
end
end | lib/ex_queb.ex | 0.654232 | 0.41253 | ex_queb.ex | starcoder |
defmodule Sippet.Transports.UDP do
@moduledoc """
Implements an UDP transport.
The UDP transport consists basically in a single listening and sending
process, this implementation itself.
This process creates an UDP socket and keeps listening for datagrams in
active mode. Its job is to forward the datagrams to the processing receiver
defined in `Sippet.Transports.Receiver`.
"""
use GenServer
alias Sippet.Message
require Logger
defstruct socket: nil,
family: :inet,
sippet: nil
@doc """
Starts the UDP transport.
"""
def start_link(options) when is_list(options) do
name =
case Keyword.fetch(options, :name) do
{:ok, name} when is_atom(name) ->
name
{:ok, other} ->
raise ArgumentError, "expected :name to be an atom, got: #{inspect(other)}"
:error ->
raise ArgumentError, "expected :name option to be present"
end
port =
case Keyword.fetch(options, :port) do
{:ok, port} when is_integer(port) and port > 0 and port < 65536 ->
port
{:ok, other} ->
raise ArgumentError,
"expected :port to be an integer between 1 and 65535, got: #{inspect(other)}"
:error ->
5060
end
{address, family} =
case Keyword.fetch(options, :address) do
{:ok, {address, family}} when family in [:inet, :inet6] and is_binary(address) ->
{address, family}
{:ok, address} when is_binary(address) ->
{address, :inet}
{:ok, other} ->
raise ArgumentError,
"expected :address to be an address or {address, family} tuple, got: " <>
"#{inspect(other)}"
:error ->
{"0.0.0.0", :inet}
end
ip =
case resolve_name(address, family) do
{:ok, ip} ->
ip
{:error, reason} ->
raise ArgumentError,
":address contains an invalid IP or DNS name, got: #{inspect(reason)}"
end
GenServer.start_link(__MODULE__, {name, ip, port, family}, name: __MODULE__)
end
@impl true
def init({name, ip, port, family}) do
Sippet.register_transport(name, :udp, false)
{:ok, nil, {:continue, {name, ip, port, family}}}
end
@impl true
def handle_continue({name, ip, port, family}, nil) do
case :gen_udp.open(port, [:binary, {:active, true}, {:ip, ip}, family]) do
{:ok, socket} ->
Logger.debug(
"#{inspect(self())} started transport " <>
"#{stringify_sockname(socket)}/udp"
)
state = %__MODULE__{
socket: socket,
family: family,
sippet: name
}
{:noreply, state}
{:error, reason} ->
Logger.error(
"#{inspect(self())} port #{port}/udp " <>
"#{inspect(reason)}, retrying in 10s..."
)
Process.sleep(10_000)
{:noreply, nil, {:continue, {name, ip, port, family}}}
end
end
@impl true
def handle_info({:udp, _socket, from_ip, from_port, packet}, %{sippet: sippet} = state) do
Sippet.Router.handle_transport_message(sippet, packet, {:udp, from_ip, from_port})
{:noreply, state}
end
@impl true
def handle_call(
{:send_message, message, to_host, to_port, key},
_from,
%{socket: socket, family: family, sippet: sippet} = state
) do
Logger.debug([
"sending message to #{stringify_hostport(to_host, to_port)}/udp",
", #{inspect(key)}"
])
with {:ok, to_ip} <- resolve_name(to_host, family),
iodata <- Message.to_iodata(message),
:ok <- :gen_udp.send(socket, {to_ip, to_port}, iodata) do
:ok
else
{:error, reason} ->
Logger.warn("udp transport error for #{to_host}:#{to_port}: #{inspect(reason)}")
if key != nil do
Sippet.Router.receive_transport_error(sippet, key, reason)
end
end
{:reply, :ok, state}
end
@impl true
def terminate(reason, %{socket: socket}) do
Logger.debug(
"stopped transport #{stringify_sockname(socket)}/udp, reason: #{inspect(reason)}"
)
:gen_udp.close(socket)
end
defp resolve_name(host, family) do
host
|> String.to_charlist()
|> :inet.getaddr(family)
end
defp stringify_sockname(socket) do
{:ok, {ip, port}} = :inet.sockname(socket)
address =
ip
|> :inet_parse.ntoa()
|> to_string()
"#{address}:#{port}"
end
defp stringify_hostport(host, port) do
"#{host}:#{port}"
end
end | lib/sippet/transports/udp.ex | 0.81468 | 0.485051 | udp.ex | starcoder |
defmodule HoneylandWeb.Schema.AstarteTypes do
use Absinthe.Schema.Notation
use Absinthe.Relay.Schema.Notation, :modern
alias HoneylandWeb.Middleware
alias HoneylandWeb.Resolvers
@desc """
Describes a set of filters to apply when fetching a list of devices.
When multiple filters are specified, they are applied in an AND fashion to \
further refine the results.
"""
input_object :device_filter do
@desc "Whether to return devices connected or not to Astarte."
field :online, :boolean
@desc """
A string to match against the device ID. The match is case-insensitive \
and tests whether the string is included in the device ID.
"""
field :device_id, :string
@desc """
A string to match against the part number of the device's appliance model.
The match is case-insensitive and tests whether the string is included in \
the part number of the device's appliance model.
"""
field :appliance_model_part_number, :string
@desc """
A string to match against the handle of the device's appliance model.
The match is case-insensitive and tests whether the string is included in \
the handle of the device's appliance model.
"""
field :appliance_model_handle, :string
@desc """
A string to match against the name of the device's appliance model.
The match is case-insensitive and tests whether the string is included in \
the name of the device's appliance model.
"""
field :appliance_model_name, :string
@desc """
A string to match against the part number of the device's hardware type.
The match is case-insensitive and tests whether the string is included in \
the part number of the device's hardware type.
"""
field :hardware_type_part_number, :string
@desc """
A string to match against the handle of the device's hardware type.
The match is case-insensitive and tests whether the string is included in \
the handle of the device's hardware type.
"""
field :hardware_type_handle, :string
@desc """
A string to match against the name of the device's hardware type.
The match is case-insensitive and tests whether the string is included in \
the name of the device's hardware type.
"""
field :hardware_type_name, :string
end
@desc """
Describes hardware-related info of a device.
It exposes data read by a device's operating system about the underlying \
hardware.
"""
object :hardware_info do
@desc "The architecture of the CPU."
field :cpu_architecture, :string
@desc "The reference code of the CPU model."
field :cpu_model, :string
@desc "The display name of the CPU model."
field :cpu_model_name, :string
@desc "The vendor's name."
field :cpu_vendor, :string
@desc "The Bytes count of memory."
field :memory_total_bytes, :integer
end
@desc "Describes the current usage of a storage unit on a device."
object :storage_unit do
@desc "The label of the storage unit."
field :label, non_null(:string)
@desc "The total number of bytes of the storage unit."
field :total_bytes, :integer
@desc "The number of free bytes of the storage unit."
field :free_bytes, :integer
end
@desc """
Describes the position of a device.
The position is estimated by means of Honeyland's Geolocation modules and the \
data published by the device.
"""
object :device_location do
@desc "The latitude coordinate."
field :latitude, non_null(:float)
@desc "The longitude coordinate."
field :longitude, non_null(:float)
@desc "The accuracy of the measurement, in meters."
field :accuracy, :float
@desc "The formatted address estimated for the position."
field :address, :string
@desc "The date at which the measurement was made."
field :timestamp, non_null(:datetime)
end
@desc """
Describes the current status of the operating system of a device.
"""
object :system_status do
@desc "The identifier of the performed boot sequence."
field :boot_id, :string
@desc "The number of free bytes of memory."
field :memory_free_bytes, :integer
@desc "The number of running tasks on the system."
field :task_count, :integer
@desc "The number of milliseconds since the last system boot."
field :uptime_milliseconds, :integer
@desc "The date at which the system status was read."
field :timestamp, non_null(:datetime)
end
@desc """
Describes the list of WiFi Access Points found by the device.
"""
object :wifi_scan_result do
@desc "The channel used by the Access Point."
field :channel, :integer
@desc "The ESSID advertised by the Access Point."
field :essid, :string
@desc "The MAC address advertised by the Access Point."
field :mac_address, :string
@desc "The power of the radio signal, measured in dBm."
field :rssi, :integer
@desc "The date at which the device found the Access Point."
field :timestamp, non_null(:datetime)
end
@desc """
Denotes a device instance that connects and exchanges data.
Each Device is associated to a specific ApplianceModel, which in turn is \
associated to a specific HardwareType.
A Device also exposes info about its connection status and some sets of \
data read by its operating system.
"""
node object(:device) do
@desc "The display name of the device."
field :name, non_null(:string)
@desc "The device ID used to connect to the Astarte cluster."
field :device_id, non_null(:string)
@desc "Tells whether the device is connected or not to Astarte."
field :online, non_null(:boolean)
@desc "The date at which the device last connected to Astarte."
field :last_connection, :datetime
@desc "The date at which the device last disconnected from Astarte."
field :last_disconnection, :datetime
@desc "The appliance model of the device."
field :appliance_model, :appliance_model
@desc "Info read from the device's hardware."
field :hardware_info, :hardware_info do
resolve &Resolvers.Astarte.get_hardware_info/3
middleware Middleware.ErrorHandler
end
@desc "The estimated location of the device."
field :location, :device_location do
resolve &Resolvers.Astarte.fetch_device_location/3
middleware Middleware.ErrorHandler
end
@desc "The current usage of the storage units of the device."
field :storage_usage, list_of(non_null(:storage_unit)) do
resolve &Resolvers.Astarte.fetch_storage_usage/3
middleware Middleware.ErrorHandler
end
@desc "The current status of the operating system of the device."
field :system_status, :system_status do
resolve &Resolvers.Astarte.fetch_system_status/3
middleware Middleware.ErrorHandler
end
@desc "The list of WiFi Access Points found by the device."
field :wifi_scan_results, list_of(non_null(:wifi_scan_result)) do
resolve &Resolvers.Astarte.fetch_wifi_scan_results/3
middleware Middleware.ErrorHandler
end
end
object :astarte_queries do
@desc "Fetches the list of all devices."
field :devices, non_null(list_of(non_null(:device))) do
@desc "An optional set of filters to apply when fetching the devices."
arg :filter, :device_filter
resolve &Resolvers.Astarte.list_devices/3
end
@desc "Fetches a single device."
field :device, :device do
@desc "The ID of the device."
arg :id, non_null(:id)
middleware Absinthe.Relay.Node.ParseIDs, id: :device
resolve &Resolvers.Astarte.find_device/2
end
end
end | backend/lib/honeyland_web/schema/astarte_types.ex | 0.834508 | 0.594021 | astarte_types.ex | starcoder |
defmodule OcrNumbers do
@doc """
Given a 3 x 4 grid of pipes, underscores, and spaces, determine which number is represented, or
whether it is garbled.
"""
@spec convert([String.t()]) :: {:ok, String.t()} | {:error, charlist()}
def convert(input) do
Enum.chunk_every(input, 4)
|> Enum.map(fn row_set -> convert(row_set, "") end)
|> format_output()
end
defp format_output([]), do: {:error, 'invalid line count'}
defp format_output(rows), do: format_output(Enum.any?(rows, &error?/1), rows)
defp format_output(true, rows), do: Enum.find(rows, &error?/1)
defp format_output(false, output), do: {:ok, Enum.join(output, ",")}
defp error?({:error, _}), do: true
defp error?(_), do: false
defp convert(_, {:error, _} = error), do: error
defp convert(input, _) when length(input) != 4, do: {:error, 'invalid line count'}
defp convert(["", "", "", ""], output), do: output
defp convert(input, output) do
split_strings = Enum.map(input, fn a -> String.split_at(a, 3) end)
this_character = Enum.map(split_strings, fn {a, _} -> a end)
other_characters = Enum.map(split_strings, fn {_, a} -> a end)
lengths = Enum.map(this_character, fn a -> String.length(a) end)
convert(other_characters, update_output(lengths, this_character, output))
end
defp update_output([3, 3, 3, 3], chars, output), do: output <> recognize_character(chars)
defp update_output(_, _, _), do: {:error, 'invalid column count'}
defp recognize_character([" _ ", "| |", "|_|", " "]), do: "0"
defp recognize_character([" ", " |", " |", " "]), do: "1"
defp recognize_character([" _ ", " _|", "|_ ", " "]), do: "2"
defp recognize_character([" _ ", " _|", " _|", " "]), do: "3"
defp recognize_character([" ", "|_|", " |", " "]), do: "4"
defp recognize_character([" _ ", "|_ ", " _|", " "]), do: "5"
defp recognize_character([" _ ", "|_ ", "|_|", " "]), do: "6"
defp recognize_character([" _ ", " |", " |", " "]), do: "7"
defp recognize_character([" _ ", "|_|", "|_|", " "]), do: "8"
defp recognize_character([" _ ", "|_|", " _|", " "]), do: "9"
defp recognize_character(_), do: "?"
end | exercises/practice/ocr-numbers/.meta/example.ex | 0.714827 | 0.441011 | example.ex | starcoder |
defmodule Cashtrail.Users.User do
@moduledoc """
This is an `Ecto.Schema` struct that represents a user of the application.
The user is any individual that uses the application. They can create their
entities or be assigned to an entity as a member. See `Cashtrail.Entities.Entity`
to know more about what is an Entity.
## Fields
* `:id` - The unique id of the user.
* `:email` - The email address of the user. This must be unique in the whole
application.
* `:first_name` - The first name of the user.
* `:last_name` - The last name of the user.
* `:password` - This is a virtual field used to the users input their passwords.
When a user is retrieved, this value is empty.
* `:password_hash` - This field keeps the hashed password. You can search more
about hashing algorithms or see `Comeonin` to know more about it.
* `:inserted_at` - When the user was inserted at the first time.
* `:updated_at` - When the user was updated at the last time.
See `Cashtrail.Users` to know how to list, get, insert, update, delete, and
authenticate users.
"""
use Ecto.Schema
import Ecto.Changeset
import Cashtrail.Users.PasswordHash, only: [hash_pwd_salt: 1]
@type t :: %Cashtrail.Users.User{
id: Ecto.UUID.t() | nil,
email: String.t() | nil,
first_name: String.t() | nil,
last_name: String.t() | nil,
password: String.t() | nil,
password_hash: String.t() | nil,
avatar_url: String.t() | nil,
inserted_at: NaiveDateTime.t() | nil,
updated_at: NaiveDateTime.t() | nil,
__meta__: Ecto.Schema.Metadata.t()
}
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
schema "users" do
field :email, :string
field :first_name, :string
field :last_name, :string
field :avatar_url, :string
field :password_hash, :string
field :password, :string, virtual: true
timestamps()
end
@email_regex ~r/[^@ \t\r\n]+@[^@ \t\r\n]+\.[^@ \t\r\n]+/
@password_regex ~r/^(?=.*\d)(?=.*[a-zA-Z]).*/
@url_regex ~r/https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&\/\/=]*)/
@spec changeset(t() | Ecto.Changeset.t(t()), map) :: Ecto.Changeset.t(t())
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [:first_name, :last_name, :email, :password, :avatar_url])
|> validate_required([:first_name, :email, :password])
|> validate_format(:email, @email_regex, message: "is not a valid email")
|> unique_constraint(:email)
|> validate_length(:password, min: 6)
|> validate_format(:password, @password_regex,
message: "should have at least one number, and one letter"
)
|> validate_format(:avatar_url, @url_regex, message: "is not a valid url")
|> validate_confirmation(:password)
|> change_password()
|> downcase_email()
end
defp change_password(changeset) do
case changeset do
%Ecto.Changeset{changes: %{password: password}, valid?: true} ->
put_change(changeset, :password_hash, hash_pwd_salt(password))
_ ->
changeset
end
end
defp downcase_email(changeset) do
case get_field(changeset, :email) do
nil -> changeset
email -> put_change(changeset, :email, String.downcase(email))
end
end
end | apps/cashtrail/lib/cashtrail/users/user.ex | 0.733643 | 0.444625 | user.ex | starcoder |
defmodule Cldr.Unit.Additional do
@moduledoc """
Additional domain-specific units can be defined
to suit application requirements. In the context
of `ex_cldr` there are two parts of configuring
additional units.
1. Configure the unit, base unit and conversion in
`config.exs`. This is a requirement since units are
compiled into code.
2. Configure the localizations for the additional
unit in a CLDR backend module.
Once configured, additional units act and behave
like any of the predefined units of measure defined
by CLDR.
## Configuring a unit in config.exs
Under the application `:ex_cldr_units` define
a key `:additional_units` with the required
unit definitions. For example:
```elixir
config :ex_cldr_units, :additional_units,
vehicle: [base_unit: :unit, factor: 1, offset: 0, sort_before: :all],
person: [base_unit: :unit, factor: 1, offset: 0, sort_before: :all]
```
This example defines two additional units: `:vehicle` and
`:person`. The keys `:base_unit`, and `:factor` are required.
The key `:offset` is optional and defaults to `0`. The
key `:sort_before` is optional and defaults to `:none`.
### Configuration keys
* `:base_unit` is the common denominator that is used
to support conversion between like units. It can be
any atom value. For example `:liter` is the base unit
for volume units, `:meter` is the base unit for length
units.
* `:factor` is used to convert a unit to its base unit in
order to support conversion. When converting a unit to
another compatible unit, the unit is first multiplied by
this units factor then divided by the target units factor.
* `:offset` is added to a unit after applying its base factor
in order to convert to another unit.
* `:sort_before` determines where in this *base unit* sorts
relative to other base units. Typically this is set to
`:all` in which case this base unit sorts before all other
base units or`:none` in which case this base unit sorted
after all other base units. The default is `:none`. If in
doubt, leave this key to its default.
* `:systems` is list of measurement systems to which this
unit belongs. The known measurement systems are `:metric`,
`:uksystem` and `:ussystem`. The default is
`[:metric, :ussystem, :uksystem]`.
## Defining localizations
Localizations are defined in a backend module through adding
`use Cldr.Unit.Additional` to the top of the backend module
and invoking `Cldr.Unit.Additional.unit_localization/4` for
each localization.
See `Cldr.Unit.Additional.unit_localization/4` for further
information.
Note that one invocation of the macro is required for
each combination of locale, style and unit. An exception
will be raised at runtime is a localization is expected
but is not found.
"""
@root_locale_name Cldr.Config.root_locale_name()
defmacro __using__(_opts) do
module = __CALLER__.module
quote do
@before_compile Cldr.Unit.Additional
@after_compile Cldr.Unit.Additional
import Cldr.Unit.Additional
Module.register_attribute(unquote(module), :custom_localizations, accumulate: true)
end
end
@doc false
defmacro __before_compile__(_ast) do
caller = __CALLER__.module
target_module = Module.concat(caller, Unit.Additional)
caller
|> Module.get_attribute(:custom_localizations)
|> Cldr.Unit.Additional.group_localizations()
|> Cldr.Unit.Additional.define_localization_module(target_module)
end
@doc """
Although defining a unit in `config.exs` is enough to create,
operate on and serialize an additional unit, it cannot be
localised without defining localizations in an `ex_cldr`
backend module. For example:
```elixir
defmodule MyApp.Cldr do
use Cldr.Unit.Additional
use Cldr,
locales: ["en", "fr", "de", "bs", "af", "af-NA", "se-SE"],
default_locale: "en",
providers: [Cldr.Number, Cldr.Unit, Cldr.List]
unit_localization(:person, "en", :long,
one: "{0} person",
other: "{0} people",
display_name: "people"
)
unit_localization(:person, "en", :short,
one: "{0} per",
other: "{0} pers",
display_name: "people"
)
unit_localization(:person, "en", :narrow,
one: "{0} p",
other: "{0} p",
display_name: "p"
)
end
```
Note the additions to a typical `ex_cldr`
backend module:
* `use Cldr.Unit.Additional` is required to
define additional units
* use of the `unit_localization/4` macro in
order to define a localization.
One invocation of `unit_localization` should
made for each combination of unit, locale and
style.
### Parameters to unit_localization/4
* `unit` is the name of the additional
unit as an `atom`.
* `locale` is the locale name for this
localization. It should be one of the locale
configured in this backend although this
cannot currently be confirmed at compile tiem.
* `style` is one of `:long`, `:short`, or
`:narrow`.
* `localizations` is a keyword like of localization
strings. Two keys - `:display_name` and `:other`
are mandatory. They represent the localizations for
a non-count display name and `:other` is the
localization for a unit when no other pluralization
is defined.
### Localisations
Localization keyword list defines localizations that
match the plural rules for a given locale. Plural rules
for a given number in a given locale resolve to one of
six keys:
* `:zero`
* `:one` (singular)
* `:two` (dual)
* `:few` (paucal)
* `:many` (also used for fractions if they have a separate class)
* `:other` (required—general plural form—also used if the language only has a single form)
Only the `:other` key is required. For english,
providing keys for `:one` and `:other` is enough. Other
languages have different grammatical requirements.
The key `:display_name` is used by the function
`Cldr.Unit.display_name/1` which is primarly used
to support UI applications.
"""
defmacro unit_localization(unit, locale, style, localizations) do
module = __CALLER__.module
{localizations, _} = Code.eval_quoted(localizations)
localization = Cldr.Unit.Additional.validate_localization!(unit, locale, style, localizations)
quote do
Module.put_attribute(
unquote(module),
:custom_localizations,
unquote(Macro.escape(localization))
)
end
end
# This is the empty module created if the backend does not
# include `use Cldr.Unit.Additional`
@doc false
def define_localization_module(%{} = localizations, module) when localizations == %{} do
IO.warn(
"The CLDR backend #{inspect(module)} calls `use Cldr.Unit.Additional` " <>
"but does not have any localizations defined",
[]
)
quote bind_quoted: [module: module] do
defmodule module do
def units_for(locale, style) do
%{}
end
def known_locale_names do
unquote([])
end
def additional_units do
unquote([])
end
end
end
end
def define_localization_module(localizations, module) do
additional_units =
localizations
|> Map.values()
|> hd()
|> Map.values()
|> hd()
|> Map.keys()
quote bind_quoted: [
module: module,
localizations: Macro.escape(localizations),
additional_units: additional_units
] do
defmodule module do
for {locale, styles} <- localizations do
for {style, formats} <- styles do
def units_for(unquote(locale), unquote(style)) do
unquote(Macro.escape(formats))
end
end
end
def units_for(locale, style) do
%{}
end
def known_locale_names do
unquote(Map.keys(localizations))
end
def additional_units do
unquote(additional_units)
end
end
end
end
@doc false
def __after_compile__(env, _bytecode) do
additional_module = Module.concat(env.module, Unit.Additional)
additional_units = additional_module.additional_units()
additional_locales = MapSet.new(additional_module.known_locale_names())
backend_locales = MapSet.new(env.module.known_locale_names() -- [@root_locale_name])
styles = Cldr.Unit.known_styles()
case MapSet.to_list(MapSet.difference(backend_locales, additional_locales)) do
[] ->
:ok
other ->
IO.warn(
"The locales #{inspect(other)} configured in " <>
"the CLDR backend #{inspect(env.module)} " <>
"do not have localizations defined for additional units #{inspect(additional_units)}.",
[]
)
end
for locale <- MapSet.intersection(backend_locales, additional_locales),
style <- styles do
with found_units when is_map(found_units) <- additional_module.units_for(locale, style),
[] <- additional_units -- Map.keys(found_units) do
:ok
else
:error ->
IO.warn(
"#{inspect(env.module)} does not define localizations " <>
"for locale #{inspect(locale)} with style #{inspect(style)}",
[]
)
not_defined when is_list(not_defined) ->
IO.warn(
"#{inspect(env.module)} does not define localizations " <>
"for locale #{inspect(locale)} with style #{inspect(style)} " <>
"for units #{inspect(not_defined)}",
[]
)
end
end
end
@doc false
def group_localizations(localizations) when is_list(localizations) do
localizations
|> Enum.group_by(
fn localization -> localization.locale end,
fn localization -> Map.take(localization, [:style, :unit, :localizations]) end
)
|> Enum.map(fn {locale, rest} ->
value =
Enum.group_by(
rest,
fn localization -> localization.style end,
fn localization -> {localization.unit, parse(localization.localizations)} end
)
|> Enum.map(fn {style, list} -> {style, Map.new(list)} end)
{locale, Map.new(value)}
end)
|> Map.new()
end
defp parse(localizations) do
Enum.map(localizations, fn
{:display_name, name} ->
{:display_name, name}
{:gender, gender} ->
{:gender, gender}
{grammatical_case, counts} ->
counts =
Enum.map(counts, fn {count, template} ->
{count, Cldr.Substitution.parse(template)}
end)
{grammatical_case, Map.new(counts)}
end)
|> Map.new()
end
@doc false
def validate_localization!(unit, locale, style, localizations) do
unless is_atom(unit) do
raise ArgumentError, "Unit name must be an atom. Found #{inspect(unit)}"
end
unless style in [:short, :long, :narrow] do
raise ArgumentError, "Style must be one of :short, :long or :narrow. Found #{inspect(style)}"
end
unless is_binary(locale) do
raise ArgumentError, "Locale name must be a string. Found #{inspect(locale)}"
end
unless Keyword.keyword?(localizations) do
raise ArgumentError, "Localizations must be a keyword list. Found #{inspect(localizations)}"
end
unless Keyword.has_key?(localizations, :nominative) do
raise ArgumentError, "Localizations must have an :nominative key"
end
unless Map.has_key?(localizations[:nominative], :other) do
raise ArgumentError, "The nominative case must have an :other key"
end
unless Keyword.has_key?(localizations, :display_name) do
raise ArgumentError, "Localizations must have a :display_name key"
end
%{unit: unit, locale: locale, style: style, localizations: localizations}
end
@doc false
@default_systems [:metric, :uksystem, :ussystem]
@default_sort_before :none
@default_offset 0
def conversions do
:ex_cldr_units
|> Application.get_env(:additional_units, [])
|> conversions()
end
defp conversions(config) when is_list(config) do
config
|> Enum.map(fn {unit, config} ->
if Keyword.keyword?(config) do
new_config =
config
|> Keyword.put_new(:offset, @default_offset)
|> Keyword.put_new(:sort_before, @default_sort_before)
|> Keyword.put_new(:systems, @default_systems)
|> validate_unit!
{unit, new_config}
else
raise ArgumentError,
"Additional unit configuration for #{inspect(unit)} must be a keyword list. Found #{
inspect(config)
}"
end
end)
end
defp conversions(config) do
raise ArgumentError,
"Additional unit configuration must be a keyword list. Found #{inspect(config)}"
end
defp validate_unit!(unit) do
unless Keyword.keyword?(unit) do
raise ArgumentError,
"Additional unit configuration must be a keyword list. Found #{inspect(unit)}"
end
unless Keyword.has_key?(unit, :factor) do
raise ArgumentError, "Additional unit configuration must have a :factor configured"
end
unless (list = Keyword.fetch!(unit, :systems)) |> is_list() do
raise ArgumentError, "Additional unit systems must be a list. Found #{inspect(list)}"
end
unless Enum.all?(Keyword.fetch!(unit, :systems), &(&1 in @default_systems)) do
raise ArgumentError,
"Additional unit valid measurement systems are " <>
"#{inspect(@default_systems)}. Found #{inspect(Keyword.fetch!(unit, :systems))}"
end
unless (base = Keyword.fetch!(unit, :base_unit)) |> is_atom() do
raise ArgumentError, "Additional unit :base_unit must be an atom. Found #{inspect(base)}"
end
case Keyword.fetch!(unit, :factor) do
x when is_number(x) ->
:ok
%{numerator: numerator, denominator: denominator}
when is_number(numerator) and is_number(denominator) ->
:ok
other ->
raise ArgumentError,
"Additional unit factor must be a number or a rational " <>
"of the form %{numerator: number, denominator: number}. Found #{inspect(other)}"
end
unit
end
@doc false
def additional_units do
Keyword.keys(conversions())
end
@doc false
def systems_for_units do
conversions()
|> Enum.map(fn {k, v} -> {k, v[:systems]} end)
end
@doc false
def merge_base_units(core_base_units) do
additional_base_units =
orderable_base_units()
|> Enum.reject(fn {k, _v} -> Keyword.has_key?(core_base_units, k) end)
merge_base_units(core_base_units, additional_base_units)
end
def merge_base_units(core_base_units, additional_base_units, acc \\ [])
# Insert units at the head
def merge_base_units(core_base_units, [{k, :all} | rest], acc) do
merge_base_units(core_base_units, rest, [{k, k} | acc])
end
# Insert units at the tail. Since the additional units are sorted
# we can guarantee that when we hit one with :none we can just take
# everything left
def merge_base_units(core_base_units, [{_k, :none} | _rest] = additional, acc) do
tail_base_units = Enum.map(additional, fn {k, _v} -> {k, k} end)
acc ++ core_base_units ++ tail_base_units
end
def merge_base_units(core_base_units, [], acc) do
acc ++ core_base_units
end
def merge_base_units([], additional, acc) do
tail_base_units = Enum.map(additional, fn {k, _v} -> {k, k} end)
acc ++ tail_base_units
end
def merge_base_units([{k1, _v1} = head | other] = core_base_units, additional, acc) do
case Keyword.pop(additional, k1) do
{nil, _rest} -> merge_base_units(other, additional, acc ++ [head])
{{v2, _}, rest} -> merge_base_units(core_base_units, rest, acc ++ [{v2, v2}])
end
end
@doc false
def base_units do
conversions()
|> Enum.map(fn {_k, v} -> {v[:base_unit], v[:base_unit]} end)
|> Enum.uniq()
|> Keyword.new()
end
@doc false
def orderable_base_units do
conversions()
|> Enum.sort(fn {_k1, v1}, {_k2, v2} ->
cond do
Keyword.get(v1, :sort_before) == :all ->
true
Keyword.get(v1, :sort_before) == :none ->
false
Keyword.get(v1, :sort_before) < Keyword.get(v2, :sort_before)
end
end)
|> Keyword.values()
|> Enum.map(&{&1[:base_unit], &1[:sort_before]})
|> Enum.uniq()
|> Keyword.new()
end
end | lib/cldr/unit/additional.ex | 0.950445 | 0.95297 | additional.ex | starcoder |
defmodule ExthCrypto.AES do
@moduledoc """
Defines standard functions for use with AES symmetric cryptography in block mode.
"""
@block_size 32
@doc """
Returns the blocksize for AES encryption when used as block mode encryption.
## Examples
iex> ExthCrypto.AES.block_size
32
"""
@spec block_size :: integer()
def block_size, do: @block_size
@doc """
Encrypts a given binary with the given public key. For block mode, this is the
standard encrypt operation. For streaming mode, this will return the final block
of the stream.
Note: for streaming modes, `init_vector` is the same as ICB.
## Examples
iex> ExthCrypto.AES.encrypt("obi wan", :cbc, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector)
<<86, 16, 7, 47, 97, 219, 8, 46, 16, 170, 70, 100, 131, 140, 241, 28>>
iex> ExthCrypto.AES.encrypt("obi wan", :cbc, ExthCrypto.Test.symmetric_key(:key_b), ExthCrypto.Test.init_vector)
<<219, 181, 173, 235, 88, 139, 229, 61, 172, 142, 36, 195, 83, 203, 237, 39>>
iex> ExthCrypto.AES.encrypt("obi wan", :cbc, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector(2))
<<134, 126, 59, 64, 83, 197, 85, 40, 155, 178, 52, 165, 27, 190, 60, 170>>
iex> ExthCrypto.AES.encrypt("jedi knight", :cbc, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector)
<<54, 252, 188, 111, 221, 182, 65, 54, 77, 143, 127, 188, 176, 178, 50, 160>>
iex> ExthCrypto.AES.encrypt("Did you ever hear the story of <NAME> The Wise? I thought not.", :cbc, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector) |> ExthCrypto.Math.bin_to_hex
"3ee326e03303a303df6eac828b0bdc8ed67254b44a6a79cd0082bc245977b0e7d4283d63a346744d2f1ecaafca8be906d9f3d27db914d80b601d7e0c598418380e5fe2b48c0e0b8454c6d251f577f28f"
iex> ExthCrypto.AES.encrypt("obi wan", :ctr, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector)
<<32, 99, 57, 7, 64, 82, 28>>
iex> ExthCrypto.AES.encrypt("obi wan", :ctr, ExthCrypto.Test.symmetric_key(:key_b), ExthCrypto.Test.init_vector)
<<156, 176, 33, 64, 69, 16, 173>>
iex> ExthCrypto.AES.encrypt("obi wan", :ctr, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector(2))
<<214, 99, 7, 241, 219, 189, 178>>
iex> ExthCrypto.AES.encrypt("jedi knight", :ctr, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector)
<<37, 100, 52, 78, 23, 88, 28, 22, 254, 47, 32>>
iex> ExthCrypto.AES.encrypt("Did you ever hear the story of D<NAME>is The Wise? I thought not.", :ctr, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector) |> ExthCrypto.Math.bin_to_hex
"<KEY>"
iex> ExthCrypto.AES.encrypt("jedi knight", :ecb, ExthCrypto.Test.symmetric_key)
<<98, 60, 215, 107, 189, 132, 176, 63, 62, 225, 92, 13, 70, 53, 187, 240>>
"""
@spec encrypt(
ExthCrypto.Cipher.plaintext(),
ExthCrypto.Cipher.mode(),
ExthCrypto.Key.symmetric_key(),
ExthCrypto.Cipher.init_vector()
) :: ExthCrypto.Cipher.ciphertext()
def encrypt(plaintext, :cbc, symmetric_key, init_vector) do
padding_bits = (16 - rem(byte_size(plaintext), 16)) * 8
:crypto.block_encrypt(
:aes_cbc,
symmetric_key,
init_vector,
<<0::size(padding_bits)>> <> plaintext
)
end
def encrypt(plaintext, :ctr, symmetric_key, init_vector) do
{_state, ciphertext} =
:crypto.stream_init(:aes_ctr, symmetric_key, init_vector)
|> :crypto.stream_encrypt(plaintext)
ciphertext
end
@spec encrypt(
ExthCrypto.Cipher.plaintext(),
ExthCrypto.Cipher.mode(),
ExthCrypto.Key.symmetric_key()
) :: ExthCrypto.Cipher.ciphertext()
def encrypt(plaintext, :ecb, symmetric_key) do
padding_bits = (16 - rem(byte_size(plaintext), 16)) * 8
:crypto.block_encrypt(:aes_ecb, symmetric_key, <<0::size(padding_bits)>> <> plaintext)
end
@doc """
Decrypts the given binary with the given private key.
## Examples
iex> <<86, 16, 7, 47, 97, 219, 8, 46, 16, 170, 70, 100, 131, 140, 241, 28>>
...> |> ExthCrypto.AES.decrypt(:cbc, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector)
<<0, 0, 0, 0, 0, 0, 0, 0, 0>> <> "obi wan"
iex> <<219, 181, 173, 235, 88, 139, 229, 61, 172, 142, 36, 195, 83, 203, 237, 39>>
...> |> ExthCrypto.AES.decrypt(:cbc, ExthCrypto.Test.symmetric_key(:key_b), ExthCrypto.Test.init_vector)
<<0, 0, 0, 0, 0, 0, 0, 0, 0>> <> "obi wan"
iex> <<134, 126, 59, 64, 83, 197, 85, 40, 155, 178, 52, 165, 27, 190, 60, 170>>
...> |> ExthCrypto.AES.decrypt(:cbc, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector(2))
<<0, 0, 0, 0, 0, 0, 0, 0, 0>> <> "obi wan"
iex> <<54, 252, 188, 111, 221, 182, 65, 54, 77, 143, 127, 188, 176, 178, 50, 160>>
...> |> ExthCrypto.AES.decrypt(:cbc, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector)
<<0, 0, 0, 0, 0>> <> "jedi knight"
iex> "3ee326e03303a303df6eac828b0bdc8ed67254b44a6a79cd0082bc245977b0e7d4283d63a346744d2f1ecaafca8be906d9f3d27db914d80b601d7e0c598418380e5fe2b48c0e0b8454c6d251f577f28f"
...> |> ExthCrypto.Math.hex_to_bin
...> |> ExthCrypto.AES.decrypt(:cbc, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector)
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0>> <> "Did you ever hear the story of <NAME> The Wise? I thought not."
iex> <<32, 99, 57, 7, 64, 82, 28>>
...> |> ExthCrypto.AES.decrypt(:ctr, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector)
"obi wan"
iex> <<156, 176, 33, 64, 69, 16, 173>>
...> |> ExthCrypto.AES.decrypt(:ctr, ExthCrypto.Test.symmetric_key(:key_b), ExthCrypto.Test.init_vector)
"obi wan"
iex> <<214, 99, 7, 241, 219, 189, 178>>
...> |> ExthCrypto.AES.decrypt(:ctr, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector(2))
"obi wan"
iex> <<37, 100, 52, 78, 23, 88, 28, 22, 254, 47, 32>>
...> |> ExthCrypto.AES.decrypt(:ctr, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector)
"jedi knight"
iex> "0b6834074e5c075ffc31318cc03cba1fe35648a6f149a74952661473b73570fb98332e31870c111d3ae5ccff2154bd4083a7ee4bfd19bc85eba77835aac4cea881ada2630cdd"
...> |> ExthCrypto.Math.hex_to_bin
...> |> ExthCrypto.AES.decrypt(:ctr, ExthCrypto.Test.symmetric_key, ExthCrypto.Test.init_vector)
"Did you ever hear the story of <NAME> The Wise? I thought not."
iex> ExthCrypto.AES.decrypt(<<98, 60, 215, 107, 189, 132, 176, 63, 62, 225, 92, 13, 70, 53, 187, 240>>, :ecb, ExthCrypto.Test.symmetric_key)
<<0, 0, 0, 0, 0>> <> "jedi knight"
"""
@spec decrypt(
ExthCrypto.Cipher.ciphertext(),
ExthCrypto.Cipher.mode(),
ExthCrypto.Key.symmetric_key(),
ExthCrypto.Cipher.init_vector()
) :: ExthCrypto.Cipher.plaintext()
def decrypt(ciphertext, :cbc, symmetric_key, init_vector) do
:crypto.block_decrypt(:aes_cbc, symmetric_key, init_vector, ciphertext)
end
def decrypt(ciphertext, :ctr, symmetric_key, init_vector) do
{_state, plaintext} =
:crypto.stream_init(:aes_ctr, symmetric_key, init_vector)
|> :crypto.stream_decrypt(ciphertext)
plaintext
end
@spec decrypt(
ExthCrypto.Cipher.ciphertext(),
ExthCrypto.Cipher.mode(),
ExthCrypto.Key.symmetric_key()
) :: ExthCrypto.Cipher.plaintext()
def decrypt(ciphertext, :ecb, symmetric_key) do
:crypto.block_decrypt(:aes_ecb, symmetric_key, ciphertext)
end
@doc """
Initializes an AES stream in the given mode with a given
key and init vector.
## Examples
iex> stream = ExthCrypto.AES.stream_init(:ctr, ExthCrypto.Test.symmetric_key(), ExthCrypto.Test.init_vector)
iex> is_nil(stream)
false
"""
@spec stream_init(
ExthCrypto.Cipher.mode(),
ExthCrypto.Key.symmetric_key(),
ExthCrypto.Cipher.init_vector()
) :: ExthCrypto.Cipher.stream()
def stream_init(:ctr, symmetric_key, init_vector) do
# IO.inspect(["Have symm key: ", symmetric_key])
:crypto.stream_init(:aes_ctr, symmetric_key, init_vector)
end
@doc """
Encrypts data with an already initialized AES stream, returning a
stream with updated state, as well as the ciphertext.
## Examples
iex> stream = ExthCrypto.AES.stream_init(:ctr, ExthCrypto.Test.symmetric_key(), ExthCrypto.Test.init_vector)
iex> {_stream_2, ciphertext} = ExthCrypto.AES.stream_encrypt("hello", stream)
iex> ciphertext
"'d<KX"
"""
@spec stream_encrypt(ExthCrypto.Cipher.plaintext(), ExthCrypto.Cipher.stream()) ::
{ExthCrypto.Cipher.stream(), ExthCrypto.Cipher.ciphertext()}
def stream_encrypt(plaintext, stream) do
:crypto.stream_encrypt(stream, plaintext)
end
@doc """
Decrypts data from an already initialized AES stream, returning a
stream with updated state, as well as the plaintext.
## Examples
iex> stream = ExthCrypto.AES.stream_init(:ctr, ExthCrypto.Test.symmetric_key(), ExthCrypto.Test.init_vector)
iex> {_stream_2, ciphertext} = ExthCrypto.AES.stream_encrypt("hello", stream)
iex> {_stream_3, plaintext} = ExthCrypto.AES.stream_decrypt(ciphertext, stream)
iex> plaintext
"hello"
"""
@spec stream_decrypt(ExthCrypto.Cipher.ciphertext(), ExthCrypto.Cipher.stream()) ::
{ExthCrypto.Cipher.stream(), ExthCrypto.Cipher.plaintext()}
def stream_decrypt(plaintext, stream) do
:crypto.stream_decrypt(stream, plaintext)
end
end | apps/exth_crypto/lib/exth_crypto/aes.ex | 0.852859 | 0.4165 | aes.ex | starcoder |
defmodule BetTelemetry.Instrumentation do
@moduledoc ~S"""
Provides a way to generate telemetry events
We can generate `start`, `stop` and `exception` events.
This works in conjuction with `BetTelemetry.OpenTelemetryReporter`, if we
generate a telemetry event and we have subscribed to that event with the OpenTelemetryReporter
a span will be open/closed.
See OpenTelemetryReporter moduledoc for more info
"""
require OpenTelemetry.Ctx, as: Ctx
@type namespace :: atom()
@type event :: atom()
@type span_action :: :start | :stop | :exception
@type metadata :: map()
@type extra_measurements :: map()
@type otel_ctx :: map()
@type w3c_trace :: binary()
@type parent_otel_ctx :: nil | otel_ctx() | w3c_trace()
@type start_time :: integer()
@type span_function(t) ::
(metadata() -> {t, metadata()})
| (metadata(), parent_otel_ctx() -> {t, metadata()})
@span_ctx_key {:otel_tracer, :span_ctx}
defguardp is_valid_event(namespace, event)
when is_atom(namespace) and is_atom(event)
defguardp is_valid_span(namespace, event, function)
when is_valid_event(namespace, event) and
is_function(function, 1)
defguardp is_valid_parent_otel_ctx(ctx)
when is_nil(ctx) or is_map(ctx) or is_binary(ctx)
@doc """
Get the current span context
This is useful when you want to open spans across processes.
span context is lost when working on a different process so we need to store the current span
context in a variable and explicitly pass it on the call/cast/info message so we can use it
on the child when starting a new span
"""
@spec get_current_metadata() :: otel_ctx()
def get_current_metadata do
Ctx.get_current()
end
@doc """
Set the current span context.
This is useful when you want to explicitly set the context after changing
to a different process. This is done implicitly when you pass `metadata` to `start/4` so this is
almost never used directly.
"""
@spec set_current_metadata(otel_ctx()) :: reference()
def set_current_metadata(ctx) do
Ctx.attach(ctx)
end
@doc """
Trigger a start event for a given namespace and event.
It returns the start time so we can
calculate duration when calling `stop/5` or `exception/8`.
Triggering a start event will also start a new span if we have subscribed to this event fon the
application config (See `BetTelemetry.OpenTelemetryReporter.setup/1`)
If we pass a span context in the `metadata` it will set up the passed context as the current context
before starting a new span (if we have subscribed to this event)
Any information we pass on `metadata` or `extra_measurements` will be added as additional info to
the telemetry event triggered.
"""
@spec start(namespace(), event(), metadata(), extra_measurements()) :: start_time()
def start(namespace, event, meta \\ %{}, extra \\ %{})
when is_valid_event(namespace, event) do
start(namespace, event, meta, extra, nil)
end
@spec start(
namespace(),
event(),
metadata(),
extra_measurements(),
parent_otel_ctx()
) :: start_time()
def start(namespace, event, meta, extra, nil = _parent_ctx)
when is_valid_event(namespace, event) and is_map(meta) and is_map(extra) do
parent_ctx = get_current_metadata()
start(namespace, event, meta, extra, parent_ctx)
end
def start(
namespace,
event,
meta,
extra,
w3c_trace =
<<_version::bytes-size(2), "-", _trace_id_string::bytes-size(32), "-",
_parent_id_string::bytes-size(16), "-", _trace_flags_string::bytes-size(2)>>
)
when is_valid_event(namespace, event) and is_map(meta) and
is_map(extra) and is_binary(w3c_trace) do
parent_ctx = %{
@span_ctx_key => :otel_propagator_http_w3c.decode(w3c_trace)
}
start(namespace, event, meta, extra, parent_ctx)
end
def start(namespace, event, meta, extra, %{} = parent_ctx)
when is_valid_event(namespace, event) and is_map(meta) and is_map(extra) do
start_time = System.monotonic_time()
meta = ensure_current_context(meta, parent_ctx)
:telemetry.execute(
[namespace, event, :start],
Map.put(extra, :system_time, System.system_time()),
meta
)
start_time
end
@doc """
Trigger a stop event for a given namespace and event.
It calculates the event duration based on
the current time and the supplid start_time.
Triggering a stop event will also close the current span if we have subscribed to this event on the
application config (See `BetTelemetry.OpenTelemetryReporter.setup/1`)
Any information we pass on `metadata` or `extra_measurements` will be added as additional info to
the telemetry event triggered.
"""
@spec stop(namespace(), event(), start_time(), metadata(), extra_measurements()) :: :ok
def stop(namespace, event, start_time, meta \\ %{}, extra \\ %{}, parent_ctx \\ nil)
when is_valid_event(namespace, event) and is_integer(start_time) and
is_map(meta) and is_map(extra) and
is_valid_parent_otel_ctx(parent_ctx) do
end_time = System.monotonic_time()
extra = Map.put(extra, :duration, end_time - start_time)
meta = ensure_current_context(meta, parent_ctx)
:telemetry.execute(
[namespace, event, :stop],
extra,
meta
)
end
@doc """
Execute a given function and record the time taken to complete.
It executes the given function and returns the end result of the function,
if it raises an error, the error will be reraised and an exception event will
be recorded.
"""
@spec span(namespace(), event(), span_function(t)) :: t when t: term()
def span(namespace, event, function)
when is_valid_span(namespace, event, function) do
span(namespace, event, function, %{})
end
@type span_opt ::
{:parent, parent_otel_ctx()}
| {:meta, map()}
| {:extra, map()}
@spec span(
namespace(),
event(),
span_function(t),
[span_opt()] | metadata()
) :: t
when t: term()
def span(namespace, event, function, opts)
when is_valid_span(namespace, event, function) and is_list(opts) do
parent = opts[:parent]
meta = opts[:meta] || %{}
extra = opts[:extra] || %{}
span(namespace, event, function, meta, extra, parent)
end
def span(namespace, event, function, meta)
when is_valid_span(namespace, event, function) and is_map(meta) do
span(namespace, event, function, meta, %{})
end
@spec span(
namespace(),
event(),
span_function(t),
metadata(),
extra_measurements(),
parent_otel_ctx()
) :: t
when t: term()
def span(namespace, event, function, meta, extra, parent_ctx \\ nil)
when is_valid_span(namespace, event, function) and
is_map(meta) and is_map(extra) do
start_time = start(namespace, event, meta, extra, parent_ctx)
try do
{result, stop_meta} = function.(meta)
stop(namespace, event, start_time, stop_meta, extra, parent_ctx)
result
catch
kind, value ->
exception(
namespace,
event,
start_time,
kind,
value,
__STACKTRACE__,
meta,
extra,
parent_ctx
)
:erlang.raise(kind, value, __STACKTRACE__)
end
end
@doc """
Trigger an exception event for a given namespace and event.
It calculates the event duration based on
the current time and the supplid start_time.
Triggering an exception event will also close the current span if we have subscribed to this event on the
application config (See `BetTelemetry.OpenTelemetryReporter.setup/1`)
Any information we pass on `metadata` or `extra_measurements` will be added as additional info to
the telemetry event triggered.
"""
@spec exception(
namespace(),
event(),
start_time(),
kind :: :exit | :throw | :error,
reason :: term(),
stacktrace :: list(),
metadata(),
extra_measurements(),
parent_otel_ctx()
) :: :ok
def exception(
namespace,
event,
start_time,
kind,
reason,
stack,
meta \\ %{},
extra \\ %{},
parent_ctx \\ nil
)
when is_valid_event(namespace, event) and is_integer(start_time) and
is_valid_parent_otel_ctx(parent_ctx) do
end_time = System.monotonic_time()
extra = Map.put(extra, :duration, end_time - start_time)
meta =
meta
|> ensure_current_context(parent_ctx)
|> Map.merge(%{
kind: kind,
reason: reason,
stacktrace: stack
})
:telemetry.execute(
[namespace, event, :exception],
extra,
meta
)
end
defp ensure_current_context(%{} = meta, _)
when is_map_key(meta, @span_ctx_key),
do: meta
defp ensure_current_context(%{} = meta, nil) do
meta
|> Map.put(@span_ctx_key, get_current_metadata())
|> ensure_current_context(nil)
end
defp ensure_current_context(%{} = meta, %{} = parent_ctx)
when map_size(parent_ctx) == 0 do
ensure_current_context(meta, nil)
end
defp ensure_current_context(%{} = meta, %{} = parent_ctx) do
meta
|> Map.put(@span_ctx_key, get_current_metadata())
|> put_in([@span_ctx_key, :parent_ctx], parent_ctx)
|> ensure_current_context(nil)
end
end | lib/instrumentation.ex | 0.8339 | 0.479321 | instrumentation.ex | starcoder |
defmodule Plymio.Ast.Vorm.Field do
@moduledoc ~S"""
Convenience wrappers for using a *vorm* held in a *struct*.
Most of these wrappers assumes a *vorm* is, or will be, held in the `:vorm` field of a *struct* and allow function such as `add` to be called where the first argument is the struct. For example `struct_vorm_add` looks like:
def struct_vorm_add(struct, new_forms) do
with {:ok, %{vorm: %Plymio.Ast.Vorm{} = vorm} = struct <- struct |> struct_vorm_ensure,
{:ok, %Plymio.Ast.Vorm{} = vorm} <- vorm |> Plymio.Ast.Vorm.add(new_forms) do
{:ok, struct |> struct!(vorm: vorm)
else
{:error, _} = result -> result
end
end
> Since most of the wrappers work in the same way, only `struct_vorm_add` will be documented below with illustrative examples.
## Documentation Terms
In the documentation below these terms, usually in *italics*, are used to mean the same thing (e.g. *state*).
### *vorm*
An instance of `Plymio.Ast.Vorm` (*%Plymio.Ast.Vorm{}*).
### *vorm field*
The field called `:vorm` in a struct intended to hold a *vorm*.
### *state*
A instance of a struct with a *vorm field*.
"""
alias Plymio.Ast.Vorm, as: PAV
alias Plymio.Ast.Vorm.Utility, as: PAVU
import Plymio.Ast.Vorm.Utility, only: [
new_error_result: 1,
]
use Plymio.Ast.Vorm.Attribute
@type t :: struct
@type vorm :: %Plymio.Ast.Vorm{}
@type error :: struct
@type form :: Macro.t
@type forms :: [form]
@doc ~S"""
`struct_vorm_ensure/1` take a *state* and checks if the *vorm field* already holds a *vorm*.
If it has `{:ok, state}` is returned.
If the *vorm field* is `nil`, a new, empty *vorm* is created, stored in the *vorm field*, and `{:ok, state}` returned.
Any other value in the *vorm field* will cause an error.
## Examples
iex> t = %PlymioAstVormFieldTest{}
...> match?(nil, t.vorm)
true
iex> {:ok, t} = %PlymioAstVormFieldTest{}
...> |> struct_vorm_ensure
...> match?(%Plymio.Ast.Vorm{}, t.vorm)
true
iex> {:ok, vorm} = Plymio.Ast.Vorm.new(form: quote(do: x = x + 1))
...> {:ok, t} = %PlymioAstVormFieldTest{vorm: vorm}
...> |> struct_vorm_ensure
...> t.vorm |> helper_vorm_test_forms!(binding: [x: 3])
{4, ["x = x + 1"]}
iex> %PlymioAstVormFieldTest{vorm: :not_a_vorm}
...> |> struct_vorm_ensure
{:error, %Plymio.Ast.Vorm.Error{error: nil, message: "vorm field invalid",
value: %PlymioAstVormFieldTest{vorm: :not_a_vorm}}}
"""
@spec struct_vorm_ensure(t) :: {:ok, t} | {:error, error}
def struct_vorm_ensure(t)
def struct_vorm_ensure(%{@pav_key_struct_id => _, @pav_key_vorm => %PAV{}} = state) do
{:ok, state}
end
def struct_vorm_ensure(%{@pav_key_struct_id => _, @pav_key_vorm => form} = state)
when @pav_value_vorm_initial_value == form do
with {:ok, form_state} <- PAV.new() do
{:ok, state |> struct!([{@pav_key_vorm, form_state}])}
else
{:error, _} = result -> result
end
end
def struct_vorm_ensure(state) do
new_error_result(m: "vorm field invalid", v: state)
end
@doc ~S"""
`struct_vorm_update/2` take a *state* and a second argument.
If the second argument is a *vorm*, the *state* is updated with it and `{:ok, state}` returned.
Otherwise the second argument is passed to `new/1` and the new *vorm* stored in the *state*.
## Examples
iex> {:ok, vorm} = Plymio.Ast.Vorm.new(form: quote(do: x = x + 1))
...> t = %PlymioAstVormFieldTest{}
...> {:ok, t} = t |> struct_vorm_update(vorm)
...> t.vorm |> helper_vorm_test_forms!(binding: [x: 3])
{4, ["x = x + 1"]}
iex> t = %PlymioAstVormFieldTest{}
...> {:ok, t} = t |> struct_vorm_update(forms: quote(do: x = x + 1))
...> t.vorm |> helper_vorm_test_forms!(binding: [x: 3])
{4, ["x = x + 1"]}
iex> t = %PlymioAstVormFieldTest{}
...> t |> struct_vorm_update(:not_new_opts)
{:error, %Plymio.Ast.Vorm.Error{error: nil, message: "new vorm opts invalid", value: :not_new_opts}}
"""
@spec struct_vorm_update(t, any) :: {:ok, t} | {:error, error}
def struct_vorm_update(t, new_forms)
def struct_vorm_update(%{@pav_key_struct_id => _} = state, %PAV{} = vorm) do
{:ok, state |> struct!([{@pav_key_vorm, vorm}])}
end
def struct_vorm_update(%{@pav_key_struct_id => _} = state, opts) do
with true <- opts |> Keyword.keyword? do
with {:ok, %PAV{} = vorm} <- PAV.new(opts) do
{:ok, state |> struct!([{@pav_key_vorm, vorm}])}
else
{:error, _} = result -> result
end
else
false -> new_error_result(m: "new vorm opts invalid", v: opts)
end
end
@doc ~S"""
`struct_forms_update/2` take a *state* and a second argument.
If the second argument is a *vorm*, the *state* is updated with the *vorm's* *forms* and `{:ok, state}` returned.
Otherwise the second argument is assumed to be one or more forms and the *state* is updated with them, returning `{:ok, state}`.
## Examples
iex> {:ok, vorm} = Plymio.Ast.Vorm.new(form: quote(do: x = x + 1))
...> t = %PlymioAstVormFieldTest{}
...> {:ok, t} = t |> struct_forms_update(vorm)
...> t.vorm |> helper_vorm_test_forms!(binding: [x: 3])
{4, ["x = x + 1"]}
iex> t = %PlymioAstVormFieldTest{}
...> {:ok, t} = t |> struct_forms_update(forms: quote(do: x = x + 1))
...> t.vorm |> helper_vorm_test_forms!(binding: [x: 3])
{4, ["x = x + 1"]}
iex> t = %PlymioAstVormFieldTest{}
...> {:ok, t} = t |> struct_forms_update(:a_valid_ast)
...> t.vorm |> helper_vorm_test_forms!(binding: [x: 3])
{:a_valid_ast, [":a_valid_ast"]}
"""
@spec struct_forms_update(t, any) :: {:ok, t} | {:error, error}
def struct_forms_update(t, new_forms)
def struct_forms_update(%{@pav_key_struct_id => _} = state, %PAV{} = vorm) do
state |> struct_vorm_update(vorm)
end
def struct_forms_update(%{@pav_key_struct_id => _} = state, forms) when is_list(forms) do
with true <- forms |> Keyword.keyword? do
state |> struct_vorm_update(forms)
else
false ->
with {:ok, %{@pav_key_struct_id => _, @pav_key_vorm => %PAV{} = vorm} = state} <- state |> struct_vorm_ensure,
{:ok, %PAV{} = vorm} <- vorm |> PAV.vorm_update_forms(forms) do
{:ok, state |> struct!([{@pav_key_vorm, vorm}])}
else
{:error, _} = result -> result
end
end
end
def struct_forms_update(%{@pav_key_struct_id => _} = state, value) do
with {:ok, _} <- value |> PAVU.form_validate do
state |> struct_forms_update([value])
else
_ -> new_error_result(m: "new vorm forms invalid", v: value)
end
end
@doc ~S"""
`struct_vorm_reset/1` take a *state* and resets its *vorm field* to `nil` returning `{:ok, state}`.
## Examples
iex> t = %PlymioAstVormFieldTest{}
...> {:ok, t} = t |> struct_vorm_reset
...> match?(nil, t.vorm)
true
iex> {:ok, vorm} = Plymio.Ast.Vorm.new(form: quote(do: x = x + 1))
...> {:ok, t} = %PlymioAstVormFieldTest{vorm: vorm}
...> |> struct_vorm_reset
...> match?(nil, t.vorm)
true
"""
@spec struct_vorm_reset(t) :: {:ok, t} | {:error, error}
def struct_vorm_reset(%{@pav_key_struct_id => _} = state) do
{:ok, state |> struct!([{@pav_key_vorm, @pav_value_vorm_initial_value}])}
end
@doc ~S"""
`struct_vorm_add/2` appends *new_forms* to the *vorm forms* of the *vorm* in the *vorm* field.
`struct_vorm_ensure/1` is called first to ensure the *vorm field* is a *vorm*.
If the `add` suceeds, `{:ok, state}` is returned.
## Examples
iex> t = %PlymioAstVormFieldTest{}
...> {:ok, t} = t |> struct_vorm_add(quote(do: x = x + 1))
...> t.vorm |> helper_vorm_test_forms!(binding: [x: 7])
{8, ["x = x + 1"]}
iex> t = %PlymioAstVormFieldTest{}
...> {:ok, t} = t |> struct_vorm_add(quote(do: x = x + 1))
...> {:ok, t} = t |> struct_vorm_add([quote(do: x = x * x), quote(do: x = x - 1)])
...> t.vorm |> helper_vorm_test_forms!(binding: [x: 7])
{63, ["x = x + 1", "x = x * x", "x = x - 1"]}
"""
@struct_var :state |> Macro.var(nil)
@struct_match quote(do: %{@pav_key_struct_id => _} = unquote(@struct_var))
@vorm_var :vorm |> Macro.var(nil)
@other_vars 5 |> Macro.generate_arguments(nil)
@struct_vorm_delegates_ok_state [
{2, :struct_vorm_add, :add},
{2, :struct_vorm_put, :put},
{2, :struct_vorm_filter, :filter},
{2, :struct_vorm_reject, :reject},
{3, :struct_vorm_insert, :insert},
{3, :struct_vorm_replace, :replace},
{2, :struct_vorm_reduce, :reduce},
{2, :struct_vorm_transform, :transform},
{3, :struct_vorm_transform, :transform},
{3, :struct_vorm_postwalk, :postwalk},
{3, :struct_vorm_prewalk, :prewalk},
{5, :struct_vorm_traverse, :traverse},
{3, :struct_vorm_pipe_before, :pipe_before},
{3, :struct_vorm_pipe_after, :pipe_after},
]
for {arity, struct_fun, form_fun} <- @struct_vorm_delegates_ok_state do
struct_args = [@struct_match | Enum.take(@other_vars,arity-1)]
form_args = Enum.take(@other_vars, arity-1)
def unquote(struct_fun)(unquote_splicing(struct_args)) do
with {:ok, %{@pav_key_struct_id => _, @pav_key_vorm => %Plymio.Ast.Vorm{} = unquote(@vorm_var)} = unquote(@struct_var)} <- unquote(@struct_var) |> struct_vorm_ensure,
{:ok, %Plymio.Ast.Vorm{} = unquote(@vorm_var)} <- unquote(@vorm_var) |> Plymio.Ast.Vorm.unquote(form_fun)(unquote_splicing(form_args)) do
{:ok, unquote(@struct_var) |> struct!([{@pav_key_vorm, unquote(@vorm_var)}])}
else
{:error, _} = result -> result
end
end
end
@struct_vorm_delegates_ok_forms_state [
{1, :struct_vorm_express, :express},
{1, :struct_vorm_produce, :produce},
]
for {arity, struct_fun, form_fun} <- @struct_vorm_delegates_ok_forms_state do
struct_args = [@struct_match | Enum.take(@other_vars,arity-1)]
form_args = Enum.take(@other_vars, arity-1)
def unquote(struct_fun)(unquote_splicing(struct_args)) do
with {:ok, %{@pav_key_struct_id => _, @pav_key_vorm => %Plymio.Ast.Vorm{} = unquote(@vorm_var)} = unquote(@struct_var)} <- unquote(@struct_var) |> struct_vorm_ensure,
{:ok, {forms, %Plymio.Ast.Vorm{} = unquote(@vorm_var) = unquote(@vorm_var)}} <- unquote(@vorm_var) |> Plymio.Ast.Vorm.unquote(form_fun)(unquote_splicing(form_args)) do
{:ok, {forms, unquote(@struct_var) |> struct!([{@pav_key_vorm, unquote(@vorm_var)}])}}
else
{:error, _} = result -> result
end
end
end
@struct_vorm_delegates_ok_forms [
{2, :struct_vorm_fetch, :fetch},
]
for {arity, struct_fun, form_fun} <- @struct_vorm_delegates_ok_forms do
struct_args = [@struct_match | Enum.take(@other_vars,arity-1)]
form_args = Enum.take(@other_vars, arity-1)
def unquote(struct_fun)(unquote_splicing(struct_args)) do
with {:ok, %{@pav_key_struct_id => _, @pav_key_vorm => %Plymio.Ast.Vorm{} = unquote(@vorm_var)}} <- unquote(@struct_var) |> struct_vorm_ensure,
{:ok, _forms} = result <- unquote(@vorm_var) |> Plymio.Ast.Vorm.unquote(form_fun)(unquote_splicing(form_args)) do
result
else
{:error, _} = result -> result
end
end
end
end | lib/ast/vorm/field.ex | 0.880605 | 0.721651 | field.ex | starcoder |
defimpl Timex.Protocol, for: NaiveDateTime do
@moduledoc """
This module implements Timex functionality for NaiveDateTime
"""
alias Timex.{Types, Duration}
import Timex.Macros
@epoch_seconds :calendar.datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}})
@spec now() :: NaiveDateTime.t
def now() do
Timex.to_naive_datetime(Timex.from_unix(:os.system_time, :native))
end
@spec to_julian(NaiveDateTime.t) :: integer
def to_julian(%NaiveDateTime{:year => y, :month => m, :day => d}) do
Timex.Calendar.Julian.julian_date(y, m, d)
end
@spec to_gregorian_seconds(NaiveDateTime.t) :: integer
def to_gregorian_seconds(date), do: to_seconds(date, :zero)
@spec to_gregorian_microseconds(NaiveDateTime.t) :: integer
def to_gregorian_microseconds(%NaiveDateTime{microsecond: {us,_}} = date) do
s = to_seconds(date, :zero)
(s*(1_000*1_000))+us
end
@spec to_unix(NaiveDateTime.t) :: integer
def to_unix(date), do: trunc(to_seconds(date, :epoch))
@spec to_date(NaiveDateTime.t) :: Date.t
def to_date(date), do: NaiveDateTime.to_date(date)
@spec to_datetime(NaiveDateTime.t, timezone :: Types.valid_timezone) :: DateTime.t | {:error, term}
def to_datetime(%NaiveDateTime{:microsecond => {us,_}} = d, timezone) do
{date,{h,mm,s}} = NaiveDateTime.to_erl(d)
Timex.DateTime.Helpers.construct({date,{h,mm,s,us}}, timezone)
end
@spec to_naive_datetime(NaiveDateTime.t) :: NaiveDateTime.t
def to_naive_datetime(%NaiveDateTime{} = date), do: date
@spec to_erl(NaiveDateTime.t) :: Types.datetime
def to_erl(%NaiveDateTime{} = d), do: NaiveDateTime.to_erl(d)
@spec century(NaiveDateTime.t) :: non_neg_integer
def century(%NaiveDateTime{:year => year}), do: Timex.century(year)
@spec is_leap?(NaiveDateTime.t) :: boolean
def is_leap?(%NaiveDateTime{year: year}), do: :calendar.is_leap_year(year)
@spec beginning_of_day(NaiveDateTime.t) :: NaiveDateTime.t
def beginning_of_day(%NaiveDateTime{:microsecond => {_, _precision}} = datetime) do
%{datetime | :hour => 0, :minute => 0, :second => 0, :microsecond => {0, 0}}
end
@spec end_of_day(NaiveDateTime.t) :: NaiveDateTime.t
def end_of_day(%NaiveDateTime{microsecond: {_, _precision}} = datetime) do
%{datetime | :hour => 23, :minute => 59, :second => 59, :microsecond => {999_999, 6}}
end
@spec beginning_of_week(NaiveDateTime.t, Types.weekday) :: NaiveDateTime.t
def beginning_of_week(%NaiveDateTime{} = date, weekstart) do
case Timex.days_to_beginning_of_week(date, weekstart) do
{:error, _} = err -> err
days ->
beginning_of_day(shift(date, [days: -days]))
end
end
@spec end_of_week(NaiveDateTime.t, Types.weekday) :: NaiveDateTime.t
def end_of_week(%NaiveDateTime{} = date, weekstart) do
case Timex.days_to_end_of_week(date, weekstart) do
{:error, _} = err -> err
days_to_end ->
end_of_day(shift(date, [days: days_to_end]))
end
end
@spec beginning_of_year(NaiveDateTime.t) :: NaiveDateTime.t
def beginning_of_year(%NaiveDateTime{:year => y}) do
{:ok, nd} = NaiveDateTime.new(y, 1, 1, 0, 0, 0)
nd
end
@spec end_of_year(NaiveDateTime.t) :: NaiveDateTime.t
def end_of_year(%NaiveDateTime{} = date),
do: %{date | :month => 12, :day => 31, :hour => 23, :minute => 59, :second => 59, :microsecond => {999_999, 6}}
@spec beginning_of_quarter(NaiveDateTime.t) :: NaiveDateTime.t
def beginning_of_quarter(%NaiveDateTime{month: month} = date) do
month = 1 + (3 * (Timex.quarter(month) - 1))
beginning_of_month(%{date | :month => month, :day => 1})
end
@spec end_of_quarter(NaiveDateTime.t) :: NaiveDateTime.t
def end_of_quarter(%NaiveDateTime{month: month} = date) do
month = 3 * Timex.quarter(month)
end_of_month(%{date | :month => month, :day => 1})
end
@spec beginning_of_month(NaiveDateTime.t) :: NaiveDateTime.t
def beginning_of_month(%NaiveDateTime{} = datetime),
do: %{datetime | :day => 1, :hour => 0, :minute => 0, :second => 0, :microsecond => {0,0}}
@spec end_of_month(NaiveDateTime.t) :: NaiveDateTime.t
def end_of_month(%NaiveDateTime{} = date),
do: %{date | :day => days_in_month(date), :hour => 23, :minute => 59, :second => 59, :microsecond => {999_999, 6}}
@spec quarter(NaiveDateTime.t) :: integer
def quarter(%NaiveDateTime{month: month}), do: Timex.quarter(month)
def days_in_month(%NaiveDateTime{:year => y, :month => m}), do: Timex.days_in_month(y, m)
def week_of_month(%NaiveDateTime{:year => y, :month => m, :day => d}), do: Timex.week_of_month(y,m,d)
def weekday(%NaiveDateTime{:year => y, :month => m, :day => d}), do: :calendar.day_of_the_week({y, m, d})
def day(%NaiveDateTime{} = date) do
{:ok, nd} = NaiveDateTime.new(date.year,1,1,0,0,0)
1 + Timex.diff(date, nd, :days)
end
def is_valid?(%NaiveDateTime{:year => y, :month => m, :day => d,
:hour => h, :minute => min, :second => sec}) do
:calendar.valid_date({y,m,d}) and Timex.is_valid_time?({h,min,sec})
end
def iso_week(%NaiveDateTime{:year => y, :month => m, :day => d}),
do: Timex.iso_week(y, m, d)
def from_iso_day(%NaiveDateTime{year: year} = date, day) when is_day_of_year(day) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(year, day)
%{date | :year => year, :month => month, :day => day_of_month}
end
@spec set(NaiveDateTime.t, list({atom(), term})) :: NaiveDateTime.t | {:error, term}
def set(%NaiveDateTime{} = date, options) do
validate? = Keyword.get(options, :validate, true)
Enum.reduce(options, date, fn
_option, {:error, _} = err ->
err
option, result ->
case option do
{:validate, _} -> result
{:datetime, {{y, m, d}, {h, min, sec}}} ->
if validate? do
%{result |
:year => Timex.normalize(:year, y),
:month => Timex.normalize(:month, m),
:day => Timex.normalize(:day, {y,m,d}),
:hour => Timex.normalize(:hour, h),
:minute => Timex.normalize(:minute, min),
:second => Timex.normalize(:second, sec)
}
else
%{result | :year => y, :month => m, :day => d, :hour => h, :minute => min, :second => sec}
end
{:date, {y, m, d}} ->
if validate? do
{yn,mn,dn} = Timex.normalize(:date, {y,m,d})
%{result | :year => yn, :month => mn, :day => dn}
else
%{result | :year => y, :month => m, :day => d}
end
{:time, {h, m, s}} ->
if validate? do
%{result |
:hour => Timex.normalize(:hour, h),
:minute => Timex.normalize(:minute, m),
:second => Timex.normalize(:second, s)}
else
%{result | :hour => h, :minute => m, :second => s}
end
{:day, d} ->
if validate? do
%{result | :day => Timex.normalize(:day, {result.year, result.month, d})}
else
%{result | :day => d}
end
{name, val} when name in [:year, :month, :hour, :minute, :second, :microsecond] ->
if validate? do
Map.put(result, name, Timex.normalize(name, val))
else
Map.put(result, name, val)
end
{name, _} when name in [:timezone] ->
result
{option_name, _} ->
{:error, {:bad_option, option_name}}
end
end)
end
@spec shift(NaiveDateTime.t, list({atom(), term})) :: NaiveDateTime.t | {:error, term}
def shift(%NaiveDateTime{} = datetime, shifts) when is_list(shifts) do
apply_shifts(datetime, shifts)
end
defp apply_shifts(datetime, []),
do: datetime
defp apply_shifts(datetime, [{:duration, %Duration{} = duration} | rest]) do
total_microseconds = Duration.to_microseconds(duration)
seconds = div(total_microseconds, 1_000*1_000)
rem_microseconds = rem(total_microseconds, 1_000*1_000)
shifted = shift_by(datetime, seconds, :seconds)
shifted = %{shifted | :microsecond => Timex.DateTime.Helpers.construct_microseconds(rem_microseconds)}
apply_shifts(shifted, rest)
end
defp apply_shifts(datetime, [{unit, 0} | rest]) when is_atom(unit),
do: apply_shifts(datetime, rest)
defp apply_shifts(datetime, [{unit, value} | rest]) when is_atom(unit) and is_integer(value) do
shifted = shift_by(datetime, value, unit)
apply_shifts(shifted, rest)
end
defp apply_shifts({:error, _} = err, _),
do: err
defp shift_by(%NaiveDateTime{:year => y} = datetime, value, :years) do
shifted = %{datetime | :year => y + value}
# If a plain shift of the year fails, then it likely falls on a leap day,
# so set the day to the last day of that month
case :calendar.valid_date({shifted.year,shifted.month,shifted.day}) do
false ->
last_day = :calendar.last_day_of_the_month(shifted.year, shifted.month)
cond do
shifted.day <= last_day ->
shifted
:else ->
%{shifted | :day => last_day}
end
true ->
shifted
end
end
defp shift_by(%NaiveDateTime{:year => year, :month => month} = datetime, value, :months) do
m = month + value
shifted = cond do
m > 0 -> %{datetime | :year => year + div(m - 1, 12), :month => rem(m - 1, 12) + 1}
m <= 0 -> %{datetime | :year => year + div(m, 12) - 1, :month => 12 + rem(m, 12)}
end
# If the shift fails, it's because it's a high day number, and the month
# shifted to does not have that many days. This will be handled by always
# shifting to the last day of the month shifted to.
case :calendar.valid_date({shifted.year,shifted.month,shifted.day}) do
false ->
last_day = :calendar.last_day_of_the_month(shifted.year, shifted.month)
cond do
shifted.day <= last_day ->
shifted
:else ->
%{shifted | :day => last_day}
end
true ->
shifted
end
end
defp shift_by(%NaiveDateTime{microsecond: {current_usecs, _}} = datetime, value, :microseconds) do
usecs_from_zero = :calendar.datetime_to_gregorian_seconds({
{datetime.year,datetime.month,datetime.day},
{datetime.hour,datetime.minute,datetime.second}
}) * (1_000*1_000) + current_usecs + value
secs_from_zero = div(usecs_from_zero, 1_000*1_000)
rem_microseconds = rem(usecs_from_zero, 1_000*1_000)
shifted = :calendar.gregorian_seconds_to_datetime(secs_from_zero)
shifted = Timex.to_naive_datetime(shifted)
%{shifted | :microsecond => Timex.DateTime.Helpers.construct_microseconds(rem_microseconds)}
end
defp shift_by(%NaiveDateTime{microsecond: {current_usecs, _}} = datetime, value, :milliseconds) do
usecs_from_zero = :calendar.datetime_to_gregorian_seconds({
{datetime.year,datetime.month,datetime.day},
{datetime.hour,datetime.minute,datetime.second}
}) * (1_000*1_000) + current_usecs + (value*1_000)
secs_from_zero = div(usecs_from_zero, 1_000*1_000)
rem_microseconds = rem(usecs_from_zero, 1_000*1_000)
shifted = :calendar.gregorian_seconds_to_datetime(secs_from_zero)
shifted = Timex.to_naive_datetime(shifted)
%{shifted | :microsecond => Timex.DateTime.Helpers.construct_microseconds(rem_microseconds)}
end
defp shift_by(%NaiveDateTime{microsecond: {us, _}} = datetime, value, units) do
secs_from_zero = :calendar.datetime_to_gregorian_seconds({
{datetime.year,datetime.month,datetime.day},
{datetime.hour,datetime.minute,datetime.second}
})
shift_by = case units do
:microseconds -> div(value + us, 1_000*1_000)
:milliseconds -> div((value*1_000 + us), 1_000*1_000)
:seconds -> value
:minutes -> value * 60
:hours -> value * 60 * 60
:days -> value * 60 * 60 * 24
:weeks -> value * 60 * 60 * 24 * 7
_ ->
{:error, {:unknown_shift_unit, units}}
end
case shift_by do
{:error, _} = err -> err
0 when units in [:microseconds] ->
%{datetime | :microsecond => Timex.DateTime.Helpers.construct_microseconds(value+us)}
0 when units in [:milliseconds] ->
%{datetime | :microsecond => Timex.DateTime.Helpers.construct_microseconds((value*1_000)+us)}
0 ->
datetime
_ ->
new_secs_from_zero = secs_from_zero + shift_by
cond do
new_secs_from_zero <= 0 ->
{:error, :shift_to_invalid_date}
:else ->
shifted = :calendar.gregorian_seconds_to_datetime(new_secs_from_zero)
shifted = Timex.to_naive_datetime(shifted)
%{shifted | :microsecond => Timex.DateTime.Helpers.construct_microseconds(us)}
end
end
end
defp to_seconds(%NaiveDateTime{year: y, month: m, day: d, hour: h, minute: mm, second: s}, :zero) do
:calendar.datetime_to_gregorian_seconds({{y,m,d},{h,mm,s}})
end
defp to_seconds(%NaiveDateTime{year: y, month: m, day: d, hour: h, minute: mm, second: s}, :epoch) do
:calendar.datetime_to_gregorian_seconds({{y,m,d},{h,mm,s}}) - @epoch_seconds
end
end | data/web/deps/timex/lib/datetime/naivedatetime.ex | 0.776792 | 0.582432 | naivedatetime.ex | starcoder |
defmodule Terp.Evaluate.Boolean do
@moduledoc """
Boolean values and conditional evaluation.
"""
alias Terp.Evaluate
@doc """
true
## Examples
iex> Terp.Evaluate.Boolean.t
true
"""
def t(), do: true
@doc """
false
## Examples
iex> Terp.Evaluate.Boolean.f
false
"""
def f(), do: false
@doc """
If - then - else conditional logic.
## Examples
iex> [RoseTree.new(true), RoseTree.new(5), RoseTree.new(3)]
...> |> Terp.Evaluate.Boolean.conditional(fn x -> x end)
5
iex> [RoseTree.new(false), RoseTree.new(5), RoseTree.new(3)]
...> |> Terp.Evaluate.Boolean.conditional(fn x -> x end)
3
"""
def conditional([test | [consequent | [alternative | []]]], env) do
if Evaluate.eval_expr(test, env) do
Evaluate.eval_expr(consequent, env)
else
Evaluate.eval_expr(alternative, env)
end
end
@doc """
`cond/2` evaluates a list of conditions one by one until
a true condition is found; when one is true, the body is evaluated.
## Examples
iex> "(cond [(equal? 1 5) (9)] [#t 5])"
...> |> Terp.eval()
5
iex> "(cond [(equal? 1 5) (9)] [#f 5])"
...> |> Terp.eval()
{:error, {:cond, "no true condition"}}
iex> "(cond [(equal? (+ 2 3) 5) 9] [#f 5])"
...> |> Terp.eval()
9
"""
def cond([], _env), do: {:error, {:cond, "no true condition"}}
def cond([%{node: [condition | consequent]} | conditions], env) do
if Evaluate.eval_expr(condition, env) do
# An artifact of the parser; pulls in consequent as a list.
consequent
|> Enum.map(&Evaluate.eval_expr(&1, env))
|> List.first
else
cond(conditions, env)
end
end
@doc """
Test whether two values are equal.
## Examples
iex> "(equal? 5 3)" |> Terp.eval()
false
iex> "(equal? 5 5)" |> Terp.eval()
true
"""
def equal?(operands, environment) do
case Enum.map(operands, &Evaluate.eval_expr(&1, environment)) do
[x | [y | []]] ->
x == y
_ ->
{:error, {:equal?, "invalid number of arguments"}}
end
end
end | lib/evaluate/boolean.ex | 0.783077 | 0.434521 | boolean.ex | starcoder |
defmodule Delugex.MessageStore.Mnesia do
@moduledoc """
This is the real implementation of MessageStore.
You should be able to infer what to write, it's just passing the
required arguments to the SQL functions and converting any returned value.
Whenever a stream name is expected, please use the %StreamName struct and
make sure to convert it to string.
"""
use Delugex.MessageStore
import Delugex.MessageStore,
only: [
is_version: 1,
is_expected_version: 1,
is_batch_size: 1
]
alias Delugex.StreamName
alias Delugex.Event
alias Delugex.Event.Raw
alias Delugex.Event.Metadata
alias Delugex.MessageStore.Mnesia.Repo
alias Delugex.MessageStore.Mnesia.ExpectedVersionError, as: MnesiaVersionError
def start do
Repo.create()
end
def stop do
Repo.delete()
end
@impl Delugex.MessageStore
@doc """
Write has an optional expected_version argument. This argument could be one of:
- nil: no version expected
- no_stream: no message ever written to this stream, the Postgres
stream_version position will return null (max(position) is null if no rows
are present)
- An integer (0+): Representing the expected version
"""
def write!(%Event{} = event, expected_version \\ nil)
when is_expected_version(expected_version) do
expected_version = to_number_version(expected_version)
params = encode_event(event)
params = params ++ [expected_version]
case Repo.write_message(params) do
{:atomic, version} ->
version
{_, %MnesiaVersionError{} = error} ->
as_known_error!(error)
end
end
@impl Delugex.MessageStore
@doc """
- `events` list of events to write
- `stream_name` stream where events will be written to (will overwrite
any stream_name provided in the events)
- optional `expected_version` argument. This argument could be one of:
- `nil`: no version expected
- `:no_stream`: no message ever written to this stream, the Postgres
stream_version position will return null (max(position) is null if no
rows are present)
- An integer (0+): Representing the expected version
"""
def write_batch!(
events,
stream_name,
expected_version \\ nil
)
when is_list(events) and is_expected_version(expected_version) do
insertables =
events
|> Stream.map(fn event -> Map.put(event, :stream_name, stream_name) end)
|> Stream.with_index()
|> Stream.map(fn {event, index} ->
case index do
0 -> encode_event(event) ++ [to_number_version(expected_version)]
_ -> encode_event(event) ++ [nil]
end
end)
{:atomic, version} = Repo.write_messages(insertables)
version
end
@impl Delugex.MessageStore
@doc """
Retrieve's the last stream by the stream_name (based on greatest position).
"""
def read_last(stream_name) do
{:atomic, message} = Repo.get_last_message(stream_name)
row_to_event_raw(message)
end
@impl Delugex.MessageStore
@doc """
Retrieve steams by the stream_name, in batches of 10 by default.
"""
def read_batch(stream_name, position \\ 0, batch_size \\ 10)
when is_version(position) and is_batch_size(batch_size) do
query_fun =
case StreamName.category?(stream_name) do
true -> &Repo.get_category_messages/3
false -> &Repo.get_stream_messages/3
end
{
:atomic,
{records, _cont}
} = query_fun.(stream_name, position, batch_size)
rows_to_events(records)
end
@impl Delugex.MessageStore
@doc """
Retrieves the last message position, or :no_stream if none are present
"""
def read_version(stream_name) do
{:atomic, version} = Repo.stream_version(stream_name)
case version do
nil -> :no_stream
_ -> version
end
end
@impl Delugex.MessageStore
@doc """
Receives notifications as GenServer casts. Two types of notifications are
received:
- `{:notification, connection_pid, ref, channel, payload}` with a notify
from Postgres (check
[Postgrex documentation](https://hexdocs.pm/postgrex/Postgrex.Notifications.html#listen/3))
- `{:reminder}` which is received every X seconds
"""
def listen(stream_name, opts \\ []) do
stream_name = StreamName.to_string(stream_name)
Repo.Notifications.listen(stream_name, opts)
end
@impl Delugex.MessageStore
@doc """
Stops notifications
"""
def unlisten(ref, opts \\ []) do
Repo.Notifications.unlisten(ref, opts)
end
defp to_number_version(:no_stream), do: -1
defp to_number_version(nil), do: nil
defp to_number_version(expected_version), do: expected_version
defp encode_event(%Event{
id: id,
stream_name: stream_name,
type: type,
data: data,
metadata: metadata
}) do
id = cast_uuid_as_string(id)
stream_name = StreamName.to_string(stream_name)
[id, stream_name, type, data, metadata]
end
defp rows_to_events(rows) do
rows
|> Enum.map(&row_to_event_raw/1)
end
defp row_to_event_raw([
id,
stream_name,
type,
position,
global_position,
data,
metadata,
time
]) do
id = cast_uuid_as_string(id)
%Raw{
id: decode_id(id),
stream_name: decode_stream_name(stream_name),
type: type,
position: position,
global_position: global_position,
data: decode_data(data),
metadata: decode_metadata(metadata),
time: time
}
end
defp symbolize(map) do
map
|> Map.new(fn {k, v} -> {String.to_existing_atom(k), v} end)
end
defp as_known_error!(%MnesiaVersionError{} = error) do
raise Delugex.MessageStore.ExpectedVersionError, message: error.message
end
defp as_known_error!(error) do
raise error
end
defp cast_uuid_as_string(id) do
Ecto.UUID.cast!(id)
end
defp decode_stream_name(text_stream_name) do
decoder =
__MODULE__
|> Delugex.Config.get(:stream_name, [])
|> Keyword.get(:decoder, Delugex.Stream.Name)
decoder.decode(text_stream_name)
end
defp decode_metadata(map) do
metadata =
map
|> decode_json()
|> symbolize()
struct(Metadata, metadata)
end
defp decode_data(map) do
map
|> decode_json()
|> symbolize()
end
defp decode_id(id) do
cast_uuid_as_string(id)
end
defp decode_json(text) do
decoder =
__MODULE__
|> Delugex.Config.get(:json, [])
|> Keyword.get(:decoder, Jason)
decoder.decode!(text)
end
end | lib/delugex/message_store/mnesia.ex | 0.824427 | 0.563858 | mnesia.ex | starcoder |
defmodule Absinthe.Phoenix.Controller do
@moduledoc """
Supports use of GraphQL documents inside Phoenix controllers.
## Example
First, `use Absinthe.Phoenix.Controller`, passing your `schema`:
```elixir
defmodule MyAppWeb.UserController do
use MyAppWeb, :controller
use Absinthe.Phoenix.Controller, schema: MyAppWeb.Schema
# ... actions
end
```
For each action you want Absinthe to process, provide a GraphQL document using
the `@graphql` module attribute (before the action):
```
@graphql \"""
query ($filter: UserFilter) {
users(filter: $filter, limit: 10)
}
\"""
def index(conn, %{data: data}) do
render conn, "index.html", data
end
```
The params for the action will be intercepted by the
`Absinthe.Phoenix.Controller.Action` plug, and used as variables for
the GraphQL document you've specified.
For instance, given a definition for a `:user_filter` input object
type like this:
```
input_object :user_filter do
field :name_matches, :string
field :age_above, :integer
field :age_below, :integer
end
```
And a query that looks like this (assuming you have the normal
`Plug.Parsers` configuration for param parsing):
```
?filter[name_matches]=joe&filter[age_above]=42
```
Then Absinthe will receive variable definitions of:
```
%{"filter" => %{"name_matches" => "joe", "age_above" => 42}}
```
(For how the string `"42"` was converted into `42`, see `cast_param/3`).
The params on the `conn` will then be replaced by the result of the
execution by Absinthe. The action function can then match against
that result to respond correctly to the user:
It's up to you to handle the three possible results:
- When there's `:data` but no `:errors`, everything went perfectly.
- When there's `:errors` but no `:data`, a validation error occurred and the document could not be
executed.
- When there's `:data` and `:errors`, partial data is available but some fields reported errors
during execution.
Notice the keys are atoms, not strings as in normal Phoenix action invocations.
## Differences with the GraphQL Specification
There are some important differences between GraphQL documents as
processed in an HTTP API and the GraphQL documents that this module
supports.
In an effort to make use of GraphQL ergonomic in Phoenix controllers
and views, Absinthe supports some slight structural modifications to
the GraphQL documents provided using the `@graphql` module attribute
in controller modules.
In a way, you can think of these changes as a specialized GraphQL
dialect. The following are the differences you need to keep in mind.
### Objects can be leaf nodes
Let's look at the `users` example mentioned before:
```
@graphql \"""
query ($filter: UserFilter) {
users(filter: $filter, limit: 10)
}
\"""
```
You'll notice that in the above example, `users` doesn't have an
accompanying _selection set_ (that is, a set of child fields bounded
by `{ ... }`). The GraphQL specification dictates that only scalar
values can be "leaf nodes" in a GraphQL document... but to support
unmodified struct values being returned (for example, Ecto schemas),
if no selection set is provided for an object value (or list
thereof), the entire value is returned.
The template can then use `users` as needed:
```
<ul>
<%= for user <- @users do %>
<li><%= link user.full_name, to: user_path(@conn, :show, user) %></li>
<% end %>
</ul>
```
This is useful for `Phoenix.HTML` helper functions that expect
structs with specific fields (especially `form_for`).
One way to think of this change is that, for objects, no selection
set is equivalent to a "splat" operator (except, of course, even
fields not defined in your GraphQL schema are returned as part of
the value).
But, never fear, nothing is stopping you from ignoring this behavior
and providing a selection set if you want a traditionally narrow set
of fields:
```
@graphql \"""
query ($filter: UserFilter) {
users(filter: $filter, limit: 10) {
id
full_name
}
}
\"""
```
### Scalar values aren't serialized
To remove the need for reparsing values, scalar values aren't serialized;
Phoenix actions receive the original, unserialized values of GraphQL fields.
This is especially useful for custom scalar types. Using a couple of the
additional types packaged in `Absinthe.Type.Custom`, for example:
- `:decimal` values are returned as `%Decimal{}` structs, not strings.
- `:datetime` values are returned as `%DateTime{}` structs, not strings.
In short, GraphQL used in controllers is a query language to retrieve the values requested---there's no need to serialize the
values to send them across HTTP.
### Fields use snake_case
Unlike in the GraphQL notation scheme we prefer for GraphQL APIs (that is,
`camelCase` fields, which better match up with the expectations of JavaScript
clients), fields used in documents provided as `@graphql` should use
`snake_case` naming, as Elixir conventions use that notation style for atoms,
etc.
### Atom keys
Because you are writing the GraphQL document in your controller and Absinthe
is validating the document against your schema, atom keys are returned for
field names.
"""
defmacro __using__(opts \\ []) do
schema = Keyword.fetch!(opts, :schema)
quote do
@behaviour unquote(__MODULE__)
@before_compile unquote(__MODULE__)
@on_definition {unquote(__MODULE__), :register_graphql_action}
Module.register_attribute(__MODULE__, :graphql_actions, accumulate: true)
import unquote(__MODULE__), only: [variables: 1]
@absinthe_schema unquote(schema)
plug unquote(__MODULE__).Action, unquote(opts)
@impl unquote(__MODULE__)
@spec cast_param(value :: any, target_type :: Absinthe.Type.t, schema :: Absinthe.Schema.t) :: any
def cast_param(value, %Absinthe.Type.NonNull{of_type: inner_target_type}, schema) do
cast_param(value, inner_target_type, schema)
end
def cast_param(values, %Absinthe.Type.List{of_type: inner_target_type}, schema) when is_list(values) do
for value <- values do
cast_param(value, inner_target_type, schema)
end
end
def cast_param(value, %Absinthe.Type.InputObject{} = target_type, schema) when is_map(value) do
for {name, field_value} <- value, into: %{} do
case Map.values(target_type.fields) |> Enum.find(&(to_string(&1.identifier) == name)) do
nil ->
# Pass through value for error reporting by validations
{name, field_value}
field ->
{
name,
cast_param(field_value, Absinthe.Schema.lookup_type(schema, field.type), schema)
}
end
end
end
def cast_param(value, %Absinthe.Type.Scalar{__reference__: %{identifier: :integer}}, _schema) when is_binary(value) do
case Integer.parse(value) do
{result, _} ->
result
:error ->
# Pass through value for error reporting by validations
value
end
end
def cast_param(value, %Absinthe.Type.Scalar{__reference__: %{identifier: :float}}, _schema) when is_binary(value) do
case Float.parse(value) do
{result, _} ->
result
:error ->
# Pass through value for error reporting by validations
value
end
end
def cast_param(value, target_type, schema) do
value
end
defoverridable [cast_param: 3]
@impl unquote(__MODULE__)
@spec absinthe_pipeline(schema :: Absinthe.Schema.t, Keyword.t) :: Absinthe.Pipeline.t
def absinthe_pipeline(schema, opts) do
unquote(__MODULE__).default_pipeline(schema, opts)
end
defoverridable [absinthe_pipeline: 2]
end
end
def variables(conn) do
conn.private[:absinthe_variables]
end
def default_pipeline(schema, options) do
alias Absinthe.{Phase, Pipeline}
options = Pipeline.options(options)
schema
|> Pipeline.for_document(options)
|> Pipeline.from(Phase.Document.Variables)
|> Pipeline.insert_before(Phase.Document.Variables, {Absinthe.Phoenix.Controller.Blueprint, options})
|> Pipeline.without(Phase.Document.Validation.ScalarLeafs)
|> Pipeline.insert_after(Phase.Document.Directives, {Absinthe.Phoenix.Controller.Action, options})
end
defmacro __before_compile__(env) do
actions = Module.get_attribute(env.module, :graphql_actions)
provides = for {name, doc, _} <- actions, do: {name, doc}
schemas = for {name, _, schema} <- actions, do: {to_string(name), schema}
quote do
defmodule GraphQL do
use Absinthe.Plug.DocumentProvider.Compiled
provide unquote(provides)
@absinthe_schemas %{unquote_splicing(schemas)}
def lookup_schema(name) do
@absinthe_schemas[name]
end
end
end
end
@doc false
def register_graphql_action(env, :def, name, _args, _guards, _body) do
default_schema = Module.get_attribute(env.module, :absinthe_schema)
case Module.get_attribute(env.module, :graphql) do
nil ->
:ok
{document, schema} ->
Module.delete_attribute(env.module, :graphql)
Module.put_attribute(env.module, :graphql_actions, {name, document, schema})
document ->
Module.delete_attribute(env.module, :graphql)
Module.put_attribute(env.module, :graphql_actions, {name, document, default_schema})
end
end
def register_graphql_action(_env, _kind, _name, _args, _guards, _body) do
:ok
end
@doc """
Cast string param values to values Absinthe expects for variable input.
Some scalar types, like `:integer` (GraphQL `Int`) require that raw,
incoming value be a non-string type. This isn't a problem in
GraphQL-over-HTTP because the variable values are provided as a JSON
payload (which supports, i.e., integer values).
To support converting incoming param values to the format that
certain scalars expect, we support a `cast_param/3` callback
function that takes a raw value, target type (e.g., the scalar
type), and the schema, and returns the transformed
value. `cast_param/3` is overridable and the implementation already
supports `:integer` and `:float` types.
If you override `cast_param/3`, make sure you super or handle lists,
non-nulls, and input object values yourself; they're also processed
using the function.
Important: In the event that a value is _invalid_, just return it
unchanged so that Absinthe's usual validation logic can report it as
invalid.
"""
@callback cast_param(value :: any, target_type :: Absinthe.Type.t, schema :: Absinthe.Schema.t) :: any
@doc """
Customize the Absinthe processing pipeline.
Only implement this function if you need to change the pipeline used
to process documents.
"""
@callback absinthe_pipeline(schema :: Absinthe.Schema.t, Keyword.t) :: Absinthe.Pipeline.t
end | lib/absinthe/phoenix/controller.ex | 0.941034 | 0.837221 | controller.ex | starcoder |
defmodule BitcrowdEcto.DateTime do
@moduledoc """
Functions to work with date and time values.
"""
@moduledoc since: "0.2.0"
@type unit :: :second | :minute | :hour | :day | :week
@type period :: {integer(), unit()}
@doc """
Converts a `{<value>, <unit>}` tuple into seconds.
#Examples
iex> in_seconds({99, :second})
99
iex> in_seconds({1, :minute})
60
iex> in_seconds({1, :hour})
3600
iex> in_seconds({1, :day})
86400
iex> in_seconds({1, :week})
604800
"""
@doc since: "0.2.0"
@spec in_seconds(period()) :: integer()
def in_seconds({seconds, :second}), do: seconds
def in_seconds({minutes, :minute}), do: 60 * minutes
def in_seconds({hours, :hour}), do: 3600 * hours
def in_seconds({days, :day}), do: in_seconds({days * 24, :hour})
def in_seconds({weeks, :week}),
do: in_seconds({weeks * 7 * 24, :hour})
@doc """
Works similar to `Timex.shift/3`, but way more simple.
## Behaviour
Semantics are like `DateTime.add/3`. TimeZone-awareness when using tzdata.
DateTime, e.g. "2020-03-29 14:00 Europe/Berlin" - 1 day = "2020-03-28 13:00" as March 29th
only had 23 hours due to DST.
## Examples
iex> shift(~U[2022-04-07 07:21:22.036Z], 15)
~U[2022-04-07 07:21:37.036Z]
iex> shift(~U[2022-04-07 07:21:22.036Z], -3600)
~U[2022-04-07 06:21:22.036Z]
iex> shift(~U[2022-04-07 07:21:22.036Z], {1, :day})
~U[2022-04-08 07:21:22.036Z]
iex> ~U[2020-03-29 12:00:00.000Z]
...> |> DateTime.shift_zone!("Europe/Berlin")
...> |> shift({-1, :day})
...> |> DateTime.to_iso8601()
"2020-03-28T13:00:00.000+01:00"
"""
@doc since: "0.10.0"
@spec shift(DateTime.t(), integer() | period()) :: DateTime.t()
def shift(datetime, period) when is_tuple(period), do: shift(datetime, in_seconds(period))
def shift(datetime, seconds), do: DateTime.add(datetime, seconds)
@doc """
Works similar to `Timex.beginning_of_day/3`, but way more simple.
## Behaviour
Nulls the time-field of the `DateTime` and keeps the rest.
## Examples
iex> beginning_of_day(~U[2022-04-07 07:21:22.036Z])
~U[2022-04-07 00:00:00.000000Z]
"""
@doc since: "0.10.0"
@spec beginning_of_day(DateTime.t()) :: DateTime.t()
def beginning_of_day(datetime) do
%{datetime | hour: 0, minute: 0, second: 0, microsecond: {0, 6}}
end
end | lib/bitcrowd_ecto/date_time.ex | 0.913334 | 0.547887 | date_time.ex | starcoder |
defmodule KafkaEx.GenConsumer do
@moduledoc """
A behaviour module for implementing a Kafka consumer.
A `KafkaEx.GenConsumer` is an Elixir process that consumes messages from
Kafka. A single `KafkaEx.GenConsumer` process consumes from a single
partition of a Kafka topic. Several `KafkaEx.GenConsumer` processes can be
used to consume from multiple partitions or even multiple topics. Partition
assignments for a group of `KafkaEx.GenConsumer`s can be defined manually
using `KafkaEx.GenConsumer.Supervisor` or coordinated across a cluster of
nodes using `KafkaEx.ConsumerGroup`.
A `KafkaEx.GenConsumer` must implement three callbacks. Two of these will be
defined with default behavior if you add `use KafkaEx.GenConsumer` to your
module, leaving just `c:handle_message_set/2` to be implemented. This is the
recommended usage.
## Example
The `KafkaEx.GenConsumer` behaviour abstracts common Kafka consumer
interactions. `KafkaEx.GenConsumer` will take care of the details of
determining a starting offset, fetching messages from a Kafka broker, and
committing offsets for consumed messages. Developers are only required to
implement `c:handle_message_set/2` to process messages.
The following is a minimal example that logs each message as it's consumed:
```
defmodule ExampleGenConsumer do
use KafkaEx.GenConsumer
alias KafkaEx.Protocol.Fetch.Message
require Logger
# note - messages are delivered in batches
def handle_message_set(message_set, state) do
for %Message{value: message} <- message_set do
Logger.debug(fn -> "message: " <> inspect(message) end)
end
{:async_commit, state}
end
end
```
`c:handle_message_set/2` will be called with the batch of messages fetched
from the broker. The number of messages in a batch is determined by the
number of messages available and the `max_bytes` and `min_bytes` parameters
of the fetch request (which can be configured in KafkaEx). In this example,
because `c:handle_message_set/2` always returns `{:async_commit, new_state}`,
the message offsets will be automatically committed asynchronously.
## Committing Offsets
`KafkaEx.GenConsumer` manages a consumer's offsets by committing the offsets
of consumed messages. KafkaEx supports two commit strategies: asynchronous
and synchronous. The return value of `c:handle_message_set/2` determines
which strategy is used:
* `{:sync_commit, new_state}` causes synchronous offset commits.
* `{:async_commit, new_state}` causes asynchronous offset commits.
Note that with both of the offset commit strategies, only if the final offset
in the message set is committed and this is done after the messages are
consumed. If you want to commit the offset of every message consumed, use
the synchronous offset commit strategy and implement calls to
`KafkaEx.offset_commit/2` within your consumer as appropriate.
### Synchronous offset commits
When `c:handle_message_set/2` returns `{:sync_commit, new_state}`, the offset
of the final message in the message set is committed immediately before
fetching any more messages. This strategy requires a significant amount of
communication with the broker and could correspondingly degrade consumer
performance, but it will keep the offset commits tightly synchronized with
the consumer state.
Choose the synchronous offset commit strategy if you want to favor
consistency of offset commits over performance, or if you have a low rate of
message arrival. The definition of a "low rate" depends on the situation,
but tens of messages per second could be considered a "low rate" in most
situations.
### Asynchronous offset commits
When `c:handle_message_set/2` returns `{:async_commit, new_state}`, KafkaEx
will not commit offsets after every message set consumed. To avoid
excessive network calls, the offsets are committed periodically (and when
the worker terminates).
How often a `KafkaEx.GenConsumer` auto-commits offsets is controlled by the two
configuration values `:commit_interval` and `:commit_threshold`.
* `:commit_interval` is the maximum time (in milliseconds) that a
`KafkaEx.GenConsumer` will delay committing the offset for an acknowledged
message.
* `:commit_threshold` is the maximum number of acknowledged messages that a
`KafkaEx.GenConsumer` will allow to be uncommitted before triggering a
commit.
These can be set globally in the `:kafka_ex` app's environment or on a
per-consumer basis by passing options to `start_link/5`:
```
# In config/config.exs
config :kafka_ex,
commit_interval: 5000,
commit_threshold: 100
# As options to start_link/5
KafkaEx.GenConsumer.start_link(MyConsumer, "my_group", "topic", 0,
commit_interval: 5000,
commit_threshold: 100)
```
For low-volume topics, `:commit_interval` is the dominant factor for how
often a `KafkaEx.GenConsumer` auto-commits. For high-volume topics,
`:commit_threshold` is the dominant factor.
## Handler state and interaction
Use the `c:init/2` to initialize consumer state and `c:handle_call/3`,
`c:handle_cast/2`, or `c:handle_info/2` to interact.
Example:
```
defmodule MyConsumer do
use KafkaEx.GenConsumer
defmodule State do
defstruct messages: [], calls: 0
end
def init(_topic, _partition) do
{:ok, %State{}}
end
def init(_topic, _partition, extra_args) do
{:ok, %State{}}
end
def handle_message_set(message_set, state) do
{:async_commit, %{state | messages: state.messages ++ message_set}}
end
def handle_call(:messages, _from, state) do
{:reply, state.messages, %{state | calls: state.calls + 1}}
end
end
{:ok, pid} = GenConsumer.start_link(MyConsumer, "consumer_group", "topic", 0)
GenConsumer.call(pid, :messages)
```
**NOTE** If you do not implement `c:handle_call/3` or `c:handle_cast/2`, any
calls to `GenConsumer.call/3` or casts to `GenConsumer.cast/2` will raise an
error. Similarly, any messages sent to a `GenConsumer` will log an error if
there is no corresponding `c:handle_info/2` callback defined.
## Testing
A `KafkaEx.GenConsumer` can be unit-tested without a running Kafka broker by sending
messages directly to its `c:handle_message_set/2` function. The following
recipe can be used as a starting point when testing a `KafkaEx.GenConsumer`:
```
defmodule ExampleGenConsumerTest do
use ExUnit.Case, async: true
alias KafkaEx.Protocol.Fetch.Message
@topic "topic"
@partition 0
setup do
{:ok, state} = ExampleGenConsumer.init(@topic, @partition)
{:ok, %{state: state}}
end
test "it acks a message", %{state: state} do
message_set = [%Message{offset: 0, value: "hello"}]
{response, _new_state} =
ExampleGenConsumer.handle_message_set(message_set, state)
assert response == :async_commit
end
end
```
"""
use GenServer
alias KafkaEx.Protocol.OffsetCommit.Request, as: OffsetCommitRequest
alias KafkaEx.Protocol.OffsetCommit.Response, as: OffsetCommitResponse
alias KafkaEx.Protocol.OffsetFetch.Request, as: OffsetFetchRequest
alias KafkaEx.Protocol.OffsetFetch.Response, as: OffsetFetchResponse
alias KafkaEx.Protocol.Offset.Response, as: OffsetResponse
alias KafkaEx.Protocol.Fetch.Response, as: FetchResponse
alias KafkaEx.Protocol.Fetch.Message
require Logger
@typedoc """
Option values used when starting a `KafkaEx.GenConsumer`.
"""
@type option ::
{:commit_interval, non_neg_integer}
| {:commit_threshold, non_neg_integer}
| {:auto_offset_reset, :none | :earliest | :latest}
| {:api_versions, map()}
| {:extra_consumer_args, map()}
@typedoc """
Options used when starting a `KafkaEx.GenConsumer`.
"""
@type options :: [option | GenServer.option()]
@doc """
Invoked when the server is started. `start_link/5` will block until it
returns.
`topic` and `partition` are the arguments passed to `start_link/5`. They
identify the Kafka partition that the `KafkaEx.GenConsumer` will consume from.
Returning `{:ok, state}` will cause `start_link/5` to return `{:ok, pid}` and
the process to start consuming from its assigned partition. `state` becomes
the consumer's state.
Any other return value will cause the `start_link/5` to return `{:error,
error}` and the process to exit.
"""
@callback init(topic :: binary, partition :: non_neg_integer) ::
{:ok, state :: term}
| {:stop, reason :: term}
@doc """
Invoked when the server is started. `start_link/5` will block until it
returns.
`topic` and `partition` are the arguments passed to `start_link/5`. They
identify the Kafka partition that the `KafkaEx.GenConsumer` will consume from.
`extra_args` is the value of the `extra_consumer_args` option to `start_link/5`.
The default implementation of this function calls `init/2`.
Returning `{:ok, state}` will cause `start_link/5` to return `{:ok, pid}` and
the process to start consuming from its assigned partition. `state` becomes
the consumer's state.
Any other return value will cause the `start_link/5` to return `{:error,
error}` and the process to exit.
"""
@callback init(
topic :: binary,
partition :: non_neg_integer,
extra_args :: map()
) :: {:ok, state :: term} | {:stop, reason :: term}
@doc """
Invoked for each message set consumed from a Kafka topic partition.
`message_set` is a message set fetched from a Kafka broker and `state` is the
current state of the `KafkaEx.GenConsumer`.
Returning `{:async_commit, new_state}` acknowledges `message` and continues
to consume from the Kafka queue with new state `new_state`. Acknowledged
messages will be auto-committed (possibly at a later time) based on the
`:commit_interval` and `:commit_threshold` options.
Returning `{:sync_commit, new_state}` commits `message` synchronously before
continuing to consume from the Kafka queue with new state `new_state`.
Committing a message synchronously means that no more messages will be
consumed until the message's offset is committed. `:sync_commit` should be
used sparingly, since committing every message synchronously would impact a
consumer's performance and could result in excessive network traffic.
"""
@callback handle_message_set(message_set :: [Message.t()], state :: term) ::
{:async_commit, new_state :: term}
| {:sync_commit, new_state :: term}
@doc """
Invoked by `KafkaEx.GenConsumer.call/3`.
Note the default implementation will cause a `RuntimeError`. If you want to
interact with your consumer, you must implement a handle_call function.
"""
@callback handle_call(call :: term, from :: GenServer.from(), state :: term) ::
{:reply, reply_value :: term, new_state :: term}
| {:stop, reason :: term, reply_value :: term, new_state :: term}
| {:stop, reason :: term, new_state :: term}
@doc """
Invoked by `KafkaEx.GenConsumer.cast/2`.
Note the default implementation will cause a `RuntimeError`. If you want to
interact with your consumer, you must implement a handle_cast function.
"""
@callback handle_cast(cast :: term, state :: term) ::
{:noreply, new_state :: term}
| {:stop, reason :: term, new_state :: term}
@doc """
Invoked by sending messages to the consumer.
Note the default implementation will log error messages. If you want to
interact with your consumer, you must implement a handle_info function.
"""
@callback handle_info(info :: term, state :: term) ::
{:noreply, new_state :: term}
| {:stop, reason :: term, new_state :: term}
defmacro __using__(_opts) do
quote do
@behaviour KafkaEx.GenConsumer
alias KafkaEx.Protocol.Fetch.Message
def init(_topic, _partition) do
{:ok, nil}
end
def init(topic, partition, _extra_args) do
init(topic, partition)
end
def handle_call(msg, _from, consumer_state) do
# taken from the GenServer handle_call implementation
proc =
case Process.info(self(), :registered_name) do
{_, []} -> self()
{_, name} -> name
end
# We do this to trick Dialyzer to not complain about non-local returns.
case :erlang.phash2(1, 1) do
0 ->
raise "attempted to call KafkaEx.GenConsumer #{inspect(proc)} " <>
"but no handle_call/3 clause was provided"
1 ->
{:reply, {:bad_call, msg}, consumer_state}
end
end
def handle_cast(msg, consumer_state) do
# taken from the GenServer handle_cast implementation
proc =
case Process.info(self(), :registered_name) do
{_, []} -> self()
{_, name} -> name
end
# We do this to trick Dialyzer to not complain about non-local returns.
case :erlang.phash2(1, 1) do
0 ->
raise "attempted to cast KafkaEx.GenConsumer #{inspect(proc)} " <>
" but no handle_cast/2 clause was provided"
1 ->
{:noreply, consumer_state}
end
end
def handle_info(msg, consumer_state) do
# taken from the GenServer handle_info implementation
proc =
case Process.info(self(), :registered_name) do
{_, []} -> self()
{_, name} -> name
end
pattern = '~p ~p received unexpected message in handle_info/2: ~p~n'
:error_logger.error_msg(pattern, [__MODULE__, proc, msg])
{:noreply, consumer_state}
end
defoverridable init: 2,
init: 3,
handle_call: 3,
handle_cast: 2,
handle_info: 2
end
end
defmodule State do
@moduledoc false
defstruct [
:consumer_module,
:consumer_state,
:commit_interval,
:commit_threshold,
:worker_name,
:group,
:topic,
:partition,
:member_id,
:generation_id,
:current_offset,
:committed_offset,
:acked_offset,
:last_commit,
:auto_offset_reset,
:fetch_options,
:api_versions
]
end
@commit_interval 5_000
@commit_threshold 100
@auto_offset_reset :none
# Client API
@doc """
Starts a `KafkaEx.GenConsumer` process linked to the current process.
This can be used to start the `KafkaEx.GenConsumer` as part of a supervision tree.
Once the consumer has been started, the `c:init/2` function of
`consumer_module` is called with `topic` and `partition` as its arguments.
`group_name` is the consumer group name that will be used for managing
consumer offsets.
### Options
* `:commit_interval` - The interval in milliseconds that the consumer will
wait to commit offsets of handled messages. Default 5_000.
* `:commit_threshold` - Threshold number of messages consumed to commit
offsets to the broker. Default 100.
* `:auto_offset_reset` - The policy for resetting offsets when an
`:offset_out_of_range` error occurs. `:earliest` will move the offset to
the oldest available, `:latest` moves to the most recent. If anything else
is specified, the error will simply be raised.
* `:fetch_options` - Optional keyword list that is passed along to the
`KafkaEx.fetch` call.
* `:extra_consumer_args` - Optional parameter that is passed along to the
`GenConsumer.init` call in the consumer module. Note that if `init/3` is not
implemented, the default implementation calls to `init/2`, dropping the extra
arguments.
**NOTE** `:commit_interval`, `auto_commit_reset` and `:commit_threshold` default to the
application config (e.g., `Application.get_env/2`) if that value is present, or the stated
default if the application config is not present.
Any valid options for `GenServer.start_link/3` can also be specified.
### Return Values
This function has the same return values as `GenServer.start_link/3`.
"""
@spec start_link(
consumer_module :: module,
consumer_group_name :: binary,
topic_name :: binary,
partition_id :: non_neg_integer,
options
) :: GenServer.on_start()
def start_link(consumer_module, group_name, topic, partition, opts \\ []) do
{server_opts, consumer_opts} =
Keyword.split(opts, [:debug, :name, :timeout, :spawn_opt])
GenServer.start_link(
__MODULE__,
{consumer_module, group_name, topic, partition, consumer_opts},
server_opts
)
end
@doc """
Returns the topic and partition id for this consumer process
"""
@spec partition(GenServer.server()) ::
{topic :: binary, partition_id :: non_neg_integer}
def partition(gen_consumer, timeout \\ 5000) do
GenServer.call(gen_consumer, :partition, timeout)
end
@doc """
Forwards a `GenServer.call/3` to the consumer implementation with the
consumer's state.
The implementation must return a `GenServer.call/3`-compatible value of the
form `{:reply, reply_value, new_consumer_state}`. The GenConsumer will
turn this into an immediate timeout, which drives continued message
consumption.
See the moduledoc for an example.
"""
@spec call(GenServer.server(), term, timeout) :: term
def call(gen_consumer, message, timeout \\ 5000) do
GenServer.call(gen_consumer, {:consumer_call, message}, timeout)
end
@doc """
Forwards a `GenServer.cast/2` to the consumer implementation with the
consumer's state.
The implementation must return a `GenServer.cast/2`-compatible value of the
form `{:noreply, new_consumer_state}`. The GenConsumer will turn this into an
immediate timeout, which drives continued message consumption.
"""
@spec cast(GenServer.server(), term) :: term
def cast(gen_consumer, message) do
GenServer.cast(gen_consumer, {:consumer_cast, message})
end
# GenServer callbacks
def init({consumer_module, group_name, topic, partition, opts}) do
commit_interval =
Keyword.get(
opts,
:commit_interval,
Application.get_env(:kafka_ex, :commit_interval, @commit_interval)
)
commit_threshold =
Keyword.get(
opts,
:commit_threshold,
Application.get_env(:kafka_ex, :commit_threshold, @commit_threshold)
)
auto_offset_reset =
Keyword.get(
opts,
:auto_offset_reset,
Application.get_env(:kafka_ex, :auto_offset_reset, @auto_offset_reset)
)
extra_consumer_args =
Keyword.get(
opts,
:extra_consumer_args
)
generation_id = Keyword.get(opts, :generation_id)
member_id = Keyword.get(opts, :member_id)
default_api_versions = %{fetch: 0, offset_fetch: 0, offset_commit: 0}
api_versions = Keyword.get(opts, :api_versions, %{})
api_versions = Map.merge(default_api_versions, api_versions)
case consumer_module.init(topic, partition, extra_consumer_args) do
{:ok, consumer_state} ->
worker_opts = Keyword.take(opts, [:uris, :use_ssl, :ssl_options])
{:ok, worker_name} =
KafkaEx.create_worker(
:no_name,
[consumer_group: group_name] ++ worker_opts
)
default_fetch_options = [
auto_commit: false,
worker_name: worker_name
]
given_fetch_options = Keyword.get(opts, :fetch_options, [])
fetch_options =
Keyword.merge(default_fetch_options, given_fetch_options)
state = %State{
consumer_module: consumer_module,
consumer_state: consumer_state,
commit_interval: commit_interval,
commit_threshold: commit_threshold,
auto_offset_reset: auto_offset_reset,
worker_name: worker_name,
group: group_name,
topic: topic,
partition: partition,
generation_id: generation_id,
member_id: member_id,
fetch_options: fetch_options,
api_versions: api_versions
}
Process.flag(:trap_exit, true)
{:ok, state, 0}
{:stop, reason} ->
{:stop, reason}
end
end
def handle_call(:partition, _from, state) do
{:reply, {state.topic, state.partition}, state, 0}
end
def handle_call(
{:consumer_call, message},
from,
%State{
consumer_module: consumer_module,
consumer_state: consumer_state
} = state
) do
# NOTE we only support the {:reply, _, _} result format here
# which we turn into a timeout = 0 clause so that we continue to consume.
# any other GenServer flow control could have unintended consequences,
# so we leave that for later consideration
consumer_reply =
consumer_module.handle_call(
message,
from,
consumer_state
)
case consumer_reply do
{:reply, reply, new_consumer_state} ->
{:reply, reply, %{state | consumer_state: new_consumer_state}, 0}
{:stop, reason, new_consumer_state} ->
{:stop, reason, %{state | consumer_state: new_consumer_state}}
{:stop, reason, reply, new_consumer_state} ->
{:stop, reason, reply, %{state | consumer_state: new_consumer_state}}
end
end
def handle_cast(
{:consumer_cast, message},
%State{
consumer_module: consumer_module,
consumer_state: consumer_state
} = state
) do
# NOTE we only support the {:noreply, _} result format here
# which we turn into a timeout = 0 clause so that we continue to consume.
# any other GenServer flow control could have unintended consequences,
# so we leave that for later consideration
consumer_reply =
consumer_module.handle_cast(
message,
consumer_state
)
case consumer_reply do
{:noreply, new_consumer_state} ->
{:noreply, %{state | consumer_state: new_consumer_state}, 0}
{:stop, reason, new_consumer_state} ->
{:stop, reason, %{state | consumer_state: new_consumer_state}}
end
end
def handle_info(
:timeout,
%State{current_offset: nil, last_commit: nil} = state
) do
new_state = %State{
load_offsets(state)
| last_commit: :erlang.monotonic_time(:milli_seconds)
}
{:noreply, new_state, 0}
end
def handle_info(:timeout, %State{} = state) do
case consume(state) do
{:error, reason} ->
{:stop, reason, state}
new_state ->
{:noreply, new_state, 0}
end
end
def handle_info(
message,
%State{
consumer_module: consumer_module,
consumer_state: consumer_state
} = state
) do
# NOTE we only support the {:noreply, _} result format here
# which we turn into a timeout = 0 clause so that we continue to consume.
# any other GenServer flow control could have unintended consequences,
# so we leave that for later consideration
consumer_reply =
consumer_module.handle_info(
message,
consumer_state
)
case consumer_reply do
{:noreply, new_consumer_state} ->
{:noreply, %{state | consumer_state: new_consumer_state}, 0}
{:stop, reason, new_consumer_state} ->
{:stop, reason, %{state | consumer_state: new_consumer_state}}
end
end
def terminate(_reason, %State{} = state) do
commit(state)
Process.unlink(state.worker_name)
KafkaEx.stop_worker(state.worker_name)
end
# Helpers
defp consume(
%State{
topic: topic,
partition: partition,
current_offset: offset,
fetch_options: fetch_options
} = state
) do
response =
KafkaEx.fetch(
topic,
partition,
Keyword.merge(
fetch_options,
offset: offset,
api_version: Map.fetch!(state.api_versions, :fetch)
)
)
response
|> handle_fetch_response(state)
end
defp handle_fetch_response(
[
%FetchResponse{
topic: _topic,
partitions: [
response = %{error_code: error_code, partition: _partition}
]
}
],
state
) do
state =
case error_code do
:offset_out_of_range ->
handle_offset_out_of_range(state)
:no_error ->
state
end
case response do
%{message_set: []} ->
handle_commit(:async_commit, state)
%{last_offset: _, message_set: message_set} ->
handle_message_set(message_set, state)
end
end
defp handle_fetch_response(error, _state) do
{:error, error}
end
defp handle_message_set(
message_set,
%State{
consumer_module: consumer_module,
consumer_state: consumer_state
} = state
) do
{sync_status, new_consumer_state} =
consumer_module.handle_message_set(message_set, consumer_state)
%Message{offset: last_offset} = List.last(message_set)
state_out = %State{
state
| consumer_state: new_consumer_state,
acked_offset: last_offset + 1,
current_offset: last_offset + 1
}
handle_commit(sync_status, state_out)
end
defp handle_offset_out_of_range(
%State{
worker_name: worker_name,
topic: topic,
partition: partition,
auto_offset_reset: auto_offset_reset
} = state
) do
[
%OffsetResponse{
topic: ^topic,
partition_offsets: [
%{partition: ^partition, error_code: :no_error, offset: [offset]}
]
}
] =
case auto_offset_reset do
:earliest ->
KafkaEx.earliest_offset(topic, partition, worker_name)
:latest ->
KafkaEx.latest_offset(topic, partition, worker_name)
_ ->
raise "Offset out of range while consuming topic #{topic}, partition #{
partition
}."
end
%State{
state
| current_offset: offset,
committed_offset: offset,
acked_offset: offset
}
end
defp handle_commit(:sync_commit, %State{} = state), do: commit(state)
defp handle_commit(
:async_commit,
%State{
acked_offset: acked,
committed_offset: committed,
commit_threshold: threshold,
last_commit: last_commit,
commit_interval: interval
} = state
) do
case acked - committed do
0 ->
%State{state | last_commit: :erlang.monotonic_time(:milli_seconds)}
n when n >= threshold ->
commit(state)
_ ->
if :erlang.monotonic_time(:milli_seconds) - last_commit >= interval do
commit(state)
else
state
end
end
end
defp commit(%State{acked_offset: offset, committed_offset: offset} = state) do
state
end
defp commit(
%State{
worker_name: worker_name,
group: group,
topic: topic,
partition: partition,
member_id: member_id,
generation_id: generation_id,
acked_offset: offset
} = state
) do
request = %OffsetCommitRequest{
consumer_group: group,
topic: topic,
partition: partition,
offset: offset,
member_id: member_id,
generation_id: generation_id,
api_version: Map.fetch!(state.api_versions, :offset_fetch)
}
[%OffsetCommitResponse{topic: ^topic, partitions: [partition_response]}] =
KafkaEx.offset_commit(worker_name, request)
# one of these needs to match, depending on which client
case partition_response do
# old client
^partition ->
:ok
# new client
%{error_code: :no_error, partition: ^partition} ->
:ok
end
Logger.debug(fn ->
"Committed offset #{topic}/#{partition}@#{offset} for #{group}"
end)
%State{
state
| committed_offset: offset,
last_commit: :erlang.monotonic_time(:milli_seconds)
}
end
defp load_offsets(
%State{
worker_name: worker_name,
group: group,
topic: topic,
partition: partition
} = state
) do
request = %OffsetFetchRequest{
consumer_group: group,
topic: topic,
partition: partition,
api_version: Map.fetch!(state.api_versions, :offset_fetch)
}
[
%OffsetFetchResponse{
topic: ^topic,
partitions: [
%{partition: ^partition, error_code: error_code, offset: offset}
]
}
] = KafkaEx.offset_fetch(worker_name, request)
# newer api versions will return -1 if the consumer group does not exist
offset = max(offset, 0)
case error_code do
:no_error ->
%State{
state
| current_offset: offset,
committed_offset: offset,
acked_offset: offset
}
:unknown_topic_or_partition ->
[
%OffsetResponse{
topic: ^topic,
partition_offsets: [
%{partition: ^partition, error_code: :no_error, offset: [offset]}
]
}
] = KafkaEx.earliest_offset(topic, partition, worker_name)
%State{
state
| current_offset: offset,
committed_offset: offset,
acked_offset: offset
}
end
end
end | lib/kafka_ex/gen_consumer.ex | 0.946138 | 0.894927 | gen_consumer.ex | starcoder |
defmodule JsonApiClient.Middleware.StatsTracker do
@moduledoc """
Stats Tracking Middleware
### Options
- `:name` - name of the stats (used in logging)
- `:log` - The log level to log at. No logging is done if `false`. Defaults to `false`
Middleware that adds stats data to response, and optionally logs it.
The `JsonApiClient.Middleware.StatsTracker` middleware provides
instrumentation for your requests. `StatsTracker` can be added to the
middleware stack to track the time spent in the middleware that comes after
it and add that data to the `Response` struct. If `log` is spoecified in the
options `StatsTracker` will then log all stats data in the `Response` struct
at the specified log level. Here's a sample configuration to add stats
tracking to the http request and parsing.
```elixir
config :json_api_client,
middlewares: [
{JsonApiClient.Middleware.StatsLogger, name: :parse_response, log: :info},
{JsonApiClient.Middleware.DocumentParser, nil},
{JsonApiClient.Middleware.StatsTracker, name: :http_request},
{JsonApiClient.Middleware.HTTPClient, nil},
]
```
That would cause something like the following to be logged on each request:
```
15:57:30.198 [info] total_ms=73.067 url=http://example.com/articles/123 parse_response_ms=7.01 http_request=66.057
```
Note that the `StatsTracker` middleware tracks the time spent in all the
middleware that comes after it in the stack. When it logs this data it
subtacts the time recorded by the next StatsTracker in the stack so that you
can see the time spent in distinct potions of the middleware stack.
Consider this stack, for example:
```elixir
config :json_api_client,
middlewares: [
{JsonApiClient.Middleware.StatsTracker, name: :custom_middleware, log: :info},
{CustomMiddleware1, nil},
{CustomMiddleware2, nil},
{CustomMiddleware3, nil},
{JsonApiClient.Middleware.StatsTracker, name: :request_and_parsing},
{JsonApiClient.Middleware.DocumentParser, nil},
{JsonApiClient.Middleware.HTTPClient, nil},
]
```
This will log the time spent in all three custom loggers as one value and the
time spent preforming the http request and parsing the response as another.
```
15:57:30.198 [info] total_ms=100 url=http://example.com/articles/123 custom_middleware_ms=12 request_and_parsing=88
```
"""
@behaviour JsonApiClient.Middleware
require Logger
alias JsonApiClient.Request
@impl JsonApiClient.Middleware
def call(%Request{} = request, next, opts) do
name = Access.get(opts, :name)
log_level = Access.get(opts, :log, false)
{microseconds, {status, response}} = :timer.tc(fn -> next.(request) end)
timer_tuple = {name, microseconds / 1_000}
attributes =
response.attributes
|> update_in([:stats], &(&1 || %{}))
|> update_in([:stats, :timers], &(&1 || []))
|> update_in([:stats, :timers], &[timer_tuple | &1])
response = %{response | attributes: attributes}
log_level && log_stats(request, response, log_level)
{status, response}
end
defp log_stats(request, response, log_level) do
[]
|> Enum.concat(stats_from_request(request))
|> Enum.concat(stats_from_response(response))
|> log(log_level)
end
@doc false
def stats_from_response(response) do
timers = get_in(response.attributes, [:stats, :timers]) || []
[{_, total_ms} | _] = timers
{stats, _} =
Enum.reduce(Enum.reverse(timers), {[], 0}, fn {name, ms}, {stats, ms_spent_elsewhere} ->
{[{:"#{name}_ms", ms - ms_spent_elsewhere} | stats], ms}
end)
[{:total_ms, total_ms} | stats]
end
@doc false
def stats_from_request(%Request{} = request) do
[url: Request.get_url(request)]
end
defp log(stats, log_level) do
Logger.log(log_level, fn ->
to_logfmt(stats)
end)
end
defp to_logfmt(enum) do
enum
|> Enum.map(fn {k, v} -> "#{k}=#{v}" end)
|> Enum.join(" ")
end
end | lib/json_api_client/middleware/stats_tracker.ex | 0.908156 | 0.792665 | stats_tracker.ex | starcoder |
defmodule Parser do
use Platform.Parsing.Behaviour
## test payloads
# 0211c90003119b117611bc119e118a119411a811a81194006401990abd
# 0211c900020abd
def fields do
[
%{field: "distance_average", display: "Distance: average", unit: "mm"},
%{field: "distance_minimum", display: "Distance: minimum", unit: "mm"},
%{field: "distance_maximum", display: "Distance: maximum", unit: "mm"},
%{field: "distance_median", display: "Distance: median", unit: "mm"},
%{field: "distance_10th_percentile", display: "Distance: 10th percentile", unit: "mm"},
%{field: "distance_25th_percentile", display: "Distance: 25th percentile", unit: "mm"},
%{field: "distance_75th_percentile", display: "Distance: 75th percentile", unit: "mm"},
%{field: "distance_90th_percentile", display: "Distance: 90th percentile", unit: "mm"},
%{field: "distance_most_frequent_value", display: "Distance: most frequent value", unit: "mm"},
%{field: "number_of_samples", display: "Number of samples", unit: ""},
%{field: "total_acquisition_time", display: "Total acquisition time", unit: "ms"},
%{field: "battery_voltage", display: "Battery voltage", unit: "V"}
]
end
def parse(<<2, device_id::size(16), flags::binary-size(2), words::binary>>, _meta) do
{_remaining, result} =
{words, %{:device_id => device_id, :protocol_version => 2}}
|> sensor0(flags)
|> sensor1(flags)
result
end
defp sensor0({<<x0::size(16), x1::size(16), x2::size(16), x3::size(16), x4::size(16), x5::size(16), x6::size(16), x7::size(16), x8::size(16), x9::size(16), x10::size(16), remaining::binary>>, result},
<<_::size(15), 1::size(1), _::size(0)>>) do
{remaining,
Map.merge(result,
%{
:distance_average => x0,
:distance_minimum => x1,
:distance_maximum => x2,
:distance_median => x3,
:distance_10th_percentile => x4,
:distance_25th_percentile => x5,
:distance_75th_percentile => x6,
:distance_90th_percentile => x7,
:distance_most_frequent_value => x8,
:number_of_samples => x9,
:total_acquisition_time => x10 / 1.024
})}
end
defp sensor0(result, _flags), do: result
defp sensor1({<<x0::size(16), remaining::binary>>, result},
<<_::size(14), 1::size(1), _::size(1)>>) do
{remaining,
Map.merge(result,
%{
:battery_voltage => x0 / 1000
})}
end
defp sensor1(result, _flags), do: result
end | DL-LID/DL-LID.ELEMENT-IoT.ex | 0.551695 | 0.54825 | DL-LID.ELEMENT-IoT.ex | starcoder |
defmodule DiscoveryApi.Search.Elasticsearch.Document do
@moduledoc """
Manages basic CRUD operations for a Dataset Document
"""
alias DiscoveryApi.Data.Model
import DiscoveryApi.Search.Elasticsearch.Shared
require Logger
def get(id) do
case elastic_get(id) do
{:ok, document} ->
{:ok, struct(Model, document)}
error ->
error
end
end
def update(%Model{} = dataset) do
put(dataset, &elastic_update_document/1)
end
def replace(%Model{} = dataset) do
put(dataset, &elastic_index_document/1)
end
def delete(dataset_id) do
elastic_delete_document(dataset_id)
end
def replace_all(datasets) do
case DiscoveryApi.Search.Elasticsearch.DatasetIndex.reset(dataset_index()) do
{:ok, _} -> elastic_bulk_document_load(datasets)
error -> error
end
end
defp put(%Model{id: _id} = dataset, operation_function) do
dataset_as_map = dataset_to_map(dataset)
case DiscoveryApi.Search.Elasticsearch.DatasetIndex.elastic_datasets_index_exists?() do
true -> operation_function.(dataset_as_map)
error -> error
end
end
defp put(_dataset, _operation) do
{:error, "Please provide a dataset with an id to update function."}
end
defp elastic_get(id) do
case Elastix.Document.get(
url(),
dataset_index_name(),
"_doc",
id
)
|> handle_get_document_response() do
{:ok, %{_source: document}} -> {:ok, document}
error -> error
end
end
defp elastic_update_document(dataset_as_map) do
Elastix.Document.update(
url(),
dataset_index_name(),
"_doc",
dataset_as_map.id,
%{doc: dataset_as_map, doc_as_upsert: true},
refresh: true
)
|> handle_response()
end
defp elastic_index_document(dataset_as_map) do
Elastix.Document.index(
url(),
dataset_index_name(),
"_doc",
dataset_as_map.id,
dataset_as_map,
refresh: true
)
|> handle_response()
end
defp elastic_delete_document(dataset_id) do
Elastix.Document.delete(
url(),
dataset_index_name(),
"_doc",
dataset_id
)
end
defp elastic_bulk_document_load(datasets) do
bulk_list = datasets_to_bulk_list(datasets)
Elastix.Bulk.post(url(), bulk_list, [], refresh: true)
|> handle_response()
end
defp datasets_to_bulk_list(datasets) do
Enum.map(datasets, fn dataset ->
[
%{index: %{_id: dataset.id, _index: dataset_index_name()}},
dataset_to_map(dataset)
]
end)
|> List.flatten()
end
defp dataset_to_map(dataset) do
dataset
|> Map.from_struct()
|> Map.drop([:completeness])
|> Enum.reject(fn {_k, v} -> is_nil(v) end)
|> Map.new()
|> populate_org_facets()
|> populate_keyword_facets()
|> populate_optimized_fields()
|> populate_sort_date()
end
defp populate_sort_date(%{sourceType: "ingest", modifiedDate: sort_date} = model) when sort_date != "",
do: Map.put(model, :sortDate, sort_date)
defp populate_sort_date(%{sourceType: "stream", lastUpdatedDate: sort_date} = model) when sort_date != "",
do: Map.put(model, :sortDate, sort_date)
defp populate_sort_date(%{issuedDate: sort_date} = model), do: Map.put(model, :sortDate, sort_date)
defp populate_sort_date(model), do: model
defp populate_org_facets(%{organizationDetails: %{orgTitle: org_title}} = dataset) do
Map.put_new(dataset, :facets, %{})
|> put_in([:facets, :orgTitle], org_title)
end
defp populate_org_facets(dataset), do: dataset
defp populate_keyword_facets(%{keywords: keywords} = dataset) do
Map.put_new(dataset, :facets, %{})
|> put_in([:facets, :keywords], keywords)
end
defp populate_keyword_facets(dataset), do: dataset
defp populate_optimized_fields(dataset) do
put_in(dataset, [:titleKeyword], Map.get(dataset, :title))
end
defp handle_get_document_response({:ok, %{body: %{_id: id, found: false}}}), do: {:error, "Dataset with id #{id} not found!"}
defp handle_get_document_response(response), do: handle_response_with_body(response)
defp handle_response({:ok, %{body: %{error: error}}}) do
Logger.error("Error from elasticsearch: #{inspect(error)}")
{:error, error}
end
defp handle_response(response), do: response
end | apps/discovery_api/lib/discovery_api/search/elasticsearch/document.ex | 0.651577 | 0.408129 | document.ex | starcoder |
defmodule ToyRobot.Robot do
alias ToyRobot.Robot
defstruct north: 0, east: 0, facing: :north
@doc """
Move the robot forward one space in the facing direction
## Examples
iex> alias ToyRobot.Robot
ToyRobot.Robot
iex> robot = %Robot{north: 0, facing: :north}
%Robot{north: 0, facing: :north}
iex> robot |> Robot.move
%Robot{north: 1, facing: :north}
"""
def move(%Robot{facing: facing} = robot) do
case facing do
:east -> move_east(robot)
:west -> move_west(robot)
:north -> move_north(robot)
:south -> move_south(robot)
end
end
@doc """
Turn the robot left
## Examples
iex> alias ToyRobot.Robot
ToyRobot.Robot
iex> robot = %Robot{facing: :north}
%Robot{facing: :north}
iex> robot |> Robot.turn_left
%Robot{facing: :west}
"""
def turn_left(%Robot{facing: facing} = robot) do
new_facing =
case facing do
:east -> :north
:west -> :south
:north -> :west
:south -> :east
end
%Robot{robot | facing: new_facing}
end
@doc """
Turn the robot right
## Examples
iex> alias ToyRobot.Robot
ToyRobot.Robot
iex> robot = %Robot{facing: :north}
%Robot{facing: :north}
iex> robot |> Robot.turn_right
%Robot{facing: :east}
"""
def turn_right(%Robot{facing: facing} = robot) do
new_facing =
case facing do
:east -> :south
:west -> :north
:north -> :east
:south -> :west
end
%Robot{robot | facing: new_facing}
end
@doc """
Turn the robot to opposite direction
## Examples
iex> alias ToyRobot.Robot
ToyRobot.Robot
iex> robot = %Robot{facing: :north}
%Robot{facing: :north}
iex> robot |> Robot.uturn
%Robot{facing: :south}
"""
def uturn(%Robot{facing: facing} = robot) do
new_facing =
case facing do
:east -> :west
:west -> :east
:north -> :south
:south -> :north
end
%Robot{robot | facing: new_facing}
end
defp move_east(robot) do
%Robot{robot | east: robot.east + 1}
end
defp move_west(robot) do
%Robot{robot | east: robot.east - 1}
end
defp move_north(robot) do
%Robot{robot | north: robot.north + 1}
end
defp move_south(robot) do
%Robot{robot | north: robot.north - 1}
end
end | lib/toy_robot/robot.ex | 0.889954 | 0.764232 | robot.ex | starcoder |
defmodule Crux.Gateway.Connection.Gun do
@moduledoc false
# Wrapper module for `:gun`, making it easier to swap out the WebSocket clients if necessary in the future.
# Sends messages to the invoking process:
# - `{:connected, pid}`
# - `{:disconnected, pid, {:close, code, message}}`
# - `{:packet, pid, packet}`
alias :erlang, as: Erlang
alias :gen_statem, as: GenStateM
alias :gun, as: Gun
alias :http_uri, as: HttpUri
alias :zlib, as: Zlib
alias Crux.Gateway.Util
require Logger
@max_size 4096
### Client API
@doc """
Starts a gun process linked to the current process.
"""
def start_link(uri) do
GenStateM.start_link(__MODULE__, {uri, self()}, [])
end
@doc """
Instructs the gun process to reconnect to the initially provided url.
"""
def reconnect(conn) do
GenStateM.call(conn, :reconnect)
end
@doc """
Instructs the gun process to disconnect. The process will not reconnect on its own.
"""
def disconnect(conn, code, message) do
GenStateM.call(conn, {:disconnect, code, message})
end
@doc """
Instructs the gun process to disconnect and stop.
"""
def stop(conn, code, message) do
GenStateM.call(conn, {:stop, code, message})
end
@doc """
Instructs the gun process to send a frame.
"""
def send_frame(conn, frame)
def send_frame(_conn, {:binary, binary})
when byte_size(binary) > @max_size do
{:error, :too_large}
end
def send_frame(conn, frame) do
GenStateM.call(conn, {:send, frame})
end
# Messages going from the server back to the client.
defp send_disconnected(%{parent: parent}, code, message) do
Kernel.send(parent, {:disconnected, self(), {:close, code, message}})
end
defp send_connected(%{parent: parent}) do
Kernel.send(parent, {:connected, self()})
end
defp send_packet(%{parent: parent}, packet) do
Kernel.send(parent, {:packet, self(), packet})
end
### Server API
# States
@disconnected :disconnected
@connecting :connecting
@connected :connected
@attempt_limit 5
defstruct [
:parent,
:host,
:port,
:path,
:query,
:zlib,
:buffer,
:conn,
:attempts,
:expect_disconnect
]
@typep t :: %__MODULE__{
# The spawning process
parent: pid(),
# Where to connect to
host: charlist(),
port: pos_integer(),
path: String.t(),
query: String.t(),
# zlib stream context and its buffer
zlib: Zlib.zstream() | nil,
buffer: binary(),
# WS connection wrapper process
conn: pid() | nil,
# Limit the amount of attempts to establish a connection
attempts: non_neg_integer(),
# Whether we are expecting a gun_down / disconnect
# and do not want to notify the spawning process again
expect_disconnect: boolean()
}
@behaviour GenStateM
def callback_mode(), do: [:state_functions, :state_enter]
@spec init({String.t(), pid()}) :: {:ok, :connecting, t()} | {:stop, :bad_uri}
def init({uri, parent}) do
case HttpUri.parse(uri, [{:scheme_defaults, [{:wss, 443}]}]) do
{:error, term} ->
Logger.error(fn -> "Failed to parse uri #{inspect(uri)}, reason #{inspect(term)}." end)
{:stop, :bad_uri}
{:ok, {:wss, "", host, port, path, query}} ->
data = %__MODULE__{
parent: parent,
host: String.to_charlist(host),
port: port,
path: path,
query: query,
zlib: nil,
buffer: <<>>,
conn: nil,
attempts: 0,
expect_disconnect: false
}
{:ok, @disconnected, data}
end
end
# From init
def disconnected(:enter, @disconnected, _data) do
:keep_state_and_data
end
def disconnected(:enter, _old_state, data) do
try do
Zlib.inflateEnd(data.zlib)
rescue
_ -> nil
end
:ok = Zlib.close(data.zlib)
:ok = Gun.close(data.conn)
data = %{data | conn: nil, zlib: nil}
{:keep_state, data}
end
def disconnected({:call, from}, :reconnect, data) do
{:next_state, @connecting, data, {:reply, from, :ok}}
end
def disconnected({:call, from}, {:disconnect, _code, _message}, _data) do
{:keep_state_and_data, {:reply, from, :ok}}
end
def disconnected({:call, from}, {:stop, _code, _message}, _data) do
{:stop_and_reply, :normal, {:reply, from, :ok}}
end
def disconnected({:call, from}, {:send, _frame}, _data) do
{:keep_state_and_data, {:reply, from, {:error, :disconnected}}}
end
def connecting(:enter, _old_state, data) do
z = Zlib.open()
Zlib.inflateInit(z)
attempts = data.attempts + 1
data = %{data | attempts: attempts}
Logger.debug(fn ->
"Starting a process to connect to #{data.host}:#{data.port} (Attempt: #{attempts} / #{@attempt_limit})"
end)
# > Gun does not currently support Websocket over HTTP/2.
{:ok, conn} = Gun.open(data.host, data.port, %{protocols: [:http]})
Logger.debug(fn -> "Process started, waiting for its connection to be up." end)
conn
|> Gun.await_up()
|> case do
{:ok, :http} ->
Logger.debug(fn ->
"Connection is up, now upgrading it to use the WebSocket protocol, using " <>
data.path <> data.query
end)
stream_ref = Gun.ws_upgrade(conn, data.path <> data.query)
:ok = await_upgrade(conn, stream_ref)
Logger.debug(fn ->
"Connection upgraded to use the WebSocket protocol, we are good to go!"
end)
send_connected(data)
data = %{data | conn: conn, zlib: z, attempts: 0}
{:keep_state, data, {:timeout, 0, :connected}}
{:error, :timeout} when attempts >= @attempt_limit ->
Logger.error(fn ->
"Connection timed out, no attempts remaining, won't retry. (#{attempts} / #{@attempt_limit})"
end)
{:stop, :connection_failure, data}
{:error, :timeout} ->
Logger.warn(fn ->
"Connection timed out, will retry. (#{attempts} / #{@attempt_limit})"
end)
{:repeat_state, data}
end
end
def connecting(:timeout, :connected, data) do
{:next_state, @connected, data}
end
# The connecting state can not receive any messages due to its blocking nature.
def connected(:enter, _old_state, _data) do
:keep_state_and_data
end
def connected(
:info,
{:gun_down, conn, _protocol, reason, _killed_stream, _uprocessed_stream},
%{conn: conn, expect_disconnect: expect_disconnect} = data
) do
if expect_disconnect do
{code, message} = expect_disconnect
send_disconnected(data, code, message)
else
Logger.warn(fn -> "Unexpected gun_down! Connection down! Reason: #{inspect(reason)}" end)
send_disconnected(data, :unknown, "gun_down")
end
data = %{data | expect_disconnect: false}
{:next_state, @disconnected, data}
end
def connected(:info, {:gun_error, conn, reason}, %{conn: conn}) do
Logger.warn(fn -> "Connection error: #{inspect(reason)}" end)
:keep_state_and_data
end
def connected(:info, {:gun_error, conn, _stream_ref, reason}, %{conn: conn}) do
Logger.warn(fn -> "Stream error: #{inspect(reason)}" end)
:keep_state_and_data
end
def connected(:info, {:gun_ws, conn, _stream_ref, {:binary, frame}}, %{conn: conn} = data) do
frame_size = byte_size(frame) - 4
<<_data::binary-size(frame_size), suffix::binary>> = frame
buffer = data.buffer <> frame
new_buffer =
if suffix == <<0x00, 0x00, 0xFF, 0xFF>> do
packet =
data.zlib
|> Zlib.inflate(buffer)
|> Erlang.iolist_to_binary()
|> Erlang.binary_to_term()
|> Util.atomify()
send_packet(data, packet)
<<>>
else
buffer
end
data = %{data | buffer: new_buffer}
{:keep_state, data}
end
def connected(:info, {:gun_ws, conn, _stream_ref, {:text, data}}, %{conn: conn} = data) do
Logger.warn(fn -> "Received unexpected text frame: #{inspect(data)}" end)
:keep_state_and_data
end
def connected(:info, {:gun_ws, conn, _stream_ref, frame}, %{conn: conn} = data) do
data =
case maybe_close(frame) do
{:close, code, message} ->
Logger.warn(fn -> "Disconnected with code #{code} and message #{message}" end)
%{data | expect_disconnect: {code, message}}
:error ->
Logger.warn(fn -> "Received an unexpected frame: #{frame}" end)
data
end
{:keep_state, data}
end
def connected({:call, from}, :reconnect, _data) do
{:keep_state_and_data, {:reply, from, :ok}}
end
def connected({:call, from}, {:disconnect, code, message}, data) do
:ok = Gun.ws_send(data.conn, {:close, code, message})
data = %{data | expect_disconnect: {code, message}}
{:keep_state, data, {:reply, from, :ok}}
end
def connected({:call, from}, {:stop, code, message}, data) do
:ok = Gun.ws_send(data.conn, {:close, code, message})
:ok = Gun.close(data.conn)
send_disconnected(data, code, message)
{:stop_and_reply, :normal, {:reply, from, :ok}}
end
def connected({:call, from}, {:send, frame}, data) do
:ok = Gun.ws_send(data.conn, frame)
{:keep_state_and_data, {:reply, from, :ok}}
end
# Handle all possible close frame options
defp maybe_close(:close), do: {:close, :unknown, "No message received."}
defp maybe_close({:close, message}), do: {:close, :unknown, message}
defp maybe_close({:close, close_code, message}), do: {:close, close_code, message}
defp maybe_close(_frame), do: :error
# Since gun does not implement one itself for some reason?
defp await_upgrade(conn, stream_ref) do
receive do
{:gun_upgrade, ^conn, ^stream_ref, _protocols, _headers} ->
:ok
{:gun_response, ^conn, ^stream_ref, _is_fin, status, headers} ->
{:error, status, headers}
{:gun_error, ^conn, ^stream_ref, reason} ->
{:error, reason}
{:gun_error, ^conn, reason} ->
{:error, reason}
after
5000 ->
{:error, :timeout}
end
end
end | lib/gateway/connection/gun.ex | 0.794066 | 0.508605 | gun.ex | starcoder |
defmodule Change do
@error_message "cannot change"
@doc """
Determine the least number of coins to be given to the user such
that the sum of the coins' value would equal the correct amount of change.
It returns {:error, "cannot change"} if it is not possible to compute the
right amount of coins. Otherwise returns the tuple {:ok, list_of_coins}
## Examples
iex> Change.generate([5, 10, 15], 3)
{:error, "cannot change"}
iex> Change.generate([1, 5, 10], 18)
{:ok, [1, 1, 1, 5, 10]}
"""
@spec generate(list, integer) :: {:ok, list} | {:error, String.t()}
def generate(_coins, 0), do: {:ok, []}
def generate(_coins, target) when target < 0, do: {:error, @error_message}
def generate(coins, target) do
if target < Enum.min(coins) do
{:error, @error_message}
else
{_coins, change_candidates, _acc} =
coins
|> generate_useable_coins(target)
|> generate_change_candidates(target)
case change_candidates do
[] ->
{:error, @error_message}
_ ->
fewest_coin_set = Enum.min_by(change_candidates, &length/1)
{:ok, fewest_coin_set}
end
end
end
defp generate_useable_coins(coins, target) do
coins
|> Enum.filter(&(&1 <= target))
|> Enum.sort(&(&1 >= &2))
end
defp generate_change_candidates(coins, target) do
coins
|> Enum.reduce({coins, [], []}, &generate_change(coins, target, &1, &2))
end
defp generate_change(coins, target, coin, {change_coins, candidates, acc}) do
combination = [coin | acc]
sum = Enum.sum(combination)
cond do
sum > target ->
case change_coins do
[] ->
handle_possible_change_without_unit_coins(coins, candidates, acc)
[head | []] ->
generate_change(coins, target, head, {[], candidates, acc})
[_head | tail] ->
generate_change(coins, target, hd(tail), {tail, candidates, acc})
end
sum == target ->
change_coins =
if Enum.empty?(change_coins) do
[]
else
tl(change_coins)
end
{change_coins, [combination | candidates], []}
# sum < target
true ->
generate_change(
coins,
target,
coin,
{change_coins, candidates, combination}
)
end
end
defp handle_possible_change_without_unit_coins(coins, candidates, acc) do
case length(candidates) do
0 ->
{coins, candidates, Enum.drop(acc, 1)}
_ ->
{[], candidates, acc}
end
end
end | elixir/change/lib/change.ex | 0.829388 | 0.560614 | change.ex | starcoder |
defmodule Edeliver.Relup.Instructions.SoftPurge do
@moduledoc """
Upgrade instruction which replaces `:brutal_purge` with `:soft_purge`
for `:load_module`, `:load`, `:update` and `:remove` relup instructions.
If `:brutal_purge` is used, processes running old code are killed.
If `:soft_purge` is used the release handler will refuse to start
the upgrade.
"""
use Edeliver.Relup.Instruction
def modify_relup(instructions = %Instructions{}, _config = %{}) do
%{instructions|
up_instructions: replace_brutal_purge_with_soft_purge(instructions.up_instructions, []),
down_instructions: replace_brutal_purge_with_soft_purge(instructions.down_instructions, [])
}
end
defp replace_brutal_purge_with_soft_purge([{:load, {module, :brutal_purge, post_purge}}|rest], modified_instructions), do: \
replace_brutal_purge_with_soft_purge(rest, [{:load, {module, :soft_purge, post_purge}}|modified_instructions])
defp replace_brutal_purge_with_soft_purge([{:update, module, change, :brutal_purge, post_purge, dep_mods}|rest], modified_instructions), do: \
replace_brutal_purge_with_soft_purge(rest, [{:update, module, change, :soft_purge, post_purge, dep_mods}|modified_instructions])
defp replace_brutal_purge_with_soft_purge([{:update, module, timeout, change, :brutal_purge, post_purge, dep_mods}|rest], modified_instructions), do: \
replace_brutal_purge_with_soft_purge(rest, [{:update, module, timeout, change, :soft_purge, post_purge, dep_mods}|modified_instructions])
defp replace_brutal_purge_with_soft_purge([{:update, module, mod_type, timeout, change, :brutal_purge, post_purge, dep_mods}|rest], modified_instructions), do: \
replace_brutal_purge_with_soft_purge(rest, [{:update, module, mod_type, timeout, change, :soft_purge, post_purge, dep_mods}|modified_instructions])
defp replace_brutal_purge_with_soft_purge([{:load_module, module, :brutal_purge, post_purge, dep_mods}|rest], modified_instructions), do: \
replace_brutal_purge_with_soft_purge(rest, [{:load_module, module, :brutal_purge, post_purge, dep_mods}|modified_instructions])
defp replace_brutal_purge_with_soft_purge([{:remove, {module, :brutal_purge, post_purge}}|rest], modified_instructions), do: \
replace_brutal_purge_with_soft_purge(rest, [{:remove, {module, :brutal_purge, post_purge}}|modified_instructions])
defp replace_brutal_purge_with_soft_purge([first|rest], modified_instructions), do: \
replace_brutal_purge_with_soft_purge(rest, [first|modified_instructions])
defp replace_brutal_purge_with_soft_purge([], modified_instructions), do: Enum.reverse(modified_instructions)
end | lib/edeliver/relup/instructions/soft_purge.ex | 0.635901 | 0.457924 | soft_purge.ex | starcoder |
defmodule Molasses do
alias Molasses.StorageAdapter.Redis
alias Molasses.StorageAdapter.Postgres
alias Molasses.StorageAdapter.MongoDB
@moduledoc ~S"""
A feature toggle library using redis or SQL (using Ecto) as a backing service. It allows you to roll out to users based on a percentage. Alternatively, you can use Molasses to deploy to a group of users or user ids.
## Installation
1. Add `molasses` to your list of dependencies in `mix.exs` and run `mix deps.get`:
```elixir
def deps do
[{:molasses, "~> 0.2.0"}]
end
```
2. Install related dependencies by including `ExRedis`, `MongoDB` or `Ecto` and one of its adapter libraries for Postgres or Mysql.
2A. Redis
For Redis, you will just need to include exredis:
```elixir
def deps do
[
{:molasses, "~> 0.2.0"},
{:exredis, ">= 0.2.4"}
]
end
```
2B. SQL using Ecto
For Ecto with Postgres, install `ecto` and `postgrex`. You will also need to start ecto and postgrex as applications :
```elixir
def deps do
[
{:molasses, "~> 0.2.0"},
{:ecto, "~> 2.1.1"},
{:postgrex, ">= 0.0.0"}
]
end
def application do
[applications: [:ecto, :postgrex]]
end
```
Your config will also need to change. You will need to set up an Ecto Repo like you would [here](https://hexdocs.pm/ecto/Ecto.html#module-repositories). As well as set the Molasses adapter to postgres.
```elixir
# molasses adapter setting
config :molasses, adapter: "ecto"
```
You will need to create an ecto migration and add the features tables.
```elixir
defmodule Repo.CreateTestMocks do
use Ecto.Migration
def change do
create table(:features) do
add :name, :string
add :percentage, :integer
add :users, :string
add :active, :boolean
end
create index(:features, [:name])
end
end
```
2C. MongoDB
```elixir
def deps do
[
{:molasses, "~> 0.2.0"},
{:mongodb, ">= 0.0.0"},
]
end
```
## Usage
Molasses uses the same interface whether you are using Redis or SQL. Each function takes an `Ecto.Repo` or the `ExRedis` client as the first argument.
"""
@doc """
Check to see if a feature is active for all users.
"""
def is_active(client, key) do
case get_feature(client, key) do
{:error, _} -> false
%{active: false} -> false
%{active: true,percentage: 100, users: []} -> true
%{active: true,percentage: 100} -> false
%{active: true,percentage: _} -> false
end
end
defp get_id(id) when is_integer(id), do: Integer.to_string(id)
defp get_id(id), do: id
@doc """
Check to see if a feature is active for a specific user.
"""
def is_active(client, key, id) do
feature = get_feature(client, key)
is_feature_active(feature,id)
end
defp is_feature_active(feature, id) do
id = get_id id
case feature do
{:error, _} -> false
%{active: false} -> false
%{active: true,percentage: 100, users: []} -> true
%{active: true,percentage: 100, users: users} -> Enum.member?(users, id)
%{active: true,percentage: percentage} when is_bitstring(id) ->
value = id |> :erlang.crc32 |> rem(100) |> abs
value <= percentage
end
end
def are_features_active(client, id) do
features = get_features(client)
Enum.map(features, fn(x) ->
%{
name: x[:name],
active: is_feature_active(x, id)
}
end)
end
@doc """
Returns a struct of the feature in question.
"""
def get_feature(client, key) do
adapter().get_feature(client,key)
end
@doc """
Activates a feature for all users.
"""
def activate(client, key) do
adapter().activate(client,key)
end
@doc """
Activates a feature for some users.
When the group argument is an integer then it sets the feature active for a percentage of users.
When the group argument is a string then it sets a feature active for that specific user or user group.
When the group argument is a list then it sets a feature active for that specific list of users or user groups
## Examples
# activate a feature for a percentage of users
Molasses.activate(client, "my_feature", 75)
# activate a feature for a subset of integer based userIds
Molasses.activate(client, "my_feature", [2, 4, 5])
# activate a feature for a subset of string based userIds (think a mongoId) or a list of groups
Molasses.activate(client, "my_feature", ["admins", "super admins"])
# activate a feature for only one group of users
Molasses.activate(client, "my_feature", "powerusers")
"""
def activate(client, key, group) do
adapter().activate(client, key, group)
end
@doc """
Dectivates a feature for all users.
"""
def deactivate(client, key) do
adapter().deactivate(client, key)
end
def get_features(client) do
adapter().get_features(client)
end
def adapter do
case Application.get_env(:molasses, :adapter) do
"ecto" -> Postgres
"mongo" -> MongoDB
_ -> Redis
end
end
end | lib/molasses.ex | 0.750004 | 0.863161 | molasses.ex | starcoder |
defmodule PasswordValidator.Validators.CharacterSetValidator do
@moduledoc """
Validates a password by checking the different types of characters contained
within.
"""
@behaviour PasswordValidator.Validator
@initial_counts %{
upper_case: 0,
lower_case: 0,
numbers: 0,
special: 0,
other: []
}
@character_sets [:lower_case, :upper_case, :numbers, :special]
alias PasswordValidator.Validators.CharacterSetValidator.Config
@doc """
Example config
[
character_set: [
upper_case: [1, :infinity],
lower_case: 1,
numbers: 1,
special: [0, 0],
allowed_special_characters: "!@#$%^&*()",
]
]
"""
def validate(_, []), do: :ok
def validate(string, opts) when is_list(opts) do
config = Config.from_options(opts)
validate_password(string, config)
end
defp validate_password(nil, %Config{} = config) do
validate_password("", config)
end
@spec validate_password(String.t(), %Config{}) :: :ok | {:error, nonempty_list()}
defp validate_password(string, %Config{} = config) do
counts = count_character_sets(string, config.allowed_special_characters)
@character_sets
|> Enum.map(&validate_character_set(&1, counts, config))
|> Enum.concat([validate_other(counts)])
|> PasswordValidator.Validator.return_errors_or_ok()
end
@spec validate_character_set(atom(), map(), %Config{}) :: :ok | {:error, String.t()}
for character_set <- @character_sets do
def validate_character_set(
unquote(character_set),
%{unquote(character_set) => count},
%Config{unquote(character_set) => config}
) do
do_validate_character_set(unquote(character_set), count, config)
end
end
@spec do_validate_character_set(atom(), integer(), list()) :: :ok | {:error, String.t()}
def do_validate_character_set(character_set, count, config)
def do_validate_character_set(_, _, [0, :infinity]) do
:ok
end
def do_validate_character_set(_, count, [min, :infinity]) when count > min do
:ok
end
def do_validate_character_set(character_set, count, [min, _]) when count < min do
{:error,
{"Not enough %{character_set} characters (only %{count} instead of at least %{min})",
character_set: character_set, count: count, min: min}}
end
def do_validate_character_set(character_set, count, [_, max]) when count > max do
{:error,
{"Too many %{character_set} (%{count} but maximum is %{max})",
character_set: character_set, count: count, max: max}}
end
def do_validate_character_set(_, count, [min, max]) when min <= count and count <= max do
:ok
end
def do_validate_character_set(_, _, config) do
raise "Invalid character set config. (#{inspect(config)})"
end
defp validate_other(%{other: other_characters}) when length(other_characters) == 0,
do: :ok
defp validate_other(%{other: other_characters}) when length(other_characters) > 0,
do:
{:error,
{"Invalid character(s) found. (%{other_characters})", other_characters: other_characters}}
@spec count_character_sets(String.t(), String.t() | nil, map()) :: map()
defp count_character_sets(string, special_characters, counts \\ @initial_counts)
defp count_character_sets("", _, counts), do: counts
defp count_character_sets(string, special_characters, counts) do
{grapheme, rest} = String.next_grapheme(string)
counts =
cond do
String.match?(grapheme, ~r/[a-z]/) ->
update_count(counts, :lower_case)
String.match?(grapheme, ~r/[A-Z]/) ->
update_count(counts, :upper_case)
String.match?(grapheme, ~r/[0-9]/) ->
update_count(counts, :numbers)
is_special_character(grapheme, special_characters) ->
update_count(counts, :special)
true ->
Map.update!(counts, :other, &Enum.concat(&1, [grapheme]))
end
count_character_sets(rest, special_characters, counts)
end
@spec update_count(map(), atom()) :: map()
defp update_count(counts, key) do
Map.update!(counts, key, &(&1 + 1))
end
@spec is_special_character(String.t(), :all | String.t()) :: boolean()
defp is_special_character(_string, :all), do: true
defp is_special_character(string, special_characters) when is_binary(special_characters) do
String.contains?(special_characters, string)
end
end | lib/password_validator/validators/character_set_validator.ex | 0.876397 | 0.464051 | character_set_validator.ex | starcoder |
defmodule SmartCity.Event do
@moduledoc """
Defines macros for encoding event types the Smart City platform
will respond to in any of the various micro service components
in a central location shared by all components.
"""
@doc """
Defines an update event to a dataset within the system. The
system treats create events as a subset of updates.
"""
defmacro dataset_update(), do: quote(do: "dataset:update")
@doc """
Declares an error occurred during the attempted upsert of a
dataset.
"""
defmacro error_dataset_update(), do: quote(do: "error:dataset:update")
@doc """
Defines an update event to an organization within the system.
The system treats create events as a subset of updates.
"""
defmacro organization_update(), do: quote(do: "organization:update")
@doc """
Declares an error occurred during the attempted upsert of an
organization.
"""
defmacro error_organization_update(), do: quote(do: "error:organization:update")
@doc """
Signals a dataset is about to be retrieved and begin loading into
the ingestion pipeline.
"""
defmacro data_ingest_start(), do: quote(do: "data:ingest:start")
@doc """
Signals a dataset has completed an ingestion process through the
pipeline from end to end and been persisted.
"""
defmacro data_ingest_end(), do: quote(do: "data:ingest:end")
@doc """
Declares an error occurred during an attempted data ingestion.
"""
defmacro error_data_ingest(), do: quote(do: "error:data:ingest")
@doc """
Signals data for a dataset is about to be downloaded into the platform,
parsed, and written to the raw ingestion topic.
"""
defmacro data_extract_start(), do: quote(do: "data:extract:start")
@doc """
Signals a dataset has completed the extraction process and the final
message has been written to the raw ingestion topic.
"""
defmacro data_extract_end(), do: quote(do: "data:extract:end")
@doc """
Declares an error occurred during an attempted data extraction.
"""
defmacro error_data_extract(), do: quote(do: "error:data:extract")
@doc """
Signals a non-ingestable data file is about to be downloaded to the
platform and stored in the object store bucket.
"""
defmacro file_ingest_start(), do: quote(do: "file:ingest:start")
@doc """
Signals a non-ingestable data file has been successfully uploaded
to the object store bucket.
"""
defmacro file_ingest_end(), do: quote(do: "file:ingest:end")
@doc """
Declares an error occurred during an attempted file ingestion.
"""
defmacro error_file_ingest(), do: quote(do: "error:file:ingest")
@doc """
Signals that a dataset should be disabled.
"""
defmacro dataset_disable(), do: quote(do: "dataset:disable")
@doc """
Signals that a dataset should be deleted
"""
defmacro dataset_delete(), do: quote(do: "dataset:delete")
@doc """
Signals that writing some data for a dataset has completed
"""
defmacro data_write_complete(), do: quote(do: "data:write:complete")
@doc """
Signals that data standardization is complete
"""
defmacro data_standardization_end(), do: quote(do: "data:standardization:end")
@doc """
Defines a user organization relationship.
"""
defmacro user_organization_associate(), do: quote(do: "user:organization:associate")
@doc """
Defines a user organization relationship.
"""
defmacro user_organization_disassociate(), do: quote(do: "user:organization:disassociate")
@doc """
Signals a dataset harvest start
"""
defmacro dataset_harvest_start(), do: quote(do: "dataset:harvest:start")
@doc """
Signals a dataset harvest end
"""
defmacro dataset_harvest_end(), do: quote(do: "dataset:harvest:end")
@doc """
Signals a dataset query has been run
"""
defmacro dataset_query(), do: quote(do: "dataset:query")
@doc """
Signals a user has logged in
"""
defmacro user_login(), do: quote(do: "user:login")
@doc """
Signals to file should be downloaded
"""
@deprecated "Use file_ingest_start/0"
defmacro hosted_file_start(), do: quote(do: "hosted:file:start")
@doc """
Hosted file and been downloaded and stored
"""
@deprecated "Use file_ingest_end/0"
defmacro hosted_file_complete(), do: quote(do: "hosted:file:complete")
@doc """
Signals that a new file has been uploaded to the object store
and made available for the rest of the system.
"""
@deprecated "Use file_ingest_end/0"
defmacro file_upload(), do: quote(do: "file:upload")
@doc """
Signals that a dataset extraction is starting
"""
@deprecated "Use data_extract_start/0"
defmacro dataset_extract_start(), do: quote(do: "dataset:extract:start")
@doc """
Signals that dataset extraction has completed
"""
@deprecated "Use data_extract_end/0"
defmacro dataset_extract_complete(), do: quote(do: "dataset:extract:complete")
end | lib/smart_city/event.ex | 0.765856 | 0.609728 | event.ex | starcoder |
defmodule ShopDeed do
@moduledoc """
Documentation for ShopDeed.
"""
alias ShopDeed.{Deck, Decoder, DecodeError, Encoder, EncodeError}
@doc """
Returns the base64 encoded string as a Deck or a DecodeError.
## Examples
iex> ShopDeed.decode("<KEY>")
{:error, %ShopDeed.DecodeError{message: "Must start with prefix 'ADC'"}}
iex> ShopDeed.decode("<KEY>")
{:ok, %ShopDeed.Deck{cards: [
%ShopDeed.Card{count: 2, id: 3000},
%ShopDeed.Card{count: 1, id: 3001},
%ShopDeed.Card{count: 3, id: 10091},
%ShopDeed.Card{count: 3, id: 10102},
%ShopDeed.Card{count: 3, id: 10128},
%ShopDeed.Card{count: 3, id: 10165},
%ShopDeed.Card{count: 3, id: 10168},
%ShopDeed.Card{count: 3, id: 10169},
%ShopDeed.Card{count: 3, id: 10185},
%ShopDeed.Card{count: 1, id: 10223},
%ShopDeed.Card{count: 3, id: 10234},
%ShopDeed.Card{count: 1, id: 10260},
%ShopDeed.Card{count: 1, id: 10263},
%ShopDeed.Card{count: 3, id: 10322},
%ShopDeed.Card{count: 3, id: 10354}
],
heroes: [
%ShopDeed.Hero{id: 4005, turn: 2},
%ShopDeed.Hero{id: 10014, turn: 1},
%ShopDeed.Hero{id: 10017, turn: 3},
%ShopDeed.Hero{id: 10026, turn: 1},
%ShopDeed.Hero{id: 10047, turn: 1}
],
name: "Green/Black Example"}}
"""
@spec decode(String.t()) :: {:ok, ShopDeed.Deck.t()} | {:error, DecodeError.t()}
def decode(deck_string), do: Decoder.decode(deck_string)
@spec decode!(String.t()) :: ShopDeed.Deck.t()
def decode!(deck_string) do
case decode(deck_string) do
{:ok, deck} -> deck
{:error, error} -> raise error
end
end
@doc """
Returns the given deck as a base64 encoded string compatable with playartifact.com's deck viewer.
## Examples
iex> ShopDeed.encode(%ShopDeed.Deck{heroes: [], cards: [], name: "Green/Black Example"})
""
"""
@spec encode(ShopDeed.Deck.t()) :: {:ok, String.t()} | {:error, EncodeError.t()}
def encode(deck), do: Encoder.encode(deck)
@spec encode!(Deck.t()) :: String.t()
def encode!(deck) do
case encode(deck) do
{:ok, deck_string} -> deck_string
{:error, error} -> raise error
end
end
end | lib/shop_deed.ex | 0.896294 | 0.429758 | shop_deed.ex | starcoder |
defmodule Plaid.Institution.Status do
@moduledoc """
[Plaid institution status schema.](https://plaid.com/docs/api/institutions/#institutions-get-response-status)
"""
@behaviour Plaid.Castable
alias Plaid.Castable
defmodule Breakdown do
@moduledoc """
[Plaid institution status breakdown schema.](https://plaid.com/docs/api/institutions/#institutions-get-response-breakdown)
"""
@behaviour Castable
@type t :: %__MODULE__{
success: number(),
error_plaid: number(),
error_institution: number(),
refresh_interval: String.t() | nil
}
defstruct [
:success,
:error_plaid,
:error_institution,
:refresh_interval
]
@impl true
def cast(generic_map) do
%__MODULE__{
success: generic_map["success"],
error_plaid: generic_map["error_plaid"],
error_institution: generic_map["error_institution"],
refresh_interval: generic_map["refresh_interval"]
}
end
end
defmodule Auth do
@moduledoc """
[Plaid institution auth status schema.](https://plaid.com/docs/api/institutions/#institutions-get-response-auth)
"""
@behaviour Castable
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Breakdown.t()
}
defstruct [
:status,
:last_status_change,
:breakdown
]
@impl true
def cast(generic_map) do
%__MODULE__{
status: generic_map["status"],
last_status_change: generic_map["last_status_change"],
breakdown: Castable.cast(Breakdown, generic_map["breakdown"])
}
end
end
defmodule Balance do
@moduledoc """
[Plaid institution balance status schema.](https://plaid.com/docs/api/institutions/#institutions-get-response-balance)
"""
@behaviour Castable
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Breakdown.t()
}
defstruct [
:status,
:last_status_change,
:breakdown
]
@impl true
def cast(generic_map) do
%__MODULE__{
status: generic_map["status"],
last_status_change: generic_map["last_status_change"],
breakdown: Castable.cast(Breakdown, generic_map["breakdown"])
}
end
end
defmodule HealthIncidentUpdate do
@moduledoc """
[Plaid institution status health incident update schema.](https://plaid.com/docs/api/institutions/#institutions-get-response-incident-updates)
"""
@behaviour Castable
@type t :: %__MODULE__{
description: String.t(),
status: String.t(),
updated_date: String.t()
}
defstruct [
:description,
:status,
:updated_date
]
@impl true
def cast(generic_map) do
%__MODULE__{
description: generic_map["description"],
status: generic_map["status"],
updated_date: generic_map["updated_date"]
}
end
end
defmodule HealthIncident do
@moduledoc """
[Plaid institution status health incident schema.](https://plaid.com/docs/api/institutions/#institutions-get-response-health-incidents)
"""
@behaviour Castable
@type t :: %__MODULE__{
start_date: String.t() | nil,
end_date: String.t() | nil,
title: String.t(),
incident_updates: [HealthIncidentUpdate.t()]
}
defstruct [
:start_date,
:end_date,
:title,
:incident_updates
]
@impl true
def cast(generic_map) do
%__MODULE__{
start_date: generic_map["start_date"],
end_date: generic_map["end_date"],
title: generic_map["title"],
incident_updates:
Castable.cast_list(HealthIncidentUpdate, generic_map["incident_updates"])
}
end
end
defmodule Identity do
@moduledoc """
[Plaid institution identity status schema.](https://plaid.com/docs/api/institutions/#institutions-get-response-identity)
"""
@behaviour Castable
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Breakdown.t()
}
defstruct [
:status,
:last_status_change,
:breakdown
]
@impl true
def cast(generic_map) do
%__MODULE__{
status: generic_map["status"],
last_status_change: generic_map["last_status_change"],
breakdown: Castable.cast(Breakdown, generic_map["breakdown"])
}
end
end
defmodule InvestmentsUpdates do
@moduledoc """
[Plaid institution investments updates status schema.](https://plaid.com/docs/api/institutions/#institutions-get-response-investments-updates)
"""
@behaviour Castable
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Breakdown.t()
}
defstruct [
:status,
:last_status_change,
:breakdown
]
@impl true
def cast(generic_map) do
%__MODULE__{
status: generic_map["status"],
last_status_change: generic_map["last_status_change"],
breakdown: Castable.cast(Breakdown, generic_map["breakdown"])
}
end
end
defmodule ItemLogins do
@moduledoc """
[Plaid institution item logins status schema.](https://plaid.com/docs/api/institutions/#institutions-get-response-item-logins)
"""
@behaviour Castable
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Breakdown.t()
}
defstruct [
:status,
:last_status_change,
:breakdown
]
@impl true
def cast(generic_map) do
%__MODULE__{
status: generic_map["status"],
last_status_change: generic_map["last_status_change"],
breakdown: Castable.cast(Breakdown, generic_map["breakdown"])
}
end
end
defmodule TransactionsUpdates do
@moduledoc """
[Plaid institution transactions updates status schema.](https://plaid.com/docs/api/institutions/#institutions-get-response-transactions-updates)
"""
@behaviour Castable
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Breakdown.t()
}
defstruct [
:status,
:last_status_change,
:breakdown
]
@impl true
def cast(generic_map) do
%__MODULE__{
status: generic_map["status"],
last_status_change: generic_map["last_status_change"],
breakdown: Castable.cast(Breakdown, generic_map["breakdown"])
}
end
end
@type t :: %__MODULE__{
auth: Auth.t(),
balance: Balance.t(),
health_incidents: [HealthIncident.t()] | nil,
identity: Identity.t(),
investments_updates: InvestmentsUpdates.t(),
item_logins: ItemLogins.t(),
transactions_updates: TransactionsUpdates.t()
}
defstruct [
:auth,
:balance,
:health_incidents,
:identity,
:investments_updates,
:item_logins,
:transactions_updates
]
@impl true
def cast(generic_map) do
%__MODULE__{
auth: Castable.cast(Auth, generic_map["auth"]),
balance: Castable.cast(Balance, generic_map["balance"]),
health_incidents: Castable.cast_list(HealthIncident, generic_map["health_incidents"]),
identity: Castable.cast(Identity, generic_map["identity"]),
investments_updates: Castable.cast(InvestmentsUpdates, generic_map["investments_updates"]),
item_logins: Castable.cast(ItemLogins, generic_map["item_logins"]),
transactions_updates:
Castable.cast(TransactionsUpdates, generic_map["transactions_updates"])
}
end
end | lib/plaid/institution/status.ex | 0.845273 | 0.430088 | status.ex | starcoder |
defmodule MatrixOperation do
@moduledoc """
Documentation for Matrix operation library.
"""
@doc """
Numbers of row and column of a matrix are got.
## Examples
iex> MatrixOperation.row_column_matrix([[3, 2, 3], [2, 1, 2]])
[2, 3]
"""
def row_column_matrix(a) when is_list(hd(a)) do
columns_number = Enum.map(a, &row_column_matrix_sub(&1, 0))
max_number = Enum.max(columns_number)
if(max_number == Enum.min(columns_number), do: [length(a), max_number], else: nil)
end
def row_column_matrix(_) do
nil
end
defp row_column_matrix_sub(row_a, i) when i != length(row_a) do
if(is_number(Enum.at(row_a, i)), do: row_column_matrix_sub(row_a, i + 1), else: nil)
end
defp row_column_matrix_sub(row_a, i) when i == length(row_a) do
i
end
@doc """
A n-th unit matrix is got.
## Examples
iex> MatrixOperation.unit_matrix(3)
[[1, 0, 0], [0, 1, 0], [0, 0, 1]]
"""
def unit_matrix(n) when n > 0 and is_integer(n) do
index_list = Enum.to_list(1..n)
Enum.map(index_list, fn x -> Enum.map(index_list, &unit_matrix_sub(x, &1)) end)
end
defp unit_matrix_sub(i, j) when i == j do
1
end
defp unit_matrix_sub(_i, _j) do
0
end
@doc """
A element of a matrix is got.
## Examples
iex> MatrixOperation.get_one_element([[1, 2, 3], [4, 5, 6], [7, 8, 9] ], [1, 1])
1
"""
def get_one_element(matrix, [row_index, column_index]) do
matrix
|> Enum.at(row_index - 1)
|> Enum.at(column_index - 1)
end
@doc """
A row of a matrix is got.
## Examples
iex> MatrixOperation.get_one_row([[1, 2, 3], [4, 5, 6], [7, 8, 9] ], 1)
[1, 2, 3]
"""
def get_one_row(matrix, row_index) do
matrix
|> Enum.at(row_index - 1)
end
@doc """
A column of a matrix is got.
## Examples
iex> MatrixOperation.get_one_column([[1, 2, 3], [4, 5, 6], [7, 8, 9] ], 1)
[1, 4, 7]
"""
def get_one_column(matrix, column_index) do
matrix
|> transpose
|> Enum.at(column_index - 1)
end
@doc """
A row of a matrix is deleted.
## Examples
iex> MatrixOperation.delete_one_row([[1, 2, 3], [4, 5, 6], [7, 8, 9]], 3)
[[1, 2, 3], [4, 5, 6]]
"""
def delete_one_row(matrix, delete_index) do
matrix
|> Enum.with_index()
|> Enum.reject(fn {_, i} -> i == delete_index - 1 end)
|> Enum.map(fn {x, _} -> x end)
end
@doc """
Transpose of a matrix
## Examples
iex> MatrixOperation.transpose([[1.0, 2.0], [3.0, 4.0]])
[[1.0, 3.0], [2.0, 4.0]]
"""
def transpose(a) do
Enum.zip(a)
|> Enum.map(&Tuple.to_list(&1))
end
@doc """
Trace of a matrix
## Examples
iex> MatrixOperation.trace([[1.0, 2.0], [3.0, 4.0]])
5.0
"""
def trace(a) do
[row, column] = row_column_matrix(a)
a_index = add_index(a)
Enum.map(a_index, &trace_sub(&1, row, column))
|> Enum.sum()
end
defp trace_sub(_, row, column) when row != column do
nil
end
defp trace_sub([index, row_list], _, _) do
Enum.at(row_list, index - 1)
end
@doc """
A determinant of a n×n square matrix is got.
## Examples
iex> MatrixOperation.determinant([[1, 2, 1], [2, 1, 0], [1, 1, 2]])
-5
iex> MatrixOperation.determinant([[1, 2, 1, 1], [2, 1, 0, 1], [1, 1, 2, 1], [1, 2, 3, 4]])
-13
iex> MatrixOperation.determinant([ [3,1,1,2,1], [5,1,3,4,1], [2,0,1,0,1], [1,3,2,1,1], [1,1,1,1,1] ])
-14
"""
def determinant(a) do
determinant_sub(1, a)
end
# minor_matrix
defp minor_matrix(a_with_index, row) do
(a_with_index -- [row])
|> Enum.map(&Enum.at(&1, 1))
|> Enum.map(&Enum.drop(&1, 1))
end
# 1×1 matrix
defp determinant_sub(_, a) when length(a) == 1 do
nil
end
# 2×2 matrix
defp determinant_sub(co, [[a11, a12], [a21, a22]]) do
co * (a11 * a22 - a12 * a21)
end
# 3×3 or over matrix
defp determinant_sub(co, a) do
a_with_index = add_index(a)
Enum.map(
a_with_index,
&determinant_sub(
(-1 + 2 * rem(hd(&1), 2)) * co * hd(Enum.at(&1, 1)),
minor_matrix(a_with_index, &1)
)
)
|> Enum.sum()
end
# add index
defp add_index(a) do
Stream.iterate(1, &(&1 + 1))
|> Enum.zip(a)
|> Enum.map(&(&1 |> Tuple.to_list()))
end
@doc """
Cramer's rule
## Examples
iex> MatrixOperation.cramer([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [[1], [0], [0]], 0)
1.0
"""
def cramer(a, vertical_vec, select_index) do
[t] = transpose(vertical_vec)
det_a = determinant(a)
cramer_sub(a, t, select_index, det_a)
end
defp cramer_sub(_, _, _, nil), do: nil
defp cramer_sub(_, _, _, 0), do: nil
defp cramer_sub(a, t, select_index, det_a) do
rep_det_a = transpose(a) |> replace_element_in_list(select_index, t, 0, []) |> determinant
rep_det_a / det_a
end
defp replace_element_in_list(list, i, replace_element, i, output) when i < length(list) do
replace_element_in_list(list, i, replace_element, i + 1, output ++ [replace_element])
end
defp replace_element_in_list(list, select_index, replace_element, i, output)
when i < length(list) do
replace_element_in_list(
list,
select_index,
replace_element,
i + 1,
output ++ [Enum.at(list, i)]
)
end
defp replace_element_in_list(list, _select_index, _replace_element, i, output)
when i == length(list),
do: output
@doc """
Linear equations are solved.
## Examples
iex> MatrixOperation.linear_equations([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [[1], [0], [0]])
[1.0, 0.0, 0.0]
iex> MatrixOperation.linear_equations([[0, -2, 1], [-1, 1, -4], [3, 3, 1]], [[3], [-7], [4]])
[2.0, -1.0, 1.0]
"""
def linear_equations(a, vertical_vec) do
[t] = transpose(vertical_vec)
if determinant(a) == 0 do
nil
else
linear_equations_sub(a, t, 0, [])
end
end
defp linear_equations_sub(a, t, i, output) when i < length(a) do
vertical_vec = transpose([t])
linear_equations_sub(a, t, i + 1, output ++ [cramer(a, vertical_vec, i)])
end
defp linear_equations_sub(a, _t, i, output) when i == length(a) do
output
end
@doc """
A matrix is multiplied by a constant.
## Examples
iex> MatrixOperation.const_multiple(-1, [1.0, 2.0, 3.0])
[-1.0, -2.0, -3.0]
iex> MatrixOperation.const_multiple(2, [[1, 2, 3], [2, 2, 2], [3, 8, 9]])
[[2, 4, 6], [4, 4, 4], [6, 16, 18]]
"""
def const_multiple(const, a) when is_number(a) do
const * a
end
def const_multiple(const, a) when is_list(a) do
Enum.map(a, &const_multiple(const, &1))
end
@doc """
Inverse Matrix
## Examples
iex> MatrixOperation.inverse_matrix([[1, 1, -1], [-2, -1, 1], [-1, -2, 1]])
[[-1.0, -1.0, 0.0], [-1.0, 0.0, -1.0], [-3.0, -1.0, -1.0]]
"""
def inverse_matrix(a) when is_list(hd(a)) do
det_a = determinant(a)
create_index_matrix(a)
|> Enum.map(&map_index_row(a, det_a, &1))
|> transpose
end
def inverse_matrix(_) do
nil
end
defp create_index_matrix(a) do
index_list = Enum.to_list(1..length(a))
Enum.map(index_list, fn x -> Enum.map(index_list, &[x, &1]) end)
end
defp map_index_row(_, 0, _) do
nil
end
defp map_index_row(a, det_a, row) do
Enum.map(row, &minor_matrix(a, det_a, &1))
end
# minor_matrix
defp minor_matrix(a, det_a, [row_number, column_number]) do
det_temp_a =
delete_one_row(a, row_number)
|> transpose
|> delete_one_row(column_number)
|> determinant
if(rem(row_number + column_number, 2) == 0,
do: det_temp_a / det_a,
else: -1 * det_temp_a / det_a
)
end
@doc """
Matrix product
## Examples
iex> MatrixOperation.product([[3, 2, 3], [2, 1, 2]], [[2, 3], [2, 1], [3, 5]])
[[19, 26], [12, 17]]
"""
def product(a, b) do
check_product(a, b)
end
defp check_product(a, b) do
column_number_a = row_column_matrix(a) |> Enum.at(1)
row_number_b = row_column_matrix(b) |> Enum.at(0)
if(column_number_a == row_number_b, do: product_sub(a, b), else: nil)
end
defp product_sub(a, b) do
Enum.map(a, fn row_a ->
transpose(b)
|> Enum.map(&inner_product(row_a, &1))
end)
end
defp inner_product(row_a, column_b) do
Enum.zip(row_a, column_b)
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&Enum.reduce(&1, fn x, acc -> x * acc end))
|> Enum.sum()
end
@doc """
Matrix addition
## Examples
iex> MatrixOperation.add([[3, 2, 3], [2, 1, 2]], [[2, 3, 1], [3, 2, 2]])
[[5, 5, 4], [5, 3, 4]]
"""
def add(a, b) do
check_add(a, b)
end
defp check_add(a, b) do
row_column_a = row_column_matrix(a)
row_column_b = row_column_matrix(b)
if(row_column_a == row_column_b, do: add_sub(a, b), else: nil)
end
defp add_sub(a, b) do
Enum.zip(a, b)
|> Enum.map(fn {x, y} ->
Enum.zip(x, y)
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&Enum.reduce(&1, fn x, acc -> x + acc end))
end)
end
@doc """
Hadamard product
## Examples
iex> MatrixOperation.hadamard_product([[3, 2, 3], [2, 1, 2]], [[2, 3, 1], [3, 2, 2]])
[[6, 6, 3], [6, 2, 4]]
"""
def hadamard_product(a, b) do
Enum.zip(a, b)
|> Enum.map(fn {x, y} -> hadamard_product_sub(x, y) end)
end
defp hadamard_product_sub(row_a, row_b) do
Enum.zip(row_a, row_b)
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&Enum.reduce(&1, fn x, acc -> x * acc end))
end
@doc """
Matrix subtraction
## Examples
iex> MatrixOperation.subtract([[3, 2, 3], [2, 1, 2]], [[2, 3, 1], [3, 2, 2]])
[[1, -1, 2], [-1, -1, 0]]
"""
def subtract(a, b) do
check_subtract(a, b)
end
defp check_subtract(a, b) do
row_column_a = row_column_matrix(a)
row_column_b = row_column_matrix(b)
if(row_column_a == row_column_b, do: subtract_sub(a, b), else: nil)
end
defp subtract_sub(a, b) do
Enum.zip(a, b)
|> Enum.map(fn {x, y} ->
Enum.zip(x, y)
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&Enum.reduce(&1, fn x, acc -> acc - x end))
end)
end
@doc """
Hadamard division
## Examples
iex> MatrixOperation.hadamard_division([[3, 2, 3], [2, 1, 2]], [[2, 3, 1], [3, 2, 2]])
[[1.5, 0.6666666666666666, 3.0], [0.6666666666666666, 0.5, 1.0]]
"""
def hadamard_division(a, b) do
Enum.zip(a, b)
|> Enum.map(fn {x, y} -> hadamard_division_sub(x, y) end)
end
defp hadamard_division_sub(row_a, row_b) do
Enum.zip(row_a, row_b)
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&Enum.reduce(&1, fn x, acc -> acc / x end))
end
@doc """
Hadamard power
## Examples
iex> MatrixOperation.hadamard_power([[3, 2, 3], [2, 1, 2]], 2)
[[9.0, 4.0, 9.0], [4.0, 1.0, 4.0]]
"""
def hadamard_power(a, n) do
Enum.map(a, &Enum.map(&1, fn x -> :math.pow(x, n) end))
end
@doc """
Tensor product
## Examples
iex> MatrixOperation.tensor_product([[3, 2, 3], [2, 1, 2]], [[2, 3, 1], [2, 1, 2], [3, 5, 3]])
[
[
[[6, 9, 3], [6, 3, 6], [9, 15, 9]],
[[4, 6, 2], [4, 2, 4], [6, 10, 6]],
[[6, 9, 3], [6, 3, 6], [9, 15, 9]]
],
[
[[4, 6, 2], [4, 2, 4], [6, 10, 6]],
[[2, 3, 1], [2, 1, 2], [3, 5, 3]],
[[4, 6, 2], [4, 2, 4], [6, 10, 6]]
]
]
"""
def tensor_product(a, b) when is_list(a) do
Enum.map(a, &tensor_product(&1, b))
end
def tensor_product(a, b) when is_number(a) do
const_multiple(a, b)
end
@doc """
eigenvalue [R^2/R^3 matrix]
## Examples
iex> MatrixOperation.eigenvalue([[3, 1], [2, 2]])
[4.0, 1.0]
iex> MatrixOperation.eigenvalue([[6, -3], [4, -1]])
[3.0, 2.0]
iex> MatrixOperation.eigenvalue([[1, 1, 1], [1, 2, 1], [1, 2, 3]])
[4.561552806429505, 0.43844714673139706, 1.0000000468390973]
iex> MatrixOperation.eigenvalue([[1, 2, 3], [2, 1, 3], [3, 2, 1]])
[5.999999995559568, -2.000000031083018, -0.99999996447655]
"""
# 2×2 algebra method
def eigenvalue([[a11, a12], [a21, a22]]) do
quadratic_formula(1, -a11 - a22, a11 * a22 - a12 * a21)
end
# 3×3 algebratic method
def eigenvalue([[a11, a12, a13], [a21, a22, a23], [a31, a32, a33]]) do
a = -1
b = a11 + a22 + a33
c = a21 * a12 + a13 * a31 + a32 * a23 - a11 * a22 - a11 * a33 - a22 * a33
d =
a11 * a22 * a33 + a12 * a23 * a31 + a13 * a32 * a21 - a11 * a32 * a23 - a22 * a31 * a13 -
a33 * a21 * a12
cubic_formula(a, b, c, d)
end
def eigenvalue(_a) do
"2×2 or 3×3 matrix only"
end
defp quadratic_formula(a, b, c) do
quadratic_formula_sub(a, b, c)
end
defp quadratic_formula_sub(a, b, c) when b * b < 4 * a * c do
nil
end
defp quadratic_formula_sub(a, b, c) do
d = :math.sqrt(b * b - 4 * a * c)
[0.5 * (-b + d) / a, 0.5 * (-b - d) / a]
end
def cubic_formula(a, b, c, d)
when -4 * a * c * c * c - 27 * a * a * d * d + b * b * c * c + 18 * a * b * c * d -
4 * b * b * b * d < 0 do
nil
end
def cubic_formula(a, b, c, d) do
ba = b / a
ca = c / a
da = d / a
const1 = (27 * da + 2 * ba * ba * ba - 9 * ba * ca) / 54
const2 = cubic_formula_sub(const1 * const1 + :math.pow((3 * ca - ba * ba) / 9, 3))
const_plus = csqrt([-const1 + Enum.at(const2, 0), Enum.at(const2, 1)], 3)
const_minus = csqrt([-const1 - Enum.at(const2, 0), -Enum.at(const2, 1)], 3)
root3 = :math.sqrt(3)
x1 = Enum.at(const_plus, 0) + Enum.at(const_minus, 0) - ba / 3
x2 =
-0.5 * Enum.at(const_plus, 0) - 0.5 * root3 * Enum.at(const_plus, 1) -
0.5 * Enum.at(const_minus, 0) + 0.5 * root3 * Enum.at(const_minus, 1) - ba / 3
x3 =
-0.5 * Enum.at(const_plus, 0) + 0.5 * root3 * Enum.at(const_plus, 1) -
0.5 * Enum.at(const_minus, 0) - 0.5 * root3 * Enum.at(const_minus, 1) - ba / 3
[x1, x2, x3]
end
def cubic_formula_sub(x) when x < 0 do
[0, :math.sqrt(-x)]
end
def cubic_formula_sub(x) do
[:math.sqrt(x), 0]
end
def atan(x) when x < 0 do
y = atan(-x)
-1 * y
end
def atan(x) do
atan_sub(x, 0, 0)
end
def atan_sub(x, z, s) when z < x do
del = 0.0000001
z = z + del
s = s + del / (z * z + 1)
atan_sub(x, z, s)
end
def atan_sub(_, _, s) do
s
end
def csqrt([re, _im], _n) when re == 0 do
nil
end
def csqrt([re, im], n) do
r = :math.pow(re * re + im * im, 0.5 / n)
re2 = r * :math.cos(atan(im / re) / n)
im2 = r * :math.sin(atan(im / re) / n)
[re2, im2]
end
@doc """
Power iteration method (maximum eigen value and eigen vector)
## Examples
iex> MatrixOperation.power_iteration([[3, 1], [2, 2]], 100)
[4.0, [2.8284271247461903, 2.8284271247461903]]
iex> MatrixOperation.power_iteration([[1, 1, 2], [0, 2, -1], [0, 0, 3]], 100)
[3.0, [1.0, -2.0, 2.0]]
"""
def power_iteration(a, max_k) do
init_vec = random_column(length(a))
xk_pre = power_iteration_sub(a, init_vec, max_k)
# eigen vector
[xk_vec] = product(a, xk_pre) |> transpose
[xk_pre_vec] = transpose(xk_pre)
# eigen value
eigen_value = inner_product(xk_vec, xk_vec) / inner_product(xk_vec, xk_pre_vec)
[eigen_value, xk_vec]
end
defp random_column(num) when num > 1 do
tmp = Enum.reduce(1..num, [], fn _, acc -> [Enum.random(0..50000) / 10000 | acc] end)
transpose([tmp])
end
defp random_column(_num) do
nil
end
defp power_iteration_sub(a, v, max_k) do
# Normarization is for overflow suppression
Enum.reduce(1..max_k, v, fn _, acc ->
vp = product(a, acc)
[vpt] = transpose(vp)
const_multiple(1 / :math.sqrt(inner_product(vpt, vpt)), vp)
end)
end
end | lib/matrix_operation.ex | 0.81134 | 0.720319 | matrix_operation.ex | starcoder |
defmodule EdgeDB.Sandbox do
@moduledoc since: "0.2.0"
@moduledoc """
Custom connection for tests that involve modifying the database through the driver.
This connection, when started, wraps the actual connection to EdgeDB into a transaction using
the `START TRANSACTION` statement. And then further calls to `EdgeDB.transaction/3` will result
in executing `DECLARE SAVEPOINT` statement instead of `START TRANSACTION`. This connection
doesn't affect the availability of the `EdgeDB.subtransaction/2` and `EdgeDB.subtransaction!/2` calls,
you can continue to use them with this module.
To use this module in tests, change the configuration of the `:edgedb` application in the `config/tests.exs`:
```elixir
config :edgedb,
connection: EdgeDB.Sandbox
```
Then modify the test case to initialize the sandbox when you run the test and to clean the sandbox
at the end of the test:
```elixir
defmodule MyApp.TestCase do
use ExUnit.CaseTemplate
# other stuff for this case (e.g. Phoenix setup, Plug configuration, etc.)
setup _context do
EdgeDB.Sandbox.initialize(MyApp.EdgeDB)
on_exit(fn ->
EdgeDB.Sandbox.clean(MyApp.EdgeDB)
end)
:ok
end
end
```
"""
use DBConnection
alias EdgeDB.Connection
alias EdgeDB.Connection.{
InternalRequest,
QueryBuilder
}
defmodule State do
@moduledoc false
defstruct [
:internal_state,
:savepoint,
conn_state: :not_in_transaction
]
@type t() :: %__MODULE__{
conn_state: EdgeDB.Protocol.Enums.TransactionState.t(),
internal_state: Connection.State.t(),
savepoint: String.t() | nil
}
end
@doc """
Wrap a connection in a transaction.
"""
@spec initialize(GenServer.server()) :: :ok
def initialize(conn) do
DBConnection.execute!(conn, %InternalRequest{request: :start_sandbox_transaction}, [], [])
:ok
end
@doc """
Release the connection transaction.
"""
@spec clean(GenServer.server()) :: :ok
def clean(conn) do
DBConnection.execute!(conn, %InternalRequest{request: :rollback_sandbox_transaction}, [], [])
:ok
end
@impl DBConnection
def checkout(%State{} = state) do
{:ok, state}
end
@impl DBConnection
def connect(opts \\ []) do
with {:ok, internal_state} <- Connection.connect(opts) do
{:ok, %State{internal_state: internal_state}}
end
end
@impl DBConnection
def disconnect(exc, %State{conn_state: :not_in_transaction} = state) do
{:ok, state} = rollback_transaction(state)
Connection.disconnect(exc, state.internal_state)
end
@impl DBConnection
def handle_begin(_opts, %State{conn_state: conn_state} = state)
when conn_state in [:in_transaction, :in_failed_transaction] do
{status(state), state}
end
@impl DBConnection
def handle_begin(_opts, %State{} = state) do
declare_savepoint(state)
end
@impl DBConnection
def handle_close(query, opts, %State{} = state) do
with {reason, result, internal_state} <-
Connection.handle_close(query, opts, state.internal_state) do
{reason, result, %State{state | internal_state: internal_state}}
end
end
@impl DBConnection
def handle_commit(_opts, %State{conn_state: conn_state} = state)
when conn_state in [:not_in_transaction, :in_failed_transaction] do
{status(state), state}
end
@impl DBConnection
def handle_commit(_opts, %State{} = state) do
release_savepoint(state)
end
@impl DBConnection
def handle_deallocate(_query, _cursor, _opts, state) do
exc = EdgeDB.Error.interface_error("handle_deallocate/4 callback hasn't been implemented")
{:error, exc, state}
end
@impl DBConnection
def handle_declare(_query, _params, _opts, state) do
exc = EdgeDB.Error.interface_error("handle_declare/4 callback hasn't been implemented")
{:error, exc, state}
end
@impl DBConnection
def handle_execute(
%InternalRequest{request: :start_sandbox_transaction} = request,
_params,
_opts,
%State{} = state
) do
with {:ok, state} <- start_transaction(state) do
{:ok, request, :ok, state}
end
end
@impl DBConnection
def handle_execute(
%InternalRequest{request: :rollback_sandbox_transaction} = request,
_params,
_opts,
%State{} = state
) do
with {:ok, state} <- rollback_transaction(state) do
{:ok, request, :ok, state}
end
end
@impl DBConnection
def handle_execute(query, params, opts, %State{} = state) do
case Connection.handle_execute(query, params, opts, state.internal_state) do
{:ok, query, result, internal_state} ->
{:ok, query, result, %State{state | internal_state: internal_state}}
{reason, exc, internal_state} ->
{reason, exc, %State{state | internal_state: internal_state}}
end
end
@impl DBConnection
def handle_fetch(_query, _cursor, _opts, state) do
exc = EdgeDB.Error.interface_error("handle_fetch/4 callback hasn't been implemented")
{:error, exc, state}
end
@impl DBConnection
def handle_prepare(query, opts, state) do
case Connection.handle_prepare(query, opts, state.internal_state) do
{:ok, query, internal_state} ->
{:ok, query, %State{state | internal_state: internal_state}}
{reason, exc, internal_state} ->
{reason, exc, %State{state | internal_state: internal_state}}
end
end
@impl DBConnection
def handle_rollback(_opts, %State{conn_state: conn_state} = state)
when conn_state == :not_in_transaction do
{status(state), state}
end
@impl DBConnection
def handle_rollback(_opts, state) do
rollback_to_savepoint(state)
end
@impl DBConnection
def handle_status(_opts, state) do
{status(state), state}
end
@impl DBConnection
def ping(state) do
{:ok, state}
end
defp start_transaction(state) do
case Connection.handle_begin([], state.internal_state) do
{:ok, _result, internal_state} ->
{:ok, %State{state | conn_state: :not_in_transaction, internal_state: internal_state}}
{_status, internal_state} ->
exc = EdgeDB.Error.client_error("unable to start transaction for sandbox connection")
{:error, exc, %State{conn_state: :not_in_transaction, internal_state: internal_state}}
{:disconnect, exc, internal_state} ->
{:disconnect, exc,
%State{conn_state: :not_in_transaction, internal_state: internal_state}}
end
end
defp rollback_transaction(
%State{internal_state: %Connection.State{server_state: :not_in_transaction}} = state
) do
{:ok, state}
end
defp rollback_transaction(%State{} = state) do
case Connection.handle_rollback([], state.internal_state) do
{:ok, _result, internal_state} ->
{:ok, %State{state | internal_state: internal_state}}
{_status, internal_state} ->
exc = EdgeDB.Error.client_error("unable to rollback transaction for sandbox connection")
{:error, exc, %State{conn_state: :in_failed_transaction, internal_state: internal_state}}
{:disconnect, exc, internal_state} ->
exc =
EdgeDB.Error.client_error(
"unable to rollback transaction for sandbox connection: #{exc.message}"
)
{:disconnect, exc,
%State{conn_state: :in_failed_transaction, internal_state: internal_state}}
end
end
defp declare_savepoint(%State{} = state) do
{:ok, _request, next_savepoint_id, internal_state} =
Connection.handle_execute(
%InternalRequest{request: :next_savepoint},
[],
[],
state.internal_state
)
savepoint_name = "edgedb_elixir_sandbox_#{next_savepoint_id}"
statement = QueryBuilder.declare_savepoint_statement(savepoint_name)
case Connection.handle_execute(
%InternalRequest{request: :execute_script_flow},
%{
statement: statement,
headers: %{}
},
[],
internal_state
) do
{:ok, _request, result, internal_state} ->
{:ok, result,
%State{
state
| conn_state: :in_transaction,
internal_state: internal_state,
savepoint: savepoint_name
}}
{reason, exc, internal_state} ->
{reason, exc, %State{state | internal_state: internal_state}}
end
end
defp release_savepoint(%State{} = state) do
statement = QueryBuilder.release_savepoint_statement(state.savepoint)
case Connection.handle_execute(
%InternalRequest{request: :execute_script_flow},
%{
statement: statement,
headers: %{}
},
[],
state.internal_state
) do
{:ok, _request, result, internal_state} ->
{:ok, result,
%State{
state
| conn_state: :not_in_transaction,
internal_state: internal_state,
savepoint: nil
}}
{reason, exc, internal_state} ->
{reason, exc, %State{state | internal_state: internal_state}}
end
end
defp rollback_to_savepoint(%State{} = state) do
statement = QueryBuilder.rollback_to_savepoint_statement(state.savepoint)
case Connection.handle_execute(
%InternalRequest{request: :execute_script_flow},
%{
statement: statement,
headers: %{}
},
[],
state.internal_state
) do
{:ok, _request, result, internal_state} ->
{:ok, result,
%State{
state
| conn_state: :not_in_transaction,
internal_state: internal_state,
savepoint: nil
}}
{reason, exc, internal_state} ->
{reason, exc, %State{state | internal_state: internal_state}}
end
end
defp status(%State{conn_state: :not_in_transaction}) do
:idle
end
defp status(%State{conn_state: :in_transaction}) do
:transaction
end
defp status(%State{conn_state: :in_failed_transaction}) do
:error
end
end | lib/edgedb/sandbox.ex | 0.855535 | 0.773794 | sandbox.ex | starcoder |
defmodule ExAliyunOts.PlainBuffer do
@moduledoc false
@header 0x75
# tag type
@tag_row_pk 0x1
@tag_row_data 0x2
@tag_cell 0x3
@tag_cell_name 0x4
@tag_cell_value 0x5
@tag_cell_type 0x6
@tag_cell_timestamp 0x7
@tag_delete_row_marker 0x8
@tag_row_checksum 0x9
@tag_cell_checksum 0x0A
# cell op type
@op_delete_all_version 0x1
@op_delete_one_version 0x3
@op_increment 0x4
# variant type
@vt_integer 0x0
@vt_double 0x1
@vt_boolean 0x2
@vt_string 0x3
# @vt_null 0x6
@vt_blob 0x7
@vt_inf_min 0x9
@vt_inf_max 0xA
@vt_auto_increment 0xB
# other
@little_endian_32_size 4
@little_endian_64_size 8
@row_data_marker <<@tag_row_data::integer, @tag_cell::integer, @tag_cell_name::integer>>
@pk_tag_marker <<@tag_row_pk::integer, @tag_cell::integer, @tag_cell_name::integer>>
@sum_endian_64_size @little_endian_64_size + 1
alias ExAliyunOts.CRC
alias ExAliyunOts.Const.{PKType, OperationType}
require PKType
require OperationType
require PKType
import ExAliyunOts.Logger, only: [debug: 1]
def serialize_for_put_row(primary_keys, attribute_columns) do
{buffer, row_checksum} = header() |> primary_keys(primary_keys) |> columns(attribute_columns)
row_checksum = CRC.crc_int8(row_checksum, 0)
process_row_checksum(buffer, row_checksum)
end
def serialize_primary_keys(primary_keys) do
{buffer, row_checksum} = header() |> primary_keys(primary_keys)
row_checksum = CRC.crc_int8(row_checksum, 0)
process_row_checksum(buffer, row_checksum)
end
def serialize_column_value(value) when is_boolean(value) do
[byte_to_binary(@vt_boolean), boolean_to_integer(value)]
end
def serialize_column_value(value) when is_integer(value) do
[byte_to_binary(@vt_integer), <<value::little-integer-size(64)>>]
end
def serialize_column_value(value) when is_binary(value) do
value_size = byte_size(value)
[byte_to_binary(@vt_string), <<value_size::little-integer-size(32)>>, value]
end
def serialize_column_value(value) when is_bitstring(value) do
value_size = byte_size(value)
[byte_to_binary(@vt_blob), <<value_size::little-integer-size(32)>>, value]
end
def serialize_column_value(value) when is_float(value) do
value_to_binary = <<value::little-float>>
[byte_to_binary(@vt_double), value_to_binary]
end
def serialize_column_value(value) do
raise ExAliyunOts.RuntimeError, "Unsupported column for value: #{inspect(value)}"
end
def serialize_for_update_row(primary_keys, attribute_columns) when is_map(attribute_columns) do
# validation
for {key, value} <- attribute_columns do
if key not in OperationType.updates_supported() do
raise ExAliyunOts.RuntimeError,
"Unsupported update type: #{inspect(key)}, in attribute_columns: #{
inspect(attribute_columns)
}"
end
if not is_list(value) do
raise ExAliyunOts.RuntimeError,
"Unsupported update value: #{inspect(value)} to key: #{inspect(key)}, expect value as list"
end
end
{buffer, row_checksum} =
header() |> primary_keys(primary_keys) |> update_grouping_columns(attribute_columns)
row_checksum = CRC.crc_int8(row_checksum, 0)
process_row_checksum(buffer, row_checksum)
end
def serialize_for_delete_row(primary_keys) do
{buffer, row_checksum} = header() |> primary_keys(primary_keys) |> process_delete_marker()
process_row_checksum(buffer, row_checksum)
end
def deserialize_row(<<>>) do
nil
end
def deserialize_row(row) do
debug(fn ->
[
"** deserialize_row:\n",
inspect(row, limit: :infinity)
]
end)
row |> deserialize_filter_header() |> deserialize_row_data()
end
def deserialize_rows(<<>>) do
nil
end
def deserialize_rows(rows) do
debug(fn ->
[
"** deserialize_rows:\n",
inspect(rows, limit: :infinity)
]
end)
rows |> deserialize_filter_header() |> deserialize_rows_data()
end
defp header() do
# row_checksum initialized value of header is 0
{<<@header::little-integer-size(32)>>, 0}
end
defp primary_keys({buffer, row_checksum}, primary_keys) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_row_pk)::bitstring>>
do_primary_keys(primary_keys, buffer, row_checksum)
end
defp do_primary_keys([], buffer, row_checksum) do
{buffer, row_checksum}
end
defp do_primary_keys([primary_key | rest], buffer, row_checksum) do
{buffer, row_checksum} = primary_key_column(primary_key, {buffer, row_checksum})
do_primary_keys(rest, buffer, row_checksum)
end
defp columns({buffer, row_checksum}, columns) when is_list(columns) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_row_data)::bitstring>>
do_columns(columns, buffer, row_checksum)
end
defp do_columns([], buffer, row_checksum) do
{buffer, row_checksum}
end
defp do_columns([column | rest], buffer, row_checksum) do
{buffer, row_checksum} = process_column(column, {buffer, row_checksum})
do_columns(rest, buffer, row_checksum)
end
defp update_grouping_columns({buffer, row_checksum}, grouping_columns)
when is_map(grouping_columns) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_row_data)::bitstring>>
grouping_columns
|> Map.keys()
|> Enum.reduce({buffer, row_checksum}, fn update_type, acc ->
columns = Map.get(grouping_columns, update_type)
Enum.reduce(columns, acc, fn column, acc_inner ->
process_update_column(acc_inner, update_type, column)
end)
end)
end
defp primary_key_column({pk_name, pk_value}, {buffer, row_checksum}) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_cell)::bitstring>>
{buffer, cell_checksum} =
{buffer, 0}
|> process_cell_name(pk_name)
|> process_primary_key_value(pk_value)
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_checksum)::bitstring,
byte_to_binary(cell_checksum)::bitstring>>
row_checksum = CRC.crc_int8(row_checksum, cell_checksum)
{buffer, row_checksum}
end
defp primary_key_column(primary_keys, {buffer, row_checksum}) when is_list(primary_keys) do
# nested primary_keys are used for batch operation with multiple pks
Enum.reduce(primary_keys, {buffer, row_checksum}, fn {pk_name, pk_value}, acc ->
primary_key_column({pk_name, pk_value}, acc)
end)
end
defp primary_key_column(primary_keys, _) do
raise ExAliyunOts.RuntimeError, "Invalid primary_keys: #{inspect(primary_keys)}"
end
defp process_cell_name({buffer, cell_checksum}, name) do
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_name)::bitstring,
<<String.length(name)::little-integer-size(32)>>, name::bitstring>>
cell_checksum = CRC.crc_string(cell_checksum, name)
{buffer, cell_checksum}
end
defp process_primary_key_value({buffer, cell_checksum}, value) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_cell_value)::bitstring>>
do_process_primary_key_value({buffer, cell_checksum}, value)
end
defp do_process_primary_key_value({buffer, cell_checksum}, value)
when value == PKType.inf_min()
when value == :inf_min do
buffer =
<<buffer::bitstring, <<1::little-integer-size(32)>>,
byte_to_binary(@vt_inf_min)::bitstring>>
cell_checksum = CRC.crc_int8(cell_checksum, @vt_inf_min)
{buffer, cell_checksum}
end
defp do_process_primary_key_value({buffer, cell_checksum}, value)
when value == PKType.inf_max()
when value == :inf_max do
buffer =
<<buffer::bitstring, <<1::little-integer-size(32)>>,
byte_to_binary(@vt_inf_max)::bitstring>>
cell_checksum = CRC.crc_int8(cell_checksum, @vt_inf_max)
{buffer, cell_checksum}
end
defp do_process_primary_key_value({buffer, cell_checksum}, value)
when value == PKType.auto_increment() do
buffer =
<<buffer::bitstring, <<1::little-integer-size(32)>>,
byte_to_binary(@vt_auto_increment)::bitstring>>
cell_checksum = CRC.crc_int8(cell_checksum, @vt_auto_increment)
{buffer, cell_checksum}
end
defp do_process_primary_key_value({buffer, cell_checksum}, value) when is_integer(value) do
buffer =
<<buffer::bitstring, <<1 + @little_endian_64_size::little-integer-size(32)>>,
byte_to_binary(@vt_integer)::bitstring, (<<value::little-integer-size(64)>>)>>
cell_checksum = cell_checksum |> CRC.crc_int8(@vt_integer) |> CRC.crc_int64(value)
{buffer, cell_checksum}
end
defp do_process_primary_key_value({buffer, cell_checksum}, value) when is_binary(value) do
prefix_length = @little_endian_32_size + 1
value_size = byte_size(value)
buffer =
<<buffer::bitstring, <<prefix_length + value_size::little-integer-size(32)>>,
byte_to_binary(@vt_string)::bitstring, <<value_size::little-integer-size(32)>>,
value::bitstring>>
cell_checksum =
cell_checksum
|> CRC.crc_int8(@vt_string)
|> CRC.crc_int32(value_size)
|> CRC.crc_string(value)
{buffer, cell_checksum}
end
defp do_process_primary_key_value({buffer, cell_checksum}, value) when is_bitstring(value) do
prefix_length = @little_endian_32_size + 1
value_size = byte_size(value)
buffer =
<<buffer::bitstring, <<prefix_length + value_size::little-integer-size(32)>>,
byte_to_binary(@vt_blob)::bitstring, <<value_size::little-integer-size(32)>>,
value::bitstring>>
cell_checksum =
cell_checksum
|> CRC.crc_int8(@vt_blob)
|> CRC.crc_int32(value_size)
|> CRC.crc_string(value)
{buffer, cell_checksum}
end
defp do_process_primary_key_value(_, value) do
raise ExAliyunOts.RuntimeError, "Unsupported primary key for value: #{inspect(value)}"
end
defp process_column({column_name, column_value}, {buffer, row_checksum}) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_cell)::bitstring>>
{buffer, cell_checksum} =
{buffer, 0}
|> process_cell_name(column_name)
|> process_column_value_with_checksum(column_value)
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_checksum)::bitstring,
byte_to_binary(cell_checksum)::bitstring>>
row_checksum = CRC.crc_int8(row_checksum, cell_checksum)
{buffer, row_checksum}
end
defp process_column({column_name, column_value, timestamp}, {buffer, row_checksum})
when timestamp != nil do
buffer = <<buffer::bitstring, byte_to_binary(@tag_cell)::bitstring>>
{buffer, cell_checksum} =
{buffer, 0}
|> process_cell_name(column_name)
|> process_column_value_with_checksum(column_value)
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_timestamp)::bitstring,
(<<timestamp::little-integer-size(64)>>)>>
cell_checksum = CRC.crc_int64(cell_checksum, timestamp)
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_checksum)::bitstring,
byte_to_binary(cell_checksum)::bitstring>>
row_checksum = CRC.crc_int8(row_checksum, cell_checksum)
{buffer, row_checksum}
end
defp process_column(column, _) do
raise ExAliyunOts.RuntimeError, "Invalid column: #{inspect(column)} is not a tuple"
end
defp process_column_value_with_checksum({buffer, cell_checksum}, nil) do
{buffer, cell_checksum}
end
defp process_column_value_with_checksum({buffer, cell_checksum}, true) do
cell_checksum =
cell_checksum
|> CRC.crc_int8(@vt_boolean)
|> CRC.crc_int8(1)
{
boolean_value_to_buffer(buffer, true),
cell_checksum
}
end
defp process_column_value_with_checksum({buffer, cell_checksum}, false) do
cell_checksum =
cell_checksum
|> CRC.crc_int8(@vt_boolean)
|> CRC.crc_int8(0)
{
boolean_value_to_buffer(buffer, false),
cell_checksum
}
end
defp process_column_value_with_checksum({buffer, cell_checksum}, value)
when is_integer(value) do
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_value)::bitstring,
<<1 + @little_endian_64_size::little-integer-size(32)>>,
byte_to_binary(@vt_integer)::bitstring, (<<value::little-integer-size(64)>>)>>
cell_checksum = cell_checksum |> CRC.crc_int8(@vt_integer) |> CRC.crc_int64(value)
{buffer, cell_checksum}
end
defp process_column_value_with_checksum({buffer, cell_checksum}, value) when is_float(value) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_cell_value)::bitstring>>
value_to_binary = <<value::little-float>>
<<long::unsigned-little-integer-64>> = value_to_binary
buffer =
<<buffer::bitstring, <<1 + @little_endian_64_size::little-integer-size(32)>>,
byte_to_binary(@vt_double)::bitstring, value_to_binary::bitstring>>
cell_checksum = cell_checksum |> CRC.crc_int8(@vt_double) |> CRC.crc_int64(long)
{buffer, cell_checksum}
end
defp process_column_value_with_checksum({buffer, cell_checksum}, value) when is_binary(value) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_cell_value)::bitstring>>
prefix_length = @little_endian_32_size + 1
value_size = byte_size(value)
buffer =
<<buffer::bitstring, <<prefix_length + value_size::little-integer-size(32)>>,
byte_to_binary(@vt_string)::bitstring, <<value_size::little-integer-size(32)>>,
value::bitstring>>
cell_checksum =
cell_checksum
|> CRC.crc_int8(@vt_string)
|> CRC.crc_int32(value_size)
|> CRC.crc_string(value)
{buffer, cell_checksum}
end
defp process_column_value_with_checksum({buffer, cell_checksum}, value)
when is_bitstring(value) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_cell_value)::bitstring>>
prefix_length = @little_endian_32_size + 1
value_size = byte_size(value)
buffer =
<<buffer::bitstring, <<prefix_length + value_size::little-integer-size(32)>>,
byte_to_binary(@vt_blob)::bitstring, <<value_size::little-integer-size(32)>>,
value::bitstring>>
cell_checksum =
cell_checksum
|> CRC.crc_int8(@vt_blob)
|> CRC.crc_int32(value_size)
|> CRC.crc_string(value)
{buffer, cell_checksum}
end
defp process_column_value_with_checksum({_buffer, _cell_checksum}, value) do
raise ExAliyunOts.RuntimeError, "Unsupported column for value: #{inspect(value)}"
end
defp boolean_value_to_buffer(buffer, value) when is_boolean(value) do
<<buffer::bitstring, byte_to_binary(@tag_cell_value)::bitstring,
<<2::little-integer-size(32)>>, byte_to_binary(@vt_boolean)::bitstring,
boolean_to_integer(value)::bitstring>>
end
defp process_update_column({buffer, row_checksum}, update_type, column)
when is_bitstring(column) do
do_process_update_column({buffer, row_checksum}, update_type, column, {nil, nil})
end
defp process_update_column({buffer, row_checksum}, update_type, {column_name, column_value}) do
do_process_update_column(
{buffer, row_checksum},
update_type,
column_name,
{column_value, nil}
)
end
defp process_update_column(
{buffer, row_checksum},
update_type,
{column_name, column_value, timestamp}
) do
do_process_update_column(
{buffer, row_checksum},
update_type,
column_name,
{column_value, timestamp}
)
end
defp process_update_column({_buffer, _row_checksum}, _update_type, column) do
raise ExAliyunOts.RuntimeError,
"Unsupported column when update grouping columns: #{inspect(column)}"
end
defp do_process_update_column(
{buffer, row_checksum},
OperationType.delete(),
column_name,
{column_value, timestamp}
) do
{buffer, cell_checksum} = process_update_column_with_cell(buffer, column_name, column_value)
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_type)::bitstring,
byte_to_binary(@op_delete_one_version)::bitstring>>
{buffer, cell_checksum} =
process_update_column_with_timestamp(buffer, cell_checksum, timestamp)
cell_checksum = CRC.crc_int8(cell_checksum, @op_delete_one_version)
process_update_column_with_row_checksum(buffer, cell_checksum, row_checksum)
end
defp do_process_update_column(
{buffer, row_checksum},
OperationType.delete_all(),
column_name,
{column_value, timestamp}
) do
{buffer, cell_checksum} = process_update_column_with_cell(buffer, column_name, column_value)
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_type)::bitstring,
byte_to_binary(@op_delete_all_version)::bitstring>>
{buffer, cell_checksum} =
process_update_column_with_timestamp(buffer, cell_checksum, timestamp)
cell_checksum = CRC.crc_int8(cell_checksum, @op_delete_all_version)
process_update_column_with_row_checksum(buffer, cell_checksum, row_checksum)
end
defp do_process_update_column(
{buffer, row_checksum},
OperationType.increment(),
column_name,
{column_value, timestamp}
) do
{buffer, cell_checksum} = process_update_column_with_cell(buffer, column_name, column_value)
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_type)::bitstring,
byte_to_binary(@op_increment)::bitstring>>
{buffer, cell_checksum} =
process_update_column_with_timestamp(buffer, cell_checksum, timestamp)
cell_checksum = CRC.crc_int8(cell_checksum, @op_increment)
process_update_column_with_row_checksum(buffer, cell_checksum, row_checksum)
end
defp do_process_update_column(
{buffer, row_checksum},
_update_type,
column_name,
{column_value, timestamp}
) do
{buffer, cell_checksum} = process_update_column_with_cell(buffer, column_name, column_value)
{buffer, cell_checksum} =
process_update_column_with_timestamp(buffer, cell_checksum, timestamp)
process_update_column_with_row_checksum(buffer, cell_checksum, row_checksum)
end
defp process_update_column_with_cell(buffer, column_name, column_value) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_cell)::bitstring>>
{buffer, 0}
|> process_cell_name(column_name)
|> process_column_value_with_checksum(column_value)
end
defp process_update_column_with_timestamp(buffer, cell_checksum, nil) do
{buffer, cell_checksum}
end
defp process_update_column_with_timestamp(buffer, cell_checksum, timestamp) do
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_timestamp)::bitstring,
(<<timestamp::little-integer-size(64)>>)>>
cell_checksum = CRC.crc_int64(cell_checksum, timestamp)
{buffer, cell_checksum}
end
defp process_update_column_with_row_checksum(buffer, cell_checksum, row_checksum) do
buffer =
<<buffer::bitstring, byte_to_binary(@tag_cell_checksum)::bitstring,
byte_to_binary(cell_checksum)::bitstring>>
row_checksum = CRC.crc_int8(row_checksum, cell_checksum)
{buffer, row_checksum}
end
defp process_delete_marker({buffer, row_checksum}) do
buffer = <<buffer::bitstring, byte_to_binary(@tag_delete_row_marker)::bitstring>>
row_checksum = CRC.crc_int8(row_checksum, 1)
{buffer, row_checksum}
end
defp boolean_to_integer(true), do: <<1>>
defp boolean_to_integer(_), do: <<0>>
defp integer_to_boolean(1), do: true
defp integer_to_boolean(_), do: false
defp process_row_checksum(buffer, row_checksum) do
<<buffer::bitstring, byte_to_binary(@tag_row_checksum)::bitstring,
byte_to_binary(row_checksum)::bitstring>>
end
defp byte_to_binary(byte) do
<<byte::integer>>
end
# deserialize processing
defp deserialize_filter_header(<<@header::little-integer-size(32), rest_row::binary>>) do
rest_row
end
defp deserialize_filter_header(invalid_row) do
raise ExAliyunOts.RuntimeError, "Invalid row to deserialize, #{inspect(invalid_row)}"
end
defp deserialize_rows_data(rows) do
result =
rows
|> :binary.split(@pk_tag_marker, [:global])
|> do_slice_rows()
debug(fn ->
[
"\nchecked_rows result:\s",
inspect(result, limit: :infinity)
]
end)
Enum.reverse(result.rows)
end
defp do_slice_rows(bytes_rows_list) do
do_slice_rows(bytes_rows_list, %{rows: [], processing: <<>>})
end
defp do_slice_rows([], prepared) do
prepared
end
defp do_slice_rows([<<>> | rest], prepared) do
do_slice_rows(rest, prepared)
end
defp do_slice_rows([row | rest], prepared) do
second_last_byte = binary_part(row, byte_size(row) - 2, 1)
prepared = do_slice_row_binary(second_last_byte, row, prepared)
do_slice_rows(rest, prepared)
end
defp do_slice_row_binary(
<<@tag_row_checksum::integer>>,
row,
%{processing: <<>>, rows: rows} = result
) do
row = deserialize_raw_rows(row)
Map.put(result, :rows, [row | rows])
end
defp do_slice_row_binary(
<<@tag_row_checksum::integer>>,
row,
%{processing: processing, rows: rows} = result
) do
row =
deserialize_raw_rows(<<processing::bitstring, @pk_tag_marker::bitstring, row::bitstring>>)
result
|> Map.put(:rows, [row | rows])
|> Map.put(:processing, <<>>)
end
defp do_slice_row_binary(_, row, %{processing: <<>>} = result) do
Map.put(result, :processing, row)
end
defp do_slice_row_binary(_, row, %{processing: processing} = result) do
Map.put(
result,
:processing,
<<processing::bitstring, @pk_tag_marker::bitstring, row::bitstring>>
)
end
defp deserialize_raw_rows(row_values) do
{primary_keys, attribute_columns} =
deserialize_row_data(<<@pk_tag_marker, row_values::bitstring>>)
debug(fn ->
[
"\nprimary_keys:\s",
inspect(primary_keys),
?\n,
"attribute_columns:\s",
inspect(attribute_columns)
]
end)
{primary_keys, attribute_columns}
end
defp deserialize_row_data(values) do
row_data_parts = :binary.split(values, @row_data_marker, [:global])
matched_index = Enum.find_index(row_data_parts, &match_tag_cell_checksum?/1)
debug(fn ->
[
"\ndeserialize_row_data:\n",
inspect(values, limit: :infinity),
?\n,
"matched_index:\n",
inspect(matched_index),
?\n,
"row_data_parts:\n"
| inspect(row_data_parts, limit: :infinity)
]
end)
deserialize_row_data_with_match_index(matched_index, values, row_data_parts)
end
defp deserialize_row_data_with_match_index(
nil,
<<(<<@tag_row_pk::integer>>), primary_keys_binary_rest::binary>>,
_
) do
{deserialize_process_primary_keys(primary_keys_binary_rest), nil}
end
defp deserialize_row_data_with_match_index(
nil,
<<(<<@tag_row_data::integer>>), attribute_columns_binary_rest::binary>>,
_
) do
{nil, deserialize_process_columns(attribute_columns_binary_rest)}
end
defp deserialize_row_data_with_match_index(nil, values, _) do
debug(fn ->
[
"\n** unexcepted row data when deserialize_row_data:\s",
inspect(values, limit: :infinity)
]
end)
nil
end
defp deserialize_row_data_with_match_index(matched_index, _values, row_data_parts) do
{pks, attribute_columns} = Enum.split(row_data_parts, matched_index + 1)
primary_keys_binary = Enum.join(pks, @row_data_marker)
attribute_columns_binary =
<<@tag_cell::integer, @tag_cell_name::integer,
Enum.join(attribute_columns, @row_data_marker)::bitstring>>
primary_keys_binary =
case primary_keys_binary do
<<(<<@tag_row_pk::integer>>), primary_keys_binary_rest::binary>> ->
primary_keys_binary_rest
_ ->
raise ExAliyunOts.RuntimeError,
"Unexcepted row data when processing primary_keys: #{
inspect(primary_keys_binary, limit: :infinity)
}"
end
{
deserialize_process_primary_keys(primary_keys_binary),
deserialize_process_columns(attribute_columns_binary)
}
end
defp deserialize_process_primary_keys(primary_keys_binary) do
primary_keys_binary |> do_deserialize_process_primary_keys([]) |> Enum.reverse()
end
defp do_deserialize_process_primary_keys("", result) do
result
end
defp do_deserialize_process_primary_keys(
<<(<<(<<@tag_cell::integer>>), (<<@tag_cell_name::integer>>)>>),
primary_key_size::little-integer-size(32), rest::binary>> = primary_keys,
result
) do
debug(fn ->
[
"\n** deserializing primary_keys, prepared result:\n",
inspect(result),
?\n,
"pk data:\s"
| inspect(primary_keys, limit: :infinity)
]
end)
primary_key_name = binary_part(rest, 0, primary_key_size)
rest_primary_key_value_and_other_pk =
binary_part(rest, primary_key_size, byte_size(rest) - primary_key_size)
debug(fn ->
[
"\nget primary_key_name:\s",
inspect(primary_key_name),
?\n,
"rest_primary_key_value_and_other_pk:\s"
| inspect(rest_primary_key_value_and_other_pk, limit: :infinity)
]
end)
case calculate_tag_cell_index(rest_primary_key_value_and_other_pk) do
next_cell_index when is_integer(next_cell_index) ->
value_binary = binary_part(rest_primary_key_value_and_other_pk, 0, next_cell_index)
primary_key_value = deserialize_process_primary_key_value(value_binary)
result = [{primary_key_name, primary_key_value} | result]
other_pk =
binary_part(
rest_primary_key_value_and_other_pk,
next_cell_index,
byte_size(rest_primary_key_value_and_other_pk) - next_cell_index
)
debug(fn ->
[
"\nfind next_cell_index:\s",
next_cell_index,
?\n,
"get primary_key_value:\s",
inspect(primary_key_value),
?\n,
"rest to be deserialized data:\s"
| inspect(other_pk, limit: :infinity)
]
end)
do_deserialize_process_primary_keys(other_pk, result)
nil ->
primary_key_value =
deserialize_process_primary_key_value(rest_primary_key_value_and_other_pk)
debug(fn ->
[
"\nno more cells to deserialized, primary_key_value:\n",
inspect(primary_key_value)
]
end)
[{primary_key_name, primary_key_value} | result]
end
end
defp do_deserialize_process_primary_keys(primary_keys, result) do
debug(fn ->
[
"\n** deserializing primary_keys, prepared result:\n",
inspect(result),
?\n,
"pk data:\s"
| inspect(primary_keys, limit: :infinity)
]
end)
result
end
defp deserialize_process_primary_key_value(
<<(<<@tag_cell_value::integer>>), <<@sum_endian_64_size::little-integer-size(32)>>,
<<@vt_integer::integer>>, <<value::signed-little-integer-size(64)>>,
(<<_rest::binary>>)>>
) do
value
end
defp deserialize_process_primary_key_value(
<<(<<@tag_cell_value::integer>>), <<@sum_endian_64_size::little-integer-size(32)>>,
<<@vt_integer::integer>>, (<<value::signed-little-integer-size(64)>>)>>
) do
value
end
defp deserialize_process_primary_key_value(
<<(<<@tag_cell_value::integer>>), <<@sum_endian_64_size::little-integer-size(32)>>,
<<@vt_integer::integer>>, (<<rest::binary>>)>>
) do
raise ExAliyunOts.RuntimeError, "Unexcepted integer value as primary value: #{inspect(rest)}"
end
defp deserialize_process_primary_key_value(
<<(<<@tag_cell_value::integer>>), <<total_size::little-integer-size(32)>>,
<<@vt_string::integer>>, <<_value_size::little-integer-size(32)>>,
(<<value::binary>>)>>
) do
value_size = total_size - @little_endian_32_size - 1
binary_part(value, 0, value_size)
end
defp deserialize_process_primary_key_value(
<<(<<@tag_cell_value::integer>>), <<total_size::little-integer-size(32)>>,
<<@vt_blob::integer>>, <<_value_size::little-integer-size(32)>>, (<<value::binary>>)>>
) do
value_size = total_size - @little_endian_32_size - 1
binary_part(value, 0, value_size)
end
defp deserialize_process_primary_key_value(
<<(<<@tag_cell_value::integer>>), <<_total_size::little-integer-size(32)>>,
(<<rest::binary>>)>>
) do
raise ExAliyunOts.RuntimeError, "Unexcepted string value as primary value: #{inspect(rest)}"
end
defp deserialize_process_primary_key_value(
<<(<<@tag_cell_value::integer>>), (<<rest::binary>>)>>
) do
raise ExAliyunOts.RuntimeError, "Unexcepted value as primary value: #{inspect(rest)}"
end
defp deserialize_process_columns(attribute_columns) do
debug(fn ->
[
"\n>>>> attribute_columns <<<<\n",
inspect(attribute_columns, limit: :infinity)
]
end)
attribute_columns |> do_deserialize_process_columns([]) |> Enum.reverse()
end
defp do_deserialize_process_columns(
<<(<<(<<@tag_cell::integer>>), (<<@tag_cell_name::integer>>)>>),
column_name_size::little-integer-size(32), rest::binary>>,
result
) do
column_name = binary_part(rest, 0, column_name_size)
rest_value_and_other_columns =
binary_part(rest, column_name_size, byte_size(rest) - column_name_size)
case calculate_tag_cell_index(rest_value_and_other_columns) do
next_cell_index when is_integer(next_cell_index) ->
value_binary = binary_part(rest_value_and_other_columns, 0, next_cell_index)
debug(fn ->
[
"\ncolumn_name:\s",
inspect(column_name),
?\n,
"value_binary:\s",
inspect(value_binary, limit: :infinity),
"\nfind next_cell_index:\s",
next_cell_index
]
end)
{column_value, timestamp} = deserialize_process_column_value_with_checksum(value_binary)
result = [{column_name, column_value, timestamp} | result]
other_attribute_columns =
binary_part(
rest_value_and_other_columns,
next_cell_index,
byte_size(rest_value_and_other_columns) - next_cell_index
)
do_deserialize_process_columns(other_attribute_columns, result)
nil ->
{column_value, timestamp} =
deserialize_process_column_value_with_checksum(rest_value_and_other_columns)
debug(fn ->
[
"\ncolumn_name:\s",
inspect(column_name),
"\ncolumn_value:\s",
inspect(column_value),
"\n=== not find next_cell_index ===\n"
]
end)
[{column_name, column_value, timestamp} | result]
end
end
defp do_deserialize_process_columns(_, result) do
result
end
defp deserialize_process_column_value_timestamp(
<<(<<@tag_cell_timestamp::integer>>), (<<timestamp::little-integer-size(64)>>)>>
) do
timestamp
end
defp deserialize_process_column_value_timestamp(
<<(<<@tag_cell_timestamp::integer>>), <<timestamp::little-integer-size(64)>>,
_rest::binary>>
) do
timestamp
end
defp deserialize_process_column_value_timestamp(_) do
nil
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<2::little-integer-size(32)>>,
<<@vt_boolean::integer>>, <<value::integer>>, (<<timestamp_rest::binary>>)>>
) do
value_boolean = integer_to_boolean(value)
timestamp = deserialize_process_column_value_timestamp(timestamp_rest)
{value_boolean, timestamp}
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<2::little-integer-size(32)>>,
<<@vt_boolean::integer>>, (<<value::integer>>)>>
) do
value_boolean = integer_to_boolean(value)
{value_boolean, nil}
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<2::little-integer-size(32)>>,
<<@vt_boolean::integer>>, (<<rest::binary>>)>>
) do
raise ExAliyunOts.RuntimeError, "Invalid boolean value as: #{inspect(rest)}"
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<@sum_endian_64_size::little-integer-size(32)>>,
<<@vt_integer::integer>>, <<value::signed-little-integer-size(64)>>,
(<<timestamp_rest::binary>>)>>
) do
timestamp = deserialize_process_column_value_timestamp(timestamp_rest)
{value, timestamp}
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<@sum_endian_64_size::little-integer-size(32)>>,
<<@vt_integer::integer>>, (<<value::signed-little-integer-size(64)>>)>>
) do
{value, nil}
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<@sum_endian_64_size::little-integer-size(32)>>,
<<@vt_integer::integer>>, (<<rest::binary>>)>>
) do
raise ExAliyunOts.RuntimeError, "Invalid integer value as: #{inspect(rest)}"
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<@sum_endian_64_size::little-integer-size(32)>>,
<<@vt_double::integer>>, <<value::signed-little-float-size(64)>>,
(<<timestamp_rest::binary>>)>>
) do
timestamp = deserialize_process_column_value_timestamp(timestamp_rest)
{value, timestamp}
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<@sum_endian_64_size::little-integer-size(32)>>,
<<@vt_double::integer>>, (<<value::signed-little-float-size(64)>>)>>
) do
{value, nil}
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<@sum_endian_64_size::little-integer-size(32)>>,
<<@vt_double::integer>>, (<<rest::binary>>)>>
) do
raise ExAliyunOts.RuntimeError, "Invalid float value as: #{inspect(rest)}"
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<_total_size::little-integer-size(32)>>,
<<@vt_string::integer>>,
<<value_size::little-integer-size(32), value::binary-size(value_size)>>,
(<<timestamp_rest::binary>>)>>
) do
timestamp = deserialize_process_column_value_timestamp(timestamp_rest)
{value, timestamp}
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<_total_size::little-integer-size(32)>>,
<<@vt_string::integer>>,
(<<value_size::little-integer-size(32), value::binary-size(value_size)>>)>>
) do
{value, nil}
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<_total_size::little-integer-size(32)>>,
<<@vt_string::integer>>, rest::binary>>
) do
raise ExAliyunOts.RuntimeError, "Unexcepted string value as: #{inspect(rest)}"
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<_total_size::little-integer-size(32)>>,
<<@vt_blob::integer>>,
<<value_size::little-integer-size(32), value::binary-size(value_size)>>,
(<<timestamp_rest::binary>>)>>
) do
timestamp = deserialize_process_column_value_timestamp(timestamp_rest)
{value, timestamp}
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<_total_size::little-integer-size(32)>>,
<<@vt_blob::integer>>,
(<<value_size::little-integer-size(32), value::binary-size(value_size)>>)>>
) do
{value, nil}
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), <<_total_size::little-integer-size(32)>>,
<<@vt_blob::integer>>, rest::binary>>
) do
raise ExAliyunOts.RuntimeError, "Unexcepted string value as: #{inspect(rest)}"
end
defp deserialize_process_column_value_with_checksum(
<<(<<@tag_cell_value::integer>>), rest::binary>>
) do
raise ExAliyunOts.RuntimeError, "Unexcepted value as: #{inspect(rest)}"
end
def calculate_tag_cell_index(values) do
splited =
:binary.split(values, <<(<<@tag_cell::integer>>), (<<@tag_cell_name::integer>>)>>, [:global])
index = Enum.find_index(splited, &match_tag_cell_checksum?/1)
debug(fn ->
[
"\ncalculate_tag_cell_index:\s",
inspect(values, limit: :infinity),
"\nsplited:\s",
inspect(splited, limit: :infinity),
"\nindex:\s",
inspect(index)
]
end)
if index == nil do
nil
else
calcuated_index =
splited
|> Enum.slice(0..index)
|> Enum.reduce(0, fn item, acc ->
byte_size(item) + acc
end)
calcuated_index + index * 2
end
end
defp match_tag_cell_checksum?(<<>>), do: false
defp match_tag_cell_checksum?(<<_>>), do: false
defp match_tag_cell_checksum?(<<@tag_cell_checksum::integer, _>>), do: true
defp match_tag_cell_checksum?(binary) do
binary_part(binary, byte_size(binary) - 2, 1) == <<@tag_cell_checksum::integer>>
end
end | lib/ex_aliyun_ots/plainbuffer/plainbuffer.ex | 0.550849 | 0.417806 | plainbuffer.ex | starcoder |
defmodule Univrse.Signature do
@moduledoc """
A Univrse Signature is a structure attached to an `t:Univrse.Envelope.t/0`,
containing a set of headers and a cryptographic signature (or MAC).
An Envelope may contain one or multiple Signature structures.
The Signature structure headers must contain an `alg` header and may contain a
`kid` header, to help other parties understand what key and algorithm was used
to generate the signature or MAC. Once understood, the observing party can
verify the signature contained in the structure.
"""
alias Univrse.{Alg, Envelope, Header, Key}
import Univrse.Util, only: [tag_binary: 1]
defstruct header: %Header{},
signature: nil
@typedoc "Signature struct"
@type t :: %__MODULE__{
header: Header.t,
signature: binary
}
@doc """
Signs the Envelope payload using the given key or array of keys.
A map of headers must be given including at least the signature `alg` value.
Where a list of keys is given, it is possible to specify different algorithms
for each key by giving a list of tuple pairs. The first element of each pair
is the key and the second is a map of headers.
## Examples
Creates a signature using a single key:
Signature.sign(env, oct_key, %{"alg" => "HS256"})
Creates multiple signatures using the same algorithm:
Signature.sign(env, [oct_key, app_key], %{"alg" => "HS256"})
Creates multiple signatures using different algorithms:
Signature.sign(env, [
oct_key,
{ec_key_1, %{"alg" => "ES256K"}},
{ec_key_2, %{"alg" => "ES256K"}}
], %{"alg" => "HS256"})
"""
@spec sign(Envelope.t, Key.t | list(Key.t) | list({Key.t, map}), map) :: {:ok, Envelope.t} | {:error, any}
def sign(env, key, headers \\ %{})
def sign(%Envelope{} = env, keys, headers) when is_list(keys) do
Enum.reduce_while(keys, env, fn key, env ->
{key, headers} = merge_key_headers(key, headers)
case sign(env, key, headers) do
{:ok, env} ->
{:cont, env}
{:error, error} ->
{:halt, {:error, error}}
end
end)
|> case do
%Envelope{} = env ->
{:ok, env}
{:error, error} ->
{:error, error}
end
end
def sign(%Envelope{header: header, payload: payload} = env, %Key{} = key, headers)
when is_map(headers)
do
alg = Map.merge(header.headers, headers) |> Map.get("alg")
payload
|> Envelope.wrap(header)
|> Envelope.encode()
|> Alg.sign(alg, key)
|> case do
{:ok, sig} ->
signature = wrap(sig, headers)
{:ok, Envelope.push(env, signature)}
{:error, error} ->
{:error, error}
end
end
@doc """
Verifies the Envelope signature(s) using the given Key or list of Keys.
"""
@spec verify(Envelope.t, Key.t | list(Key.t)) :: boolean | {:error, String.t}
def verify(%Envelope{header: header, payload: payload, signature: signatures}, keys)
when is_list(signatures) and is_list(keys) and length(signatures) == length(keys)
do
n = length(keys) - 1
Enum.reduce_while 0..n, true, fn i, _result ->
signature = Enum.at(signatures, i)
key = Enum.at(keys, i)
payload
|> Envelope.wrap(header)
|> Envelope.push(signature)
|> verify(key)
|> case do
true -> {:cont, true}
result -> {:halt, result}
end
end
end
def verify(%Envelope{header: h1, payload: payload, signature: %__MODULE__{header: h2, signature: sig}}, %Key{} = key) do
alg = Map.merge(h1.headers, h2.headers) |> Map.get("alg")
payload
|> Envelope.wrap(h1)
|> Envelope.encode()
|> Alg.verify(sig, alg, key)
end
@doc """
Wraps the given signature and headers in a new Signature struct.
"""
@spec wrap(binary, map | Header.t) :: t
def wrap(sig, headers \\ %{})
def wrap(sig, %Header{} = header),
do: %__MODULE__{header: header, signature: sig}
def wrap(sig, %{} = headers),
do: %__MODULE__{header: Header.wrap(headers), signature: sig}
# Merges key headers with signature headers
defp merge_key_headers({key, key_headers}, headers),
do: {key, Map.merge(headers, key_headers)}
defp merge_key_headers(key, headers), do: {key, headers}
defimpl CBOR.Encoder do
alias Univrse.Signature
def encode_into(%Signature{header: header, signature: signature}, acc) do
CBOR.Encoder.encode_into([header, tag_binary(signature)], acc)
end
end
end | lib/univrse/signature.ex | 0.877339 | 0.682137 | signature.ex | starcoder |
defmodule Day08 do
def part1(input) do
parse(input)
|> Enum.flat_map(&(elem(&1, 1)))
|> Enum.filter(fn output ->
byte_size(output) in [2, 3, 4, 7]
end)
|> Enum.count
end
def part2(input) do
input = parse(input)
mappings = Enum.map(?a..?g, fn wire ->
{to_string([wire]), Enum.map(?a..?g, &(to_string([&1])))}
end)
|> Map.new
{segment_map, digit_map} = wirings()
Enum.map(input, fn display ->
solve(display, mappings, segment_map, digit_map)
end)
|> Enum.sum
end
defp solve({digits, output}, mappings, segment_map, digit_map) do
translation =
Enum.reduce(digits, mappings, fn wires, acc ->
solve(wires, acc, segment_map)
end)
|> Enum.map(fn {from, [to]} -> {from, to} end)
|> Map.new
Enum.map(output, &(translate_output(&1, translation, digit_map)))
|> Enum.reduce(0, &(&2 * 10 + &1))
end
defp solve(wires, mappings, segment_map) do
possibilities = Map.fetch!(segment_map, byte_size(wires))
possible = possibilities
|> Enum.concat
|> Enum.sort
|> Enum.dedup
wires = String.codepoints(wires)
all = ~w(a b c d e f g)
other_wires = all -- wires
not_possible = Enum.reduce(possibilities, &:ordsets.intersection/2)
mappings
|> eliminate(wires, &(:ordsets.intersection(&1, possible)))
|> eliminate(other_wires, &(&1 -- not_possible))
end
defp eliminate(mappings, wires, f) do
Enum.reduce(wires, mappings, fn wire, acc ->
mappings = Map.update!(acc, wire, f)
case mappings do
%{^wire => [target]} ->
all = ~w(a b c d e f g)
other_wires = all -- [wire]
Enum.reduce(other_wires, mappings, fn wire, acc ->
Map.update!(acc, wire, &(&1 -- [target]))
end)
%{} ->
mappings
end
end)
end
defp translate_output(wires, translation, digit_map) do
new_wires = String.codepoints(wires)
|> Enum.map(fn wire -> Map.fetch!(translation, wire) end)
|> Enum.sort
Map.fetch!(digit_map, new_wires)
end
defp wirings() do
ws = [{2, [{1, ~w(c f)}]},
{3, [{7, ~w(a c f)}]},
{4, [{4, ~w(b c d f)}]},
{5, [{2, ~w(a c d e g)},
{3, ~w(a c d f g)},
{5, ~w(a b d f g)}]},
{6, [{0, ~w(a b c e f g)},
{6, ~w(a b d e f g)},
{9, ~w(a b c d f g)}]},
{7, [{8, ~w(a b c d e f g)}]}]
segment_map = Enum.map(ws, fn {n, list} ->
{n, Enum.map(list, &(elem(&1, 1)))}
end)
|> Map.new
digit_map = Enum.flat_map(ws, &(elem(&1, 1)))
|> Enum.map(fn {digit, segments} -> {segments, digit} end)
|> Map.new
{segment_map, digit_map}
end
defp parse(input) do
Enum.map(input, fn line ->
[wires, output] = String.split(line, " | ")
{String.split(wires), String.split(output)}
end)
end
end | day08/lib/day08.ex | 0.530966 | 0.558146 | day08.ex | starcoder |
defmodule ExAws.Kinesis.Lazy do
alias ExAws.Kinesis
@moduledoc """
Kinesis has a few functions that require paging.
These functions operate just like those in Kinesis,
except that they return streams instead of lists that can be iterated through
and will automatically retrieve additional pages as necessary.
"""
@doc """
Returns the normally shaped AWS response, except the Shards key is now a stream
"""
def describe_stream(stream, opts \\ %{}) do
request_fun = fn
{:initial, initial} -> initial
fun_opts -> Kinesis.describe_stream(stream, Map.merge(opts, fun_opts))
end
Kinesis.describe_stream(stream, opts)
|> do_describe_stream(request_fun)
end
defp do_describe_stream({:error, results}, _), do: {:error, results}
defp do_describe_stream({:ok, results}, request_fun) do
stream = build_shard_stream({:ok, results}, request_fun)
{:ok, put_in(results["StreamDescription"], %{"Shards" => stream})}
end
defp build_shard_stream(initial, request_fun) do
Stream.resource(fn -> {request_fun, {:initial, initial}} end, fn
:quit -> {:halt, nil}
{fun, args} -> case fun.(args) do
{:error, results} -> {[{:error, results}], :quit}
{:ok, %{"StreamDescription" => %{"Shards" => shards, "HasMoreShards" => true}}} ->
opts = %{ExclusiveStartShardId: shards |> List.last |> Map.get("ShardId")}
{shards, {fun, opts}}
{:ok, %{"StreamDescription" => %{"Shards" => shards}}} ->
{shards, :quit}
end
end, &pass/1)
end
defp pass(x), do: x
@doc """
Returns a stream of record shard iterator tuples.
NOTE: This stream is basically INFINITE, in that it runs
until the shard it is reading from closes, which may be never.
"""
def get_records(shard_iterator, opts \\ %{}, fun \\ &pass/1) do
sleep_time = Application.get_env(:ex_aws, :kinesis_sleep_between_req_time) || 200
request_fun = fn(fun_opts) ->
:timer.sleep(sleep_time)
req_opts = Map.merge(opts, fun_opts)
Kinesis.get_records(shard_iterator, req_opts)
end
build_record_stream(request_fun, fun)
end
defp build_record_stream(request_fun, iteration_fun) do
Stream.resource(fn -> {request_fun, %{}} end, fn
:quit -> {:halt, nil}
{fun, args} -> case fun.(args) do
{:error, results} -> {iteration_fun.([{:error, results}]), :quit}
{:ok, %{"Records" => records, "NextShardIterator" => shard_iter}} ->
{
records |> iteration_fun.(),
{fun, %{ShardIterator: shard_iter}}
}
{:ok, %{"Records" => records}} ->
{iteration_fun.(records), :quit}
end
end, &pass/1)
end
end | lib/ex_aws/kinesis/lazy.ex | 0.717804 | 0.547343 | lazy.ex | starcoder |
defmodule Minecraft.Connection do
@moduledoc """
Maintains the state of a client's connection, and provides utilities for sending and receiving
data. It is designed to be chained in a fashion similar to [`Plug`](https://hexdocs.pm/plug/).
"""
alias Minecraft.Crypto
alias Minecraft.Packet
require Logger
@has_joined_url "https://sessionserver.mojang.com/session/minecraft/hasJoined?"
@typedoc """
The possible states a client/server can be in.
"""
@type state :: :handshake | :status | :login | :play
@typedoc """
Allowed ranch transport types.
"""
@type transport :: :ranch_tcp
@type t :: %__MODULE__{
protocol_handler: pid,
assigns: %{atom => any} | nil,
settings: %{atom => any} | nil,
current_state: state,
socket: port | nil,
transport: transport | nil,
client_ip: String.t(),
data: binary | nil,
error: any,
protocol_version: integer | nil,
secret: binary | nil,
join: boolean,
state_machine: pid | nil,
encryptor: Crypto.AES.t() | nil,
decryptor: Crypto.AES.t() | nil
}
defstruct protocol_handler: nil,
assigns: nil,
settings: nil,
current_state: nil,
encryptor: nil,
decryptor: nil,
socket: nil,
transport: nil,
client_ip: nil,
data: nil,
error: nil,
protocol_version: nil,
secret: nil,
join: false,
state_machine: nil
@doc """
Assigns a value to a key in the connection.
## Examples
iex> conn.assigns[:hello]
nil
iex> conn = assign(conn, :hello, :world)
iex> conn.assigns[:hello]
:world
"""
@spec assign(t, atom, term) :: t
def assign(%__MODULE__{assigns: assigns} = conn, key, value) when is_atom(key) do
%__MODULE__{conn | assigns: Map.put(assigns, key, value)}
end
@doc """
Closes the `Connection`.
"""
@spec close(t) :: t
def close(conn) do
:ok = conn.transport.close(conn.socket)
%__MODULE__{conn | socket: nil, transport: nil, state_machine: nil}
end
@doc """
Continues receiving messages from the client.
To prevent a client from flooding our process mailbox, we only receive one message at a time,
and explicitly `continue` to receive messages once we finish processing the ones we have.
"""
@spec continue(t) :: t
def continue(conn) do
:ok = conn.transport.setopts(conn.socket, active: :once)
conn
end
@doc """
Starts encrypting messages sent/received over this `Connection`.
"""
@spec encrypt(t, binary) :: t
def encrypt(conn, secret) do
encryptor = %Crypto.AES{key: secret, ivec: secret}
decryptor = %Crypto.AES{key: secret, ivec: secret}
%__MODULE__{conn | encryptor: encryptor, decryptor: decryptor, secret: secret}
end
@doc """
Called when `Connection` is ready for the user to join the server.
"""
@spec join(t) :: t
def join(conn) do
%__MODULE__{conn | join: true}
end
@doc """
Initializes a `Connection`.
"""
@spec init(pid(), port(), transport()) :: t
def init(protocol_handler, socket, transport) do
{:ok, {client_ip, _port}} = :inet.peername(socket)
client_ip = IO.iodata_to_binary(:inet.ntoa(client_ip))
:ok = transport.setopts(socket, active: :once)
Logger.info(fn -> "Client #{client_ip} connected." end)
%__MODULE__{
protocol_handler: protocol_handler,
assigns: %{},
settings: %{},
current_state: :handshake,
socket: socket,
transport: transport,
client_ip: client_ip,
data: ""
}
end
@doc """
Communicates with Mojang servers to verify user login.
"""
@spec verify_login(t) :: t | {:error, :failed_login_verification}
def verify_login(%__MODULE__{} = conn) do
public_key = Crypto.get_public_key()
username = conn.assigns[:username]
hash = Crypto.SHA.sha(conn.secret <> public_key)
query_params = URI.encode_query(%{username: username, serverId: hash})
url = @has_joined_url <> query_params
with %{body: body, status_code: 200} <- HTTPoison.get!(url),
%{"id" => uuid, "name" => ^username} <- Poison.decode!(body) do
assign(conn, :uuid, normalize_uuid(uuid))
else
_ ->
{:error, :failed_login_verification}
end
end
@doc """
Stores data received from the client in this `Connection`.
"""
@spec put_data(t, binary) :: t
def put_data(conn, data) do
{data, conn} = maybe_decrypt(data, conn)
%__MODULE__{conn | data: conn.data <> data}
end
@doc """
Puts the `Connection` into the given `error` state.
"""
@spec put_error(t, any) :: t
def put_error(conn, error) do
%__MODULE__{conn | error: error}
end
@doc """
Sets the protocol for the `Connection`.
"""
@spec put_protocol(t, integer) :: t
def put_protocol(conn, protocol_version) do
%__MODULE__{conn | protocol_version: protocol_version}
end
@doc """
Replaces the `Connection`'s underlying socket.
"""
@spec put_socket(t, port()) :: t
def put_socket(conn, socket) do
%__MODULE__{conn | socket: socket}
end
@doc """
Updates the `Connection` state.
"""
@spec put_state(t, state) :: t
def put_state(conn, state) do
%__MODULE__{conn | current_state: state}
end
@doc """
Sets a setting for this `Connection`.
"""
@spec put_setting(t, key :: atom, value :: any) :: t
def put_setting(%__MODULE__{settings: settings} = conn, key, value) do
%__MODULE__{conn | settings: Map.put(settings, key, value)}
end
@doc """
Pops a packet from the `Connection`.
"""
@spec read_packet(t) :: {:ok, struct, t} | {:error, t}
def read_packet(conn) do
case Packet.deserialize(conn.data, conn.current_state) do
{packet, rest} when is_binary(rest) ->
Logger.debug(fn -> "RECV: #{inspect(packet)}" end)
{:ok, packet, %__MODULE__{conn | data: rest}}
{{:error, :invalid_packet}, rest} ->
Logger.error(fn ->
"Received an invalid packet from client, closing connection. #{inspect(conn.data)}"
end)
{:ok, nil, %__MODULE__{conn | data: rest}}
end
end
@doc """
Sends a packet to the client.
"""
@spec send_packet(t, struct) :: t | {:error, :closed}
def send_packet(conn, packet) do
Logger.debug(fn -> "SEND: #{inspect(packet)}" end)
{:ok, raw} = Packet.serialize(packet)
{raw, conn} = maybe_encrypt(raw, conn)
:ok = conn.transport.send(conn.socket, raw)
conn
end
defp maybe_decrypt(request, %__MODULE__{decryptor: nil} = conn) do
{request, conn}
end
defp maybe_decrypt(request, conn) do
{decrypted, decryptor} = Crypto.AES.decrypt(request, conn.decryptor)
{decrypted, %__MODULE__{conn | decryptor: decryptor}}
end
defp maybe_encrypt(response, %__MODULE__{encryptor: nil} = conn) do
{response, conn}
end
defp maybe_encrypt(response, conn) do
{encrypted, encryptor} = Crypto.AES.encrypt(response, conn.encryptor)
{encrypted, %__MODULE__{conn | encryptor: encryptor}}
end
defp normalize_uuid(<<a::8-binary, b::4-binary, c::4-binary, d::4-binary, e::12-binary>>) do
"#{a}-#{b}-#{c}-#{d}-#{e}"
end
end | lib/minecraft/connection.ex | 0.885043 | 0.423279 | connection.ex | starcoder |
defmodule Sanbase.Billing.Plan.AccessChecker do
@moduledoc """
Module that contains functions for determining access based on the subscription
plan.
Adding new queries or updating the subscription plan does not require this
module to be changed.
The subscription plan needed for a given query is given in the query definition
```
field :network_growth, list_of(:network_growth) do
meta(access: :restricted, min_plan: [sanapi: :pro, sanbase: :free])
...
end
```
This module knows how to inspect the GraphQL schema that is being build
compile-time and build the needed sets of data also compile time. There are no
checks for mutations - mutations
Additionally, this module will raise a compile-time warning if there is a
query without a subscription plan defined.
The actual historical/realtime restrictions are implemented in modules:
- ApiAccessChecker
- SanbaseAccessChecker
as we have different restrictions.
"""
@doc documentation_ref: "# DOCS access-plans/index.md"
@type query_or_metric :: {:metric, String.t()} | {:query, atom()}
alias Sanbase.Billing.{Product, Subscription, GraphqlSchema}
alias Sanbase.Billing.Plan.{CustomAccess, ApiAccessChecker, SanbaseAccessChecker}
# Raise an error if there is any query without subscription plan
case GraphqlSchema.get_all_without_access_level() do
[] ->
:ok
queries ->
require Sanbase.Break, as: Break
Break.break("""
There are GraphQL queries defined without specifying their access level.
The access level could be either `free` or `restricted`.
Queries without access level: #{inspect(queries)}
""")
end
@extension_metrics GraphqlSchema.get_all_with_access_level(:extension)
def extension_metrics(), do: @extension_metrics
@free_metrics GraphqlSchema.get_all_with_access_level(:free)
@free_metrics_mapset MapSet.new(@free_metrics)
def free_metrics_mapset(), do: @free_metrics_mapset
@restricted_metrics GraphqlSchema.get_all_with_access_level(:restricted)
@restricted_metrics_mapset MapSet.new(@restricted_metrics)
def restricted_metrics_mapset(), do: @restricted_metrics_mapset
@all_metrics @free_metrics ++ @restricted_metrics
def all_metrics, do: @all_metrics
@custom_access_queries_stats CustomAccess.get()
@custom_access_queries @custom_access_queries_stats |> Map.keys() |> Enum.sort()
@custom_access_queries_mapset MapSet.new(@custom_access_queries)
@free_subscription Subscription.free_subscription()
@min_plan_map GraphqlSchema.min_plan_map()
# Raise an error if there are queries with custom access logic that are marked
# as free. If there are such queries the access restriction logic will never
# be applied
free_and_custom_intersection =
MapSet.intersection(@custom_access_queries_mapset, @free_metrics_mapset)
case Enum.empty?(free_and_custom_intersection) do
true ->
:ok
false ->
require Sanbase.Break, as: Break
Break.break("""
There are queries with access level `:free` that are defined in the
CustomAccess module. These queries custom access logic will never be
executed.
Queries defined in the CustomAccess module but do not have the `:restricted`
access level field: #{inspect(free_and_custom_intersection |> Enum.to_list())}
""")
end
@doc ~s"""
Check if a query full access is given only to users with a plan higher than free.
A query can be restricted but still accessible by not-paid users or users with
lower plans. In this case historical and/or realtime data access can be cut off
"""
@spec is_restricted?(query_or_metric) :: boolean()
def is_restricted?(query_or_metric), do: query_or_metric not in @free_metrics_mapset
@spec plan_has_access?(plan, product, query_or_metric) :: boolean()
when plan: atom(), product: binary()
def plan_has_access?(plan, product, query_or_metric) do
case min_plan(product, query_or_metric) do
:free -> true
:basic -> plan != :free
:pro -> plan not in [:free, :basic]
:premium -> plan not in [:free, :basic, :pro]
:custom -> plan == :custom
# extensions plans can be with other plan. They're handled separately
_ -> true
end
end
@spec min_plan(product, query_or_metric) :: atom() when product: binary()
def min_plan(product, query_or_metric) do
@min_plan_map[query_or_metric][product] || :free
end
@spec get_available_metrics_for_plan(product, plan, restriction_type) :: list(binary())
when plan: atom(), product: binary(), restriction_type: atom()
def get_available_metrics_for_plan(product, plan, restriction_type \\ :all) do
case restriction_type do
:free -> @free_metrics
:restricted -> @restricted_metrics
:custom -> @custom_access_queries
:all -> @all_metrics
end
|> Stream.filter(&match?({:metric, _}, &1))
|> Stream.filter(&plan_has_access?(plan, product, &1))
|> Enum.map(fn {_, name} -> name end)
end
def custom_access_queries_stats(), do: @custom_access_queries_stats
def custom_access_queries(), do: @custom_access_queries
@product_to_access_module [
{Product.product_api(), ApiAccessChecker},
{Product.product_sanbase(), SanbaseAccessChecker}
]
@spec historical_data_in_days(atom(), query_or_metric(), non_neg_integer()) ::
non_neg_integer() | nil
def historical_data_in_days(plan, query_or_metric, _product_id)
when query_or_metric in @custom_access_queries do
Map.get(@custom_access_queries_stats, query_or_metric)
|> get_in([:plan_access, plan, :historical_data_in_days])
end
for {product_id, module} <- @product_to_access_module do
def historical_data_in_days(plan, query_or_metric, unquote(product_id)) do
unquote(module).historical_data_in_days(plan, query_or_metric)
end
end
@spec realtime_data_cut_off_in_days(atom(), query_or_metric(), non_neg_integer()) ::
non_neg_integer() | nil
def realtime_data_cut_off_in_days(plan, query_or_metric, _product_id)
when query_or_metric in @custom_access_queries do
Map.get(@custom_access_queries_stats, query_or_metric)
|> get_in([:plan_access, plan, :realtime_data_cut_off_in_days])
end
for {product_id, module} <- @product_to_access_module do
def realtime_data_cut_off_in_days(plan, query_or_metric, unquote(product_id)) do
unquote(module).realtime_data_cut_off_in_days(plan, query_or_metric)
end
end
def user_can_create_signal?(user, subscription) do
subscription = subscription || @free_subscription
cond do
# If user has API subscription - he has unlimited signals
subscription.plan.product_id == Product.product_api() -> true
SanbaseAccessChecker.signals_limits_not_reached?(user, subscription) -> true
true -> false
end
end
def signals_limits_upgrade_message(), do: SanbaseAccessChecker.signals_limits_upgrade_message()
end | lib/sanbase/billing/plan/access_checker.ex | 0.884664 | 0.680067 | access_checker.ex | starcoder |
defmodule X.Tokenizer do
@moduledoc """
X template tokenizer module.
"""
alias X.Ast
@whitespaces ' \n\r\t'
@attr_stop_chars @whitespaces ++ '/>'
@namechars '.-_:'
@singleton_tags ~w[
area base br col embed hr
img input keygen link meta
param source track wbr
]c
defguardp is_whitespace(char) when char in @whitespaces
defguardp is_capital(char) when char >= ?A and char <= ?Z
defguardp is_lowercase(char) when char >= ?a and char <= ?z
defguardp is_letter(char) when is_capital(char) or is_lowercase(char)
defguardp is_digit(char) when char >= ?0 and char <= ?9
defguardp is_literal(char) when is_letter(char) or is_digit(char)
defguardp is_namechar(char) when is_literal(char) or char in @namechars
@doc ~S"""
Parses given string or charlist into X template tokens.
See `X.Ast` for tokens type definition.
## Example
iex> X.Tokenizer.call("<div><span class='test'>{{ a }}</span></div>")
[
{:tag_start, {1, 1}, 'div', [], nil, nil, false, false, false},
{:tag_start, {6, 1}, 'span', [{:tag_attr, {12, 1}, 'class', 'test', false}],
nil, nil, false, false, false},
{:tag_output, {25, 1}, 'a ', true},
{:tag_end, {32, 1}, 'span'},
{:tag_end, {39, 1}, 'div'}
]
"""
@spec call(charlist() | String.t()) :: [Ast.token()]
def call(html) when is_list(html) do
tokenize(html, {1, 1})
end
def call(html) when is_binary(html) do
html
|> :unicode.characters_to_list()
|> call()
end
@spec tokenize(charlist(), Ast.cursor()) :: [Ast.token()]
defp tokenize('</' ++ tail, {col, row}) do
{name, list, cur} = extract_tag_end(tail, {col + 2, row})
token = {:tag_end, {col, row}, name}
[token | tokenize(list, cur)]
end
defp tokenize('{{=' ++ tail, {col, row}) do
{list, cur} = skip_whitespace(tail, {col + 3, row})
{data, list, cur} = extract_tag_output(list, cur)
token = {:tag_output, {col, row}, data, false}
[token | tokenize(list, cur)]
end
defp tokenize('{{' ++ tail, {col, row}) do
{list, cur} = skip_whitespace(tail, {col + 2, row})
{data, list, cur} = extract_tag_output(list, cur)
token = {:tag_output, {col, row}, data, true}
[token | tokenize(list, cur)]
end
defp tokenize('<!' ++ tail, {col, row}) do
{data, list, cur} = extract_value(tail, {col + 2, row}, '>', nil, false)
token = {:tag_comment, {col, row}, data}
[token | tokenize(list, cur)]
end
defp tokenize([?<, next | tail], {col, row}) do
cond do
is_letter(next) ->
{token, list, cur} = extract_tag_start([next | tail], {col, row})
[token | tokenize(list, cur)]
true ->
throw({:unexpected_token, {col, row}, next})
end
end
defp tokenize([char | tail], cur = {col, row}) do
{text, is_blank, list, cur} = extract_tag_text(tail, next_cursor(char, cur))
token = {:tag_text, {col, row}, [char | text], is_whitespace(char), is_blank}
[token | tokenize(list, cur)]
end
defp tokenize([], _) do
[]
end
@spec extract_tag_text(charlist(), Ast.cursor()) ::
{charlist(), boolean(), charlist(), Ast.cursor()}
defp extract_tag_text(list, {col, row}) do
case list do
[char, next | tail] when char != ?< and [char, next] != '{{' and char != ?\n ->
{acc, is_blank, rest, cur} = extract_tag_text([next | tail], {col + 1, row})
{[char | acc], is_blank && is_whitespace(char), rest, cur}
[char] ->
{[char], is_whitespace(char), [], {col, row}}
_ ->
{[], true, list, {col, row}}
end
end
@spec extract_tag_output(charlist(), Ast.cursor()) ::
{charlist(), charlist(), Ast.cursor()}
defp extract_tag_output(list, {col, row}) do
case list do
'}}' ++ tail ->
{[], tail, {col + 2, row}}
[char, next | tail] ->
{acc, rest, cur} = extract_tag_output([next | tail], next_cursor(char, {col, row}))
{[char | acc], rest, cur}
[char | _] ->
throw({:unexpected_token, {col, row}, char})
end
end
@spec extract_tag_end(charlist(), Ast.cursor()) :: {charlist(), charlist(), Ast.cursor()}
defp extract_tag_end(list, {col, row}) do
{name, rest, cur} = extract_name(list, {col, row})
{false, rest, cur} = extract_tag_close(rest, cur)
{name, rest, cur}
end
@spec extract_tag_start(charlist(), Ast.cursor()) :: {Ast.tag_start(), charlist(), Ast.cursor()}
defp extract_tag_start(list, {col, row}) do
{name, list, cur} = extract_name(list, {col + 1, row})
{attrs, condition, iterator, list, cur} = extract_tag_attributes(list, cur)
{is_selfclosed, list, cur} = extract_tag_close(list, cur)
is_component =
case name do
[char | _] when is_capital(char) -> true
_ -> false
end
{
{:tag_start, {col, row}, name, attrs, condition, iterator, name in @singleton_tags,
is_selfclosed, is_component},
list,
cur
}
end
@spec extract_name(charlist(), Ast.cursor()) :: {charlist(), charlist(), Ast.cursor()}
defp extract_name(list = [char | tail], {col, row}) do
case is_namechar(char) do
true ->
{acc, rest, cur} = extract_name(tail, {col + 1, row})
{[char | acc], rest, cur}
_ ->
{[], list, {col, row}}
end
end
defp extract_name([], cur) do
throw({:unexpected_token, cur, ?\s})
end
@spec extract_tag_attributes(
charlist(),
Ast.cursor()
) ::
{[Ast.tag_attr()], Ast.tag_condition() | nil, Ast.tag_iterator() | nil, charlist(),
Ast.cursor()}
defp extract_tag_attributes(list, cur) do
{list, {col, row}} = skip_whitespace(list, cur)
case list do
[char | _] when char in '/>' ->
{[], nil, nil, list, cur}
'x-else-if' ++ tail ->
cur = {col + 8, row}
{value, list, cur} = extract_attr_value(tail, cur)
{acc, _, iterator, rest, cur} = extract_tag_attributes(list, cur)
{acc, {:elseif, cur, value}, iterator, rest, cur}
'x-unless' ++ tail ->
cur = {col + 8, row}
{value, list, cur} = extract_attr_value(tail, cur)
{acc, _, iterator, rest, cur} = extract_tag_attributes(list, cur)
{acc, {:unless, cur, value}, iterator, rest, cur}
'x-else' ++ tail ->
cur = {col + 6, row}
{value, list, cur} = extract_attr_value(tail, cur)
{acc, _, iterator, rest, cur} = extract_tag_attributes(list, cur)
{acc, {:else, cur, value}, iterator, rest, cur}
'x-for' ++ tail ->
cur = {col + 5, row}
{value, list, cur} = extract_attr_value(tail, cur)
{acc, condition, _, rest, cur} = extract_tag_attributes(list, cur)
{acc, condition, {:for, cur, value}, rest, cur}
'x-if' ++ tail ->
{value, list, cur} = extract_attr_value(tail, {col + 4, row})
{acc, _, iterator, rest, cur} = extract_tag_attributes(list, cur)
{acc, {:if, cur, value}, iterator, rest, cur}
_ ->
{attr, list, cur} = extract_attribute(list, {col, row})
{acc, condition, iterator, rest, cur} = extract_tag_attributes(list, cur)
{[attr | acc], condition, iterator, rest, cur}
end
end
@spec extract_attribute(charlist(), Ast.cursor()) :: {Ast.tag_attr(), charlist(), Ast.cursor()}
defp extract_attribute(list, {col, row}) do
{is_dynamic, {name, list, cur}} =
case list do
[?: | rest] ->
{true, extract_name(rest, {col + 1, row})}
[char | _] when is_namechar(char) ->
{false, extract_name(list, {col, row})}
[char | _] ->
throw({:unexpected_token, {col, row}, char})
end
{value, list, cur} = extract_attr_value(list, cur)
{list, cur} = skip_whitespace(list, cur)
{{:tag_attr, {col, row}, name, value, is_dynamic}, list, cur}
end
@spec extract_attr_value(charlist(), Ast.cursor()) :: {charlist(), charlist(), Ast.cursor()}
defp extract_attr_value(list, {col, row}) do
case list do
'=%{' ++ rest ->
extract_value([?%, ?{ | rest], {col + 3, row}, '}', ?{, true)
[?=, ?' | rest] ->
extract_value(rest, {col + 2, row}, [?'], nil, false)
'="' ++ rest ->
extract_value(rest, {col + 2, row}, '"', nil, false)
'=[' ++ rest ->
extract_value([?[ | rest], {col + 2, row}, ']', ?[, true)
[?=, next | rest] when is_literal(next) ->
extract_value([next | rest], {col + 1, row}, @attr_stop_chars, nil, false)
[char | _] when is_whitespace(char) or char in '/>' ->
{[], list, {col, row}}
[char | _] ->
throw({:unexpected_token, {col, row}, char})
end
end
@spec extract_value(charlist(), Ast.cursor(), charlist(), nil | integer(), boolean()) ::
{charlist(), charlist(), Ast.cursor()}
@spec extract_value(charlist(), Ast.cursor(), charlist(), nil | integer(), boolean(), integer()) ::
{charlist(), charlist(), Ast.cursor()}
defp extract_value(list, cur, terminator, continue_char, include_terminator, nesting \\ 0)
defp extract_value(
[char | rest],
{col, row},
terminator,
continue_char,
include_terminator,
nesting
) do
cur = next_cursor(char, {col, row})
cond do
char == continue_char ->
{acc, rest, cur} =
extract_value(rest, cur, terminator, continue_char, include_terminator, nesting + 1)
{[char | acc], rest, cur}
char in terminator and (nesting == 1 or is_nil(continue_char)) ->
{(include_terminator && [char]) || [], rest, cur}
char in terminator ->
{acc, rest, cur} =
extract_value(rest, cur, terminator, continue_char, include_terminator, nesting - 1)
{[char | acc], rest, cur}
true ->
{acc, rest, cur} =
extract_value(rest, cur, terminator, continue_char, include_terminator, nesting)
{[char | acc], rest, cur}
end
end
defp extract_value([], cur, _, _, _, _) do
throw({:unexpected_token, cur, ?\n})
end
@spec extract_tag_close(charlist(), Ast.cursor()) :: {boolean(), charlist(), Ast.cursor()}
defp extract_tag_close(list, {col, row}) do
case list do
'/>' ++ rest -> {true, rest, {col + 2, row}}
[?> | rest] -> {false, rest, {col + 1, row}}
[char | _] -> throw({:unexpected_token, {col, row}, char})
end
end
@spec skip_whitespace(charlist(), Ast.cursor()) :: {charlist(), Ast.cursor()}
defp skip_whitespace(list, {col, row}) do
case list do
[?\n | rest] ->
skip_whitespace(rest, {1, row + 1})
[char | rest] when char in ' \r\t' ->
skip_whitespace(rest, {col + 1, row})
_ ->
{list, {col, row}}
end
end
@spec next_cursor(integer(), Ast.cursor()) :: Ast.cursor()
defp next_cursor(char, {col, row}) do
case char do
?\n -> {1, row + 1}
_ -> {col + 1, row}
end
end
end | lib/x/tokenizer.ex | 0.857679 | 0.497131 | tokenizer.ex | starcoder |
defmodule OMG.API.BlackBoxMe do
@moduledoc """
Generates dumb wrapper for pure library that keeps state in process dictionary.
Wrapper creates module with :"GS" attached at the end.
Example:
```
defmodule YourProject.State.Core do
use OMG.API.BlackBoxMe
...
```
would create a YourProject.State.CoreGS module, accessible in every MIX_ENV.
Pure library is presumed to have following interface:
-spec funX(arg1, ..., argN, state) :: {:ok, side_effects(), state} | {{:error, term}, state}
Wrapper exports the same functions with arity-1 (state is hidden) and returns tuples that are shorted by one item (state is hidden). Example above would have been transformed into:
-spec funX(arg1, ..., argN) :: {:ok, side_effects()} | {:error, term}
This allows for black-box testing and more importantly - for interaction with proper_statem and proper_fsm.
Wrapper adds following helper functions:
# initiate state with call to this:
@spec init(state()) :: {:ok, :state_managed_by_helper}
# cleanup state stored in process dictionary
@spec reset() :: state() | nil
# get state stored in process dictionary (for possible inspection)
@spec get_state() :: state() | nil
"""
defmacro __using__(_opts) do
quote do
@before_compile OMG.API.BlackBoxMe
end
end
defp insert_static(core) do
quote do
def init(state) do
Process.put(unquote(core), state)
{:ok, :state_managed_by_helper}
end
def reset do
Process.put(unquote(core), nil)
end
def get_state do
Process.get(unquote(core))
end
end
end
defp make_module_name(core) do
core
|> Atom.to_string()
|> Kernel.<>("GS")
|> String.to_atom()
end
defmacro __before_compile__(opts) do
specials = [__info__: 1, __struct__: 0, __struct__: 1, module_info: 0, module_info: 1]
core = hd(opts.context_modules)
exports = Module.definitions_in(core, :def)
exports = exports -- specials
module_static = insert_static(core)
contents = [module_static]
exports =
for {func_name, arity} <- exports do
args =
for x <- :lists.seq(1, arity - 1) do
argname = String.to_atom("arg#{inspect(x)}")
{argname, [], nil}
end
{func_name, args}
end
module_api =
Enum.map(exports, fn {func_name, args} ->
quote do
def unquote(func_name)(unquote_splicing(args)) do
state = Process.get(unquote(core))
case :erlang.apply(unquote(core), unquote(func_name), unquote(args) ++ [state]) do
{:ok, sideeffects, new_state} ->
Process.put(unquote(core), new_state)
{:ok, sideeffects}
{:ok, new_state} ->
Process.put(unquote(core), new_state)
:ok
{{:error, error}, new_state} ->
Process.put(unquote(core), new_state)
{:error, error}
unexpected ->
IO.puts(
"unexpected output #{inspect(unquote(func_name)(unquote_splicing(args)))} :: #{inspect(unexpected)}"
)
:erlang.error({:badreturn, unexpected})
end
end
end
end)
contents = contents ++ module_api
module_name = make_module_name(core)
# generate the helper module:
{:module, _, _, _} = Module.create(module_name, contents, Macro.Env.location(__ENV__))
# but don't introduce any changes into caller module:
[]
end
end | apps/omg_api/lib/black_box_me.ex | 0.878503 | 0.700434 | black_box_me.ex | starcoder |
defmodule Cizen.Pattern.Compiler do
@moduledoc false
alias Cizen.Pattern.Code
import Code, only: [as_code: 2]
# filter style
# input: `fn %{a: a} -> a == :ok end`
def compile(pattern_or_filter, env) do
{:fn, _, fncases} = to_filter(pattern_or_filter)
# Merges cases
{codes, _guards} =
fncases
|> Enum.reduce({[], []}, fn fncase, {codes, guards_of_above_fncases} ->
{code, guard} = read_fncase(fncase, env)
code =
guards_of_above_fncases
|> Enum.reverse()
# Makes guards of above fncases nagative
|> Enum.map(fn guard -> Code.not_(guard) end)
# guard for this case
|> List.insert_at(-1, guard)
|> Code.all()
|> gen_and(code)
{[code | codes], [guard | guards_of_above_fncases]}
end)
codes
|> Enum.reverse()
|> Code.any()
end
# input `fn case1; case2 end`
def to_filter({:fn, _, _fncases} = filter), do: filter
def to_filter(pattern), do: quote(do: fn unquote(pattern) -> true end)
# Reads fncase
@spec read_fncase(Code.ast(), Macro.Env.t()) :: {Code.t(), [Code.t()]}
defp read_fncase({:->, _, [[header], {:__block__, _, [expression]}]}, env) do
# Ignores :__block__
read_fncase({:->, [], [[header], expression]}, env)
end
# input: header -> expression
defp read_fncase({:->, _, [[header], expression]}, env) do
{vars, guard_codes} = read_header(header, env)
guard =
guard_codes
|> Enum.reverse()
|> Code.all()
code =
expression
|> Code.expand_embedded_patterns(env)
|> Code.translate(vars, env)
{code, guard}
end
# Reads prefix and guard codes (reversed) from the given expression
@spec read_header(Code.ast(), Macro.Env.t()) :: {Code.vars(), [Code.t()]}
defp read_header(header, env), do: read_header(header, %{}, [], [], env)
# * vars - accessible variables
# * codes - codes generated from guard expressions (reversed order of execution)
# * prefix - prefix keys
# * env - `Macro.Env`
@spec read_header(Code.ast(), Code.vars(), [Code.t()], [term], Macro.Env.t()) ::
{Code.vars(), [Code.t()]}
# input: `%{key: atom} when atom in [:a, :b, :c]`
defp read_header({:when, _, [header, guard]}, vars, assertion_codes, prefix, env) do
# read the header
{vars, assertion_codes} = read_header(header, vars, assertion_codes, prefix, env)
# translate the guard case
assertion_codes = [Code.translate(guard, vars, env) | assertion_codes]
{vars, assertion_codes}
end
# input: `%MyStruct{key1: var, key2: 42}`
defp read_header({:%, _, [module, {:%{}, _, pairs}]}, vars, assertion_codes, prefix, env) do
context_value = Code.access_(prefix)
assertion_code =
as_code value: context_value do
is_map(value) and value.__struct__ == module
end
handle_key_value_pairs(pairs, vars, [assertion_code | assertion_codes], prefix, env)
end
# input: `%{key1: var, key2: 42}`
defp read_header({:%{}, _, pairs}, vars, assertion_codes, prefix, env) do
context_value = Code.access_(prefix)
assertion_code =
as_code value: context_value do
is_map(value)
end
assertion_code_checks_keys_exists =
pairs
|> Enum.map(fn {key, _value} ->
as_code value: context_value do
Map.has_key?(value, key)
end
end)
assertion_codes = assertion_code_checks_keys_exists ++ [assertion_code | assertion_codes]
handle_key_value_pairs(pairs, vars, assertion_codes, prefix, env)
end
# input: `%MyStruct{a: 1} = var`
defp read_header({:=, _, [struct, {var, meta, context}]}, vars, assertion_codes, prefix, env) do
# read the struct
{vars, assertion_codes} = read_header(struct, vars, assertion_codes, prefix, env)
# read the var
read_header({var, meta, context}, vars, assertion_codes, prefix, env)
end
# input: `^var`
defp read_header({:^, _, [var]}, vars, assertion_codes, prefix, _env) do
context_value = Code.access_(prefix)
assertion_code =
as_code value: context_value do
value == var
end
{vars, [assertion_code | assertion_codes]}
end
# input: `"str" <> var`
defp read_header({:<>, _, [leading, {var_name, _, _}]}, vars, assertion_codes, prefix, _env) do
context_value = Code.access_(prefix)
assertion_code =
as_code value: context_value do
is_binary(value) and String.starts_with?(value, leading)
end
assertion_codes = [assertion_code | assertion_codes]
var_code =
as_code value: context_value do
String.trim_leading(value, leading)
end
vars = Map.put(vars, var_name, var_code)
{vars, assertion_codes}
end
# input: `var`
defp read_header({var_name, _, _}, vars, assertion_codes, prefix, _env) do
case Map.get(vars, var_name) do
# bind the current value to the new variable.
nil ->
vars = Map.put(vars, var_name, Code.access_(prefix))
{vars, assertion_codes}
# variable exists.
var_code ->
context_value = Code.access_(prefix)
assertion_code =
as_code value: context_value, var: var_code do
value == var
end
{vars, [assertion_code | assertion_codes]}
end
end
# input: `42`
defp read_header(actual_value, vars, assertion_codes, prefix, _env) do
context_value = Code.access_(prefix)
assertion_code =
as_code value: context_value do
value == actual_value
end
{vars, [assertion_code | assertion_codes]}
end
# Handles `[key1: var, key2: 42]` for a map or struct
defp handle_key_value_pairs(pairs, vars, codes, prefix, env) do
pairs
|> Enum.reduce({vars, codes}, fn {key, value}, {vars, codes} ->
read_header(value, vars, codes, List.insert_at(prefix, -1, key), env)
end)
end
defp gen_and(true, arg2), do: arg2
defp gen_and(arg1, true), do: arg1
defp gen_and(arg1, arg2), do: Code.and_(arg1, arg2)
end | lib/cizen/pattern/compiler.ex | 0.793346 | 0.47025 | compiler.ex | starcoder |
defmodule Garlic.NetworkStatus.Document do
@moduledoc "Network status document"
@spec parse(binary) :: {:ok, Garlic.NetworkStatus.t()} | {:error, atom}
def parse(text) do
text
|> String.split("\n")
|> Enum.map(&String.split(&1, " "))
|> parse_tokens(%Garlic.NetworkStatus{})
end
defp parse_tokens([["params" | params] | tail], network_status) do
parse_tokens(tail, %{network_status | params: split_pairs(params)})
end
defp parse_tokens([["valid-after", date, time] | tail], network_status) do
with {:ok, valid_after, _} <- DateTime.from_iso8601("#{date}T#{time}Z") do
parse_tokens(tail, %{network_status | valid_after: DateTime.to_unix(valid_after)})
end
end
defp parse_tokens([["valid-until", date, time] | tail], network_status) do
with {:ok, valid_until, _} <- DateTime.from_iso8601("#{date}T#{time}Z") do
parse_tokens(tail, %{network_status | valid_until: DateTime.to_unix(valid_until)})
end
end
defp parse_tokens([["fresh-until", date, time] | tail], network_status) do
with {:ok, fresh_until, _} <- DateTime.from_iso8601("#{date}T#{time}Z") do
parse_tokens(tail, %{network_status | fresh_until: DateTime.to_unix(fresh_until)})
end
end
defp parse_tokens([["shared-rand-previous-value", _, value] | tail], network_status) do
parse_tokens(tail, %{network_status | previous_shared_random: Base.decode64!(value)})
end
defp parse_tokens([["shared-rand-current-value", _, value] | tail], network_status) do
parse_tokens(tail, %{network_status | current_shared_random: Base.decode64!(value)})
end
defp parse_tokens([["r" | _] | _] = data, network_status) do
{:ok, %{network_status | routers: parse_routers(data, network_status)}}
end
defp parse_tokens([_ | tail], network_status) do
parse_tokens(tail, network_status)
end
defp parse_routers(
[
["r", nickname, fingerprint, digest, _, _, ipv4, onion_port, directory_port] | tail
],
network_status
) do
%Garlic.Router{
nickname: nickname,
fingerprint: Base.decode64!(fingerprint, padding: false),
digest: Base.decode64!(digest, padding: false),
ipv4: Garlic.Util.parse_ip_address(ipv4),
onion_port: String.to_integer(onion_port),
directory_port: String.to_integer(directory_port)
}
|> parse_router_description(tail, network_status)
end
defp parse_routers([_ | tail], network_status) do
parse_routers(tail, network_status)
end
defp parse_routers([], _), do: []
defp parse_router_description(router, [["s" | flags] | tail], network_status) do
router.flags
|> put_in(MapSet.new(flags))
|> parse_router_description(tail, network_status)
end
defp parse_router_description(router, [["w" | options] | tail], network_status) do
router.bandwidth
|> put_in(split_pairs(options))
|> parse_router_description(tail, network_status)
end
defp parse_router_description(router, [["id", "ed25519", identity] | tail], network_status) do
parse_router_description(
%{router | identity: Base.decode64!(identity, padding: false)},
tail,
network_status
)
end
defp parse_router_description(router, [[s | _] | tail], network_status)
when s in ~w(a v pr p) do
parse_router_description(router, tail, network_status)
end
defp parse_router_description(router, tail, network_status) do
[router | parse_routers(tail, network_status)]
end
defp split_pairs([""]), do: %{}
defp split_pairs(pairs) do
for string <- pairs, into: %{} do
[name, value] = String.split(string, "=")
{name, String.to_integer(value)}
end
end
end | lib/garlic/network_status/document.ex | 0.751283 | 0.413181 | document.ex | starcoder |
defmodule Cog.Pipeline.InitialContext do
alias Experimental.GenStage
alias Cog.Pipeline.DoneSignal
@moduledoc ~s"""
`GenStage` producer responsible for initiating pipeline execution.
`InitialContext` begins with its `GenStage` demand set to `:accumulate`
pausing the pipeline until it is fully constructed. Once the pipeline
is ready calling `InitialContext.unlock/1` will begin processing.
`GenStage.BroadcastDispatcher` is used to dispatch output from this stage.
"""
use GenStage
require Logger
@type t :: %__MODULE__{
request_id: String.t,
context: [] | [Cog.Pipeline.DataSignal],
done: boolean,
pipeline: pid
}
defstruct [:context, :done, :pipeline, :request_id]
@doc ~s"""
Starts a new `InitialContext` process.
## Options
* `:context` - Initial pipeline context. Either an empty list or a list of `Cog.Pipeline.DataSignal`. Required.
* `:pipeline` - Pid of the parent pipeline. Required.
* `:request_id` - Id of the originating request. Required.
"""
@spec start_link(Keyword.t) :: {:ok, pid} | {:error, any}
def start_link(opts) do
case GenStage.start_link(__MODULE__, opts) do
{:ok, pid} ->
GenStage.demand(pid, :accumulate)
{:ok, pid}
error ->
error
end
end
@doc "Initiates pipeline processing."
@spec unlock(pid) :: :ok
def unlock(pid) do
GenStage.demand(pid, :forward)
end
def init(opts) do
try do
pipeline = Keyword.fetch!(opts, :pipeline)
Process.monitor(pipeline)
{:producer, %__MODULE__{context: Keyword.fetch!(opts, :context),
done: false,
pipeline: pipeline,
request_id: Keyword.fetch!(opts, :request_id)},
[dispatcher: GenStage.BroadcastDispatcher]}
rescue
e in KeyError ->
{:stop, {:error, Exception.message(e)}}
end
end
def handle_demand(_demand, %__MODULE__{done: false}=state) do
{:noreply, state.context ++ [%DoneSignal{}], %{state | done: true}}
end
def handle_demand(_demand, %__MODULE__{done: true}=state) do
{:noreply, [%DoneSignal{}], state}
end
def handle_info({:DOWN, _mref, _, pipeline, _}, %__MODULE__{pipeline: pipeline}=state) do
{:stop, :normal, state}
end
def handle_info({:pipeline_complete, pipeline}, %__MODULE__{pipeline: pipeline}=state) do
{:stop, :normal, state}
end
def handle_info(_msg, state) do
{:noreply, state}
end
def terminate(_reason, state) do
Logger.debug("Initial context for pipeline #{state.request_id} shutting down")
end
end | lib/cog/pipeline/initial_context.ex | 0.819785 | 0.502991 | initial_context.ex | starcoder |
defmodule ExFuzzywuzzy.Algorithms.PartialMatch do
@moduledoc """
Implementation for the partial matching algorithms used by the library interface.
The model defined is linked to the calling ratio functions, making no sense to be used externally
"""
alias ExFuzzywuzzy.Algorithms.LongestCommonSubstring
defstruct [:left_block, :right_block, :left_starting_index, :right_starting_index, :length]
@typedoc """
The position of a grapheme in a string
"""
@type index :: non_neg_integer()
@typedoc """
The data collected applying partial matching algorithm
"""
@type t :: %__MODULE__{
left_block: String.t(),
right_block: String.t(),
left_starting_index: index(),
right_starting_index: index(),
length: non_neg_integer()
}
@typep slice :: {index(), index(), index(), index()}
@doc """
Calculates a list of string pairs which are the best matching substrings extracted from the provided ones
"""
@spec matching_blocks(String.t(), String.t()) :: [t()]
def matching_blocks(left, right), do: matching_blocks(left, right, String.length(left), String.length(right))
@spec matching_blocks(String.t(), String.t(), index(), index()) :: [t()]
def matching_blocks(left, right, left_length, right_length) when right_length < left_length do
# swapping after calculus isn't done in order to guarantee the same ratio when the calling order is swapped
matching_blocks(right, left, right_length, left_length)
end
def matching_blocks(left, right, left_length, right_length) do
[{0, left_length, 0, right_length}]
|> do_matching_blocks(left, right, [])
|> Enum.concat([
%__MODULE__{
left_block: left,
right_block: String.slice(right, right_length - left_length, left_length),
left_starting_index: left_length,
right_starting_index: right_length,
length: 0
}
])
|> Enum.sort()
|> Enum.reduce([], fn
%__MODULE__{
left_block: first_left_block,
right_block: first_right_block,
left_starting_index: first_left_index,
right_starting_index: first_right_index,
length: first_length
},
[
%__MODULE__{
left_block: second_left_block,
right_block: second_right_block,
left_starting_index: second_left_index,
right_starting_index: second_right_index,
length: second_length
}
| other_matches
]
when first_left_index + first_length == second_left_index and
first_right_index + first_length == second_right_index ->
[
%__MODULE__{
left_block: first_left_block <> second_left_block,
right_block: first_right_block <> second_right_block,
left_starting_index: first_left_index + second_left_index,
right_starting_index: first_right_index + second_right_index,
length: first_length + second_length
}
| other_matches
]
match, matches ->
[match | matches]
end)
|> Enum.reverse()
end
@spec do_matching_blocks([slice()], String.t(), String.t(), [t()]) :: [t()]
defp do_matching_blocks(to_be_processed, left, right, matches)
defp do_matching_blocks([], _, _, acc), do: acc
defp do_matching_blocks([{left_from, left_to, right_from, right_to} | remaining], left, right, matches) do
case LongestCommonSubstring.lcs(
String.slice(left, left_from, left_to - left_from),
String.slice(right, right_from, right_to - right_from)
) do
nil ->
do_matching_blocks(remaining, left, right, matches)
lcs = %LongestCommonSubstring{
left_starting_index: left_index,
right_starting_index: right_index,
length: k
} ->
i = left_from + left_index
j = right_from + right_index
remaining
|> update_left_boundary(left_from, right_from, lcs)
|> update_right_boundary(left_from, left_to, right_from, right_to, lcs)
|> do_matching_blocks(left, right, [
%__MODULE__{
left_block: left,
right_block: String.slice(right, max(j - i, 0), String.length(left)),
left_starting_index: i,
right_starting_index: j,
length: k
}
| matches
])
end
end
@spec update_left_boundary([slice()], index(), index(), LongestCommonSubstring.t()) :: [slice()]
defp update_left_boundary(remaining, left_from, right_from, %LongestCommonSubstring{
left_starting_index: left_index,
right_starting_index: right_index
})
when left_index > 0 and right_index > 0,
do: [{left_from, left_from + left_index, right_from, right_from + right_index} | remaining]
defp update_left_boundary(remaining, _, _, _), do: remaining
@spec update_right_boundary([slice()], index(), index(), index(), index(), LongestCommonSubstring.t()) :: [slice()]
defp update_right_boundary(remaining, left_from, left_to, right_from, right_to, %LongestCommonSubstring{
left_starting_index: left_index,
right_starting_index: right_index,
length: k
})
when left_from + left_index + k < left_to and right_from + right_index + k < right_to,
do: [{left_from + left_index + k, left_to, right_from + right_index + k, right_to} | remaining]
defp update_right_boundary(remaining, _, _, _, _, _), do: remaining
end | lib/ex_fuzzywuzzy/algorithms/partial_match.ex | 0.906661 | 0.772273 | partial_match.ex | starcoder |
defmodule Scenic.Primitive.Triangle do
@moduledoc """
Draw a triangle on the screen.
## Data
`{point_a, point_b, point_c}`
The data for a line is a tuple containing three points.
* `point_a` - position to start drawing from
* `point_b` - position to draw to
* `point_c` - position to draw to
## Styles
This primitive recognizes the following styles
* [`hidden`](Scenic.Primitive.Style.Hidden.html) - show or hide the primitive
* [`fill`](Scenic.Primitive.Style.Fill.html) - fill in the area of the primitive
* [`stroke`](Scenic.Primitive.Style.Stroke.html) - stroke the outline of the primitive. In this case, only the curvy part.
* [`join`](Scenic.Primitive.Style.Join.html) - control how segments are joined.
* [`miter_limit`](Scenic.Primitive.Style.MiterLimit.html) - control how segments are joined.
## Usage
You should add/modify primitives via the helper functions in
[`Scenic.Primitives`](Scenic.Primitives.html#triangle/3)
```elixir
graph
|> triangle(
{{10, 0}, {20, 40}, 0, 20}},
stroke: {1, :yellow}
)
```
"""
use Scenic.Primitive
alias Scenic.Math
alias Scenic.Script
alias Scenic.Primitive
alias Scenic.Primitive.Style
@type t ::
{{x0 :: number, y0 :: number}, {x1 :: number, y1 :: number},
{x2 :: number, y2 :: number}}
@type styles_t :: [
:hidden | :scissor | :fill | :stroke_width | :stroke_fill | :join | :miter_limit
]
@styles [:hidden, :scissor, :fill, :stroke_width, :stroke_fill, :join, :miter_limit]
@impl Primitive
@spec validate(t()) :: {:ok, t()} | {:error, String.t()}
def validate({{x0, y0}, {x1, y1}, {x2, y2}} = data)
when is_number(x0) and is_number(y0) and
is_number(x1) and is_number(y1) and
is_number(x2) and is_number(y2) do
{:ok, data}
end
def validate(data) do
{
:error,
"""
#{IO.ANSI.red()}Invalid Triangle specification
Received: #{inspect(data)}
#{IO.ANSI.yellow()}
The data for a Triangle is {{x0, y0}, {x1, y1}, {x2, y2}}
Each x/y pair represents a corner in the Triangle.#{IO.ANSI.default_color()}
"""
}
end
# --------------------------------------------------------
@doc """
Returns a list of styles recognized by this primitive.
"""
@impl Primitive
@spec valid_styles() :: styles_t()
def valid_styles(), do: @styles
# --------------------------------------------------------
@doc """
Compile the data for this primitive into a mini script. This can be combined with others to
generate a larger script and is called when a graph is compiled.
"""
@spec compile(primitive :: Primitive.t(), styles :: Style.t()) :: Script.t()
@impl Primitive
def compile(%Primitive{module: __MODULE__, data: {{x0, y0}, {x1, y1}, {x2, y2}}}, styles) do
Script.draw_triangle([], x0, y0, x1, y1, x2, y2, Script.draw_flag(styles))
end
# --------------------------------------------------------
def default_pin(data), do: centroid(data)
# --------------------------------------------------------
@doc """
Returns the centroid of the triangle. This is used as the default pin when applying
rotate or scale transforms.
"""
def centroid(data)
def centroid({{x0, y0}, {x1, y1}, {x2, y2}}) do
{
(x0 + x1 + x2) / 3,
(y0 + y1 + y2) / 3
}
end
# http://blackpawn.com/texts/pointinpoly/
# --------------------------------------------------------
@degenerate 0.0001
def contains_point?({{x0, y0} = p0, {x1, y1} = p1, {x2, y2} = p2}, px) do
# make sure the points are not collinear, if so the abs(area) will be very small
area = abs(x0 * (y1 - y2) + x1 * (y2 - y0) + x2 * (y0 - y1))
if area < @degenerate do
false
else
# compute vectors
v0 = Math.Vector2.sub(p2, p0)
v1 = Math.Vector2.sub(p1, p0)
v2 = Math.Vector2.sub(px, p0)
# compute dot products
dot00 = Math.Vector2.dot(v0, v0)
dot01 = Math.Vector2.dot(v0, v1)
dot02 = Math.Vector2.dot(v0, v2)
dot11 = Math.Vector2.dot(v1, v1)
dot12 = Math.Vector2.dot(v1, v2)
# Compute barycentric coordinates
inv_denom = 1.0 / (dot00 * dot11 - dot01 * dot01)
u = (dot11 * dot02 - dot01 * dot12) * inv_denom
v = (dot00 * dot12 - dot01 * dot02) * inv_denom
# Check if point is in triangle
u >= 0 && v >= 0 && u + v < 1
end
end
# --------------------------------------------------------
@doc false
def default_pin({{x0, y0}, {x1, y1}, {x2, y2}}, _styles) do
{
(x0 + x1 + x2) / 3,
(y0 + y1 + y2) / 3
}
end
end | lib/scenic/primitive/triangle.ex | 0.947186 | 0.912202 | triangle.ex | starcoder |
defmodule Cafex.Consumer.LoadBalancer do
@moduledoc """
Balance partition assignment between Cafex consumers
"""
@type layout :: [{node, [partition]}]
@type partition :: non_neg_integer
@doc """
Balance partition assignment between Cafex consumers
## Examples
iex> rebalance [], 5
[]
iex> rebalance [{:a, [0, 1, 2, 3, 4]}], 5
[{:a, [0, 1, 2, 3, 4]}]
iex> rebalance [{:a, [0, 1, 2, 3, 4]}, {:b, []}], 5
[{:a, [0, 1, 2]}, {:b, [3, 4]}]
iex> rebalance [{:a, [0, 1, 2, 3, 4]}, {:b, []}, {:c, []}], 5
[{:a, [0, 1]}, {:b, [2, 3]}, {:c, [4]}]
iex> rebalance [{:a, [0, 1, 2]}, {:b, [3, 4]}, {:c, []}], 5
[{:a, [0, 1]}, {:b, [3, 4]}, {:c, [2]}]
iex> rebalance [{:a, [0, 1]}, {:c, [2]}], 5
[{:a, [0, 1, 3]}, {:c, [2, 4]}]
iex> rebalance [{:a, []}, {:b, [0, 1, 2, 3, 4]}], 5
[{:a, [3, 4]}, {:b, [0, 1, 2]}]
More details see the source of this module or test.
"""
@spec rebalance(layout, partitions :: non_neg_integer) :: layout
def rebalance([], _partitions), do: []
def rebalance(layout, partitions) do
consumers = Keyword.keys(layout)
count = Float.floor(partitions / length(consumers)) |> trunc
remainder = rem(partitions, length(consumers))
all = Enum.into(0..(partitions - 1), HashSet.new)
assigned = layout |> Keyword.values
|> List.flatten
|> Enum.into(HashSet.new)
not_assigned = all |> HashSet.difference(assigned)
|> Enum.uniq
|> Enum.sort
{new_layout, [], 0} =
layout |> Enum.sort(fn {_c1, p1}, {_c2, p2} ->
length(p1) >= length(p2)
end)
|> Enum.reduce({[], not_assigned, remainder}, fn
{consumer, partitions}, {layout, not_assigned, remainder} when remainder > 0 ->
{keep, rest} = assign(partitions, count + 1, not_assigned)
{[{consumer, keep}|layout], rest, remainder - 1}
{consumer, partitions}, {layout, not_assigned, remainder} when remainder == 0 ->
{keep, rest} = assign(partitions, count, not_assigned)
{[{consumer, keep}|layout], rest, remainder}
end)
Enum.sort(new_layout)
end
defp assign(current, count, not_assigned) when length(current) > count do
{partitions, rest} = Enum.split(current, count)
{partitions, Enum.sort(rest ++ not_assigned)}
end
defp assign(current, count, not_assigned) when length(current) < count do
{partitions, rest} = Enum.split(not_assigned, count - length(current))
{Enum.sort(current ++ partitions), rest}
end
defp assign(current, count, not_assigned) when length(current) == count do
{current, not_assigned}
end
end | lib/cafex/consumer/load_balancer.ex | 0.775817 | 0.540621 | load_balancer.ex | starcoder |
defmodule JWKSURIUpdater do
@moduledoc """
JWKSURIUpdater dynamically loads jwks URIs keys (lazy-loading) and keeps it in memory for
further access.
## Options
The `get_keys/2` function can be called with the following options:
- `:refresh_interval`: the number of seconds to keep keys unchanged in cache before it is
fetched again. Defaults to `3600` seconds
- `:min_refresh_interval`: the delay before JWKSURIUpdater will try to fetch keys of a
jwks_uri again. It is intended to prevent fetching storms when the keys are unavailable.
Defaults to `10` seconds
- `:on_refresh_failure`: determines the behaviour of JWKSURIUpdater when the keys *become*
unavailable: `:keep_keys` will keep the keys in the cache, `:discard` will delete them.
Defaults to `:discard`
- `:tesla_middlewares`: `Tesla` middlewares to add to the outgoing request
## Application environment configuration options
- `:tesla_middlewares`: `Tesla` middlewares to add to the outgoing request (in addition to
those passed as a parameter to `get_keys/1`)
## Deviation from the specifications
From [RFC7517](https://tools.ietf.org/html/rfc7517#section-5):
- due to the behaviour of the `Poison.decode/1` function, the first (and not the last) `"keys"`
will be returned:
> The member names within a JWK Set MUST be unique; JWK Set parsers
> MUST either reject JWK Sets with duplicate member names or use a JSON
> parser that returns only the lexically last duplicate member name, as
> specified in Section 15.12 ("The JSON Object") of ECMAScript 5.1
> [ECMAScript].
"""
@doc """
Returns `{:ok, [map()]}` containing the keys, or `{:error, error}` if they could not be
retrieved or if validation failed.
## Examples
```elixir
iex> JWKSURIUpdater.get_keys("https://www.googleapis.com/oauth2/v3/certs")
{:ok,
[
%{
"alg" => "RS256",
"e" => "AQAB",
"kid" => "84f294c45160088d079fee68138f52133d3e228c",
"kty" => "RSA",
"n" => "<KEY>",
"use" => "sig"
},
%{
"alg" => "RS256",
"e" => "AQAB",
"kid" => "df3758908b792293ad977a0b991d98a77f4eeecd",
"kty" => "RSA",
"n" => "<KEY>",
"use" => "sig"
}
]}
iex> JWKSURIUpdater.get_keys("https://auth.login.yahoo.co.jp/yconnect/v2/jwks")
{:ok,
[
%{
"alg" => "RS256",
"e" => "AQAB",
"kid" => "<KEY>",
"kty" => "RSA",
"n" => "<KEY>",
"use" => "sig"
},
%{
"alg" => "RS256",
"e" => "AQAB",
"kid" => "<KEY>",
"kty" => "RSA",
"n" => "<KEY>",
"use" => "sig"
}
]}
```
"""
defdelegate get_keys(jwks_uri, opts \\ []), to: JWKSURIUpdater.Updater
end | lib/jwks_uri_updater.ex | 0.836187 | 0.800614 | jwks_uri_updater.ex | starcoder |
defmodule Graft do
@moduledoc """
An API of the raft consensus algorithm, allowing for custom client requests
and custom replicated state machines.
## Example
Let's create a distributed stack. The first step is to set up the state machine.
Here we will use the `Graft.Machine` behaviour.
```
defmodule MyStackMachine do
use Graft.Machine
@impl Graft.Machine
def init([]) do
{:ok, []}
end
@impl Graft.Machine
def handle_entry({:put, value}, state) do
{:ok, [value | state]}
end
def handle_entry(:pop, []) do
{:noop, []}
end
def handle_entry(:pop, [response | state]) do
{response, state}
end
def handle_entry(_, state) do
{:invalid_request, state}
end
end
```
Now that we have our state machine, we can define the servers that
will make up the raft cluster. Each server must have a unique name.
```
servers = [:server1, :server2, :server3]
```
With both the servers and state machine, we can now run the graft funtion,
which will start the servers and the consensus algorithm.
```
{:ok, supervisor} = Graft.start servers, MyStackMachine
```
`Graft.start` returns the supervisor pid from which we can terminate or restart
the servers.
We can now use `Graft.request` to make requests to our consensus cluster.
As long as we know at least one server, we can send requests, since the `Graft.Client`
module will forward the request if the server we choose is not the current leader.
```
Graft.request :server1, :pop
#=> :noop
Graft.request :server1, {:put, :foo}
#=> :ok
Graft.request :server1, :pop
#=> :foo
Graft.request :server1, :bar
#=> :invalid_request
```
That completes the distributed stack.
"""
use Application
def start(), do:
for(
server <- Application.fetch_env!(:graft, :cluster),
do: GenStateMachine.cast(server, :start)
)
def start(_type, _args), do: Graft.Supervisor.start_link()
def stop(), do: Supervisor.stop(Graft.Supervisor)
def leader(server), do: GenStateMachine.call(server, :leader)
def stop_server(server), do: Supervisor.terminate_child(Graft.Supervisor, server)
def restart_server(server), do: Supervisor.restart_child(Graft.Supervisor, server)
@doc """
Print out the internal state of the `server`.
"""
def data(server), do: :sys.get_state(server)
@doc """
Make a new client request to a server within the consensus cluster.
`server` - name of the server the request should be sent to.
`entry` - processed and applied by the replicated state machine.
"""
@spec request(atom(), any()) :: response :: any()
def request(server, entry), do: Graft.Client.request(server, entry)
end | lib/graft.ex | 0.868896 | 0.931025 | graft.ex | starcoder |
defmodule Bluetooth.HCI.PortEmulator do
@moduledoc """
Emulates the port program for accessing HCI and enables testing
on a host without accessing a real bluetooth device.
"""
use GenServer
require Logger
# Constants for HCI commands etc
@hci_command_package_type 1
@hci_event_package_type 4
def start_link(hci_device \\ 0) do
GenServer.start_link(__MODULE__, [hci_device])
end
defstruct [hci_device: -1, filter: "", from: nil]
def init([hci_device]) do
{:ok, %__MODULE__{hci_device: hci_device}}
end
def handle_info({from, {:command, msg}}, state) when is_pid(from) and is_binary(msg) do
{ref, {func, args}} = :erlang.binary_to_term(msg)
state = %__MODULE__{state | from: from}
{new_state, result} = apply(__MODULE__, func, [state | args])
return_value = :erlang.term_to_binary({ref, result})
send(from, {self(), {:data, return_value}})
{:noreply, new_state}
end
def hci_init(state) do
{state, :ok}
end
def hci_is_dev_up(state) do
{state, true}
end
def hci_dev_id_for(state, true), do: {state, state.hci_device}
def hci_dev_id_for(state, false), do: {state, nil}
def hci_bind_raw(%__MODULE{hci_device: id} = state, dev_id) when dev_id == id do
{state, dev_id}
end
def hci_bind_raw(state, _) , do: {state, -1}
def hci_set_filter(state, filter_data) do
new_state = %__MODULE__{state | filter: filter_data}
{new_state, :ok}
end
def hci_send_command(state, command) do
<<@hci_command_package_type :: integer-size(8),
opcode :: unsigned-integer-little-size(16),
len :: unsigned-integer-size(8),
params :: binary>> = command
<<
ogf :: unsigned-integer-size(6),
ocf :: unsigned-integer-size(10)
>> = <<opcode :: unsigned-integer-size(16)>>
do_command(ogf, ocf, params, state)
end
def do_command(0x04, 0x01, <<>>, state) do
do_send_event(state.from, <<4, 14, 12, 1, 1, 16, 0, 7, 25, 18, 7, 15, 0, 119, 33>>)
{state, :ok}
end
def do_command(ogf, ocf, params, state) do
Logger.error "Unknown command: ogf: #{inspect ogf}, ocf: #{inspect ocf}, params: #{inspect params}"
{:stop, {:error, :unknown_command}, state}
end
def do_send_event(pid, data) do
msg = :erlang.term_to_binary({:event, data})
send(pid, {self(), {:data, msg}})
end
def foo(state, x) do
{state, x+1}
end
end | test/support/hci_emulator.ex | 0.637708 | 0.481088 | hci_emulator.ex | starcoder |
defmodule Litelist.Auth do
@moduledoc """
The Auth context.
"""
import Ecto.Query, warn: false
alias Litelist.Repo
alias Litelist.Auth.Neighbor
alias Bcrypt
@doc """
Returns the list of neighbors.
## Examples
iex> list_neighbors()
[%Neighbor{}, ...]
"""
def list_neighbors do
Repo.all(Neighbor)
end
@doc """
Gets a single neighbor.
Raises `Ecto.NoResultsError` if the Neighbor does not exist.
## Examples
iex> get_neighbor!(123)
%Neighbor{}
iex> get_neighbor!(456)
** (Ecto.NoResultsError)
"""
def get_neighbor!(id), do: Repo.get!(Neighbor, id)
@doc """
Creates a neighbor.
## Examples
iex> create_neighbor(%{field: value})
{:ok, %Neighbor{}}
iex> create_neighbor(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_neighbor(attrs \\ %{}) do
%Neighbor{}
|> Neighbor.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a neighbor.
## Examples
iex> update_neighbor(neighbor, %{field: new_value})
{:ok, %Neighbor{}}
iex> update_neighbor(neighbor, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_neighbor(%Neighbor{} = neighbor, attrs) do
neighbor
|> Neighbor.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Neighbor.
## Examples
iex> delete_neighbor(neighbor)
{:ok, %Neighbor{}}
iex> delete_neighbor(neighbor)
{:error, %Ecto.Changeset{}}
"""
def delete_neighbor(%Neighbor{} = neighbor) do
Repo.delete(neighbor)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking neighbor changes.
## Examples
iex> change_neighbor(neighbor)
%Ecto.Changeset{source: %Neighbor{}}
"""
def change_neighbor(%Neighbor{} = neighbor) do
Neighbor.changeset(neighbor, %{})
end
def authenticate_neighbor(username, plain_text_password) do
query = from n in Neighbor, where: n.username == ^username
query
|> Repo.one()
|> check_password(plain_text_password)
end
defp check_password(nil, _), do: {:error, "Incorrect username or password"}
defp check_password(neighbor, plain_text_password) do
case Bcrypt.verify_pass(plain_text_password, neighbor.encrypted_password) do
true -> {:ok, neighbor}
false -> {:error, "Incorrect username or password"}
end
end
end | lib/litelist/auth/auth.ex | 0.884912 | 0.471284 | auth.ex | starcoder |
defmodule Phoenix.Socket do
@moduledoc ~S"""
Defines a socket and its state.
`Phoenix.Socket` is used as a module for establishing and maintaining
the socket state via the `Phoenix.Socket` struct.
Once connected to a socket, incoming and outgoing events are routed to
channels. The incoming client data is routed to channels via transports.
It is the responsibility of the socket to tie transports and channels
together.
By default, Phoenix supports both websockets and longpoll transports.
For example:
transport :websocket, Phoenix.Transports.WebSocket
The command above means incoming socket connections can be made via
the WebSocket transport. Events are routed by topic to channels:
channel "room:lobby", MyApp.LobbyChannel
See `Phoenix.Channel` for more information on channels. Check each
transport module to find the options specific to each transport.
## Socket Behaviour
Socket handlers are mounted in Endpoints and must define two callbacks:
* `connect/2` - receives the socket params and authenticates the connection.
Must return a `Phoenix.Socket` struct, often with custom assigns.
* `id/1` - receives the socket returned by `connect/2` and returns the
id of this connection as a string. The `id` is used to identify socket
connections, often to a particular user, allowing us to force disconnections.
For sockets requiring no authentication, `nil` can be returned.
## Examples
defmodule MyApp.UserSocket do
use Phoenix.Socket
transport :websocket, Phoenix.Transports.WebSocket
channel "room:*", MyApp.RoomChannel
def connect(params, socket) do
{:ok, assign(socket, :user_id, params["user_id"])}
end
def id(socket), do: "users_socket:#{socket.assigns.user_id}"
end
# Disconnect all user's socket connections and their multiplexed channels
MyApp.Endpoint.broadcast("users_socket:" <> user.id, "disconnect", %{})
## Socket Fields
* `id` - The string id of the socket
* `assigns` - The map of socket assigns, default: `%{}`
* `channel` - The current channel module
* `channel_pid` - The channel pid
* `endpoint` - The endpoint module where this socket originated, for example: `MyApp.Endpoint`
* `handler` - The socket module where this socket originated, for example: `MyApp.UserSocket`
* `joined` - If the socket has effectively joined the channel
* `pubsub_server` - The registered name of the socket's pubsub server
* `join_ref` - The ref sent by the client when joining
* `ref` - The latest ref sent by the client
* `topic` - The string topic, for example `"room:123"`
* `transport` - The socket's transport, for example: `Phoenix.Transports.WebSocket`
* `transport_pid` - The pid of the socket's transport process
* `transport_name` - The socket's transport, for example: `:websocket`
* `serializer` - The serializer for socket messages,
for example: `Phoenix.Transports.WebSocketSerializer`
* `vsn` - The protocol version of the client, for example: "2.0.0"
## Custom transports
See the `Phoenix.Socket.Transport` documentation for more information on
writing your own transports.
"""
alias Phoenix.Socket
@doc """
Receives the socket params and authenticates the connection.
## Socket params and assigns
Socket params are passed from the client and can
be used to verify and authenticate a user. After
verification, you can put default assigns into
the socket that will be set for all channels, ie
{:ok, assign(socket, :user_id, verified_user_id)}
To deny connection, return `:error`.
See `Phoenix.Token` documentation for examples in
performing token verification on connect.
"""
@callback connect(params :: map, Socket.t) :: {:ok, Socket.t} | :error
@doc ~S"""
Identifies the socket connection.
Socket IDs are topics that allow you to identify all sockets for a given user:
def id(socket), do: "users_socket:#{socket.assigns.user_id}"
Would allow you to broadcast a "disconnect" event and terminate
all active sockets and channels for a given user:
MyApp.Endpoint.broadcast("users_socket:" <> user.id, "disconnect", %{})
Returning `nil` makes this socket anonymous.
"""
@callback id(Socket.t) :: String.t | nil
defmodule InvalidMessageError do
@moduledoc """
Raised when the socket message is invalid.
"""
defexception [:message]
end
@type t :: %Socket{id: nil,
assigns: map,
channel: atom,
channel_pid: pid,
endpoint: atom,
handler: atom,
joined: boolean,
pubsub_server: atom,
ref: term,
topic: String.t,
transport: atom,
transport_name: atom,
serializer: atom,
transport_pid: pid,
private: %{}}
defstruct id: nil,
assigns: %{},
channel: nil,
channel_pid: nil,
endpoint: nil,
handler: nil,
joined: false,
pubsub_server: nil,
ref: nil,
join_ref: nil,
topic: nil,
transport: nil,
transport_pid: nil,
transport_name: nil,
serializer: nil,
private: %{},
vsn: nil
defmacro __using__(_) do
quote do
@behaviour Phoenix.Socket
import unquote(__MODULE__)
Module.register_attribute(__MODULE__, :phoenix_channels, accumulate: true)
@phoenix_transports %{}
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(env) do
transports = Module.get_attribute(env.module, :phoenix_transports)
channels = Module.get_attribute(env.module, :phoenix_channels)
transport_defs =
for {name, {mod, conf}} <- transports do
quote do
def __transport__(unquote(name)) do
{unquote(mod), unquote(Macro.escape(conf))}
end
end
end
channel_defs =
for {topic_pattern, module, opts} <- channels do
topic_pattern
|> to_topic_match()
|> defchannel(module, opts[:via], opts)
end
quote do
def __transports__, do: unquote(Macro.escape(transports))
unquote(transport_defs)
unquote(channel_defs)
def __channel__(_topic, _transport), do: nil
end
end
defp to_topic_match(topic_pattern) do
case String.split(topic_pattern, "*") do
[prefix, ""] -> quote do: <<unquote(prefix) <> _rest>>
[bare_topic] -> bare_topic
_ -> raise ArgumentError, "channels using splat patterns must end with *"
end
end
defp defchannel(topic_match, channel_module, nil = _transports, opts) do
quote do
def __channel__(unquote(topic_match), _transport), do: unquote({channel_module, Macro.escape(opts)})
end
end
defp defchannel(topic_match, channel_module, transports, opts) do
quote do
def __channel__(unquote(topic_match), transport)
when transport in unquote(List.wrap(transports)), do: unquote({channel_module, Macro.escape(opts)})
end
end
@doc """
Adds key/value pair to socket assigns.
## Examples
iex> socket.assigns[:token]
nil
iex> socket = assign(socket, :token, "bar")
iex> socket.assigns[:token]
"bar"
"""
def assign(socket = %Socket{}, key, value) do
put_in socket.assigns[key], value
end
@doc """
Defines a channel matching the given topic and transports.
* `topic_pattern` - The string pattern, for example "room:*", "users:*", "system"
* `module` - The channel module handler, for example `MyApp.RoomChannel`
* `opts` - The optional list of options, see below
## Options
* `:assigns` - the map of socket assigns to merge into the socket on join.
## Examples
channel "topic1:*", MyChannel
channel "topic2:*", MyChannel, via: [:websocket]
channel "topic", MyChannel, via: [:longpoll]
## Topic Patterns
The `channel` macro accepts topic patterns in two flavors. A splat argument
can be provided as the last character to indicate a "topic:subtopic" match. If
a plain string is provided, only that topic will match the channel handler.
Most use-cases will use the "topic:*" pattern to allow more versatile topic
scoping.
See `Phoenix.Channel` for more information
"""
defmacro channel(topic_pattern, module, opts \\ []) do
# Tear the alias to simply store the root in the AST.
# This will make Elixir unable to track the dependency
# between endpoint <-> socket and avoid recompiling the
# endpoint (alongside the whole project) whenever the
# socket changes.
module = tear_alias(module)
quote bind_quoted: [topic_pattern: topic_pattern, module: module, opts: opts] do
if opts[:via] do
IO.warn "the :via option in the channel/3 macro is deprecated"
end
@phoenix_channels {topic_pattern, module, opts}
end
end
defp tear_alias({:__aliases__, meta, [h|t]}) do
alias = {:__aliases__, meta, [h]}
quote do
Module.concat([unquote(alias)|unquote(t)])
end
end
defp tear_alias(other), do: other
@doc """
Defines a transport with configuration.
## Examples
transport :websocket, Phoenix.Transports.WebSocket,
timeout: 10_000
"""
defmacro transport(name, module, config \\ []) do
quote do
@phoenix_transports Phoenix.Socket.__transport__(
@phoenix_transports, unquote(name), unquote(module), unquote(config))
end
end
@doc false
def __transport__(transports, name, module, user_conf) do
defaults = module.default_config()
unless name in [:websocket, :longpoll] do
IO.warn "The transport/3 macro accepts only websocket and longpoll for transport names. " <>
"Other names are deprecated. If you want multiple websocket/longpoll endpoints, " <>
"define multiple sockets instead"
end
conf =
user_conf
|> normalize_serializer_conf(name, module, defaults[:serializer] || [])
|> merge_defaults(defaults)
Map.update(transports, name, {module, conf}, fn {dup_module, _} ->
raise ArgumentError,
"duplicate transports (#{inspect dup_module} and #{inspect module}) defined for #{inspect name}."
end)
end
defp merge_defaults(conf, defaults), do: Keyword.merge(defaults, conf)
defp normalize_serializer_conf(conf, name, transport_mod, default) do
update_in(conf, [:serializer], fn
nil ->
precompile_serializers(default)
Phoenix.Transports.LongPollSerializer = serializer ->
warn_serializer_deprecation(name, transport_mod, serializer)
precompile_serializers(default)
Phoenix.Transports.WebSocketSerializer = serializer ->
warn_serializer_deprecation(name, transport_mod, serializer)
precompile_serializers(default)
[_ | _] = serializer ->
precompile_serializers(serializer)
serializer when is_atom(serializer) ->
warn_serializer_deprecation(name, transport_mod, serializer)
precompile_serializers([{serializer, "~> 1.0.0"}])
end)
end
defp warn_serializer_deprecation(name, transport_mod, serializer) do
IO.warn """
passing a serializer module to the transport macro is deprecated.
Use a list with version requirements instead. For example:
transport :#{name}, #{inspect transport_mod},
serializer: [{#{inspect serializer}, "~> 1.0.0"}]
"""
end
defp precompile_serializers(serializers) do
for {module, requirement} <- serializers do
case Version.parse_requirement(requirement) do
{:ok, requirement} -> {module, requirement}
:error -> Version.match?("1.0.0", requirement)
end
end
end
end | assets/node_modules/phoenix/lib/phoenix/socket.ex | 0.891793 | 0.540196 | socket.ex | starcoder |
defmodule Stripe.Webhook do
@moduledoc """
Creates a Stripe Event from webhook's payload if signature is valid.
Use `construct_event/3` to verify the authenticity of a webhook request and
convert its payload into a `Stripe.Event` struct.
case Stripe.Webhook.construct_event(payload, signature, secret) do
{:ok, %Stripe.Event{} = event} ->
# Return 200 to Stripe and handle event
{:error, reason} ->
# Reject webhook by responding with non-2XX
end
"""
@default_tolerance 300
@expected_scheme "v1"
def construct_event(payload, signature_header, secret, tolerance \\ @default_tolerance) do
case verify_header(payload, signature_header, secret, tolerance) do
:ok ->
{:ok, convert_to_event!(payload)}
error ->
error
end
end
defp verify_header(payload, signature_header, secret, tolerance) do
case get_timestamp_and_signatures(signature_header, @expected_scheme) do
{nil, _} ->
{:error, "Unable to extract timestamp and signatures from header"}
{_, []} ->
{:error, "No signatures found with expected scheme #{@expected_scheme}"}
{timestamp, signatures} ->
with {:ok, timestamp} <- check_timestamp(timestamp, tolerance),
{:ok, _signatures} <- check_signatures(signatures, timestamp, payload, secret) do
:ok
else
{:error, error} -> {:error, error}
end
end
end
defp get_timestamp_and_signatures(signature_header, scheme) do
signature_header
|> String.split(",")
|> Enum.map(& String.split(&1, "="))
|> Enum.reduce({nil, []}, fn
["t", timestamp], {nil, signatures} ->
{to_integer(timestamp), signatures}
[^scheme, signature], {timestamp, signatures} ->
{timestamp, [signature | signatures]}
_, acc ->
acc
end)
end
defp to_integer(timestamp) do
case Integer.parse(timestamp) do
{timestamp, _} ->
timestamp
:error ->
nil
end
end
defp check_timestamp(timestamp, tolerance) do
now = System.system_time(:seconds)
if timestamp < (now - tolerance) do
{:error, "Timestamp outside the tolerance zone (#{now})"}
else
{:ok, timestamp}
end
end
defp check_signatures(signatures, timestamp, payload, secret) do
signed_payload = "#{timestamp}.#{payload}"
expected_signature = compute_signature(signed_payload, secret)
if Enum.any?(signatures, & secure_equals?(&1, expected_signature)) do
{:ok, signatures}
else
{:error, "No signatures found matching the expected signature for payload"}
end
end
defp compute_signature(payload, secret) do
:crypto.hmac(:sha256, secret, payload)
|> Base.encode16(case: :lower)
end
defp secure_equals?(input, expected) when byte_size(input) == byte_size(expected) do
input = String.to_charlist(input)
expected = String.to_charlist(expected)
secure_compare(input, expected)
end
defp secure_equals?(_, _), do: false
defp secure_compare(acc \\ 0, input, expected)
defp secure_compare(acc, [], []), do: acc == 0
defp secure_compare(acc, [input_codepoint | input], [expected_codepoint | expected]) do
import Bitwise
acc
|> bor(input_codepoint ^^^ expected_codepoint)
|> secure_compare(input, expected)
end
def convert_to_event!(payload) do
payload
|> Poison.decode!()
|> Stripe.Converter.convert_result()
end
end | lib/stripe/webhook.ex | 0.890342 | 0.565899 | webhook.ex | starcoder |
defmodule ExTwilio.Parser do
@moduledoc """
A JSON parser tuned specifically for Twilio API responses. Based on Poison's
excellent JSON decoder.
"""
@type metadata :: map
@type http_status_code :: number
@type key :: String.t
@type success :: {:ok, [map]}
@type success_list :: {:ok, [map], metadata}
@type success_delete :: :ok
@type error :: {:error, String.t, http_status_code}
@type parsed_response :: success | error
@type parsed_list_response :: success_list | error
@doc """
Parse a response expected to contain a single resource. If you pass in a
module as the first argument, the JSON will be parsed into that module's
`__struct__`.
## Examples
Given you have a module named `Resource`, defined like this:
defmodule Resource do
defstruct sid: nil
end
You can parse JSON into that module's struct like so:
iex> response = %{body: "{ \\"sid\\": \\"AD34123\\" }", status_code: 200}
...> ExTwilio.Parser.parse(response, Resource)
{:ok, %Resource{sid: "AD34123"}}
You can also parse into a regular map if you want.
iex> response = %{body: "{ \\"sid\\": \\"AD34123\\" }", status_code: 200}
...> ExTwilio.Parser.parse(response, %{})
{:ok, %{"sid" => "AD34123"}}
"""
@spec parse(HTTPoison.Response.t, module) :: success | error
def parse(response, module) do
handle_errors response, fn(body) ->
Poison.decode!(body, as: target(module))
end
end
defp target(module) when is_atom(module), do: module.__struct__
defp target(other), do: other
@doc """
Parse a response expected to contain multiple resources. If you pass in a
module as the first argument, the JSON will be parsed into that module's
`__struct__`.
## Examples
Given you have a module named `Resource`, defined like this:
defmodule Resource do
defstruct sid: nil
end
And the JSON you are parsing looks like this:
{
"resources": [{
"sid": "first"
}, {
"sid": "second"
}],
"next_page": 10
}
You can parse the the JSON like this:
ExTwilio.Parser.parse_list(json, Resource, "resources")
{:ok, [%Resource{sid: "first"}, %Resource{sid: "second"}], %{"next_page" => 10}}
"""
@spec parse_list(HTTPoison.Response.t, module, key) :: success_list | error
def parse_list(response, module, key) do
result = handle_errors response, fn(body) ->
as = Map.put(%{}, key, [target(module)])
Poison.decode!(body, as: as)
end
case result do
{:ok, list} -> {:ok, list[key], Map.drop(list, [key])}
error -> error
end
end
# @spec handle_errors(response, ((String.t) -> any)) :: success | success_delete | error
defp handle_errors(response, fun) do
case response do
%{body: body, status_code: status} when status in [200, 201] ->
{:ok, fun.(body)}
%{body: _, status_code: 204} ->
:ok
%{body: body, status_code: status} ->
{:ok, json} = Poison.decode(body)
{:error, json["message"], status}
end
end
end | lib/ex_twilio/parser.ex | 0.867682 | 0.460107 | parser.ex | starcoder |
defmodule Chunky.Sequence.OEIS.Powers do
@moduledoc """
Sequences from the [Online Encyclopedia of Integer Sequences](https://oeis.org) dealing with powers
and simple polynomials.
## Available Sequences
### Powers of specific integers
- `create_sequence_a000351/1` - A000351 - Powers of 5: a(n) = 5^n.
- `create_sequence_a000400/1` - A000400 - Powers of 6: a(n) = 6^n.
- `create_sequence_a000420/1` - A000420 - Powers of 7: a(n) = 7^n.
- `create_sequence_a001018/1` - A001018 - Powers of 8: a(n) = 8^n.
- `create_sequence_a001019/1` - A001019 - Powers of 9: a(n) = 9^n.
- `create_sequence_a011557/1` - A011557 - Powers of 10: a(n) = 10^n.
- `create_sequence_a001020/1` - A001020 - Powers of 11: a(n) = 11^n.
- `create_sequence_a001021/1` - A001021 - Powers of 12.
- `create_sequence_a001022/1` - A001022 - Powers of 13.
- `create_sequence_a001023/1` - A001023 - Powers of 14.
- `create_sequence_a001024/1` - A001024 - Powers of 15.
- `create_sequence_a001025/1` - A001025 - Powers of 16: a(n) = 16^n.
- `create_sequence_a001026/1` - A001026 - Powers of 17.
- `create_sequence_a001027/1` - A001027 - Powers of 18.
- `create_sequence_a001029/1` - A001029 - Powers of 19.
- `create_sequence_a009964/1` - A009964 - Powers of 20.
- `create_sequence_a009965/1` - A009965 - Powers of 21.
- `create_sequence_a009966/1` - A009966 - Powers of 22.
- `create_sequence_a009967/1` - A009967 - Powers of 23.
- `create_sequence_a009968/1` - A009968 - Powers of 24: a(n) = 24^n.
- `create_sequence_a009969/1` - A009969 - Powers of 25.
- `create_sequence_a009970/1` - A009970 - Powers of 26.
- `create_sequence_a009971/1` - A009971 - Powers of 27.
- `create_sequence_a009972/1` - A009972 - Powers of 28.
- `create_sequence_a009973/1` - A009973 - Powers of 29.
- `create_sequence_a009974/1` - A009974 - Powers of 30.
- `create_sequence_a009975/1` - A009975 - Powers of 31.
- `create_sequence_a009976/1` - A009976 - Powers of 32.
- `create_sequence_a009977/1` - A009977 - Powers of 33.
- `create_sequence_a009978/1` - A009978 - Powers of 34.
- `create_sequence_a009979/1` - A009979 - Powers of 35.
- `create_sequence_a009980/1` - A009980 - Powers of 36.
- `create_sequence_a009981/1` - A009981 - Powers of 37.
- `create_sequence_a009982/1` - A009982 - Powers of 38.
- `create_sequence_a009983/1` - A009983 - Powers of 39.
- `create_sequence_a009984/1` - A009984 - Powers of 40.
- `create_sequence_a009985/1` - A009985 - Powers of 41.
- `create_sequence_a009986/1` - A009986 - Powers of 42.
- `create_sequence_a009987/1` - A009987 - Powers of 43.
- `create_sequence_a009988/1` - A009988 - Powers of 44.
- `create_sequence_a009989/1` - A009989 - Powers of 45.
- `create_sequence_a009990/1` - A009990 - Powers of 46.
- `create_sequence_a009991/1` - A009991 - Powers of 47.
- `create_sequence_a009992/1` - A009992 - Powers of 48: a(n) = 48^n.
- `create_sequence_a087752/1` - A087752 - Powers of 49.
- `create_sequence_a159991/1` - A159991 - Powers of 60.
### Variations of powers of 2
- `create_sequence_a000051/1` - A000051 - a(n) = 2^n + 1.
- `create_sequence_a057716/1` - A057716 - The non-powers of 2.
"""
import Chunky.Sequence, only: [sequence_for_function: 1]
alias Chunky.Math
alias Chunky.Math.Predicates
@doc """
OEIS Sequence `A000051` - a(n) = 2^n + 1.
From [OEIS A000051](https://oeis.org/A000051):
> a(n) = 2^n + 1.
> (Formerly M0717 N0266)
**Sequence IDs**: `:a000051`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a000051) |> Sequence.take!(32)
[2,3,5,9,17,33,65,129,257,513,1025,2049,4097,8193,16385,32769,65537,131073,262145,524289,1048577,2097153,4194305,8388609,16777217,33554433,67108865,134217729,268435457,536870913,1073741825,2147483649]
"""
@doc offset: 0,
sequence: "a(n) = 2^n + 1.",
references: [{:oeis, :a000051, "https://oeis.org/A000051"}]
def create_sequence_a000051(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a000051/1)
end
@doc false
@doc offset: 0
def seq_a000051(idx) do
Math.pow(2, idx) + 1
end
@doc """
OEIS Sequence `A000351` - Powers of 5: a(n) = 5^n.
From [OEIS A000351](https://oeis.org/A000351):
> Powers of 5: a(n) = 5^n.
> (Formerly M3937 N1620)
**Sequence IDs**: `:a000351`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a000351) |> Sequence.take!(24)
[1,5,25,125,625,3125,15625,78125,390625,1953125,9765625,48828125,244140625,1220703125,6103515625,30517578125,152587890625,762939453125,3814697265625,19073486328125,95367431640625,476837158203125,2384185791015625,11920928955078125]
"""
@doc offset: 0,
sequence: "Powers of 5: a(n) = 5^n.",
references: [{:oeis, :a000351, "https://oeis.org/A000351"}]
def create_sequence_a000351(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a000351/1)
end
@doc false
@doc offset: 0
def seq_a000351(idx) do
Math.pow(5, idx)
end
@doc """
OEIS Sequence `A000400` - Powers of 6: a(n) = 6^n.
From [OEIS A000400](https://oeis.org/A000400):
> Powers of 6: a(n) = 6^n.
> (Formerly M4224 N1765)
**Sequence IDs**: `:a000400`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a000400) |> Sequence.take!(23)
[1,6,36,216,1296,7776,46656,279936,1679616,10077696,60466176,362797056,2176782336,13060694016,78364164096,470184984576,2821109907456,16926659444736,101559956668416,609359740010496,3656158440062976,21936950640377856,131621703842267136]
"""
@doc offset: 0,
sequence: "Powers of 6: a(n) = 6^n.",
references: [{:oeis, :a000400, "https://oeis.org/A000400"}]
def create_sequence_a000400(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a000400/1)
end
@doc false
@doc offset: 0
def seq_a000400(idx) do
Math.pow(6, idx)
end
@doc """
OEIS Sequence `A000420` - Powers of 7: a(n) = 7^n.
From [OEIS A000420](https://oeis.org/A000420):
> Powers of 7: a(n) = 7^n.
> (Formerly M4431 N1874)
**Sequence IDs**: `:a000420`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a000420) |> Sequence.take!(22)
[1,7,49,343,2401,16807,117649,823543,5764801,40353607,282475249,1977326743,13841287201,96889010407,678223072849,4747561509943,33232930569601,232630513987207,1628413597910449,11398895185373143,79792266297612001,558545864083284007]
"""
@doc offset: 0,
sequence: "Powers of 7: a(n) = 7^n.",
references: [{:oeis, :a000420, "https://oeis.org/A000420"}]
def create_sequence_a000420(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a000420/1)
end
@doc false
@doc offset: 0
def seq_a000420(idx) do
Math.pow(7, idx)
end
@doc """
OEIS Sequence `A001018` - Powers of 8: a(n) = 8^n.
From [OEIS A001018](https://oeis.org/A001018):
> Powers of 8: a(n) = 8^n.
> (Formerly M4555 N1937)
**Sequence IDs**: `:a001018`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001018) |> Sequence.take!(25)
[1,8,64,512,4096,32768,262144,2097152,16777216,134217728,1073741824,8589934592,68719476736,549755813888,4398046511104,35184372088832,281474976710656,2251799813685248,18014398509481984,144115188075855872,1152921504606846976,9223372036854775808,73786976294838206464,590295810358705651712,4722366482869645213696]
"""
@doc offset: 0,
sequence: "Powers of 8: a(n) = 8^n.",
references: [{:oeis, :a001018, "https://oeis.org/A001018"}]
def create_sequence_a001018(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001018/1)
end
@doc false
@doc offset: 0
def seq_a001018(idx) do
Math.pow(8, idx)
end
@doc """
OEIS Sequence `A001019` - Powers of 9: a(n) = 9^n.
From [OEIS A001019](https://oeis.org/A001019):
> Powers of 9: a(n) = 9^n.
> (Formerly M4653 N1992)
**Sequence IDs**: `:a001019`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001019) |> Sequence.take!(20)
[1,9,81,729,6561,59049,531441,4782969,43046721,387420489,3486784401,31381059609,282429536481,2541865828329,22876792454961,205891132094649,1853020188851841,16677181699666569,150094635296999121,1350851717672992089]
"""
@doc offset: 0,
sequence: "Powers of 9: a(n) = 9^n.",
references: [{:oeis, :a001019, "https://oeis.org/A001019"}]
def create_sequence_a001019(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001019/1)
end
@doc false
@doc offset: 0
def seq_a001019(idx) do
Math.pow(9, idx)
end
@doc """
OEIS Sequence `A001020` - Powers of 11: a(n) = 11^n.
From [OEIS A001020](https://oeis.org/A001020):
> Powers of 11: a(n) = 11^n.
> (Formerly M4807 N2054)
**Sequence IDs**: `:a001020`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001020) |> Sequence.take!(20)
[1,11,121,1331,14641,161051,1771561,19487171,214358881,2357947691,25937424601,285311670611,3138428376721,34522712143931,379749833583241,4177248169415651,45949729863572161,505447028499293771,5559917313492231481,61159090448414546291]
"""
@doc offset: 0,
sequence: "Powers of 11: a(n) = 11^n.",
references: [{:oeis, :a001020, "https://oeis.org/A001020"}]
def create_sequence_a001020(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001020/1)
end
@doc false
@doc offset: 0
def seq_a001020(idx) do
Math.pow(11, idx)
end
@doc """
OEIS Sequence `A001021` - Powers of 12.
From [OEIS A001021](https://oeis.org/A001021):
> Powers of 12.
> (Formerly M4869 N2084)
**Sequence IDs**: `:a001021`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001021) |> Sequence.take!(18)
[1,12,144,1728,20736,248832,2985984,35831808,429981696,5159780352,61917364224,743008370688,8916100448256,106993205379072,1283918464548864,15407021574586368,184884258895036416,2218611106740436992]
"""
@doc offset: 0,
sequence: "Powers of 12.",
references: [{:oeis, :a001021, "https://oeis.org/A001021"}]
def create_sequence_a001021(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001021/1)
end
@doc false
@doc offset: 0
def seq_a001021(idx) do
Math.pow(12, idx)
end
@doc """
OEIS Sequence `A001022` - Powers of 13.
From [OEIS A001022](https://oeis.org/A001022):
> Powers of 13.
> (Formerly M4914 N2107)
**Sequence IDs**: `:a001022`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001022) |> Sequence.take!(21)
[1,13,169,2197,28561,371293,4826809,62748517,815730721,10604499373,137858491849,1792160394037,23298085122481,302875106592253,3937376385699289,51185893014090757,665416609183179841,8650415919381337933,112455406951957393129,1461920290375446110677,19004963774880799438801]
"""
@doc offset: 0,
sequence: "Powers of 13.",
references: [{:oeis, :a001022, "https://oeis.org/A001022"}]
def create_sequence_a001022(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001022/1)
end
@doc false
@doc offset: 0
def seq_a001022(idx) do
Math.pow(13, idx)
end
@doc """
OEIS Sequence `A001023` - Powers of 14.
From [OEIS A001023](https://oeis.org/A001023):
> Powers of 14.
> (Formerly M4949 N2120)
**Sequence IDs**: `:a001023`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001023) |> Sequence.take!(21)
[1,14,196,2744,38416,537824,7529536,105413504,1475789056,20661046784,289254654976,4049565169664,56693912375296,793714773254144,11112006825558016,155568095557812224,2177953337809371136,30491346729331195904,426878854210636742656,5976303958948914397184,83668255425284801560576]
"""
@doc offset: 0,
sequence: "Powers of 14.",
references: [{:oeis, :a001023, "https://oeis.org/A001023"}]
def create_sequence_a001023(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001023/1)
end
@doc false
@doc offset: 0
def seq_a001023(idx) do
Math.pow(14, idx)
end
@doc """
OEIS Sequence `A001024` - Powers of 15.
From [OEIS A001024](https://oeis.org/A001024):
> Powers of 15.
> (Formerly M4990 N2147)
**Sequence IDs**: `:a001024`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001024) |> Sequence.take!(21)
[1,15,225,3375,50625,759375,11390625,170859375,2562890625,38443359375,576650390625,8649755859375,129746337890625,1946195068359375,29192926025390625,437893890380859375,6568408355712890625,98526125335693359375,1477891880035400390625,22168378200531005859375,332525673007965087890625]
"""
@doc offset: 0,
sequence: "Powers of 15.",
references: [{:oeis, :a001024, "https://oeis.org/A001024"}]
def create_sequence_a001024(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001024/1)
end
@doc false
@doc offset: 0
def seq_a001024(idx) do
Math.pow(15, idx)
end
@doc """
OEIS Sequence `A001025` - Powers of 16: a(n) = 16^n.
From [OEIS A001025](https://oeis.org/A001025):
> Powers of 16: a(n) = 16^n.
> (Formerly M5021 N2164)
**Sequence IDs**: `:a001025`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001025) |> Sequence.take!(21)
[1,16,256,4096,65536,1048576,16777216,268435456,4294967296,68719476736,1099511627776,17592186044416,281474976710656,4503599627370496,72057594037927936,1152921504606846976,18446744073709551616,295147905179352825856,4722366482869645213696,75557863725914323419136,1208925819614629174706176]
"""
@doc offset: 0,
sequence: "Powers of 16: a(n) = 16^n.",
references: [{:oeis, :a001025, "https://oeis.org/A001025"}]
def create_sequence_a001025(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001025/1)
end
@doc false
@doc offset: 0
def seq_a001025(idx) do
Math.pow(16, idx)
end
@doc """
OEIS Sequence `A001026` - Powers of 17.
From [OEIS A001026](https://oeis.org/A001026):
> Powers of 17.
> (Formerly M5048 N2182)
**Sequence IDs**: `:a001026`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001026) |> Sequence.take!(21)
[1,17,289,4913,83521,1419857,24137569,410338673,6975757441,118587876497,2015993900449,34271896307633,582622237229761,9904578032905937,168377826559400929,2862423051509815793,48661191875666868481,827240261886336764177,14063084452067724991009,239072435685151324847153,4064231406647572522401601]
"""
@doc offset: 0,
sequence: "Powers of 17.",
references: [{:oeis, :a001026, "https://oeis.org/A001026"}]
def create_sequence_a001026(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001026/1)
end
@doc false
@doc offset: 0
def seq_a001026(idx) do
Math.pow(17, idx)
end
@doc """
OEIS Sequence `A001027` - Powers of 18.
From [OEIS A001027](https://oeis.org/A001027):
> Powers of 18.
> (Formerly M5062 N2192)
**Sequence IDs**: `:a001027`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001027) |> Sequence.take!(21)
[1,18,324,5832,104976,1889568,34012224,612220032,11019960576,198359290368,3570467226624,64268410079232,1156831381426176,20822964865671168,374813367582081024,6746640616477458432,121439531096594251776,2185911559738696531968,39346408075296537575424,708235345355337676357632,12748236216396078174437376]
"""
@doc offset: 0,
sequence: "Powers of 18.",
references: [{:oeis, :a001027, "https://oeis.org/A001027"}]
def create_sequence_a001027(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001027/1)
end
@doc false
@doc offset: 0
def seq_a001027(idx) do
Math.pow(18, idx)
end
@doc """
OEIS Sequence `A001029` - Powers of 19.
From [OEIS A001029](https://oeis.org/A001029):
> Powers of 19.
> (Formerly M5079 N2198)
**Sequence IDs**: `:a001029`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a001029) |> Sequence.take!(21)
[1,19,361,6859,130321,2476099,47045881,893871739,16983563041,322687697779,6131066257801,116490258898219,2213314919066161,42052983462257059,799006685782884121,15181127029874798299,288441413567621167681,5480386857784802185939,104127350297911241532841,1978419655660313589123979,37589973457545958193355601]
"""
@doc offset: 0,
sequence: "Powers of 19.",
references: [{:oeis, :a001029, "https://oeis.org/A001029"}]
def create_sequence_a001029(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a001029/1)
end
@doc false
@doc offset: 0
def seq_a001029(idx) do
Math.pow(19, idx)
end
@doc """
OEIS Sequence `A009964` - Powers of 20.
From [OEIS A009964](https://oeis.org/A009964):
> Powers of 20.
> (Formerly )
**Sequence IDs**: `:a009964`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009964) |> Sequence.take!(16)
[1,20,400,8000,160000,3200000,64000000,1280000000,25600000000,512000000000,10240000000000,204800000000000,4096000000000000,81920000000000000,1638400000000000000,32768000000000000000]
"""
@doc offset: 0,
sequence: "Powers of 20.",
references: [{:oeis, :a009964, "https://oeis.org/A009964"}]
def create_sequence_a009964(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009964/1)
end
@doc false
@doc offset: 0
def seq_a009964(idx) do
Math.pow(20, idx)
end
@doc """
OEIS Sequence `A009965` - Powers of 21.
From [OEIS A009965](https://oeis.org/A009965):
> Powers of 21.
> (Formerly )
**Sequence IDs**: `:a009965`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009965) |> Sequence.take!(21)
[1,21,441,9261,194481,4084101,85766121,1801088541,37822859361,794280046581,16679880978201,350277500542221,7355827511386641,154472377739119461,3243919932521508681,68122318582951682301,1430568690241985328321,30041942495081691894741,630880792396715529789561,13248496640331026125580781,278218429446951548637196401]
"""
@doc offset: 0,
sequence: "Powers of 21.",
references: [{:oeis, :a009965, "https://oeis.org/A009965"}]
def create_sequence_a009965(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009965/1)
end
@doc false
@doc offset: 0
def seq_a009965(idx) do
Math.pow(21, idx)
end
@doc """
OEIS Sequence `A009966` - Powers of 22.
From [OEIS A009966](https://oeis.org/A009966):
> Powers of 22.
> (Formerly )
**Sequence IDs**: `:a009966`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009966) |> Sequence.take!(21)
[1,22,484,10648,234256,5153632,113379904,2494357888,54875873536,1207269217792,26559922791424,584318301411328,12855002631049216,282810057883082752,6221821273427820544,136880068015412051968,3011361496339065143296,66249952919459433152512,1457498964228107529355264,32064977213018365645815808,705429498686404044207947776]
"""
@doc offset: 0,
sequence: "Powers of 22.",
references: [{:oeis, :a009966, "https://oeis.org/A009966"}]
def create_sequence_a009966(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009966/1)
end
@doc false
@doc offset: 0
def seq_a009966(idx) do
Math.pow(22, idx)
end
@doc """
OEIS Sequence `A009967` - Powers of 23.
From [OEIS A009967](https://oeis.org/A009967):
> Powers of 23.
> (Formerly )
**Sequence IDs**: `:a009967`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009967) |> Sequence.take!(21)
[1,23,529,12167,279841,6436343,148035889,3404825447,78310985281,1801152661463,41426511213649,952809757913927,21914624432020321,504036361936467383,11592836324538749809,266635235464391245607,6132610415680998648961,141050039560662968926103,3244150909895248285300369,74615470927590710561908487,1716155831334586342923895201]
"""
@doc offset: 0,
sequence: "Powers of 23.",
references: [{:oeis, :a009967, "https://oeis.org/A009967"}]
def create_sequence_a009967(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009967/1)
end
@doc false
@doc offset: 0
def seq_a009967(idx) do
Math.pow(23, idx)
end
@doc """
OEIS Sequence `A009968` - Powers of 24: a(n) = 24^n.
From [OEIS A009968](https://oeis.org/A009968):
> Powers of 24: a(n) = 24^n.
> (Formerly )
**Sequence IDs**: `:a009968`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009968) |> Sequence.take!(21)
[1,24,576,13824,331776,7962624,191102976,4586471424,110075314176,2641807540224,63403380965376,1521681143169024,36520347436056576,876488338465357824,21035720123168587776,504857282956046106624,12116574790945106558976,290797794982682557415424,6979147079584381377970176,167499529910025153071284224,4019988717840603673710821376]
"""
@doc offset: 0,
sequence: "Powers of 24: a(n) = 24^n.",
references: [{:oeis, :a009968, "https://oeis.org/A009968"}]
def create_sequence_a009968(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009968/1)
end
@doc false
@doc offset: 0
def seq_a009968(idx) do
Math.pow(24, idx)
end
@doc """
OEIS Sequence `A009969` - Powers of 25.
From [OEIS A009969](https://oeis.org/A009969):
> Powers of 25.
> (Formerly )
**Sequence IDs**: `:a009969`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009969) |> Sequence.take!(21)
[1,25,625,15625,390625,9765625,244140625,6103515625,152587890625,3814697265625,95367431640625,2384185791015625,59604644775390625,1490116119384765625,37252902984619140625,931322574615478515625,23283064365386962890625,582076609134674072265625,14551915228366851806640625,363797880709171295166015625,9094947017729282379150390625]
"""
@doc offset: 0,
sequence: "Powers of 25.",
references: [{:oeis, :a009969, "https://oeis.org/A009969"}]
def create_sequence_a009969(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009969/1)
end
@doc false
@doc offset: 0
def seq_a009969(idx) do
Math.pow(25, idx)
end
@doc """
OEIS Sequence `A009970` - Powers of 26.
From [OEIS A009970](https://oeis.org/A009970):
> Powers of 26.
> (Formerly )
**Sequence IDs**: `:a009970`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009970) |> Sequence.take!(21)
[1,26,676,17576,456976,11881376,308915776,8031810176,208827064576,5429503678976,141167095653376,3670344486987776,95428956661682176,2481152873203736576,64509974703297150976,1677259342285725925376,43608742899428874059776,1133827315385150725554176,29479510200013918864408576,766467265200361890474622976,19928148895209409152340197376]
"""
@doc offset: 0,
sequence: "Powers of 26.",
references: [{:oeis, :a009970, "https://oeis.org/A009970"}]
def create_sequence_a009970(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009970/1)
end
@doc false
@doc offset: 0
def seq_a009970(idx) do
Math.pow(26, idx)
end
@doc """
OEIS Sequence `A009971` - Powers of 27.
From [OEIS A009971](https://oeis.org/A009971):
> Powers of 27.
> (Formerly )
**Sequence IDs**: `:a009971`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009971) |> Sequence.take!(16)
[1,27,729,19683,531441,14348907,387420489,10460353203,282429536481,7625597484987,205891132094649,5559060566555523,150094635296999121,4052555153018976267,109418989131512359209,2954312706550833698643]
"""
@doc offset: 0,
sequence: "Powers of 27.",
references: [{:oeis, :a009971, "https://oeis.org/A009971"}]
def create_sequence_a009971(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009971/1)
end
@doc false
@doc offset: 0
def seq_a009971(idx) do
Math.pow(27, idx)
end
@doc """
OEIS Sequence `A009972` - Powers of 28.
From [OEIS A009972](https://oeis.org/A009972):
> Powers of 28.
> (Formerly )
**Sequence IDs**: `:a009972`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009972) |> Sequence.take!(16)
[1,28,784,21952,614656,17210368,481890304,13492928512,377801998336,10578455953408,296196766695424,8293509467471872,232218265089212416,6502111422497947648,182059119829942534144,5097655355238390956032]
"""
@doc offset: 0,
sequence: "Powers of 28.",
references: [{:oeis, :a009972, "https://oeis.org/A009972"}]
def create_sequence_a009972(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009972/1)
end
@doc false
@doc offset: 0
def seq_a009972(idx) do
Math.pow(28, idx)
end
@doc """
OEIS Sequence `A009973` - Powers of 29.
From [OEIS A009973](https://oeis.org/A009973):
> Powers of 29.
> (Formerly )
**Sequence IDs**: `:a009973`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009973) |> Sequence.take!(16)
[1,29,841,24389,707281,20511149,594823321,17249876309,500246412961,14507145975869,420707233300201,12200509765705829,353814783205469041,10260628712958602189,297558232675799463481,8629188747598184440949]
"""
@doc offset: 0,
sequence: "Powers of 29.",
references: [{:oeis, :a009973, "https://oeis.org/A009973"}]
def create_sequence_a009973(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009973/1)
end
@doc false
@doc offset: 0
def seq_a009973(idx) do
Math.pow(29, idx)
end
@doc """
OEIS Sequence `A009974` - Powers of 30.
From [OEIS A009974](https://oeis.org/A009974):
> Powers of 30.
> (Formerly )
**Sequence IDs**: `:a009974`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009974) |> Sequence.take!(16)
[1,30,900,27000,810000,24300000,729000000,21870000000,656100000000,19683000000000,590490000000000,17714700000000000,531441000000000000,15943230000000000000,478296900000000000000,14348907000000000000000]
"""
@doc offset: 0,
sequence: "Powers of 30.",
references: [{:oeis, :a009974, "https://oeis.org/A009974"}]
def create_sequence_a009974(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009974/1)
end
@doc false
@doc offset: 0
def seq_a009974(idx) do
Math.pow(30, idx)
end
@doc """
OEIS Sequence `A009975` - Powers of 31.
From [OEIS A009975](https://oeis.org/A009975):
> Powers of 31.
> (Formerly )
**Sequence IDs**: `:a009975`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009975) |> Sequence.take!(16)
[1,31,961,29791,923521,28629151,887503681,27512614111,852891037441,26439622160671,819628286980801,25408476896404831,787662783788549761,24417546297445042591,756943935220796320321,23465261991844685929951]
"""
@doc offset: 0,
sequence: "Powers of 31.",
references: [{:oeis, :a009975, "https://oeis.org/A009975"}]
def create_sequence_a009975(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009975/1)
end
@doc false
@doc offset: 0
def seq_a009975(idx) do
Math.pow(31, idx)
end
@doc """
OEIS Sequence `A009976` - Powers of 32.
From [OEIS A009976](https://oeis.org/A009976):
> Powers of 32.
> (Formerly )
**Sequence IDs**: `:a009976`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009976) |> Sequence.take!(15)
[1,32,1024,32768,1048576,33554432,1073741824,34359738368,1099511627776,35184372088832,1125899906842624,36028797018963968,1152921504606846976,36893488147419103232,1180591620717411303424]
"""
@doc offset: 0,
sequence: "Powers of 32.",
references: [{:oeis, :a009976, "https://oeis.org/A009976"}]
def create_sequence_a009976(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009976/1)
end
@doc false
@doc offset: 0
def seq_a009976(idx) do
Math.pow(32, idx)
end
@doc """
OEIS Sequence `A009977` - Powers of 33.
From [OEIS A009977](https://oeis.org/A009977):
> Powers of 33.
> (Formerly )
**Sequence IDs**: `:a009977`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009977) |> Sequence.take!(15)
[1,33,1089,35937,1185921,39135393,1291467969,42618442977,1406408618241,46411484401953,1531578985264449,50542106513726817,1667889514952984961,55040353993448503713,1816331681783800622529]
"""
@doc offset: 0,
sequence: "Powers of 33.",
references: [{:oeis, :a009977, "https://oeis.org/A009977"}]
def create_sequence_a009977(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009977/1)
end
@doc false
@doc offset: 0
def seq_a009977(idx) do
Math.pow(33, idx)
end
@doc """
OEIS Sequence `A009978` - Powers of 34.
From [OEIS A009978](https://oeis.org/A009978):
> Powers of 34.
> (Formerly )
**Sequence IDs**: `:a009978`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009978) |> Sequence.take!(15)
[1,34,1156,39304,1336336,45435424,1544804416,52523350144,1785793904896,60716992766464,2064377754059776,70188843638032384,2386420683693101056,81138303245565435904,2758702310349224820736]
"""
@doc offset: 0,
sequence: "Powers of 34.",
references: [{:oeis, :a009978, "https://oeis.org/A009978"}]
def create_sequence_a009978(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009978/1)
end
@doc false
@doc offset: 0
def seq_a009978(idx) do
Math.pow(34, idx)
end
@doc """
OEIS Sequence `A009979` - Powers of 35.
From [OEIS A009979](https://oeis.org/A009979):
> Powers of 35.
> (Formerly )
**Sequence IDs**: `:a009979`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009979) |> Sequence.take!(15)
[1,35,1225,42875,1500625,52521875,1838265625,64339296875,2251875390625,78815638671875,2758547353515625,96549157373046875,3379220508056640625,118272717781982421875,4139545122369384765625]
"""
@doc offset: 0,
sequence: "Powers of 35.",
references: [{:oeis, :a009979, "https://oeis.org/A009979"}]
def create_sequence_a009979(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009979/1)
end
@doc false
@doc offset: 0
def seq_a009979(idx) do
Math.pow(35, idx)
end
@doc """
OEIS Sequence `A009980` - Powers of 36.
From [OEIS A009980](https://oeis.org/A009980):
> Powers of 36.
> (Formerly )
**Sequence IDs**: `:a009980`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009980) |> Sequence.take!(15)
[1,36,1296,46656,1679616,60466176,2176782336,78364164096,2821109907456,101559956668416,3656158440062976,131621703842267136,4738381338321616896,170581728179578208256,6140942214464815497216]
"""
@doc offset: 0,
sequence: "Powers of 36.",
references: [{:oeis, :a009980, "https://oeis.org/A009980"}]
def create_sequence_a009980(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009980/1)
end
@doc false
@doc offset: 0
def seq_a009980(idx) do
Math.pow(36, idx)
end
@doc """
OEIS Sequence `A009981` - Powers of 37.
From [OEIS A009981](https://oeis.org/A009981):
> Powers of 37.
> (Formerly )
**Sequence IDs**: `:a009981`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009981) |> Sequence.take!(15)
[1,37,1369,50653,1874161,69343957,2565726409,94931877133,3512479453921,129961739795077,4808584372417849,177917621779460413,6582952005840035281,243569224216081305397,9012061295995008299689]
"""
@doc offset: 0,
sequence: "Powers of 37.",
references: [{:oeis, :a009981, "https://oeis.org/A009981"}]
def create_sequence_a009981(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009981/1)
end
@doc false
@doc offset: 0
def seq_a009981(idx) do
Math.pow(37, idx)
end
@doc """
OEIS Sequence `A009982` - Powers of 38.
From [OEIS A009982](https://oeis.org/A009982):
> Powers of 38.
> (Formerly )
**Sequence IDs**: `:a009982`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009982) |> Sequence.take!(16)
[1,38,1444,54872,2085136,79235168,3010936384,114415582592,4347792138496,165216101262848,6278211847988224,238572050223552512,9065737908494995456,344498040522809827328,13090925539866773438464,497455170514937390661632]
"""
@doc offset: 0,
sequence: "Powers of 38.",
references: [{:oeis, :a009982, "https://oeis.org/A009982"}]
def create_sequence_a009982(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009982/1)
end
@doc false
@doc offset: 0
def seq_a009982(idx) do
Math.pow(38, idx)
end
@doc """
OEIS Sequence `A009983` - Powers of 39.
From [OEIS A009983](https://oeis.org/A009983):
> Powers of 39.
> (Formerly )
**Sequence IDs**: `:a009983`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009983) |> Sequence.take!(15)
[1,39,1521,59319,2313441,90224199,3518743761,137231006679,5352009260481,208728361158759,8140406085191601,317475837322472439,12381557655576425121,482880748567480579719,18832349194131742609041]
"""
@doc offset: 0,
sequence: "Powers of 39.",
references: [{:oeis, :a009983, "https://oeis.org/A009983"}]
def create_sequence_a009983(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009983/1)
end
@doc false
@doc offset: 0
def seq_a009983(idx) do
Math.pow(39, idx)
end
@doc """
OEIS Sequence `A009984` - Powers of 40.
From [OEIS A009984](https://oeis.org/A009984):
> Powers of 40.
> (Formerly )
**Sequence IDs**: `:a009984`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009984) |> Sequence.take!(15)
[1,40,1600,64000,2560000,102400000,4096000000,163840000000,6553600000000,262144000000000,10485760000000000,419430400000000000,16777216000000000000,671088640000000000000,26843545600000000000000]
"""
@doc offset: 0,
sequence: "Powers of 40.",
references: [{:oeis, :a009984, "https://oeis.org/A009984"}]
def create_sequence_a009984(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009984/1)
end
@doc false
@doc offset: 0
def seq_a009984(idx) do
Math.pow(40, idx)
end
@doc """
OEIS Sequence `A009985` - Powers of 41.
From [OEIS A009985](https://oeis.org/A009985):
> Powers of 41.
> (Formerly )
**Sequence IDs**: `:a009985`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009985) |> Sequence.take!(15)
[1,41,1681,68921,2825761,115856201,4750104241,194754273881,7984925229121,327381934393961,13422659310152401,550329031716248441,22563490300366186081,925103102315013629321,37929227194915558802161]
"""
@doc offset: 0,
sequence: "Powers of 41.",
references: [{:oeis, :a009985, "https://oeis.org/A009985"}]
def create_sequence_a009985(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009985/1)
end
@doc false
@doc offset: 0
def seq_a009985(idx) do
Math.pow(41, idx)
end
@doc """
OEIS Sequence `A009986` - Powers of 42.
From [OEIS A009986](https://oeis.org/A009986):
> Powers of 42.
> (Formerly )
**Sequence IDs**: `:a009986`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009986) |> Sequence.take!(15)
[1,42,1764,74088,3111696,130691232,5489031744,230539333248,9682651996416,406671383849472,17080198121677824,717368321110468608,30129469486639681536,1265437718438866624512,53148384174432398229504]
"""
@doc offset: 0,
sequence: "Powers of 42.",
references: [{:oeis, :a009986, "https://oeis.org/A009986"}]
def create_sequence_a009986(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009986/1)
end
@doc false
@doc offset: 0
def seq_a009986(idx) do
Math.pow(42, idx)
end
@doc """
OEIS Sequence `A009987` - Powers of 43.
From [OEIS A009987](https://oeis.org/A009987):
> Powers of 43.
> (Formerly )
**Sequence IDs**: `:a009987`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009987) |> Sequence.take!(15)
[1,43,1849,79507,3418801,147008443,6321363049,271818611107,11688200277601,502592611936843,21611482313284249,929293739471222707,39959630797262576401,1718264124282290785243,73885357344138503765449]
"""
@doc offset: 0,
sequence: "Powers of 43.",
references: [{:oeis, :a009987, "https://oeis.org/A009987"}]
def create_sequence_a009987(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009987/1)
end
@doc false
@doc offset: 0
def seq_a009987(idx) do
Math.pow(43, idx)
end
@doc """
OEIS Sequence `A009988` - Powers of 44.
From [OEIS A009988](https://oeis.org/A009988):
> Powers of 44.
> (Formerly )
**Sequence IDs**: `:a009988`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009988) |> Sequence.take!(15)
[1,44,1936,85184,3748096,164916224,7256313856,319277809664,14048223625216,618121839509504,27197360938418176,1196683881290399744,52654090776777588736,2316779994178213904384,101938319743841411792896]
"""
@doc offset: 0,
sequence: "Powers of 44.",
references: [{:oeis, :a009988, "https://oeis.org/A009988"}]
def create_sequence_a009988(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009988/1)
end
@doc false
@doc offset: 0
def seq_a009988(idx) do
Math.pow(44, idx)
end
@doc """
OEIS Sequence `A009989` - Powers of 45.
From [OEIS A009989](https://oeis.org/A009989):
> Powers of 45.
> (Formerly )
**Sequence IDs**: `:a009989`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009989) |> Sequence.take!(15)
[1,45,2025,91125,4100625,184528125,8303765625,373669453125,16815125390625,756680642578125,34050628916015625,1532278301220703125,68952523554931640625,3102863559971923828125,139628860198736572265625]
"""
@doc offset: 0,
sequence: "Powers of 45.",
references: [{:oeis, :a009989, "https://oeis.org/A009989"}]
def create_sequence_a009989(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009989/1)
end
@doc false
@doc offset: 0
def seq_a009989(idx) do
Math.pow(45, idx)
end
@doc """
OEIS Sequence `A009990` - Powers of 46.
From [OEIS A009990](https://oeis.org/A009990):
> Powers of 46.
> (Formerly )
**Sequence IDs**: `:a009990`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009990) |> Sequence.take!(15)
[1,46,2116,97336,4477456,205962976,9474296896,435817657216,20047612231936,922190162669056,42420747482776576,1951354384207722496,89762301673555234816,4129065876983540801536,189937030341242876870656]
"""
@doc offset: 0,
sequence: "Powers of 46.",
references: [{:oeis, :a009990, "https://oeis.org/A009990"}]
def create_sequence_a009990(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009990/1)
end
@doc false
@doc offset: 0
def seq_a009990(idx) do
Math.pow(46, idx)
end
@doc """
OEIS Sequence `A009991` - Powers of 47.
From [OEIS A009991](https://oeis.org/A009991):
> Powers of 47.
> (Formerly )
**Sequence IDs**: `:a009991`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009991) |> Sequence.take!(14)
[1,47,2209,103823,4879681,229345007,10779215329,506623120463,23811286661761,1119130473102767,52599132235830049,2472159215084012303,116191483108948578241,5460999706120583177327]
"""
@doc offset: 0,
sequence: "Powers of 47.",
references: [{:oeis, :a009991, "https://oeis.org/A009991"}]
def create_sequence_a009991(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009991/1)
end
@doc false
@doc offset: 0
def seq_a009991(idx) do
Math.pow(47, idx)
end
@doc """
OEIS Sequence `A009992` - Powers of 48: a(n) = 48^n.
From [OEIS A009992](https://oeis.org/A009992):
> Powers of 48: a(n) = 48^n.
> (Formerly )
**Sequence IDs**: `:a009992`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a009992) |> Sequence.take!(16)
[1,48,2304,110592,5308416,254803968,12230590464,587068342272,28179280429056,1352605460594688,64925062108545024,3116402981210161152,149587343098087735296,7180192468708211294208,344649238497994142121984,16543163447903718821855232]
"""
@doc offset: 0,
sequence: "Powers of 48: a(n) = 48^n.",
references: [{:oeis, :a009992, "https://oeis.org/A009992"}]
def create_sequence_a009992(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a009992/1)
end
@doc false
@doc offset: 0
def seq_a009992(idx) do
Math.pow(48, idx)
end
@doc """
OEIS Sequence `A011557` - Powers of 10: a(n) = 10^n.
From [OEIS A011557](https://oeis.org/A011557):
> Powers of 10: a(n) = 10^n.
> (Formerly )
**Sequence IDs**: `:a011557`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a011557) |> Sequence.take!(19)
[1,10,100,1000,10000,100000,1000000,10000000,100000000,1000000000,10000000000,100000000000,1000000000000,10000000000000,100000000000000,1000000000000000,10000000000000000,100000000000000000,1000000000000000000]
"""
@doc offset: 0,
sequence: "Powers of 10: a(n) = 10^n.",
references: [{:oeis, :a011557, "https://oeis.org/A011557"}]
def create_sequence_a011557(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a011557/1)
end
@doc false
@doc offset: 0
def seq_a011557(idx) do
Math.pow(10, idx)
end
@doc """
OEIS Sequence `A087752` - Powers of 49.
From [OEIS A087752](https://oeis.org/A087752):
> Powers of 49.
> (Formerly )
**Sequence IDs**: `:a087752`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a087752) |> Sequence.take!(15)
[1,49,2401,117649,5764801,282475249,13841287201,678223072849,33232930569601,1628413597910449,79792266297612001,3909821048582988049,191581231380566414401,9387480337647754305649,459986536544739960976801]
"""
@doc offset: 0,
sequence: "Powers of 49.",
references: [{:oeis, :a087752, "https://oeis.org/A087752"}]
def create_sequence_a087752(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a087752/1)
end
@doc false
@doc offset: 0
def seq_a087752(idx) do
Math.pow(49, idx)
end
@doc """
OEIS Sequence `A159991` - Powers of 60.
From [OEIS A159991](https://oeis.org/A159991):
> Powers of 60.
> (Formerly )
**Sequence IDs**: `:a159991`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a159991) |> Sequence.take!(14)
[1,60,3600,216000,12960000,777600000,46656000000,2799360000000,167961600000000,10077696000000000,604661760000000000,36279705600000000000,2176782336000000000000,130606940160000000000000]
"""
@doc offset: 0,
sequence: "Powers of 60.",
references: [{:oeis, :a159991, "https://oeis.org/A159991"}]
def create_sequence_a159991(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a159991/1)
end
@doc false
@doc offset: 0
def seq_a159991(idx) do
Math.pow(60, idx)
end
@doc """
OEIS Sequence `A057716` - The non-powers of 2.
From [OEIS A057716](https://oeis.org/A057716):
> The non-powers of 2.
**Sequence IDs**: `:a057716`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Powers, :a057716) |> Sequence.take!(68)
[0,3,5,6,7,9,10,11,12,13,14,15,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,65,66,67,68,69,70,71,72,73,74]
"""
@doc offset: 0,
sequence: "The non-powers of 2.",
references: [{:oeis, :a057716, "https://oeis.org/A057716"}]
def create_sequence_a057716(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Powers.seq_a057716/2)
end
@doc false
@doc offset: 0
def seq_a057716(idx, last) do
case idx do
0 -> 0
_ -> Math.next_number(&Predicates.is_polite_number?/1, last)
end
end
end | lib/sequence/oeis/powers.ex | 0.845113 | 0.806205 | powers.ex | starcoder |
defmodule DataBase.Schemas.AccountMovement do
@moduledoc """
The bank account funds movimentation.
Every time a `t:DataBase.Schemas.Account.t/0` adds or removes
funds, it's in other words: Inserting a new `t:t/0`. Which are
simply called **Movements**. They can add *(inbound)* or remove
*(outbound)* funds.
A **Movement** inbound/outbound is determined by its `:direction`
(which is `1` for inbounds or `-1` for outbounds). So its *real
amount* is represented by `direction * amount`, as its `:amount` is
always positive regardless of direction.
The act of registering a new movement to a
`t:DataBase.Schemas.Account.t/0` is called to **move** a movement.
This moving operation ensures to set required internal values and
to write/update its respective
`t:DataBase.Schemas.AccountHistory.t/0` logbook.
See `move/1`.
Expresses information over the `account_movements` database table.
"""
import Map, only: [merge: 2]
import Ecto.Query, only: [last: 2, where: 2]
use Ecto.Schema
alias Decimal, as: D
alias Ecto.Association.NotLoaded
alias DataBase.Repos.AmethystRepo, as: Repo
alias DataBase.Schemas.AccountHistory, as: History
alias DataBase.Schemas.Account
@typedoc """
A `DataBase.Schemas.AccountMovement` struct.
"""
@type t :: %__MODULE__{}
@typedoc """
A standard Ecto response to `DataBase.Schemas.AccountMovement` data
insertion.
"""
@type response_t :: {:ok, t()} | {:error, any()}
@typedoc """
A map containing a `:initial_balance` key with a `t:Decimal.t/0`
value. Or `nil`.
It's the representation for querying this specific field in the
database.
"""
@type initial_balance_t :: %{initial_balance: D.t()} | nil
@typedoc """
A group of fields to be set after building a `t:t/0`. Prior its
insertion.
"""
@type missing_values_t :: %{
move_on: Date.t(),
move_at: DateTime.t(),
initial_balance: D.t(),
final_balance: D.t()
}
@typedoc """
A group of fields that represents the association of a `t:t/0` to
the `t:DataBase.Schemas.Account.t/0` last
`t:DataBase.Schemas.AccountMovement.t/0`. A.k.a. *previous
movement*.
"""
@type precedent_assoc_t :: nil | %{
previous_movement_id: pos_integer(),
previous_movement: t()
}
schema "account_movements" do
field :direction, :integer
field :amount, :decimal
field :initial_balance, :decimal
field :final_balance, :decimal
field :move_at, :utc_datetime
field :move_on, :date
belongs_to(:account, Account)
belongs_to(:previous_movement, __MODULE__)
timestamps()
end
@doc """
Builds a `t:t/0` over the given arguments.
"""
@spec build(pos_integer, D.t, integer) :: t()
def build(account_id, %D{} = amount, direction) do
%__MODULE__{
account_id: account_id,
amount: amount,
direction: direction
}
end
@doc """
The only allowed way to register a `t:t/0`.
It will set its *previous movement* (if any), then compute its
required internal values and register with the respective
`t:DataBase.Schemas.AccountHistory.t/0` logbook.
Only after registering to the logbook the `t:t/0` is inserted.
"""
@spec move(t) :: response_t()
def move(%__MODULE__{} = movement) do
movement
|> set_previous_movement()
|> set_missing_values()
|> History.register()
|> Repo.insert()
end
@doc """
Determines if a given `t:Decimal.t/0` is greater than zero.
"""
@spec valid_amount?(D.t) :: boolean()
def valid_amount?(%D{} = amount) do
D.cmp(amount, 0) == :gt
end
@doc """
It's a `t:Decimal.t/0` to represent the addition of
`t:DataBase.Schemas.AccountHistory.t/0` (logbook) `:inbounds_on`
with the given `t:t/0` *movement inbound value*.
The *movement inbound value* is the `t:t/0` `:amount` if it's a
inbound movement. Otherwise it's zero.
"""
@spec inbounds_on(t) :: D.t()
def inbounds_on(%__MODULE__{} = m) do
with inbounds <- Account.inbounds_on(m.account_id, m.move_on),
inbound <- inbound_amount(m, m.direction),
do: D.add(inbounds, inbound)
end
@doc """
It's a `t:Decimal.t/0` to represent the addition of
`t:DataBase.Schemas.AccountHistory.t/0` (logbook) `:outbounds_on`
with the given `t:t/0` *movement outbound value*.
The *movement outbound value* is the `t:t/0` `:amount` if it's a
outbound movement. Otherwise it's zero.
"""
@spec outbounds_on(t) :: D.t()
def outbounds_on(%__MODULE__{} = m) do
with outbounds <- Account.outbounds_on(m.account_id, m.move_on),
outbound <- outbound_amount(m, m.direction),
do: D.add(outbounds, outbound)
end
@doc """
When a given `t:t/0` is the first of its day, it returns the
`t:t/0` `:initial_balance`. Otherwise it returns the
`DataBase.Schemas.Account.early_balance/2` `:initial_balance` for
the given `t:t/0`.
"""
@spec initial_balance_for(t) :: D.t()
def initial_balance_for(%__MODULE__{} = movement) do
movement.account_id
|> Account.early_balance(movement.move_on)
|> do_initial_balance_for(movement)
end
@spec do_initial_balance_for(initial_balance_t, t) :: D.t()
defp do_initial_balance_for(nil, %__MODULE__{} = movement) do
movement.initial_balance
end
defp do_initial_balance_for(%{} = result, _movement) do
result.initial_balance
end
@spec set_previous_movement(t) :: t()
defp set_previous_movement(%__MODULE__{} = movement) do
merge(movement, get_precedent_assoc(movement.account_id))
end
@spec set_missing_values(t) :: t()
defp set_missing_values(%__MODULE__{} = movement) do
merge(movement, missing_values(movement))
end
@spec get_precedent_assoc(pos_integer) :: %{} | t()
defp get_precedent_assoc(account_id) when is_integer(account_id) do
__MODULE__
|> last(:move_at)
|> where(account_id: ^account_id)
|> Repo.one()
|> precedent_assoc()
end
@spec missing_values(t, DateTime.t) :: missing_values_t()
defp missing_values(%__MODULE__{} = m, at \\ DateTime.utc_now) do
%{
move_on: DateTime.to_date(at),
move_at: at,
initial_balance: final_balance(m.previous_movement),
final_balance: operate(m.previous_movement, m)
}
end
@spec precedent_assoc(nil | t) :: precedent_assoc_t()
defp precedent_assoc(nil), do: %{}
defp precedent_assoc(%__MODULE__{} = movement) do
%{
previous_movement_id: movement.id,
previous_movement: movement
}
end
@spec final_balance(%NotLoaded{} | t) :: D.t()
defp final_balance(%NotLoaded{}), do: D.new(0)
defp final_balance(%__MODULE__{} = movement) do
movement.final_balance
end
@spec operate(%NotLoaded{} | t, t) :: D.t()
defp operate(precedent, %__MODULE__{} = movement) do
D.add(final_balance(precedent), signed_amount(movement))
end
@spec signed_amount(t) :: D.t()
defp signed_amount(%__MODULE__{} = movement) do
D.mult(movement.amount, movement.direction)
end
@spec inbound_amount(t, integer) :: D.t()
defp inbound_amount(_movement, -1), do: D.new(0)
defp inbound_amount(%__MODULE__{} = movement, 1) do
movement.amount
end
@spec outbound_amount(t, integer) :: D.t()
defp outbound_amount(_movement, 1), do: D.new(0)
defp outbound_amount(%__MODULE__{} = movement, -1) do
movement.amount
end
end | apps/database/lib/database/schemas/account_movement.ex | 0.894723 | 0.754892 | account_movement.ex | starcoder |
defmodule Strukt.Field do
@moduledoc false
defstruct type: nil,
name: nil,
meta: nil,
value_type: nil,
options: [],
validations: [],
block: nil
@validation_opts [
:required,
:length,
:format,
:one_of,
:none_of,
:subset_of,
:range,
:number
]
@supported_field_types [
:field,
:embeds_one,
:embeds_many,
:timestamps
]
defguard is_supported(type) when type in @supported_field_types
@doc """
This function receives the AST of all field definitions provided to `defstruct`, and
converts the nodes to a more useful form for the macro internals.
The resulting struct can be converted back to an AST node with `to_ast/1`.
This is intended for use only in the internal implementation of `defstruct`.
"""
def parse(fields) do
for {type, meta, args} <- fields, do: parse(type, meta, args)
end
defp parse(type, meta, [name, value_type]) when type in @supported_field_types,
do: %__MODULE__{name: name, type: type, meta: meta, value_type: value_type}
defp parse(type, meta, [name, value_type, opts]) when type in @supported_field_types do
{block, opts} = Keyword.pop(opts, :do)
{validations, options} = Keyword.split(opts, @validation_opts)
%__MODULE__{
name: name,
type: type,
meta: meta,
value_type: value_type,
block: block,
options: options,
validations: validations
}
end
defp parse(type, meta, [name, value_type, opts, list])
when type in [:embeds_one, :embeds_many] do
block = Keyword.fetch!(list, :do)
{validations, options} = Keyword.split(opts, @validation_opts)
%__MODULE__{
name: name,
type: type,
meta: meta,
value_type: value_type,
block: block,
options: options,
validations: validations
}
end
defp parse(:timestamps, meta, args),
do: %__MODULE__{type: :timestamps, meta: meta, options: args}
defp parse(field_type, _meta, args) do
raise ArgumentError,
message:
"unsupported use of #{field_type}/#{length(args)} within `defstruct`, " <>
"only #{Enum.join(@supported_field_types, ",")} are permitted"
end
@doc """
This module converts a `Strukt.Field` struct into its AST form _without_ validations.
"""
def to_ast(field)
def to_ast(%__MODULE__{type: :timestamps, meta: meta, options: options}),
do: {:timestamps, meta, options}
def to_ast(%__MODULE__{
type: type,
name: name,
meta: meta,
value_type: value_type,
options: options,
block: nil
}),
do: {type, meta, [name, value_type, options]}
def to_ast(%__MODULE__{
type: type,
name: name,
meta: meta,
value_type: value_type,
options: options,
block: block
}),
do: {type, meta, [name, value_type, options, block]}
end | lib/field.ex | 0.832543 | 0.473414 | field.ex | starcoder |
defmodule Crux.Rest.Util do
@moduledoc """
Collection of util functions.
"""
alias Crux.Structs.{Channel, Emoji, Guild, Member, Message, Overwrite, Reaction, Role, User}
@doc """
Resolves a string or a binary to a `t:binary/0`.
* http / https url
* local file path
* a binary itself
"""
@spec resolve_file(file :: String.t() | binary()) :: {:ok, binary()} | {:error, term()}
def resolve_file(nil), do: nil
def resolve_file(file) do
cond do
Regex.match?(~r{^https?://}, file) ->
with {:ok, response} <- HTTPoison.get(file) do
{:ok, response.body}
end
File.exists?(file) && File.stat!(file).type == :regular ->
File.read(file)
is_binary(file) ->
{:ok, file}
true ->
{:error, :no_binary}
end
end
@typedoc """
Used when sending files via `Rest.create_message/2`.
"""
@type resolved_file ::
{
String.t() | :file,
String.t() | binary(),
{String.t(), [{String.t(), binary()}]},
[{String.t(), String.t()}]
}
| {:error, term()}
@doc """
Resolves a:
* path to a file
* tuple of path to a file or binary of one, and a file name
to a `resolved_file` automatically used by `Rest.create_message/2`
"""
# path
@spec map_file(
path ::
String.t()
| {String.t() | binary(), String.t()}
| {String.t() | :file, String.t() | binary(), String.t()}
) :: resolved_file() | {:error, term()}
def(map_file(path) when is_bitstring(path), do: map_file({path, Path.basename(path)}))
# {binary | path, name}
def map_file({bin_or_path, name}) when is_binary(bin_or_path) do
cond do
Regex.match?(~r{^https?://}, bin_or_path) ->
with {:ok, %{body: file}} <- HTTPoison.get(bin_or_path) do
map_file({file, Path.basename(name)})
else
{:error, _error} = error ->
error
other ->
{:error, other}
end
File.exists?(bin_or_path) ->
with {:ok, %{type: :regular}} <- File.stat(bin_or_path) do
map_file({:file, bin_or_path, Path.basename(name)})
end
true ->
map_file({Path.basename(name), bin_or_path, Path.basename(name)})
end
end
def map_file({name_or_atom, bin_or_path, name}) do
disposition = {"form-data", [{"filename", "\"#{name}\""}, {"name", "\"#{name}\""}]}
headers = [{"content-type", :mimerl.filename(name)}]
{name_or_atom, bin_or_path, disposition, headers}
end
@typedoc """
All available types that can be resolved into a role id.
"""
@type role_id_resolvable :: Role.t() | Crux.Rest.snowflake()
@doc ~S"""
Resolves a `t:role_id_resolvable/0` into a role id.
## Examples
```elixir
# A role struct
iex> %Crux.Structs.Role{id: 376146940762783746}
...> |> Crux.Rest.Util.resolve_role_id()
376146940762783746
# A role id
iex> 376146940762783746
...> |> Crux.Rest.Util.resolve_role_id()
376146940762783746
"""
@spec resolve_role_id(role :: role_id_resolvable()) :: integer()
def resolve_role_id(%Role{id: role_id}), do: role_id
def resolve_role_id(role_id) when is_number(role_id), do: role_id
@typedoc """
All available types that can be resolved into an emoji identifier.
"""
@type emoji_identifier_resolvable :: Reaction.t() | Emoji.t() | String.t()
@typedoc """
All available types that can be resolved into an emoji id.
"""
@type emoji_id_resolvable :: Reaction.t() | Emoji.t() | String.t()
@doc ~S"""
Resolves a `t:emoji_id_resolvable/0` into an emoji id.
## Examples
```elixir
iex> %Crux.Structs.Emoji{id: 396521773216301056}
...> |> Crux.Rest.Util.resolve_emoji_id()
396521773216301056
iex> %Crux.Structs.Reaction{emoji: %Crux.Structs.Emoji{id: 396521773216301056}}
...> |> Crux.Rest.Util.resolve_emoji_id()
396521773216301056
iex> 396521773216301056
...> |> Crux.Rest.Util.resolve_emoji_id()
396521773216301056
```
"""
@spec resolve_emoji_id(emoji :: emoji_id_resolvable()) :: String.t()
def resolve_emoji_id(%Emoji{id: id}) when not is_nil(id), do: id
def resolve_emoji_id(%Reaction{emoji: emoji}), do: resolve_emoji_id(emoji)
def resolve_emoji_id(emoji) when is_integer(emoji), do: emoji
@typedoc """
All available types that can be resolved into a user id.
"""
@type user_id_resolvable :: Member.t() | User.t() | integer()
@doc ~S"""
Resolves a `t:user_id_resolvable/0` into a user id.
## Examples
```elixir
iex> %Crux.Structs.User{id: 218348062828003328}
...> |> Crux.Rest.Util.resolve_user_id()
218348062828003328
iex> %Crux.Structs.Member{user: 218348062828003328}
...> |> Crux.Rest.Util.resolve_user_id()
218348062828003328
iex> 218348062828003328
...> |> Crux.Rest.Util.resolve_user_id()
218348062828003328
```
"""
@spec resolve_user_id(user :: user_id_resolvable()) :: Crux.Rest.snowflake()
def resolve_user_id(%User{id: id}), do: id
def resolve_user_id(%Member{user: id}), do: id
def resolve_user_id(id) when is_number(id), do: id
@typedoc """
All available types that can be resolved into a guild id.
"""
@type guild_id_resolvable :: Guild.t() | Channel.t() | Message.t() | Crux.Rest.snowflake()
@doc ~S"""
Resolves a `t:guild_id_resolvable/0` into a guild id.
## Examples
```elixir
iex> %Crux.Structs.Guild{id: 222078108977594368}
...> |> Crux.Rest.Util.resolve_guild_id()
222078108977594368
iex> %Crux.Structs.Channel{guild_id: 222078108977594368}
...> |> Crux.Rest.Util.resolve_guild_id()
222078108977594368
iex> %Crux.Structs.Message{guild_id: 222078108977594368}
...> |> Crux.Rest.Util.resolve_guild_id()
222078108977594368
iex> 222078108977594368
...> |> Crux.Rest.Util.resolve_guild_id()
222078108977594368
```
"""
@spec resolve_guild_id(guild :: guild_id_resolvable()) :: Crux.Rest.snowflake()
def resolve_guild_id(%Guild{id: id}), do: id
def resolve_guild_id(%Channel{guild_id: id}) when not is_nil(id), do: id
def resolve_guild_id(%Message{guild_id: id}) when not is_nil(id), do: id
def resolve_guild_id(id) when is_number(id), do: id
@typedoc """
All available types that can be resolved into a channel id.
"""
@type channel_id_resolvable :: Message.t() | Channel.t() | Crux.Rest.snowflake()
@doc ~S"""
Resolves a `t:channel_id_resolvable/0` into a channel id.
## Examples
```elixir
iex> %Crux.Structs.Channel{id: 222079895583457280}
...> |> Crux.Rest.Util.resolve_channel_id()
222079895583457280
iex> %Crux.Structs.Message{channel_id: 222079895583457280}
...> |> Crux.Rest.Util.resolve_channel_id()
222079895583457280
iex> 222079895583457280
...> |> Crux.Rest.Util.resolve_channel_id()
222079895583457280
```
"""
@spec resolve_channel_id(channel :: channel_id_resolvable()) :: Crux.Rest.snowflake()
def resolve_channel_id(%Channel{id: id}), do: id
def resolve_channel_id(%Message{channel_id: channel_id}), do: channel_id
def resolve_channel_id(id) when is_number(id), do: id
@typedoc """
All available types that can be resolved into a target for a permission overwrite.
"""
@type overwrite_target_resolvable ::
Overwrite.t() | Role.t() | User.t() | Member.t() | Crux.Rest.snowflake()
@doc """
Resolves a `t:overwrite_target_resolvabe/0` into an overwrite target.
## Examples
```elixir
iex> %Crux.Structs.Overwrite{type: "member", id: 218348062828003328}
...> |> Crux.Rest.Util.resolve_overwrite_target()
{"member", 218348062828003328}
iex> %Crux.Structs.Role{id: 376146940762783746}
...> |> Crux.Rest.Util.resolve_overwrite_target()
{"role", 376146940762783746}
iex> %Crux.Structs.User{id: 218348062828003328}
...> |> Crux.Rest.Util.resolve_overwrite_target()
{"member", 218348062828003328}
iex> %Crux.Structs.Member{user: 218348062828003328}
...> |> Crux.Rest.Util.resolve_overwrite_target()
{"member", 218348062828003328}
iex> 218348062828003328
...> |> Crux.Rest.Util.resolve_overwrite_target()
{:unknown, 218348062828003328}
```
"""
@spec resolve_overwrite_target(overwrite :: overwrite_target_resolvable()) ::
{String.t() | :unknown, Crux.Rest.snowflake()}
def resolve_overwrite_target(%Overwrite{id: id, type: type}), do: {type, id}
def resolve_overwrite_target(%Role{id: id}), do: {"role", id}
def resolve_overwrite_target(%User{id: id}), do: {"member", id}
def resolve_overwrite_target(%Member{user: id}), do: {"member", id}
def resolve_overwrite_target(id) when is_integer(id), do: {:unknown, id}
@typedoc """
All available types that can be resolved into a message id.
"""
@type message_id_resolvable :: Message.t() | Crux.Rest.snowflake()
@doc ~S"""
Resolves a `t:message_id_resolvable/0` into a message id.
## Examples
```elixir
iex> %Crux.Structs.Message{id: 441568727302012928}
...> |> Crux.Rest.Util.resolve_message_id()
441568727302012928
iex> 441568727302012928
...> |> Crux.Rest.Util.resolve_message_id()
441568727302012928
```
"""
@spec resolve_message_id(message :: message_id_resolvable()) :: Crux.Rest.snowflake()
def resolve_message_id(%Message{id: id}), do: id
def resolve_message_id(id) when is_number(id), do: id
@typedoc """
All available types that can be resolved into a channel position.
"""
@type channel_position_resolvable ::
Channel.t()
| %{channel: Channel.t(), position: integer()}
| {Crux.Rest.snowflake(), integer()}
| %{id: Crux.Rest.snowflake(), position: integer()} :: %{
id: Crux.Rest.snowflake(),
position: integer()
}
@doc ~S"""
Resolves a `t:channel_poisition_resolvable/0` into a channel position.
## Examples
```elixir
iex> %Crux.Structs.Channel{id: 222079895583457280, position: 5}
...> |> Crux.Rest.Util.resolve_channel_position()
%{id: 222079895583457280, position: 5}
iex> {%Crux.Structs.Channel{id: 222079895583457280}, 5}
...> |> Crux.Rest.Util.resolve_channel_position()
%{id: 222079895583457280, position: 5}
iex> {222079895583457280, 5}
...> |> Crux.Rest.Util.resolve_channel_position()
%{id: 222079895583457280, position: 5}
iex> %{id: 222079895583457280, position: 5}
...> |> Crux.Rest.Util.resolve_channel_position()
%{id: 222079895583457280, position: 5}
```
"""
@spec resolve_channel_position(channel :: channel_position_resolvable()) :: %{
id: Crux.Rest.snowflake(),
position: integer()
}
def resolve_channel_position({%Channel{id: id}, position}), do: %{id: id, position: position}
def resolve_channel_position(%{channel: %Channel{id: id}, position: position}),
do: %{id: id, position: position}
def resolve_channel_position({id, position}), do: %{id: id, position: position}
def resolve_channel_position(%{id: id, position: position}), do: %{id: id, position: position}
@typedoc """
All available types which can be resolved into a role position.
"""
@type guild_role_position_resolvable ::
{Role.t(), integer()}
| %{id: Crux.Rest.snowflake(), position: integer()}
| {Crux.Rest.snowflake(), integer()}
| %{role: Role.t(), position: integer}
@doc """
Resolves a `t:guild_role_position_resolvable/0` into a role position.
## Examples
```elixir
iex> {%Crux.Structs.Role{id: 373405430589816834}, 5}
...> |> Crux.Rest.Util.resolve_guild_role_position()
%{id: 373405430589816834, position: 5}
iex> %{id: 373405430589816834, position: 5}
...> |> Crux.Rest.Util.resolve_guild_role_position()
%{id: 373405430589816834, position: 5}
iex> %{role: %Crux.Structs.Role{id: 373405430589816834}, position: 5}
...> |> Crux.Rest.Util.resolve_guild_role_position()
%{id: 373405430589816834, position: 5}
iex> {373405430589816834, 5}
...> |> Crux.Rest.Util.resolve_guild_role_position()
%{id: 373405430589816834, position: 5}
```
"""
@spec resolve_guild_role_position(role :: guild_role_position_resolvable()) :: %{
id: Crux.Rest.snowflake(),
position: integer()
}
def resolve_guild_role_position({%Role{id: id}, position}), do: %{id: id, position: position}
def resolve_guild_role_position(%{id: id, position: position}),
do: %{id: id, position: position}
def resolve_guild_role_position(%{role: %Role{id: id}, position: position}),
do: %{
id: id,
position: position
}
def resolve_guild_role_position({id, position}), do: %{id: id, position: position}
end | lib/rest/util.ex | 0.855293 | 0.650772 | util.ex | starcoder |
defmodule Asteroid.OAuth2.DeviceAuthorization do
@moduledoc """
Types and convenience functions to work with the device flow
"""
import Asteroid.Utils
defmodule ExpiredTokenError do
@moduledoc """
Error returned when a device code has expired
"""
defexception []
@type t :: %__MODULE__{}
def message(_) do
case astrenv(:api_error_response_verbosity) do
:debug ->
"The device code has expired"
:normal ->
"The device code has expired"
:minimal ->
""
end
end
end
defmodule AuthorizationPendingError do
@moduledoc """
Error returned when a device code is valid but has not been granted access yet by the user
"""
defexception []
@type t :: %__MODULE__{}
def message(_) do
case astrenv(:api_error_response_verbosity) do
:debug ->
"The device code authorization is pending"
:normal ->
"The device code authorization is pending"
:minimal ->
""
end
end
end
defmodule RateLimitedError do
@moduledoc """
Error returned when requests with a device code become rate-limited
The `:retry_after` option is in seconds.
"""
defexception [:retry_after]
@type t :: %__MODULE__{
retry_after: non_neg_integer()
}
def message(%{retry_after: retry_after}) do
case astrenv(:api_error_response_verbosity) do
:debug ->
if is_integer(retry_after) do
"Too many requests (retry after #{to_string(retry_after)} seconds)"
else
"Too many requests"
end
:normal ->
"Too many requests"
:minimal ->
""
end
end
end
alias Asteroid.Context
@type user_code :: String.t()
@type device_code :: String.t()
@doc """
8-character user code generation fucntion
This function returns the characters of the follwoing alphabet:
"ABCDEFGHIJKLMNPQRSTUVWXYZ2345678" with an entropy of 32^8
"""
@spec user_code(Context.t()) :: String.t()
def user_code(_) do
:crypto.strong_rand_bytes(5)
|> Base.encode32(padding: false)
|> String.replace("O", "8")
end
@doc """
Returns `:ok` if the device code is not rate-limited,
`{:error, %Asteroid.OAuth2.DeviceAuthorization.RateLimitedError{}}` otherwise.
"""
@spec rate_limited?(device_code) :: :ok | {:error, RateLimitedError.t()}
def rate_limited?(device_code) do
case astrenv(:oauth2_flow_device_authorization_rate_limiter) do
{module, opts} ->
case module.check(device_code, opts) do
:ok ->
:ok
{:rate_limited, nil} ->
{:error, RateLimitedError.exception([])}
{:rate_limited, retry_after} ->
{:error, RateLimitedError.exception(retry_after: retry_after)}
end
nil ->
:ok
end
end
end | lib/asteroid/oauth2/device_authorization.ex | 0.850562 | 0.417925 | device_authorization.ex | starcoder |
defmodule Ecto.Query do
@moduledoc """
This module is the query DSL. Queries are used to fetch data from a repository
(see `Ecto.Repo`).
## Examples
import Ecto.Query
from w in Weather,
where: w.prcp > 0,
select: w.city
The above example will create a query that can be run against a repository.
`from` will bind the variable `w` to the entity `Weather` (see `Ecto.Entity`).
If there are multiple from expressions the query will run for every
permutation of their combinations. `where` is used to filter the results,
multiple `where`s can be given. `select` selects which results will be
returned, a single variable can be given, that will return the full entity, or
a single field. Multiple fields can also be grouped in lists or tuples. Only
one `select` expression is allowed.
External variables and elixir expressions can be injected into a query
expression with `^`. Anything that isn't inside a `^` expression is treated
as a query expression.
This allows one to create dynamic queries:
def with_minimum(age, height_ft) do
from u in User,
where: u.age > ^age and u.height > ^(height_ft * 3.28)
end
In the example above, we will compare against the `age` given as argument.
Notice the `select` clause is optional, Ecto will automatically infer and
returns the user record (similar to `select: u`) from the query above.
## Extensions
Queries are composable and can be extend dynamically. This allows you to
create specific queries given a parameter:
query = from w in Weather, select: w.city
if filter_by_prcp do
query = extend w in query, where: w.prcp > 0
end
Repo.all(query)
Or even create functions that extend an existing query:
def paginate(query, page, size) do
extend query,
limit: size,
offset: (page-1) * size
end
query |> paginate |> Repo.all
## Query expansion
In the examples above, we have used the so-called **keywords query syntax**
to create a query. Our first example:
import Ecto.Query
from w in Weather,
where: w.prcp > 0,
select: w.city
Simply expands to the following **query expressions**:
from(w in Weather) |> where([w], w.prcp > 0) |> select([w], w.city)
Which then expands to:
select(where(from(w in Weather), [w], w.prcp > 0), [w], w.city)
This module documents each of those macros, providing examples both
in the keywords query and in the query expression formats.
"""
@type t :: Query.t
defrecord Query, sources: nil, from: nil, joins: [], wheres: [], select: nil,
order_bys: [], limit: nil, offset: nil, group_bys: [],
havings: [], preloads: []
defrecord QueryExpr, [:expr, :file, :line]
defrecord AssocJoinExpr, [:qual, :expr, :file, :line]
defrecord JoinExpr, [:qual, :source, :on, :file, :line]
alias Ecto.Query.FromBuilder
alias Ecto.Query.WhereBuilder
alias Ecto.Query.SelectBuilder
alias Ecto.Query.OrderByBuilder
alias Ecto.Query.LimitOffsetBuilder
alias Ecto.Query.GroupByBuilder
alias Ecto.Query.HavingBuilder
alias Ecto.Query.PreloadBuilder
alias Ecto.Query.JoinBuilder
alias Ecto.Query.Util
@doc false
defmacro __using__(_) do
quote do
import unquote(__MODULE__), only: [from: 1, from: 2]
end
end
@doc """
Creates a query. It can either be a keyword query or a query expression. If it
is a keyword query the first argument should be an `in` expression and the
second argument a keyword query where they keys are expression types and the
values are expressions.
If it is a query expression the first argument is the original query and the
second argument the expression.
## Keywords examples
from(City, select: c)
## Expressions examples
from(City) |> select([c], c)
# Extending queries
An existing query can be extended with `from` by appending the given
expressions to it.
The existing variables from the original query can be rebound by
giving the variables on the left hand side of `in`. The bindings
are order dependent, that means that each variable will be bound to
the variable in the original query that was defined in the same order
as the binding was in its list.
## Examples
def paginate(query, page, size) do
from query,
limit: size,
offset: (page-1) * size
end
The example above does not rebinding any variable, as they are not
required for `limit` and `offset`. However, extending a query with
where expression would require so:
def published(query) do
from p in query, where: p.published_at != nil
end
Notice we have rebound the term `p`. In case the given query has
more than one `from` expression, each of them must be given in
the order they were bound:
def published_multi(query) do
from [p,o] in query,
where: p.published_at != nil and o.published_at != nil
end
"""
defmacro from(expr, kw) when is_list(kw) do
unless Keyword.keyword?(kw) do
raise Ecto.InvalidQuery, reason: "second argument to from has to be a keyword list"
end
{ binds, expr } = FromBuilder.escape(expr)
quoted = quote do
expr = unquote(expr)
Ecto.Query.check_binds(expr, unquote(length(binds)))
expr
end
build_query(quoted, binds, kw)
end
defmacro from(query, expr) do
FromBuilder.validate_query_from(expr)
{ binds, expr } = FromBuilder.escape(expr)
quote do
query = unquote(query)
Ecto.Query.check_binds(query, unquote(length(binds)))
Util.merge(query, :from, unquote(expr))
end
end
@doc """
Creates a query with a from query expression.
## Examples
from(c in City)
"""
defmacro from(kw) when is_list(kw) do
quote do
Ecto.Query.from(Ecto.Query.Query[], unquote(kw))
end
end
defmacro from(expr) do
{ binds, expr } = FromBuilder.escape(expr)
quote do
expr = unquote(expr)
Ecto.Query.check_binds(expr, unquote(length(binds)))
expr
end
end
@doc """
A join query expression. Receives an entity that is to be joined to the query
and a condition to do the joining on. The join condition can be any expression
that evaluates to a boolean value. The join is by default an inner join, the
qualifier can be changed by giving the atoms: `:inner`, `:left`, `:right` or
`:full`. For a keyword query the `:join` keyword can be changed to:
`:inner_join`, `:left_join`, `:right_join` or `:full_join`.
The join condition can be automatically set when doing an association join. An
association join can be done on any association field (`has_many`, `has_one`,
`belong_to`).
## Keywords examples
from c in Comment,
join: p in Post, on: c.post_id == p.id,
select: { p.title, c.text }
from p in Post,
left_join: c in p.comments,
select: { p, c }
## Expressions examples
from(Comment)
|> join(:inner, [c], p in Post, c.post_id == p.id)
|> select([c, p], { p.title, c.text })
from(Post)
|> join(:left, [p], c in p.comments)
|> select([p, c], { p, c })
"""
defmacro join(query, qual, binding, expr, on // nil) do
binding = Util.escape_binding(binding)
{ expr_bindings, join_expr } = JoinBuilder.escape(expr, binding)
is_assoc = Ecto.Associations.assoc_join?(join_expr)
unless is_assoc == nil?(on) do
raise Ecto.InvalidQuery, reason: "`join` expression requires explicit `on` " <>
"expression unless association join expression"
end
if (bind = Enum.first(expr_bindings)) && bind in binding do
raise Ecto.InvalidQuery, reason: "variable `#{bind}` is already defined in query"
end
on_expr = if on do
binds = binding ++ expr_bindings
WhereBuilder.escape(on, binds, bind)
end
quote do
query = unquote(query)
qual = unquote(qual)
join_expr = unquote(join_expr)
Ecto.Query.check_binds(query, unquote(length(binding)))
JoinBuilder.validate_qual(qual)
var!(count_entities, Ecto.Query) = Util.count_entities(query)
if unquote(is_assoc) do
join = AssocJoinExpr[qual: qual, expr: join_expr, file: __ENV__.file, line: __ENV__.line]
else
on = QueryExpr[expr: unquote(on_expr), file: __ENV__.file, line: __ENV__.line]
join = JoinExpr[qual: qual, source: join_expr, on: on, file: __ENV__.file, line: __ENV__.line]
end
Util.merge(query, :join, join)
end
end
@doc """
A select query expression. Selects which fields will be selected from the
entity and any transformations that should be performed on the fields, any
expression that is accepted in a query can be a select field. There can only
be one select expression in a query, if the select expression is omitted, the
query will by default select the full entity (only works when there is a
single from expression and no group by).
The sub-expressions in the query can be wrapped in lists or tuples as shown in
the examples. A full entity can also be selected if the entity variable is the
only thing in the expression.
The `assoc/2` selector can be used to load an association on a parent entity
as shown in the examples below. The first argument to `assoc` has to be a
variable bound in the `from` query expression, the second has to be a variable
bound in an association join on the `from` variable.
## Keywords examples
from(c in City, select: c) # selects the entire entity
from(c in City, select: { c.name, c.population })
from(c in City, select: [c.name, c.county])
from(c in City, select: { c.name, to_binary(40 + 2), 43 })
from(p in Post, join: c in p.comments, select: assoc(p, c))
## Expressions examples
from(c in City) |> select([c], c)
from(c in City) |> select([c], { c.name, c.country })
"""
defmacro select(query, binding, expr) do
binding = Util.escape_binding(binding)
quote do
query = unquote(query)
Ecto.Query.check_binds(query, unquote(length(binding)))
select_expr = unquote(SelectBuilder.escape(expr, binding))
select = QueryExpr[expr: select_expr, file: __ENV__.file, line: __ENV__.line]
Util.merge(query, :select, select)
end
end
@doc """
A where query expression. Filters the rows from the entity. If there are more
than one where expressions they will be combined in conjunction. A where
expression have to evaluate to a boolean value.
## Keywords examples
from(c in City, where: c.state == "Sweden")
## Expressions examples
from(c in City) |> where([c], c.state == "Sweden")
"""
defmacro where(query, binding, expr) do
binding = Util.escape_binding(binding)
quote do
query = unquote(query)
Ecto.Query.check_binds(query, unquote(length(binding)))
where_expr = unquote(WhereBuilder.escape(expr, binding))
where = QueryExpr[expr: where_expr, file: __ENV__.file, line: __ENV__.line]
Util.merge(query, :where, where)
end
end
@doc """
An order by query expression. Orders the fields based on one or more entity
fields. It accepts a single field or a list field, the direction can be
specified in a keyword list as shown in the examples. There can be several
order by expressions in a query.
## Keywords examples
from(c in City, order_by: c.name, order_by: c.population)
from(c in City, order_by: [c.name, c.population])
from(c in City, order_by: [asc: c.name, desc: c.population])
## Expressions examples
from(c in City) |> order_by([c], asc: c.name, desc: c.population)
"""
defmacro order_by(query, binding, expr) do
binding = Util.escape_binding(binding)
quote do
query = unquote(query)
Ecto.Query.check_binds(query, unquote(length(binding)))
expr = unquote(OrderByBuilder.escape(expr, binding))
order_by = QueryExpr[expr: expr, file: __ENV__.file, line: __ENV__.line]
Util.merge(query, :order_by, order_by)
end
end
@doc """
A limit query expression. Limits the number of rows selected from the entity.
Can be any expression but have to evaluate to an integer value. Can't include
entity fields.
## Keywords examples
from(u in User, where: u.id == current_user, limit: 1)
## Expressions examples
from(u in User) |> where(u.id == current_user) |> limit(1)
"""
defmacro limit(query, binding // [], expr) do
quote do
query = unquote(query)
Ecto.Query.check_binds(query, unquote(length(binding)))
expr = unquote(expr)
LimitOffsetBuilder.validate(expr)
Util.merge(query, :limit, expr)
end
end
@doc """
An offset query expression. Limits the number of rows selected from the
entity. Can be any expression but have to evaluate to an integer value.
Can't include entity fields.
## Keywords examples
# Get all posts on page 4
from(p in Post, limit: 10, offset: 30)
## Expressions examples
from(p in Post) |> limit(10) |> offset(30)
"""
defmacro offset(query, binding // [], expr) do
quote do
query = unquote(query)
Ecto.Query.check_binds(query, unquote(length(binding)))
expr = unquote(expr)
LimitOffsetBuilder.validate(expr)
Util.merge(query, :offset, expr)
end
end
@doc """
A group by query expression. Groups together rows from the entity that have
the same values in the given fields. Using `group_by` "groups" the query
giving it different semantics in the `select` expression. If a query is
grouped only fields that were referenced in the `group_by` can be used in the
`select` or if the field is given as an argument to an aggregate function.
## Keywords examples
# Returns the number of posts in each category
from(p in Post,
group_by: p.category,
select: { p.category, count(p.id) })
# Group on all fields on the Post entity
from(p in Post,
group_by: p,
select: p)
## Expressions examples
from(Post) |> group_by([p], p.category) |> select([p], count(p.id))
"""
defmacro group_by(query, binding, expr) do
binding = Util.escape_binding(binding)
quote do
query = unquote(query)
Ecto.Query.check_binds(query, unquote(length(binding)))
expr = unquote(GroupByBuilder.escape(expr, binding))
group_by = QueryExpr[expr: expr, file: __ENV__.file, line: __ENV__.line]
Util.merge(query, :group_by, group_by)
end
end
@doc """
A having query expression. Like `where` `having` filters rows from the entity,
but after the grouping is performed giving it the same semantics as `select`
for a grouped query (see `group_by/3`). `having` groups the query even if the
query has no `group_by` expression.
## Keywords examples
# Returns the number of posts in each category where the
# average number of comments is above ten
from(p in Post,
group_by: p.category,
having: avg(p.num_comments) > 10,
select: { p.category, count(p.id) })
## Expressions examples
from(Post)
|> group_by([p], p.category)
|> having([p], avg(p.num_comments) > 10)
|> select([p], count(p.id))
"""
defmacro having(query, binding, expr) do
binding = Util.escape_binding(binding)
quote do
query = unquote(query)
Ecto.Query.check_binds(query, unquote(length(binding)))
having_expr = unquote(HavingBuilder.escape(expr, binding))
having = QueryExpr[expr: having_expr, file: __ENV__.file, line: __ENV__.line]
Util.merge(query, :having, having)
end
end
@doc """
A preload query expression. Preloads the specified fields on the entity in the
from expression. Loads all associated records for each entity in the result
set based on the association. The fields have to be association fields and the
entity has to be in the select expression.
## Keywords examples
# Returns all posts and their associated comments
from(p in Post,
preload: [:comments],
select: p)
## Expressions examples
from(Post) |> preload(:comments) |> select([p], p)
"""
defmacro preload(query, binding // [], expr) do
expr = List.wrap(expr)
PreloadBuilder.validate(expr)
quote do
query = unquote(query)
Ecto.Query.check_binds(query, unquote(length(binding)))
preload_expr = unquote(expr)
preload = QueryExpr[expr: preload_expr, file: __ENV__.file, line: __ENV__.line]
Util.merge(query, :preload, preload)
end
end
@doc false
def check_binds(queryable, count_binds) do
query = Ecto.Queryable.to_query(queryable)
if count_binds > 1 and count_binds > Util.count_entities(query) do
raise Ecto.InvalidQuery, reason: "more binds specified than there are models on query"
end
end
defrecord KwState, [:quoted, :binds]
# Builds the quoted code for creating a keyword query
defp build_query(quoted, binds, kw) do
state = KwState[quoted: quoted, binds: binds]
Enum.reduce(kw, state, &build_query_type(&1, &2)).quoted
end
defp build_query_type({ :from, expr }, KwState[] = state) do
FromBuilder.validate_query_from(expr)
{ [bind], expr } = FromBuilder.escape(expr)
if bind != :_ and bind in state.binds do
raise Ecto.InvalidQuery, reason: "variable `#{bind}` is already defined in query"
end
quoted = quote do
Util.merge(unquote(state.quoted), :from, unquote(expr))
end
state.quoted(quoted).binds(state.binds ++ [bind])
end
@joins [:join, :inner_join, :left_join, :right_join, :full_join]
defp build_query_type({ join, expr }, state) when join in @joins do
case join do
:join -> build_join(:inner, expr, state)
:inner_join -> build_join(:inner, expr, state)
:left_join -> build_join(:left, expr, state)
:right_join -> build_join(:right, expr, state)
:full_join -> build_join(:full, expr, state)
end
end
defp build_query_type({ :on, expr }, KwState[] = state) do
quoted = quote do
expr = unquote(WhereBuilder.escape(expr, state.binds))
on = QueryExpr[expr: expr, file: __ENV__.file, line: __ENV__.line]
Util.merge(unquote(state.quoted), :on, on)
end
state.quoted(quoted)
end
defp build_query_type({ type, expr }, KwState[] = state) do
quoted = quote do
Ecto.Query.unquote(type)(unquote(state.quoted), unquote(state.binds), unquote(expr))
end
state.quoted(quoted)
end
defp build_join(qual, expr, KwState[] = state) do
{ binds, expr } = JoinBuilder.escape(expr, state.binds)
if (bind = Enum.first(binds)) && bind != :_ && bind in state.binds do
raise Ecto.InvalidQuery, reason: "variable `#{bind}` is already defined in query"
end
is_assoc = Ecto.Associations.assoc_join?(expr)
quoted = quote do
qual = unquote(qual)
expr = unquote(expr)
if unquote(is_assoc) do
join = AssocJoinExpr[qual: qual, expr: expr, file: __ENV__.file, line: __ENV__.line]
else
join = JoinExpr[qual: qual, source: expr, file: __ENV__.file, line: __ENV__.line]
end
Util.merge(unquote(state.quoted), :join, join)
end
state.quoted(quoted).binds(state.binds ++ [bind])
end
end | lib/ecto/query.ex | 0.92597 | 0.812012 | query.ex | starcoder |
defmodule AWS.GlobalAccelerator do
@moduledoc """
AWS Global Accelerator
This is the *AWS Global Accelerator API Reference*.
This guide is for developers who need detailed information about AWS Global
Accelerator API actions, data types, and errors. For more information about
Global Accelerator features, see the [AWS Global Accelerator Developer Guide](https://docs.aws.amazon.com/global-accelerator/latest/dg/Welcome.html).
AWS Global Accelerator is a service in which you create accelerators to improve
availability and performance of your applications for local and global users.
You must specify the US West (Oregon) Region to create or update accelerators.
By default, Global Accelerator provides you with static IP addresses that you
associate with your accelerator. (Instead of using the IP addresses that Global
Accelerator provides, you can configure these entry points to be IPv4 addresses
from your own IP address ranges that you bring to Global Accelerator.) The
static IP addresses are anycast from the AWS edge network and distribute
incoming application traffic across multiple endpoint resources in multiple AWS
Regions, which increases the availability of your applications. Endpoints can be
Network Load Balancers, Application Load Balancers, EC2 instances, or Elastic IP
addresses that are located in one AWS Region or multiple Regions.
Global Accelerator uses the AWS global network to route traffic to the optimal
regional endpoint based on health, client location, and policies that you
configure. The service reacts instantly to changes in health or configuration to
ensure that internet traffic from clients is directed to only healthy endpoints.
Global Accelerator includes components that work together to help you improve
performance and availability for your applications:
## Definitions
### Static IP address
By default, AWS Global Accelerator provides you with a set of static IP
addresses that are anycast from the AWS edge network and serve as the single
fixed entry points for your clients. Or you can configure these entry points to
be IPv4 addresses from your own IP address ranges that you bring to Global
Accelerator (BYOIP). For more information, see [Bring Your Own IP Addresses (BYOIP)](https://docs.aws.amazon.com/global-accelerator/latest/dg/using-byoip.html)
in the *AWS Global Accelerator Developer Guide*. If you already have load
balancers, EC2 instances, or Elastic IP addresses set up for your applications,
you can easily add those to Global Accelerator to allow the resources to be
accessed by the static IP addresses.
The static IP addresses remain assigned to your accelerator for as long as it
exists, even if you disable the accelerator and it no longer accepts or routes
traffic. However, when you *delete* an accelerator, you lose the static IP
addresses that are assigned to it, so you can no longer route traffic by using
them. You can use IAM policies with Global Accelerator to limit the users who
have permissions to delete an accelerator. For more information, see
[Authentication and Access Control](https://docs.aws.amazon.com/global-accelerator/latest/dg/auth-and-access-control.html)
in the *AWS Global Accelerator Developer Guide*.
### Accelerator
An accelerator directs traffic to optimal endpoints over the AWS global network
to improve availability and performance for your internet applications that have
a global audience. Each accelerator includes one or more listeners.
### DNS name
Global Accelerator assigns each accelerator a default Domain Name System (DNS)
name, similar to `a1234567890abcdef.awsglobalaccelerator.com`, that points to
your Global Accelerator static IP addresses. Depending on the use case, you can
use your accelerator's static IP addresses or DNS name to route traffic to your
accelerator, or set up DNS records to route traffic using your own custom domain
name.
### Network zone
A network zone services the static IP addresses for your accelerator from a
unique IP subnet. Similar to an AWS Availability Zone, a network zone is an
isolated unit with its own set of physical infrastructure. When you configure an
accelerator, by default, Global Accelerator allocates two IPv4 addresses for it.
If one IP address from a network zone becomes unavailable due to IP address
blocking by certain client networks, or network disruptions, then client
applications can retry on the healthy static IP address from the other isolated
network zone.
### Listener
A listener processes inbound connections from clients to Global Accelerator,
based on the protocol and port that you configure. Each listener has one or more
endpoint groups associated with it, and traffic is forwarded to endpoints in one
of the groups. You associate endpoint groups with listeners by specifying the
Regions that you want to distribute traffic to. Traffic is distributed to
optimal endpoints within the endpoint groups associated with a listener.
### Endpoint group
Each endpoint group is associated with a specific AWS Region. Endpoint groups
include one or more endpoints in the Region. You can increase or reduce the
percentage of traffic that would be otherwise directed to an endpoint group by
adjusting a setting called a *traffic dial*. The traffic dial lets you easily do
performance testing or blue/green deployment testing for new releases across
different AWS Regions, for example.
### Endpoint
An endpoint is a Network Load Balancer, Application Load Balancer, EC2 instance,
or Elastic IP address. Traffic is routed to endpoints based on several factors,
including the geo-proximity to the user, the health of the endpoint, and the
configuration options that you choose, such as endpoint weights. For each
endpoint, you can configure weights, which are numbers that you can use to
specify the proportion of traffic to route to each one. This can be useful, for
example, to do performance testing within a Region.
"""
@doc """
Advertises an IPv4 address range that is provisioned for use with your AWS
resources through bring your own IP addresses (BYOIP).
It can take a few minutes before traffic to the specified addresses starts
routing to AWS because of propagation delays. To see an AWS CLI example of
advertising an address range, scroll down to **Example**.
To stop advertising the BYOIP address range, use [
WithdrawByoipCidr](https://docs.aws.amazon.com/global-accelerator/latest/api/WithdrawByoipCidr.html).
For more information, see [Bring Your Own IP Addresses (BYOIP)](https://docs.aws.amazon.com/global-accelerator/latest/dg/using-byoip.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def advertise_byoip_cidr(client, input, options \\ []) do
request(client, "AdvertiseByoipCidr", input, options)
end
@doc """
Create an accelerator.
An accelerator includes one or more listeners that process inbound connections
and direct traffic to one or more endpoint groups, each of which includes
endpoints, such as Network Load Balancers. To see an AWS CLI example of creating
an accelerator, scroll down to **Example**.
If you bring your own IP address ranges to AWS Global Accelerator (BYOIP), you
can assign IP addresses from your own pool to your accelerator as the static IP
address entry points. Only one IP address from each of your IP address ranges
can be used for each accelerator.
You must specify the US West (Oregon) Region to create or update accelerators.
"""
def create_accelerator(client, input, options \\ []) do
request(client, "CreateAccelerator", input, options)
end
@doc """
Create an endpoint group for the specified listener.
An endpoint group is a collection of endpoints in one AWS Region. To see an AWS
CLI example of creating an endpoint group, scroll down to **Example**.
"""
def create_endpoint_group(client, input, options \\ []) do
request(client, "CreateEndpointGroup", input, options)
end
@doc """
Create a listener to process inbound connections from clients to an accelerator.
Connections arrive to assigned static IP addresses on a port, port range, or
list of port ranges that you specify. To see an AWS CLI example of creating a
listener, scroll down to **Example**.
"""
def create_listener(client, input, options \\ []) do
request(client, "CreateListener", input, options)
end
@doc """
Delete an accelerator.
Before you can delete an accelerator, you must disable it and remove all
dependent resources (listeners and endpoint groups). To disable the accelerator,
update the accelerator to set `Enabled` to false.
When you create an accelerator, by default, Global Accelerator provides you with
a set of two static IP addresses. Alternatively, you can bring your own IP
address ranges to Global Accelerator and assign IP addresses from those ranges.
The IP addresses are assigned to your accelerator for as long as it exists, even
if you disable the accelerator and it no longer accepts or routes traffic.
However, when you *delete* an accelerator, you lose the static IP addresses that
are assigned to the accelerator, so you can no longer route traffic by using
them. As a best practice, ensure that you have permissions in place to avoid
inadvertently deleting accelerators. You can use IAM policies with Global
Accelerator to limit the users who have permissions to delete an accelerator.
For more information, see [Authentication and Access Control](https://docs.aws.amazon.com/global-accelerator/latest/dg/auth-and-access-control.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def delete_accelerator(client, input, options \\ []) do
request(client, "DeleteAccelerator", input, options)
end
@doc """
Delete an endpoint group from a listener.
"""
def delete_endpoint_group(client, input, options \\ []) do
request(client, "DeleteEndpointGroup", input, options)
end
@doc """
Delete a listener from an accelerator.
"""
def delete_listener(client, input, options \\ []) do
request(client, "DeleteListener", input, options)
end
@doc """
Releases the specified address range that you provisioned to use with your AWS
resources through bring your own IP addresses (BYOIP) and deletes the
corresponding address pool.
To see an AWS CLI example of deprovisioning an address range, scroll down to
**Example**.
Before you can release an address range, you must stop advertising it by using
[WithdrawByoipCidr](https://docs.aws.amazon.com/global-accelerator/latest/api/WithdrawByoipCidr.html) and you must not have any accelerators that are using static IP addresses
allocated from its address range.
For more information, see [Bring Your Own IP Addresses
(BYOIP)](https://docs.aws.amazon.com/global-accelerator/latest/dg/using-byoip.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def deprovision_byoip_cidr(client, input, options \\ []) do
request(client, "DeprovisionByoipCidr", input, options)
end
@doc """
Describe an accelerator.
To see an AWS CLI example of describing an accelerator, scroll down to
**Example**.
"""
def describe_accelerator(client, input, options \\ []) do
request(client, "DescribeAccelerator", input, options)
end
@doc """
Describe the attributes of an accelerator.
To see an AWS CLI example of describing the attributes of an accelerator, scroll
down to **Example**.
"""
def describe_accelerator_attributes(client, input, options \\ []) do
request(client, "DescribeAcceleratorAttributes", input, options)
end
@doc """
Describe an endpoint group.
To see an AWS CLI example of describing an endpoint group, scroll down to
**Example**.
"""
def describe_endpoint_group(client, input, options \\ []) do
request(client, "DescribeEndpointGroup", input, options)
end
@doc """
Describe a listener.
To see an AWS CLI example of describing a listener, scroll down to **Example**.
"""
def describe_listener(client, input, options \\ []) do
request(client, "DescribeListener", input, options)
end
@doc """
List the accelerators for an AWS account.
To see an AWS CLI example of listing the accelerators for an AWS account, scroll
down to **Example**.
"""
def list_accelerators(client, input, options \\ []) do
request(client, "ListAccelerators", input, options)
end
@doc """
Lists the IP address ranges that were specified in calls to
[ProvisionByoipCidr](https://docs.aws.amazon.com/global-accelerator/latest/api/ProvisionByoipCidr.html),
including the current state and a history of state changes.
To see an AWS CLI example of listing BYOIP CIDR addresses, scroll down to
**Example**.
"""
def list_byoip_cidrs(client, input, options \\ []) do
request(client, "ListByoipCidrs", input, options)
end
@doc """
List the endpoint groups that are associated with a listener.
To see an AWS CLI example of listing the endpoint groups for listener, scroll
down to **Example**.
"""
def list_endpoint_groups(client, input, options \\ []) do
request(client, "ListEndpointGroups", input, options)
end
@doc """
List the listeners for an accelerator.
To see an AWS CLI example of listing the listeners for an accelerator, scroll
down to **Example**.
"""
def list_listeners(client, input, options \\ []) do
request(client, "ListListeners", input, options)
end
@doc """
List all tags for an accelerator.
To see an AWS CLI example of listing tags for an accelerator, scroll down to
**Example**.
For more information, see [Tagging in AWS Global Accelerator](https://docs.aws.amazon.com/global-accelerator/latest/dg/tagging-in-global-accelerator.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Provisions an IP address range to use with your AWS resources through bring your
own IP addresses (BYOIP) and creates a corresponding address pool.
After the address range is provisioned, it is ready to be advertised using [
AdvertiseByoipCidr](https://docs.aws.amazon.com/global-accelerator/latest/api/AdvertiseByoipCidr.html).
To see an AWS CLI example of provisioning an address range for BYOIP, scroll
down to **Example**.
For more information, see [Bring Your Own IP Addresses (BYOIP)](https://docs.aws.amazon.com/global-accelerator/latest/dg/using-byoip.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def provision_byoip_cidr(client, input, options \\ []) do
request(client, "ProvisionByoipCidr", input, options)
end
@doc """
Add tags to an accelerator resource.
To see an AWS CLI example of adding tags to an accelerator, scroll down to
**Example**.
For more information, see [Tagging in AWS Global Accelerator](https://docs.aws.amazon.com/global-accelerator/latest/dg/tagging-in-global-accelerator.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Remove tags from a Global Accelerator resource.
When you specify a tag key, the action removes both that key and its associated
value. To see an AWS CLI example of removing tags from an accelerator, scroll
down to **Example**. The operation succeeds even if you attempt to remove tags
from an accelerator that was already removed.
For more information, see [Tagging in AWS Global Accelerator](https://docs.aws.amazon.com/global-accelerator/latest/dg/tagging-in-global-accelerator.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Update an accelerator.
To see an AWS CLI example of updating an accelerator, scroll down to
**Example**.
You must specify the US West (Oregon) Region to create or update accelerators.
"""
def update_accelerator(client, input, options \\ []) do
request(client, "UpdateAccelerator", input, options)
end
@doc """
Update the attributes for an accelerator.
To see an AWS CLI example of updating an accelerator to enable flow logs, scroll
down to **Example**.
"""
def update_accelerator_attributes(client, input, options \\ []) do
request(client, "UpdateAcceleratorAttributes", input, options)
end
@doc """
Update an endpoint group.
To see an AWS CLI example of updating an endpoint group, scroll down to
**Example**.
"""
def update_endpoint_group(client, input, options \\ []) do
request(client, "UpdateEndpointGroup", input, options)
end
@doc """
Update a listener.
To see an AWS CLI example of updating listener, scroll down to **Example**.
"""
def update_listener(client, input, options \\ []) do
request(client, "UpdateListener", input, options)
end
@doc """
Stops advertising an address range that is provisioned as an address pool.
You can perform this operation at most once every 10 seconds, even if you
specify different address ranges each time. To see an AWS CLI example of
withdrawing an address range for BYOIP so it will no longer be advertised by
AWS, scroll down to **Example**.
It can take a few minutes before traffic to the specified addresses stops
routing to AWS because of propagation delays.
For more information, see [Bring Your Own IP Addresses (BYOIP)](https://docs.aws.amazon.com/global-accelerator/latest/dg/using-byoip.html)
in the *AWS Global Accelerator Developer Guide*.
"""
def withdraw_byoip_cidr(client, input, options \\ []) do
request(client, "WithdrawByoipCidr", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "globalaccelerator"}
host = build_host("globalaccelerator", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "GlobalAccelerator_V20180706.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end | lib/aws/generated/global_accelerator.ex | 0.900073 | 0.738121 | global_accelerator.ex | starcoder |
defmodule Scenic.Primitive.Style.Paint.Color do
@moduledoc false
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
# verify that a color is correctly described
def verify(color) do
try do
normalize(color)
true
rescue
_ -> false
end
end
# --------------------------------------------------------
# single color
def normalize(color) when is_atom(color), do: to_rgba(color)
def normalize({color, alpha}) when is_atom(color) and is_integer(alpha),
do: to_rgba({color, alpha})
def normalize({r, g, b}) when is_integer(r) and is_integer(g) and is_integer(b),
do: to_rgba({r, g, b})
def normalize({r, g, b, a})
when is_integer(r) and is_integer(g) and is_integer(b) and is_integer(a),
do: to_rgba({r, g, b, a})
# ============================================================================
# https://www.w3schools.com/colors/colors_names.asp
defguard is_uint8(x) when is_integer(x) and x >= 0 and x <= 255
def to_rgba({:transparent, _}), do: to_rgba(:transparent)
def to_rgba(:transparent), do: {0x80, 0x80, 0x80, 0x00}
def to_rgba({:clear, _}), do: to_rgba(:transparent)
def to_rgba(:clear), do: to_rgba(:transparent)
def to_rgba({r, g, b}), do: {r, g, b, 0xFF}
def to_rgba({r, g, b, a})
when is_uint8(r) and is_uint8(g) and is_uint8(b) and is_uint8(a) do
{r, g, b, a}
end
def to_rgba(<<r::size(8), g::size(8), b::size(8), a::size(8)>>), do: {r, g, b, a}
def to_rgba(named_color) when is_atom(named_color) do
name_to_rgb(named_color)
|> to_rgba()
end
def to_rgba({named_color, alpha})
when is_atom(named_color) and is_integer(alpha) and alpha >= 0 and alpha <= 255 do
{r, g, b} = name_to_rgb(named_color)
{r, g, b, alpha}
end
def name_to_rgb(:alice_blue), do: {0xF0, 0xF8, 0xFF}
def name_to_rgb(:antique_white), do: {0xFA, 0xEB, 0xD7}
def name_to_rgb(:aqua), do: {0x00, 0xFF, 0xFF}
def name_to_rgb(:aquamarine), do: {0x7F, 0xFF, 0xD4}
def name_to_rgb(:azure), do: {0xF0, 0xFF, 0xFF}
def name_to_rgb(:beige), do: {0xF5, 0xF5, 0xDC}
def name_to_rgb(:bisque), do: {0xFF, 0xE4, 0xC4}
def name_to_rgb(:black), do: {0x00, 0x00, 0x00}
def name_to_rgb(:blanched_almond), do: {0xFF, 0xEB, 0xCD}
def name_to_rgb(:blue), do: {0x00, 0x00, 0xFF}
def name_to_rgb(:blue_violet), do: {0x8A, 0x2B, 0xE2}
def name_to_rgb(:brown), do: {0xA5, 0x2A, 0x2A}
def name_to_rgb(:burly_wood), do: {0xDE, 0xB8, 0x87}
def name_to_rgb(:cadet_blue), do: {0x5F, 0x9E, 0xA0}
def name_to_rgb(:chartreuse), do: {0x7F, 0xFF, 0x00}
def name_to_rgb(:chocolate), do: {0xD2, 0x69, 0x1E}
def name_to_rgb(:coral), do: {0xFF, 0x7F, 0x50}
def name_to_rgb(:cornflower_blue), do: {0x64, 0x95, 0xED}
def name_to_rgb(:cornsilk), do: {0xFF, 0xF8, 0xDC}
def name_to_rgb(:crimson), do: {0xDC, 0x14, 0x3C}
def name_to_rgb(:cyan), do: {0x00, 0xFF, 0xFF}
def name_to_rgb(:dark_blue), do: {0x00, 0x00, 0x8B}
def name_to_rgb(:dark_cyan), do: {0x00, 0x8B, 0x8B}
def name_to_rgb(:dark_golden_rod), do: {0xB8, 0x86, 0x0B}
def name_to_rgb(:dark_gray), do: {0xA9, 0xA9, 0xA9}
def name_to_rgb(:dark_grey), do: {0xA9, 0xA9, 0xA9}
def name_to_rgb(:dark_green), do: {0x00, 0x64, 0x00}
def name_to_rgb(:dark_khaki), do: {0xBD, 0xB7, 0x6B}
def name_to_rgb(:dark_magenta), do: {0x8B, 0x00, 0x8B}
def name_to_rgb(:dark_olive_green), do: {0x55, 0x6B, 0x2F}
def name_to_rgb(:dark_orange), do: {0xFF, 0x8C, 0x00}
def name_to_rgb(:dark_orchid), do: {0x99, 0x32, 0xCC}
def name_to_rgb(:dark_red), do: {0x8B, 0x00, 0x00}
def name_to_rgb(:dark_salmon), do: {0xE9, 0x96, 0x7A}
def name_to_rgb(:dark_sea_green), do: {0x8F, 0xBC, 0x8F}
def name_to_rgb(:dark_slate_blue), do: {0x48, 0x3D, 0x8B}
def name_to_rgb(:dark_slate_gray), do: {0x2F, 0x4F, 0x4F}
def name_to_rgb(:dark_slate_grey), do: {0x2F, 0x4F, 0x4F}
def name_to_rgb(:dark_turquoise), do: {0x00, 0xCE, 0xD1}
def name_to_rgb(:dark_violet), do: {0x94, 0x00, 0xD3}
def name_to_rgb(:deep_pink), do: {0xFF, 0x14, 0x93}
def name_to_rgb(:deep_sky_blue), do: {0x00, 0xBF, 0xFF}
def name_to_rgb(:dim_gray), do: {0x69, 0x69, 0x69}
def name_to_rgb(:dim_grey), do: {0x69, 0x69, 0x69}
def name_to_rgb(:dodger_blue), do: {0x1E, 0x90, 0xFF}
def name_to_rgb(:fire_brick), do: {0xB2, 0x22, 0x22}
def name_to_rgb(:floral_white), do: {0xFF, 0xFA, 0xF0}
def name_to_rgb(:forest_green), do: {0x22, 0x8B, 0x22}
def name_to_rgb(:fuchsia), do: {0xFF, 0x00, 0xFF}
def name_to_rgb(:gainsboro), do: {0xDC, 0xDC, 0xDC}
def name_to_rgb(:ghost_white), do: {0xF8, 0xF8, 0xFF}
def name_to_rgb(:gold), do: {0xFF, 0xD7, 0x00}
def name_to_rgb(:golden_rod), do: {0xDA, 0xA5, 0x20}
def name_to_rgb(:gray), do: {0x80, 0x80, 0x80}
def name_to_rgb(:grey), do: {0x80, 0x80, 0x80}
def name_to_rgb(:green), do: {0x00, 0x80, 0x00}
def name_to_rgb(:green_yellow), do: {0xAD, 0xFF, 0x2F}
def name_to_rgb(:honey_dew), do: {0xF0, 0xFF, 0xF0}
def name_to_rgb(:hot_pink), do: {0xFF, 0x69, 0xB4}
def name_to_rgb(:indian_red), do: {0xCD, 0x5C, 0x5C}
def name_to_rgb(:indigo), do: {0x4B, 0x00, 0x82}
def name_to_rgb(:ivory), do: {0xFF, 0xFF, 0xF0}
def name_to_rgb(:khaki), do: {0xF0, 0xE6, 0x8C}
def name_to_rgb(:lavender), do: {0xE6, 0xE6, 0xFA}
def name_to_rgb(:lavender_blush), do: {0xFF, 0xF0, 0xF5}
def name_to_rgb(:lawn_green), do: {0x7C, 0xFC, 0x00}
def name_to_rgb(:lemon_chiffon), do: {0xFF, 0xFA, 0xCD}
def name_to_rgb(:light_blue), do: {0xAD, 0xD8, 0xE6}
def name_to_rgb(:light_coral), do: {0xF0, 0x80, 0x80}
def name_to_rgb(:light_cyan), do: {0xE0, 0xFF, 0xFF}
def name_to_rgb(:light_golden_rod_yellow), do: {0xFA, 0xFA, 0xD2}
def name_to_rgb(:light_gray), do: {0xD3, 0xD3, 0xD3}
def name_to_rgb(:light_grey), do: {0xD3, 0xD3, 0xD3}
def name_to_rgb(:light_green), do: {0x90, 0xEE, 0x90}
def name_to_rgb(:light_pink), do: {0xFF, 0xB6, 0xC1}
def name_to_rgb(:light_salmon), do: {0xFF, 0xA0, 0x7A}
def name_to_rgb(:light_sea_green), do: {0x20, 0xB2, 0xAA}
def name_to_rgb(:light_sky_blue), do: {0x87, 0xCE, 0xFA}
def name_to_rgb(:light_slate_gray), do: {0x77, 0x88, 0x99}
def name_to_rgb(:light_slate_grey), do: {0x77, 0x88, 0x99}
def name_to_rgb(:light_steel_blue), do: {0xB0, 0xC4, 0xDE}
def name_to_rgb(:light_yellow), do: {0xFF, 0xFF, 0xE0}
def name_to_rgb(:lime), do: {0x00, 0xFF, 0x00}
def name_to_rgb(:lime_green), do: {0x32, 0xCD, 0x32}
def name_to_rgb(:linen), do: {0xFA, 0xF0, 0xE6}
def name_to_rgb(:magenta), do: {0xFF, 0x00, 0xFF}
def name_to_rgb(:maroon), do: {0x80, 0x00, 0x00}
def name_to_rgb(:medium_aqua_marine), do: {0x66, 0xCD, 0xAA}
def name_to_rgb(:medium_blue), do: {0x00, 0x00, 0xCD}
def name_to_rgb(:medium_orchid), do: {0xBA, 0x55, 0xD3}
def name_to_rgb(:medium_purple), do: {0x93, 0x70, 0xDB}
def name_to_rgb(:medium_sea_green), do: {0x3C, 0xB3, 0x71}
def name_to_rgb(:medium_slate_blue), do: {0x7B, 0x68, 0xEE}
def name_to_rgb(:medium_spring_green), do: {0x00, 0xFA, 0x9A}
def name_to_rgb(:medium_turquoise), do: {0x48, 0xD1, 0xCC}
def name_to_rgb(:medium_violet_red), do: {0xC7, 0x15, 0x85}
def name_to_rgb(:midnight_blue), do: {0x19, 0x19, 0x70}
def name_to_rgb(:mint_cream), do: {0xF5, 0xFF, 0xFA}
def name_to_rgb(:misty_rose), do: {0xFF, 0xE4, 0xE1}
def name_to_rgb(:moccasin), do: {0xFF, 0xE4, 0xB5}
def name_to_rgb(:navajo_white), do: {0xFF, 0xDE, 0xAD}
def name_to_rgb(:navy), do: {0x00, 0x00, 0x80}
def name_to_rgb(:old_lace), do: {0xFD, 0xF5, 0xE6}
def name_to_rgb(:olive), do: {0x80, 0x80, 0x00}
def name_to_rgb(:olive_drab), do: {0x6B, 0x8E, 0x23}
def name_to_rgb(:orange), do: {0xFF, 0xA5, 0x00}
def name_to_rgb(:orange_red), do: {0xFF, 0x45, 0x00}
def name_to_rgb(:orchid), do: {0xDA, 0x70, 0xD6}
def name_to_rgb(:pale_golden_rod), do: {0xEE, 0xE8, 0xAA}
def name_to_rgb(:pale_green), do: {0x98, 0xFB, 0x98}
def name_to_rgb(:pale_turquoise), do: {0xAF, 0xEE, 0xEE}
def name_to_rgb(:pale_violet_red), do: {0xDB, 0x70, 0x93}
def name_to_rgb(:papaya_whip), do: {0xFF, 0xEF, 0xD5}
def name_to_rgb(:peach_puff), do: {0xFF, 0xDA, 0xB9}
def name_to_rgb(:peru), do: {0xCD, 0x85, 0x3F}
def name_to_rgb(:pink), do: {0xFF, 0xC0, 0xCB}
def name_to_rgb(:plum), do: {0xDD, 0xA0, 0xDD}
def name_to_rgb(:powder_blue), do: {0xB0, 0xE0, 0xE6}
def name_to_rgb(:purple), do: {0x80, 0x00, 0x80}
def name_to_rgb(:rebecca_purple), do: {0x66, 0x33, 0x99}
def name_to_rgb(:red), do: {0xFF, 0x00, 0x00}
def name_to_rgb(:rosy_brown), do: {0xBC, 0x8F, 0x8F}
def name_to_rgb(:royal_blue), do: {0x41, 0x69, 0xE1}
def name_to_rgb(:saddle_brown), do: {0x8B, 0x45, 0x13}
def name_to_rgb(:salmon), do: {0xFA, 0x80, 0x72}
def name_to_rgb(:sandy_brown), do: {0xF4, 0xA4, 0x60}
def name_to_rgb(:sea_green), do: {0x2E, 0x8B, 0x57}
def name_to_rgb(:sea_shell), do: {0xFF, 0xF5, 0xEE}
def name_to_rgb(:sienna), do: {0xA0, 0x52, 0x2D}
def name_to_rgb(:silver), do: {0xC0, 0xC0, 0xC0}
def name_to_rgb(:sky_blue), do: {0x87, 0xCE, 0xEB}
def name_to_rgb(:slate_blue), do: {0x6A, 0x5A, 0xCD}
def name_to_rgb(:slate_gray), do: {0x70, 0x80, 0x90}
def name_to_rgb(:slate_grey), do: {0x70, 0x80, 0x90}
def name_to_rgb(:snow), do: {0xFF, 0xFA, 0xFA}
def name_to_rgb(:spring_green), do: {0x00, 0xFF, 0x7F}
def name_to_rgb(:steel_blue), do: {0x46, 0x82, 0xB4}
def name_to_rgb(:tan), do: {0xD2, 0xB4, 0x8C}
def name_to_rgb(:teal), do: {0x00, 0x80, 0x80}
def name_to_rgb(:thistle), do: {0xD8, 0xBF, 0xD8}
def name_to_rgb(:tomato), do: {0xFF, 0x63, 0x47}
def name_to_rgb(:turquoise), do: {0x40, 0xE0, 0xD0}
def name_to_rgb(:violet), do: {0xEE, 0x82, 0xEE}
def name_to_rgb(:wheat), do: {0xF5, 0xDE, 0xB3}
def name_to_rgb(:white), do: {0xFF, 0xFF, 0xFF}
def name_to_rgb(:white_smoke), do: {0xF5, 0xF5, 0xF5}
def name_to_rgb(:yellow), do: {0xFF, 0xFF, 0x00}
def name_to_rgb(:yellow_green), do: {0x9A, 0xCD, 0x32}
end | lib/scenic/primitive/style/paint/color.ex | 0.581422 | 0.482856 | color.ex | starcoder |
defmodule Stripe.Price do
@moduledoc """
Work with Stripe price objects.
The Prices API adds more flexibility to how you charge customers.
It also replaces the Plans API, so Stripe recommends migrating your existing
integration to work with prices.
To migrate, you need to identify how you use plans, products, and payment
flows and then update these parts of your integration to use the Prices API.
Migrating to Prices guide: https://stripe.com/docs/billing/migration/migrating-prices
You can:
- Create a price
- Retrieve a price
- Update a price
- List all prices
Stripe API reference: https://stripe.com/docs/api/prices
Example:
```
{
"id": "plan_HJ8MK9HTYgniMM",
"object": "price",
"active": true,
"billing_scheme": "per_unit",
"created": 1589897226,
"currency": "usd",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"product": "prod_HJ8MOtuM1vD2jd",
"recurring": {
"aggregate_usage": null,
"interval": "month",
"interval_count": 1,
"trial_period_days": null,
"usage_type": "licensed"
},
"tax_behavior": "unspecified",
"tiers": null,
"tiers_mode": null,
"transform_lookup_key": false,
"transform_quantity": null,
"type": "recurring",
"unit_amount": 999,
"unit_amount_decimal": "999"
}
```
"""
use Stripe.Entity
import Stripe.Request
@type recurring :: %{
optional(:aggregate_usage) => String.t(),
optional(:interval) => String.t(),
optional(:interval_count) => pos_integer,
optional(:trial_period_days) => pos_integer,
optional(:usage_type) => String.t()
}
@type price_tier :: %{
flat_amount: integer,
flat_amount_decimal: String.t(),
unit_amount: integer,
unit_amount_decimal: String.t(),
up_to: integer
}
@type transform_quantity :: %{
divide_by: pos_integer,
round: String.t()
}
@type product_data :: %{
:name => String.t(),
optional(:active) => boolean,
optional(:metadata) => map,
optional(:statement_descriptor) => String.t(),
optional(:tax_code) => String.t(),
optional(:unit_label) => String.t()
}
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
active: boolean,
billing_scheme: String.t(),
created: Stripe.timestamp(),
currency: String.t(),
livemode: boolean,
lookup_key: String.t(),
metadata: Stripe.Types.metadata(),
nickname: String.t(),
product: Stripe.id() | Stripe.Product.t(),
recurring: recurring(),
tax_behavior: String.t(),
tiers: [price_tier()],
tiers_mode: String.t(),
transform_lookup_key: boolean(),
transform_quantity: transform_quantity(),
type: String.t(),
unit_amount: pos_integer,
unit_amount_decimal: String.t()
}
defstruct [
:id,
:object,
:active,
:billing_scheme,
:created,
:currency,
:livemode,
:lookup_key,
:metadata,
:nickname,
:product,
:recurring,
:tax_behavior,
:tiers,
:tiers_mode,
:transform_lookup_key,
:transform_quantity,
:type,
:unit_amount,
:unit_amount_decimal
]
@plural_endpoint "prices"
@doc """
Create a price.
"""
@spec create(params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
:currency => String.t(),
optional(:unit_amount) => pos_integer,
optional(:active) => boolean,
optional(:metadata) => Stripe.Types.metadata(),
optional(:nickname) => String.t(),
optional(:product) => Stripe.id() | Stripe.Product.t(),
optional(:product_data) => product_data,
optional(:recurring) => recurring(),
optional(:tax_behavior) => String.t(),
optional(:tiers) => [price_tier()],
optional(:tiers_mode) => String.t(),
optional(:billing_scheme) => String.t(),
optional(:lookup_key) => String.t(),
optional(:transfer_lookup_key) => boolean,
optional(:transform_quantity) => transform_quantity(),
optional(:unit_amount_decimal) => String.t()
}
| %{}
def create(params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_params(params)
|> put_method(:post)
|> cast_to_id([:product])
|> make_request()
end
@doc """
Retrieve a price.
"""
@spec retrieve(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def retrieve(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:get)
|> make_request()
end
@doc """
Update a price.
Takes the `id` and a map of changes.
"""
@spec update(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:active) => boolean,
optional(:metadata) => Stripe.Types.metadata(),
optional(:nickname) => String.t(),
optional(:recurring) => recurring(),
optional(:lookup_key) => String.t(),
optional(:transfer_lookup_key) => boolean
}
| %{}
def update(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
List all prices.
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params:
%{
optional(:active) => boolean,
optional(:currency) => String.t(),
optional(:product) => Stripe.Product.t() | Stripe.id(),
optional(:type) => String.t(),
optional(:created) => Stripe.timestamp(),
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:lookup_keys) => list(String.t()),
optional(:recurring) => recurring() | nil,
optional(:starting_after) => t | Stripe.id()
}
| %{}
def list(params \\ %{}, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:product, :ending_before, :starting_after])
|> make_request()
end
end | lib/stripe/subscriptions/price.ex | 0.863406 | 0.789842 | price.ex | starcoder |
defmodule Oban.Plugins.Cron do
@moduledoc """
Periodically enqueue jobs through CRON based scheduling.
## Using the Plugin
Schedule various jobs using `{expr, worker}` and `{expr, worker, opts}` syntaxes:
config :my_app, Oban,
plugins: [
{Oban.Plugins.Cron,
crontab: [
{"* * * * *", MyApp.MinuteWorker},
{"0 * * * *", MyApp.HourlyWorker, args: %{custom: "arg"}},
{"0 0 * * *", MyApp.DailyWorker, max_attempts: 1},
{"0 12 * * MON", MyApp.MondayWorker, queue: :scheduled, tags: ["mondays"]},
{"@daily", MyApp.AnotherDailyWorker}
]}
]
## Options
* `:crontab` — a list of cron expressions that enqueue jobs on a periodic basis. See [Periodic
Jobs][perjob] in the Oban module docs for syntax and details.
* `:timezone` — which timezone to use when scheduling cron jobs. To use a timezone other than
the default of "Etc/UTC" you *must* have a timezone database like [tzdata][tzdata] installed
and configured.
[tzdata]: https://hexdocs.pm/tzdata
[perjob]: Oban.html#module-periodic-jobs
## Instrumenting with Telemetry
The `Oban.Plugins.Cron` plugin adds the following metadata to the `[:oban, :plugin, :stop]` event:
* :jobs - a list of jobs that were inserted into the database
"""
use GenServer
alias Oban.Cron.Expression
alias Oban.{Config, Job, Query, Worker}
@type cron_input :: {binary(), module()} | {binary(), module(), [Job.option()]}
@type option ::
{:conf, Config.t()}
| {:name, GenServer.name()}
| {:crontab, [cron_input()]}
| {:timezone, Calendar.time_zone()}
defmodule State do
@moduledoc false
defstruct [
:conf,
:name,
:timer,
crontab: [],
lock_key: 1_149_979_440_242_868_001,
timezone: "Etc/UTC"
]
end
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) do
validate!(opts)
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@doc false
@spec validate!(Keyword.t()) :: :ok
def validate!(opts) when is_list(opts) do
Enum.each(opts, &validate_opt!/1)
end
@doc false
@spec interval_to_next_minute(Time.t()) :: pos_integer()
def interval_to_next_minute(time \\ Time.utc_now()) do
time
|> Time.add(60)
|> Map.put(:second, 0)
|> Time.diff(time)
|> Integer.mod(86_400)
|> :timer.seconds()
end
@impl GenServer
def init(opts) do
Process.flag(:trap_exit, true)
state =
State
|> struct!(opts)
|> parse_crontab()
{:ok, state, {:continue, :start}}
end
@impl GenServer
def handle_continue(:start, %State{} = state) do
handle_info(:evaluate, state)
end
@impl GenServer
def terminate(_reason, %State{timer: timer}) do
if is_reference(timer), do: Process.cancel_timer(timer)
:ok
end
@impl GenServer
def handle_info(:evaluate, %State{} = state) do
state = schedule_evaluate(state)
meta = %{conf: state.conf, plugin: __MODULE__}
:telemetry.span([:oban, :plugin], meta, fn ->
case lock_and_insert_jobs(state) do
{:ok, inserted_jobs} when is_list(inserted_jobs) ->
{:ok, Map.put(meta, :jobs, inserted_jobs)}
{:ok, false} ->
{:ok, Map.put(meta, :jobs, [])}
error ->
{:error, Map.put(meta, :error, error)}
end
end)
{:noreply, state}
end
# Scheduling Helpers
defp schedule_evaluate(state) do
timer = Process.send_after(self(), :evaluate, interval_to_next_minute())
%{state | timer: timer}
end
# Parsing & Validation Helpers
defp parse_crontab(%State{crontab: crontab} = state) do
parsed =
Enum.map(crontab, fn
{expression, worker} -> {Expression.parse!(expression), worker, []}
{expression, worker, opts} -> {Expression.parse!(expression), worker, opts}
end)
%{state | crontab: parsed}
end
defp validate_opt!({:crontab, crontab}) do
unless is_list(crontab) do
raise ArgumentError, "expected :crontab to be a list, got: #{inspect(crontab)}"
end
Enum.each(crontab, &validate_crontab!/1)
end
defp validate_opt!({:timezone, timezone}) do
unless is_binary(timezone) and match?({:ok, _}, DateTime.now(timezone)) do
raise ArgumentError, "expected :timezone to be a known timezone"
end
end
defp validate_opt!(_opt), do: :ok
defp validate_crontab!({expression, worker, opts}) do
%Expression{} = Expression.parse!(expression)
unless Code.ensure_loaded?(worker) do
raise ArgumentError, "#{inspect(worker)} not found or can't be loaded"
end
unless function_exported?(worker, :perform, 1) do
raise ArgumentError, "#{inspect(worker)} does not implement `perform/1` callback"
end
unless Keyword.keyword?(opts) do
raise ArgumentError, "options must be a keyword list, got: #{inspect(opts)}"
end
unless build_changeset(worker, opts).valid? do
raise ArgumentError, "expected valid job options, got: #{inspect(opts)}"
end
end
defp validate_crontab!({expression, worker}) do
validate_crontab!({expression, worker, []})
end
defp validate_crontab!(invalid) do
raise ArgumentError,
"expected crontab entry to be an {expression, worker} or " <>
"{expression, worker, options} tuple, got: #{inspect(invalid)}"
end
# Inserting Helpers
defp lock_and_insert_jobs(state) do
Query.with_xact_lock(state.conf, state.lock_key, fn ->
insert_jobs(state.conf, state.crontab, state.timezone)
end)
end
defp insert_jobs(conf, crontab, timezone) do
{:ok, datetime} = DateTime.now(timezone)
for {expr, worker, opts} <- crontab, Expression.now?(expr, datetime) do
{:ok, job} = Query.fetch_or_insert_job(conf, build_changeset(worker, opts))
job
end
end
defp build_changeset(worker, opts) do
{args, opts} = Keyword.pop(opts, :args, %{})
opts = unique_opts(worker.__opts__(), opts)
worker.new(args, opts)
end
# Make each job unique for 59 seconds to prevent double-enqueue if the node or scheduler
# crashes. The minimum resolution for our cron jobs is 1 minute, so there is potentially
# a one second window where a double enqueue can happen.
defp unique_opts(worker_opts, crontab_opts) do
[unique: [period: 59]]
|> Keyword.merge(worker_opts, &Worker.resolve_opts/3)
|> Keyword.merge(crontab_opts, &Worker.resolve_opts/3)
end
end | lib/oban/plugins/cron.ex | 0.863262 | 0.455865 | cron.ex | starcoder |
defmodule Nebulex.Adapters.Local.Generation do
@moduledoc """
Generations Handler. This GenServer acts as garbage collector, everytime
it runs, a new cache generation is created a the oldest one is deleted.
The only way to create new generations is through this module (this
server is the metadata owner) calling `new/2` function. When a Cache
is created, a generations handler associated to that Cache is started
at the same time, therefore, this server MUST NOT be started directly.
## Options
These options are configured via the built-in local adapter
(`Nebulex.Adapters.Local`):
* `:gc_interval` - Interval time in seconds to garbage collection to run,
delete the oldest generation and create a new one. If this option is
not set, garbage collection is never executed, so new generations
must be created explicitly, e.g.: `new(cache, [])`.
* `:allocated_memory` - Max size in bytes allocated for a cache generation.
If this option is set and the configured value is reached, a new generation
is created so the oldest is deleted and force releasing memory space.
If it is not set (`nil`), the cleanup check to release memory is not
performed (the default).
* `:gc_cleanup_interval` - The number of writes needed to run the cleanup
check. Once this value is reached and only if `allocated_memory` option
is set, the cleanup check is performed. Defaults to `10`, so after 10
write operations the cleanup check is performed.
"""
defmodule State do
@moduledoc false
defstruct [
:cache,
:gc_interval,
:time_ref,
:gen_name,
:gen_index,
:allocated_memory,
:gc_cleanup_interval,
:gc_cleanup_counts,
:memory
]
@type t :: %__MODULE__{}
end
use GenServer
alias Nebulex.Adapters.Local.Generation.State
alias Nebulex.Adapters.Local.Metadata
alias :shards_local, as: Local
## API
@doc """
Starts the garbage collector for the build-in local cache adapter.
"""
@spec start_link(Nebulex.Cache.t(), Nebulex.Cache.opts()) :: GenServer.on_start()
def start_link(cache, opts \\ []) do
GenServer.start_link(__MODULE__, {cache, opts}, name: server_name(cache))
end
@doc """
Creates a new cache generation. Once the max number of generations
is reached, when a new generation is created, the oldest one is
deleted.
## Options
* `:reset_timeout` - Indicates if the poll frequency time-out should
be reset or not (default: true).
## Example
Nebulex.Adapters.Local.Generation.new(MyCache, reset_timeout: :false)
"""
@spec new(Nebulex.Cache.t(), Nebulex.Cache.opts()) :: [atom]
def new(cache, opts \\ []) do
cache
|> server_name()
|> GenServer.call({:new_generation, opts})
end
@doc """
Flushes the cache (including all its generations).
## Example
Nebulex.Adapters.Local.Generation.flush(MyCache)
"""
@spec flush(Nebulex.Cache.t()) :: :ok
def flush(cache) do
cache
|> server_name()
|> GenServer.call(:flush)
end
@doc """
Triggers the cleanup process to check whether or not the max generation size
has been reached. If so, a new generation is pushed in order to release memory
and keep it within the configured limit.
## Example
Nebulex.Adapters.Local.Generation.cleanup(MyCache)
"""
@spec cleanup(Nebulex.Cache.t()) :: :ok
def cleanup(cache) do
cache
|> server_name()
|> GenServer.cast(:cleanup)
end
@doc """
Reallocates the block of memory that was previously allocated for the given
`cache` with the new `size`. In other words, reallocates the max memory size
for a cache generation.
## Example
Nebulex.Adapters.Local.Generation.realloc(MyCache, 1_000_000)
"""
@spec realloc(Nebulex.Cache.t(), pos_integer) :: :ok
def realloc(cache, size) do
cache
|> server_name()
|> GenServer.call({:realloc, size})
end
@doc """
Returns the `GenServer` state (mostly for testing purposes).
## Example
Nebulex.Adapters.Local.Generation.get_state(MyCache)
"""
@spec get_state(Nebulex.Cache.t()) :: State.t()
def get_state(cache) do
cache
|> server_name()
|> GenServer.call(:get_state)
end
## GenServer Callbacks
@impl true
def init({cache, opts}) do
_ = init_metadata(cache, opts)
{{_, gen_name, gen_index}, ref} =
if gc_interval = opts[:gc_interval],
do: {new_gen(cache, 0), start_timer(gc_interval)},
else: {new_gen(cache, 0), nil}
init_state = %State{
cache: cache,
gc_interval: gc_interval,
time_ref: ref,
gen_name: gen_name,
gen_index: gen_index,
allocated_memory: Keyword.get(opts, :allocated_memory),
gc_cleanup_interval: Keyword.get(opts, :gc_cleanup_interval, 10),
gc_cleanup_counts: 1
}
{:ok, init_state}
end
@impl true
def handle_call(
{:new_generation, opts},
_from,
%State{cache: cache, gen_index: gen_index} = state
) do
{generations, gen_name, gen_index} = new_gen(cache, gen_index)
state =
opts
|> Keyword.get(:reset_timeout, true)
|> maybe_reset_timeout(state)
{:reply, generations, %{state | gen_name: gen_name, gen_index: gen_index}}
end
def handle_call(:flush, _from, %State{cache: cache} = state) do
:ok =
Enum.each(
cache.__metadata__.generations,
&Local.delete_all_objects(&1, cache.__state__)
)
{:reply, :ok, state}
end
def handle_call({:realloc, mem_size}, _from, %State{} = state) do
{:reply, :ok, %{state | allocated_memory: mem_size}}
end
def handle_call(:get_state, _from, %State{gen_name: name, cache: cache} = state) do
{:reply, %{state | memory: memory_info(name, cache.__state__)}, state}
end
@impl true
def handle_cast(
:cleanup,
%State{
gen_name: name,
gen_index: index,
cache: cache,
allocated_memory: max_size,
gc_cleanup_interval: cleanup_interval,
gc_cleanup_counts: cleanup_counts
} = state
)
when cleanup_counts >= cleanup_interval do
if memory_info(name, cache.__state__) >= max_size do
{_, name, index} = new_gen(cache, index)
{:noreply, %{reset_timeout(state) | gc_cleanup_counts: 1, gen_name: name, gen_index: index}}
else
{:noreply, %{state | gc_cleanup_counts: 1}}
end
end
def handle_cast(:cleanup, %{gc_cleanup_counts: counts} = state) do
{:noreply, %{state | gc_cleanup_counts: counts + 1}}
end
@impl true
def handle_info(
:timeout,
%State{cache: cache, gc_interval: time_interval, gen_index: gen_index} = state
) do
{_, gen_name, gen_index} = new_gen(cache, gen_index)
state = %{
state
| gen_name: gen_name,
gen_index: gen_index,
time_ref: start_timer(time_interval)
}
{:noreply, state}
end
## Private Functions
defp server_name(cache), do: Module.concat([cache, Generation])
defp init_metadata(cache, opts) do
n_gens = Keyword.get(opts, :n_generations, 2)
cache
|> Metadata.create(%Metadata{n_generations: n_gens})
|> init_indexes(cache)
end
defp init_indexes(metadata, cache) do
:ok = Enum.each(0..metadata.n_generations, &String.to_atom("#{cache}.#{&1}"))
metadata
end
defp new_gen(cache, gen_index) do
gen_name = String.to_existing_atom("#{cache}.#{gen_index}")
gens =
gen_name
|> Local.new(cache.__tab_opts__)
|> Metadata.new_generation(cache)
|> maybe_delete_gen()
{gens, gen_name, incr_gen_index(cache, gen_index)}
end
defp maybe_delete_gen({generations, nil}), do: generations
defp maybe_delete_gen({generations, dropped_gen}) do
_ = Local.delete(dropped_gen)
generations
end
defp incr_gen_index(cache, gen_index) do
if gen_index < cache.__metadata__.n_generations, do: gen_index + 1, else: 0
end
defp start_timer(time) do
{:ok, ref} = :timer.send_after(time * 1000, :timeout)
ref
end
defp maybe_reset_timeout(_, %State{gc_interval: nil} = state), do: state
defp maybe_reset_timeout(false, state), do: state
defp maybe_reset_timeout(true, state), do: reset_timeout(state)
defp reset_timeout(%State{gc_interval: time, time_ref: ref} = state) do
{:ok, :cancel} = :timer.cancel(ref)
%{state | time_ref: start_timer(time)}
end
defp memory_info(name, state) do
Local.info(name, :memory, state) * :erlang.system_info(:wordsize)
end
end | lib/nebulex/adapters/local/generation.ex | 0.922543 | 0.429908 | generation.ex | starcoder |
defmodule MasteringBitcoin.ProofOfWorkExample do
@moduledoc """
Example 10-11. Simplified Proof-of-Work implementation
Port over of some code contained in the `proof-of-work-example.py` file.
"""
import MasteringBitcoin, only: [pow: 2]
@starting_nonce 0
# max nonce: 4 billion
@max_nonce pow(2, 32)
@test_block "test block with transactions"
@nonce_increment 1
# check if this is a valid result, below the target
defguardp valid_nonce?(nonce) when nonce <= @max_nonce
def run(difficulty \\ 0..32) do
# difficulty from 0 to 31 bits
Enum.reduce(difficulty, "", fn difficulty_bits, previous_block_hash ->
difficulty = pow(2, difficulty_bits)
IO.puts("Difficulty: #{difficulty} (#{difficulty_bits} bits)")
IO.puts("Starting search...")
# checkpoint the current time
start_time = :os.system_time(:seconds)
# make a new block which includes the hash from the previous block
# we fake a block of transactions - just a string.
# find a valid nonce for the new block
{hash_result, nonce} =
@test_block
|> Kernel.<>(previous_block_hash)
|> proof_of_work(difficulty_bits)
start_time
|> display_elapsed_time()
|> display_hashing_power(nonce)
IO.puts("")
hash_result
end)
end
defp proof_of_work(nonce \\ @starting_nonce, header, difficulty_bits)
defp proof_of_work(nonce, header, difficulty_bits)
when valid_nonce?(nonce) do
target = pow(2, 256 - difficulty_bits)
hash_result =
:sha256
|> :crypto.hash(to_string(header) <> to_string(nonce))
|> Base.encode16(case: :lower)
case String.to_integer(hash_result, 16) do
result when result < target ->
IO.puts("Success with nonce #{nonce}")
IO.puts("Hash is #{hash_result}")
{hash_result, nonce}
_more_than_target ->
proof_of_work(nonce + @nonce_increment, header, difficulty_bits)
end
end
defp proof_of_work(nonce, _header, _difficulty_bits) do
IO.puts("Failed after #{nonce} (max_nonce) tries")
{nil, nonce}
end
defp display_elapsed_time(start_time) do
# checkpoint how long it took to find a result
elapsed_time =
:seconds
|> :os.system_time()
|> Kernel.-(start_time)
IO.puts("Elapsed Time: #{elapsed_time} seconds")
elapsed_time
end
defp display_hashing_power(elapsed_time, nonce) when elapsed_time > 0 do
# estimate the hashes per second
nonce
|> Kernel./(elapsed_time)
|> :erlang.float_to_binary(decimals: 4)
|> (&IO.puts("Hashing Power: #{&1} hashes per second")).()
end
defp display_hashing_power(_elapsed_time, _nonce), do: nil
end | lib/mastering_bitcoin/proof_of_work_example.ex | 0.778313 | 0.612223 | proof_of_work_example.ex | starcoder |
defmodule Absinthe.Middleware do
@moduledoc """
Middleware enables custom resolution behaviour on a field.
All resolution happens through middleware. Even `resolve` functions are
middleware, as the `resolve` macro is just
```
quote do
middleware Absinthe.Resolution, unquote(function_ast)
end
```
Resolution happens by reducing a list of middleware spec onto an
`%Absinthe.Resolution{}` struct.
## Example
```
defmodule MyApp.Web.Authentication do
@behaviour Absinthe.Middleware
def call(resolution, _config) do
case resolution.context do
%{current_user: _} ->
resolution
_ ->
resolution
|> Absinthe.Resolution.put_result({:error, "unauthenticated"})
end
end
end
```
By specifying `@behaviour Absinthe.Middleware` the compiler will ensure that
we provide a `def call` callback. This function takes an
`%Absinthe.Resolution{}` struct and will also need to return one such struct.
On that struct there is a `context` key which holds the absinthe context. This
is generally where things like the current user are placed. For more
information on how the current user ends up in the context please see our full
authentication guide on the website.
Our `call/2` function simply checks the context to see if there is a current
user. If there is, we pass the resolution onward. If there is not, we update
the resolution state to `:resolved` and place an error result.
Middleware can be placed on a field in three different ways:
1. Using the `Absinthe.Schema.Notation.middleware/2`
macro used inside a field definition.
2. Using the `middleware/3` callback in your schema.
3. Returning a `{:middleware, middleware_spec, config}`
tuple from a resolution function.
## The `middleware/2` macro
For placing middleware on a particular field, it's handy to use
the `middleware/2` macro.
Middleware will be run in the order in which they are specified.
The `middleware/3` callback has final say on what middleware get
set.
Examples
`MyApp.Web.Authentication` would run before resolution, and `HandleError` would run after.
```
field :hello, :string do
middleware MyApp.Web.Authentication
resolve &get_the_string/2
middleware HandleError, :foo
end
```
Anonymous functions are a valid middleware spec. A nice use case
is altering the context in a logout mutation. Mutations are the
only time the context should be altered. This is not enforced.
```
field :logout, :query do
middleware fn res, _ ->
%{res |
context: Map.delete(res.context, :current_user),
value: "logged out",
state: :resolved
}
end
end
```
`middleware/2` even accepts local public function names. Note
that `middleware/2` is the only thing that can take local function
names without an associated module. If not using macros, use
`{{__MODULE__, :function_name}, []}`
```
def auth(res, _config) do
# auth logic here
end
query do
field :hello, :string do
middleware :auth
resolve &get_the_string/2
end
end
```
## The `middleware/3` callback
`middleware/3` is a function callback on a schema. When you `use
Absinthe.Schema` a default implementation of this function is placed in your
schema. It is passed the existing middleware for a field, the field itself,
and the object that the field is a part of.
So for example if your schema contained:
```
object :user do
field :name, :string
field :age, :integer
end
query do
field :lookup_user, :user do
resolve fn _, _ ->
{:ok, %{name: "Bob"}}
end
end
end
def middleware(middleware, field, object) do
middleware |> IO.inspect
field |> IO.inspect
object |> IO.inspect
middleware
end
```
Given a document like:
```graphql
{ lookupUser { name }}
```
`object` is each object that is accessed while executing the document. In our
case that is the `:user` object and the `:query` object. `field` is every
field on that object, and middleware is a list of whatever middleware
spec have been configured by the schema on that field. Concretely
then, the function will be called , with the following arguments:
```
YourSchema.middleware([{Absinthe.Resolution, #Function<20.52032458/0>}], lookup_user_field_of_root_query_object, root_query_object)
YourSchema.middleware([{Absinthe.Middleware.MapGet, :name}], name_field_of_user, user_object)
YourSchema.middleware([{Absinthe.Middleware.MapGet, :age}], age_field_of_user, user_object)
```
In the latter two cases we see that the middleware list is empty. In the first
case we see one middleware spec, which is placed by the `resolve` macro used in the
`:lookup_user` field.
### Default Middleware
One use of `middleware/3` is setting the default middleware on a field.
By default middleware is placed on a
field that looks up a field by its snake case identifier, ie `:resource_name`.
Here is an example of how to change the default to use a camel cased string,
IE, "resourceName".
```
def middleware(middleware, %{identifier: identifier} = field, object) do
camelized =
identifier
|> Atom.to_string
|> Macro.camelize
new_middleware_spec = {{__MODULE__, :get_camelized_key}, camelized}
Absinthe.Schema.replace_default(middleware, new_middleware_spec, field, object)
end
def get_camelized_key(%{source: source} = res, key) do
%{res | state: :resolved, value: Map.get(source, key)}
end
```
There's a lot going on here so let's unpack it. We need to define a
specification to tell Absinthe what middleware to run. The form we're using is
`{{MODULE, :function_to_call}, options_of_middleware}`. For our purposes we're
simply going to use a function in the schema module itself
`get_camelized_key`.
We then use the `Absinthe.Schema.replace_default/4` function to swap out the
default middleware already present in the middleware list with the new one we
want to use. It handles going through the existing list of middleware and
seeing if it's using the default or if it has custom resolvers on it. If it's
using the default, the function applies our newly defined middleware spec.
Like all middleware functions, `:get_camelized_key` takes a resolution struct,
and options. The options is the camelized key we generated. We get the
camelized string from the parent map, and set it as the value of the
resolution struct. Finally we mark the resolution state `:resolved`.
Side note: This `middleware/3` function is called whenever we pull the type
out of the schema. The middleware itself is run every time we get a field on
an object. If we have 1000 objects and we were doing the camelization logic
INSIDE the middleware, we would compute the camelized string 1000 times. By
doing it in the `def middleware` callback we do it just once.
### Changes Since 1.3
In Absinthe 1.3, fields without any `middleware/2` or `resolve/1` calls would
show up with an empty list `[]` as its middleware in the `middleware/3`
function. If no middleware was applied in the function and it also returned `[]`,
THEN Absinthe would apply the default.
This made it very easy to accidentally break your schema if you weren't
particularly careful with your pattern matching. Now the defaults are applied
FIRST by absinthe, and THEN passed to `middleware/3`. Consequently, the
middleware list argument should always have at least one value. This is also
why there is now the `replace_default/4` function, because it handles telling
the difference between a field with a resolver and a field with the default.
### Object Wide Authentication
Let's use our authentication middleware from earlier, and place it on every
field in the query object.
```
defmodule MyApp.Web.Schema do
use Absinthe.Schema
query do
field :private_field, :string do
resolve fn _, _ ->
{:ok, "this can only be viewed if authenticated"}
end
end
end
def middleware(middleware, _field, %Absinthe.Type.Object{identifier: identifier})
when identifier in [:query, :subscription, :mutation] do
[MyApp.Web.Authentication | middleware]
end
def middleware(middleware, _field, _object) do
middleware
end
end
```
It is important to note that we are matching for the `:query`, `:subscription`
or `:mutation` identifier types. We do this because the middleware function
will be called for each field in the schema. If we didn't limit it to those
types, we would be applying authentication to every field in the entire
schema, even stuff like `:name` or `:age`. This generally isn't necessary
provided you authenticate at the entrypoints.
## Main Points
- Middleware functions take a `%Absinthe.Resolution{}` struct, and return one.
- All middleware on a field are always run, make sure to pattern match on the
state if you care.
"""
@type function_name :: atom
@type spec ::
module
| {module, term}
| {{module, function_name}, term}
| (Absinthe.Resolution.t(), term -> Absinthe.Resolution.t())
@doc """
This is the main middleware callback.
It receives an `%Absinthe.Resolution{}` struct and it needs to return an
`%Absinthe.Resolution{}` struct. The second argument will be whatever value
was passed to the `middleware` call that setup the middleware.
"""
@callback call(Absinthe.Resolution.t(), term) :: Absinthe.Resolution.t()
@doc false
def shim(res, {object, field, middleware}) do
schema = res.schema
object = Absinthe.Schema.lookup_type(schema, object)
field = Map.fetch!(object.fields, field)
middleware = expand(schema, middleware, field, object)
%{res | middleware: middleware}
end
@doc "For testing and inspection purposes"
def unshim([{{__MODULE__, :shim}, {object, field, middleware}}], schema) do
object = Absinthe.Schema.lookup_type(schema, object)
field = Map.fetch!(object.fields, field)
expand(schema, middleware, field, object)
end
@doc false
def expand(schema, middleware, field, object) do
expanded =
middleware
|> Enum.flat_map(&get_functions/1)
|> Absinthe.Schema.Notation.__ensure_middleware__(field, object)
case middleware do
[{:ref, Absinthe.Phase.Schema.Introspection, _}] ->
expanded
[{:ref, Absinthe.Type.BuiltIns.Introspection, _}] ->
expanded
[{:ref, Absinthe.Phase.Schema.DeprecatedDirectiveFields, _}] ->
expanded
_ ->
schema.middleware(expanded, field, object)
end
end
defp get_functions({:ref, module, identifier}) do
module.__absinthe_function__(identifier, :middleware)
end
defp get_functions(val) do
List.wrap(val)
end
end | lib/absinthe/middleware.ex | 0.944408 | 0.89289 | middleware.ex | starcoder |
defmodule Honeylixir.Event do
@moduledoc """
Used for managing Events and holding their data. It also has a send function
that really just kicks off the sending process which happens asynchronously.
"""
@moduledoc since: "0.1.0"
# This one is mainly for testing only so we an force a timestamp for comparing
@typedoc """
An RFC3339 formatted timestamp
`"2020-09-29 04:36:15Z"`
"""
@type rfc_timestamp :: String.t()
@typedoc """
A struct containing all the data of an event.
By default, an event is constructed with the values in the configuration defined
in `Honeylixir`. Any field can be overwritten via regular struct assigning of values.
```
event = Honeylixir.Event.create()
event = %{event | api_host: "something-else.com"}
```
An Event also includes two other fields:
* `fields` - The set of fields which will be sent to Honeycomb as the event body
* `metadata` - A map of extra data that may be provided as part of a response.
"""
@type t :: %__MODULE__{
api_host: String.t(),
dataset: String.t() | atom(),
fields: map(),
metadata: map(),
sample_rate: integer(),
team_writekey: String.t(),
timestamp: rfc_timestamp()
}
defstruct [
:api_host,
:dataset,
:sample_rate,
:team_writekey,
:timestamp,
fields: %{},
metadata: %{}
]
@doc """
Creates an event using the current timestamp, configured values for sending,
and no initial fields other than `service_name` if configured.
```
event = Honeycomb.Event.create()
```
"""
@doc since: "0.1.0"
@spec create() :: t()
def create(), do: base_event()
@doc """
`create/1` accepts either a timestamp or a set of fields to initialize the
`Honeylixir.Event`.
```
event = Honeylixir.Event.create("2020-09-29 04:36:15Z")
event = Honeylixir.Event.create(%{"field1" => "value1"})
```
"""
@doc since: "0.1.0"
@spec create(rfc_timestamp() | map()) :: t()
def create(fields_or_timestamp)
def create(%{} = fields) do
create(utc_timestamp(), fields)
end
def create(timestamp) when is_binary(timestamp) do
%{base_event() | timestamp: timestamp}
end
@doc """
Accepts both a timestamp in RFC3339 format and a map of key/values to
initialize the Event struct with.
"""
@doc since: "0.1.0"
@spec create(rfc_timestamp(), map()) :: t()
def create(timestamp, %{} = fields) when is_binary(timestamp) do
event = base_event()
event = %{event | timestamp: timestamp}
Honeylixir.Event.add(event, fields)
end
@doc """
Add a single key/value pair to the event.
## Examples
iex> event = Honeylixir.Event.create()
iex> Honeylixir.Event.add_field(event, "key", "other").fields
%{"service_name" => "honeylixir-tests", "key" => "other"}
"""
@doc since: "0.1.0"
@spec add_field(t(), String.t(), any()) :: t()
def add_field(%Honeylixir.Event{} = event, field, value) when is_binary(field) do
new_fields = Map.put(event.fields, field, value)
%{event | fields: new_fields}
end
@doc """
Adds a map of fields into the existing set.
## Examples
iex> event = Honeylixir.Event.create()
iex> Honeylixir.Event.add(event, %{"another" => "field", "service_name" => "foobar"}).fields
%{"service_name" => "foobar", "another" => "field"}
"""
@spec add(Honeylixir.Event.t(), map()) :: t()
def add(%Honeylixir.Event{} = event, %{} = fieldset) do
Enum.reduce(fieldset, event, fn {k, v}, acc_event ->
add_field(acc_event, k, v)
end)
end
@doc """
Adds to the metadata of the event.
This information is NOT passed along to LaunchDarkly and should only be used
by the consuming application to keep track of an event.
## Examples
iex> event = Honeylixir.Event.create()
iex> Honeylixir.Event.add_metadata(event, %{"some_key" => "some_value"}).metadata
%{"some_key" => "some_value"}
"""
@spec add_metadata(t(), map()) :: t()
def add_metadata(%Honeylixir.Event{} = event, %{} = metadata) do
new_metadata = Map.merge(event.metadata, metadata)
%{event | metadata: new_metadata}
end
@doc """
Used for acknowledging the event is ready for sending, passing it off to
be sent asynchronously. Currently nothing stops a user from sending the same
event twice.
If the event is sampled, a `Honeylixir.Response` is added to the `Honeylixir.ResponseQueue`
with the `err` attribute set to `:sampled`.
"""
@spec send(t()) :: :ok
def send(%Honeylixir.Event{sample_rate: 1} = event),
do: Honeylixir.TransmissionQueue.enqueue_event(event)
def send(%Honeylixir.Event{} = event) do
case Enum.random(1..event.sample_rate) do
1 ->
Honeylixir.TransmissionQueue.enqueue_event(event)
:ok
_ ->
Honeylixir.ResponseQueue.add(%Honeylixir.Response{
metadata: event.metadata,
duration: 0,
status_code: nil,
body: nil,
err: :sampled
})
:ok
end
end
defp base_event() do
%Honeylixir.Event{
api_host: api_host(),
sample_rate: sample_rate(),
team_writekey: team_writekey(),
dataset: dataset(),
timestamp: utc_timestamp()
}
|> add_service_name()
end
defp utc_timestamp(), do: DateTime.to_string(datetime_module().utc_now())
defp add_service_name(event) do
if service_name() != nil do
add_field(event, "service_name", service_name())
else
event
end
end
# Intended mainly for unit testing dependency injection.
defp datetime_module do
Application.get_env(:honeylixir, :datetime_module, DateTime)
end
defp api_host do
Application.get_env(:honeylixir, :api_host, "https://api.honeycomb.io")
end
defp sample_rate do
Application.get_env(:honeylixir, :sample_rate, 1)
end
defp team_writekey do
Application.get_env(:honeylixir, :team_writekey)
end
defp dataset do
Application.get_env(:honeylixir, :dataset)
end
defp service_name do
Application.get_env(:honeylixir, :service_name)
end
end | lib/event.ex | 0.893632 | 0.797951 | event.ex | starcoder |
defmodule Hunter.Relationship do
@moduledoc """
Relationship entity
This module defines a `Hunter.Relationship` struct and the main functions
for working with Relationship.
## Fields
* `id` - target account id
* `following` - whether the user is currently following the account
* `followed_by` - whether the user is currently being followed by the account
* `blocking` - whether the user is currently blocking the account
* `muting` - whether the user is currently muting the account
* `requested` - whether the user has requested to follow the account
* `domain_blocking` - whether the user is currently blocking the user's domain
"""
alias Hunter.Config
@type t :: %__MODULE__{
id: non_neg_integer,
following: boolean,
followed_by: boolean,
blocking: boolean,
muting: boolean,
requested: boolean,
domain_blocking: boolean
}
@derive [Poison.Encoder]
defstruct [:id, :following, :followed_by, :blocking, :muting, :requested, :domain_blocking]
@doc """
Get the relationships of authenticated user towards given other users
## Parameters
* `conn` - connection credentials
* `id` - list of relationship IDs
"""
@spec relationships(Hunter.Client.t(), [non_neg_integer]) :: [Hunter.Relationship.t()]
def relationships(conn, ids) do
Config.hunter_api().relationships(conn, ids)
end
@doc """
Follow a user
## Parameters
* `conn` - Connection credentials
* `id` - user id
"""
@spec follow(Hunter.Client.t(), non_neg_integer) :: Hunter.Relationship.t()
def follow(conn, id) do
Config.hunter_api().follow(conn, id)
end
@doc """
Unfollow a user
## Parameters
* `conn` - Connection credentials
* `id` - user id
"""
@spec unfollow(Hunter.Client.t(), non_neg_integer) :: Hunter.Relationship.t()
def unfollow(conn, id) do
Config.hunter_api().unfollow(conn, id)
end
@doc """
Block a user
## Parameters
* `conn` - Connection credentials
* `id` - user id
"""
@spec block(Hunter.Client.t(), non_neg_integer) :: Hunter.Relationship.t()
def block(conn, id) do
Config.hunter_api().block(conn, id)
end
@doc """
Unblock a user
* `conn` - Connection credentials
* `id` - user id
"""
@spec unblock(Hunter.Client.t(), non_neg_integer) :: Hunter.Relationship.t()
def unblock(conn, id) do
Config.hunter_api().unblock(conn, id)
end
@doc """
Mute a user
## Parameters
* `conn` - Connection credentials
* `id` - user id
"""
@spec mute(Hunter.Client.t(), non_neg_integer) :: Hunter.Relationship.t()
def mute(conn, id) do
Config.hunter_api().mute(conn, id)
end
@doc """
Unmute a user
## Parameters
* `conn` - Connection credentials
* `id` - user id
"""
@spec unmute(Hunter.Client.t(), non_neg_integer) :: Hunter.Relationship.t()
def unmute(conn, id) do
Config.hunter_api().unmute(conn, id)
end
end | lib/hunter/relationship.ex | 0.843943 | 0.521654 | relationship.ex | starcoder |
defmodule Eden do
import Eden.Parser
@moduledoc """
Provides functions to `encode/1` and `decode/2` between *Elixir* and
*edn* data format.
"""
alias Eden.Encode
alias Eden.Decode
alias Eden.Exception, as: Ex
@default_handlers %{"inst" => &Eden.Tag.inst/1,
"uuid" => &Eden.Tag.uuid/1}
@doc """
Encodes an *Elixir* term that implements the `Eden.Encode` protocol.
When the term is a nested data structure (e.g. `List`, `Map`, etc.),
all children should also implement `Eden.Encode` protocol for the
encoding to be successful.
There is an implementation for the most common *Elixir* data types:
- `Atom`
- `BitString` (binary)
- `Integer`
- `Float`
- `Map`
- `List`
- `MapSet`
There are also implementations for the following custom *Elixir* data
types in order to support native *edn* types:
- `Eden.Symbol`
- `Eden.Character`
- `Array` (vector)
- `Eden.Tag` (tagged value)
Since the *edn* specification requires every implementation to
provide handlers for tags `uuid` and `inst`, the following data
types also have an implementation for `Eden.Encode`:
- `Eden.UUID` (`#uuid`)
- `Timex.DatetTime` (`#inst`)
## Examples
iex> Eden.encode([1, 2])
{:ok, "(1, 2)"}
iex> Eden.encode(%{a: 1, b: 2, c: 3})
{:ok, "{:a 1, :b 2, :c 3}"}
iex> Eden.encode({:a, 1})
{:error, Protocol.UndefinedError}
"""
@spec encode(Encode.t) :: {:ok, String.t} | {:error, atom}
def encode(data) do
try do
{:ok, encode!(data)}
rescue
e -> {:error, e.__struct__}
end
end
@doc """
Same as `encode/1` but raises an error if the term could not
be encoded.
Returns the function result otherwise.
"""
@spec encode!(Encode.t) :: String.t
def encode!(data) do
Encode.encode(data)
end
@doc """
Decodes a string containing *edn* data into *Elixir* data
structures. For a detailed list on the mapping between
*edn* and *Elixir* check the documentation in the project's
[page](https://github.com/jfacorro/Eden).
When the string contains a single expression it is decoded
and returned. Otherwise, if there are multiple expressions,
then a list with all parsed expressions is returned.
## Examples
iex> Eden.decode("{:a 1 :b 2}")
{:ok, %{a: 1, b: 2}}
iex> Eden.decode("(hello :world \\!)")
{:ok, [%Eden.Symbol{name: "hello"}, :world, %Eden.Character{char: "!"}]
iex> Eden.decode("[1 2 3 4]")
{:ok, #Array<[1, 2, 3, 4], fixed=false, default=nil>}
iex> Eden.decode("nil true false")
{:ok, #Array<[1, 2, 3, 4], fixed=false, default=nil>}
iex> Eden.decode("nil true false .")
{:error, Eden.Exception.UnexpectedInputError}
"""
@spec decode(String.t, Keyword.t) :: {:ok, any} | {:error, atom}
def decode(input, opts \\ []) do
try do
{:ok, decode!(input, opts)}
rescue
e -> {:error, e.__struct__}
end
end
@doc """
Same as `decode/1` but raises an error if the term could not
be encoded.
Returns the function result otherwise.
"""
@spec decode!(String.t, Keyword.t) :: any
def decode!(input, opts \\ []) do
tree = parse(input, location: true)
handlers = Map.merge(@default_handlers, opts[:handlers] || %{})
opts = [handlers: handlers]
case Decode.decode(tree, opts) do
[] -> raise Ex.EmptyInputError, input
[data] -> data
data -> data
end
end
end | lib/eden.ex | 0.874961 | 0.689671 | eden.ex | starcoder |
defmodule Cognixir.FaceApi.DetectOptions do
@moduledoc """
Options for function detect_face. See official api doc for supported options.
## Keys
- returnFaceId: boolean, return detected face ids
- returnFaceLandmarks: boolean, return face landmarks
- returnFaceAttributes: comma separated strings, analyze specific face attributes in detail
"""
defstruct returnFaceId: true, returnFaceLandmarks: false, returnFaceAttributes: ""
end
defmodule Cognixir.FaceApi do
@moduledoc """
Provides functions for face detection, verification and grouping
"""
alias Cognixir.FaceApi
defp api_base do
"https://api.projectoxford.ai/face/v1.0/"
end
defp api_key do
Application.get_env(:cognixir, :fa_api_key)
end
@doc """
Detects a face and returns various detail information like face position, landmarks and attributes.
See official api doc for supported options.
## Parameters
- image: A string containing valid image url or binary file content of an image
- options: DetectOptions with additional parameters (optional)
## Examples
iex> ComputerVision.analyze_image("http://example.com/images/test.jpg", %FaceApi.DetectOptions{returnFaceLandmarks: true, returnFaceAttributes: "age,gender"})
{ :ok, response_map }
"""
def detect_face(image, options \\ %FaceApi.DetectOptions{}) do
body = if String.valid?(image), do: %{"url" => image}, else: image
Cognixir.post(body, api_base() <> "detect", api_key(), Map.from_struct(options))
end
@doc """
Checks if two provided faces are identical. First, you need to run detect_face on each face to get a face id, then you can compare both faces
## Parameters
- face_id_1: face id of first face
- face_id_2: face id of second face
## Examples
iex> ComputerVision.verify_faces("id_1", "id_2")
{ :ok, %{"isIdentical" => false, "confidence" => 0.0} }
"""
def verify_faces(face_id_1, face_id_2) do
body = %{"faceId1" => face_id_1, "faceId2" => face_id_2}
Cognixir.post(body, api_base() <> "verify", api_key())
end
end | lib/face_api.ex | 0.883381 | 0.4112 | face_api.ex | starcoder |
defmodule AdaptableCostsEvaluator.Computations do
@moduledoc """
The Computations context.
"""
import Ecto.Query, warn: false
alias AdaptableCostsEvaluator.Repo
alias AdaptableCostsEvaluator.Computations.Computation
alias AdaptableCostsEvaluator.Users.User
alias AdaptableCostsEvaluator.{Users, Organizations}
@doc """
Returns the list of computations belonging to the particular user defined by
`creator_id` or computations in the organization defined by `organization_id`.
"""
def list_computations(creator_id: creator_id) do
user = Users.get_user!(creator_id)
Repo.preload(user, :computations).computations
end
def list_computations(organization_id: organization_id) do
organization = Organizations.get_organization!(organization_id)
Repo.preload(organization, :computations).computations
end
@doc """
Gets a single computation.
Raises `Ecto.NoResultsError` if the Computation does not exist.
## Examples
iex> get_computation!(123)
%Computation{}
iex> get_computation!(456)
** (Ecto.NoResultsError)
"""
def get_computation!(id), do: Repo.get!(Computation, id)
@doc """
Gets a single computation defined by the given `attrs`.
"""
def get_computation_by!(attrs) do
Repo.get_by!(Computation, attrs)
end
@doc """
Creates a computation belonging to the given user.
## Examples
iex> create_computation(user, %{field: value})
{:ok, %Computation{}}
iex> create_computation(user, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_computation(%User{} = user, attrs \\ %{}) do
attrs = Map.put(attrs, "creator_id", user.id)
%Computation{}
|> change_computation(attrs)
|> Repo.insert()
end
@doc """
Shares the given computation with the users within the organization.
It sets `organization_id` attribute of the given computation.
"""
def add_computation_to_organization(%Computation{} = computation, organization_id) do
organization = Organizations.get_organization!(organization_id)
computation
|> change_computation(%{organization_id: organization.id})
|> Repo.update()
end
@doc """
Updates a computation.
## Examples
iex> update_computation(computation, %{field: new_value})
{:ok, %Computation{}}
iex> update_computation(computation, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_computation(%Computation{} = computation, attrs) do
attrs = %{name: Map.get(attrs, "name")}
computation
|> change_computation(attrs)
|> Repo.update()
end
@doc """
Deletes a computation. If `from_org` keyword is set to `true`, it deletes the
computation only from the organization.
## Examples
iex> delete_computation(computation)
{:ok, %Computation{}}
iex> delete_computation(computation, from_org: true)
{:ok, %Computation{}}
iex> delete_computation(computation)
{:error, %Ecto.Changeset{}}
"""
def delete_computation(
%Computation{} = computation,
[from_org: fo] \\ [from_org: false]
) do
if fo do
computation
|> change_computation(%{organization_id: nil})
|> Repo.update()
else
Repo.delete(computation)
end
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking computation changes.
## Examples
iex> change_computation(computation)
%Ecto.Changeset{data: %Computation{}}
"""
def change_computation(%Computation{} = computation, attrs \\ %{}) do
Computation.changeset(computation, attrs)
end
end | lib/adaptable_costs_evaluator/computations.ex | 0.895878 | 0.571916 | computations.ex | starcoder |
defmodule InteropProxy.Session do
@moduledoc """
Keeps an internal state with the url and cookie for requests.
Will also keep making sure the login is valid every 5 seconds. If
the cookie no longer works, because for example, a new server was
created, the cookie would be updated.
"""
use Agent
alias InteropProxy.Request
@doc """
Log in and then start the Session Agent.
If the `:respond_to` keyword argument is supplied in `opts`, we'll
send an `:ok` message when we've connected successfully.
"""
def start_link(opts) do
url = Keyword.fetch! opts, :url
username = Keyword.fetch! opts, :username
password = Keyword.fetch! opts, :password
resp = Keyword.get opts, :respond_to, nil
other_opts = Keyword.drop opts, [:url, :username, :password, :respond_to]
{^url, cookie} = do_login url, username, password
if resp !== nil, do: send resp, :ok
{:ok, pid} = Agent.start_link fn -> %{
url: url, cookie: cookie, username: username, password: password
} end, other_opts
spawn_link fn -> monitor_cookie pid, url, username, password end
{:ok, pid}
end
# Log in, if we couldn't log in, just keep trying.
defp do_login(url, username, password) do
case Request.login url, username, password do
{:ok, _, cookie} ->
{url, cookie}
_ ->
IO.puts "Interop server could not be reached... trying again."
Process.sleep 500
do_login url, username, password
end
end
# Keep checking if the cookie is still valid, if it's not, make a
# new one for the session.
defp monitor_cookie(session, url, username, password) do
Process.sleep 5000
case Request.get_obstacles url, cookie(session) do
{:error, :forbidden} ->
{^url, cookie} = do_login url, username, password
Agent.update session, &Map.put(&1, :cookie, cookie)
_ ->
nil
end
monitor_cookie session, url, username, password
end
@doc """
Get the url used in the login.
"""
def url(session \\ __MODULE__) do
Agent.get session, &Map.get(&1, :url)
end
@doc """
Get the cookie from the login.
"""
def cookie(session \\ __MODULE__) do
Agent.get session, &Map.get(&1, :cookie)
end
end | services/interop-proxy/lib/interop_proxy/session.ex | 0.560854 | 0.416144 | session.ex | starcoder |
defmodule ReWeb.Types.SellerLead do
@moduledoc """
GraphQL types for seller leads
"""
use Absinthe.Schema.Notation
import Absinthe.Resolution.Helpers, only: [dataloader: 1]
alias ReWeb.Resolvers
object :site_seller_lead do
field :uuid, :uuid
field :complement, :string
field :type, :string
field :price, :integer
field :maintenance_fee, :float
field :suites, :integer
end
object :broker_seller_lead do
field :uuid, :uuid
end
input_object :site_seller_lead_input do
field :complement, :string
field :type, :string
field :price, :integer
field :maintenance_fee, :float
field :suites, :integer
field :price_request_id, non_null(:id)
end
input_object :broker_seller_lead_input do
field :complement, :string
field :type, non_null(:string)
field :additional_information, :string
field :owner, non_null(:owner_contact_input)
field :address, non_null(:address_input)
field :utm, :input_utm
end
object :price_request do
field :id, :id
field :name, :string
field :email, :string
field :area, :integer
field :rooms, :integer
field :suites, :integer
field :type, :string
field :maintenance_fee, :float
field :bathrooms, :integer
field :garage_spots, :integer
field :is_covered, :boolean
field :suggested_price, :float
field :listing_price_rounded, :float
field :listing_price_error_q90_min, :float
field :listing_price_error_q90_max, :float
field :listing_price_per_sqr_meter, :float
field :listing_average_price_per_sqr_meter, :float
field :address, :address, resolve: dataloader(Re.Addresses)
field :user, :user, resolve: dataloader(Re.Accounts)
end
input_object :price_suggestion_input do
field :area, non_null(:integer)
field :rooms, non_null(:integer)
field :bathrooms, non_null(:integer)
field :garage_spots, non_null(:integer)
field :suites, non_null(:integer)
field :type, non_null(:string)
field :maintenance_fee, non_null(:float)
field :address, non_null(:address_input)
end
object :seller_lead_mutations do
@desc "Insert seller lead"
field :site_seller_lead_create, type: :site_seller_lead do
arg :input, non_null(:site_seller_lead_input)
resolve &Resolvers.SellerLeads.create_site/2
end
@desc "Insert broker seller lead"
field :broker_seller_lead_create, type: :broker_seller_lead do
arg :input, non_null(:broker_seller_lead_input)
resolve &Resolvers.SellerLeads.create_broker/2
end
@desc "Request price suggestion"
field :request_price_suggestion, type: :price_request do
arg :name, :string
arg :email, :string
arg :area, non_null(:integer)
arg :rooms, non_null(:integer)
arg :bathrooms, non_null(:integer)
arg :garage_spots, non_null(:integer)
arg :suites, :integer
arg :type, :string
arg :maintenance_fee, :float
arg :is_covered, non_null(:boolean)
arg :address, non_null(:address_input)
resolve &Resolvers.SellerLeads.create_price_suggestion/2
end
@desc "Request notification when covered"
field :notify_when_covered, type: :contact do
arg :name, :string
arg :phone, :string
arg :email, :string
arg :message, :string
arg :state, non_null(:string)
arg :city, non_null(:string)
arg :neighborhood, non_null(:string)
resolve &Resolvers.SellerLeads.create_out_of_coverage/2
end
end
object :seller_lead_subscriptions do
@desc "Subscribe to price suggestion requests"
field :price_suggestion_requested, :price_request do
config(fn _args, %{context: %{current_user: current_user}} ->
case current_user do
:system -> {:ok, topic: "price_suggestion_requested"}
_ -> {:error, :unauthorized}
end
end)
trigger :request_price_suggestion,
topic: fn _ ->
"price_suggestion_requested"
end
end
@desc "Subscribe to price suggestion requests"
field :notification_coverage_asked, :contact do
config(fn _args, %{context: %{current_user: current_user}} ->
case current_user do
:system -> {:ok, topic: "notification_coverage_asked"}
_ -> {:error, :unauthorized}
end
end)
trigger :notify_when_covered,
topic: fn _ ->
"notification_coverage_asked"
end
end
end
end | apps/re_web/lib/graphql/types/seller_lead.ex | 0.652906 | 0.514827 | seller_lead.ex | starcoder |
defimpl Timex.Protocol, for: DateTime do
@moduledoc """
A type which represents a date and time with timezone information (optional, UTC will
be assumed for date/times with no timezone information provided).
Functions that produce time intervals use UNIX epoch (or simly Epoch) as the
default reference date. Epoch is defined as UTC midnight of January 1, 1970.
Time intervals in this module don't account for leap seconds.
"""
import Timex.Macros
use Timex.Constants
alias Timex.{Duration, AmbiguousDateTime}
alias Timex.{Timezone, TimezoneInfo}
alias Timex.Types
@epoch_seconds :calendar.datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}})
@spec to_julian(DateTime.t) :: float
def to_julian(%DateTime{:year => y, :month => m, :day => d}) do
Timex.Calendar.Julian.julian_date(y, m, d)
end
@spec to_gregorian_seconds(DateTime.t) :: non_neg_integer
def to_gregorian_seconds(date), do: to_seconds(date, :zero)
@spec to_gregorian_microseconds(DateTime.t) :: non_neg_integer
def to_gregorian_microseconds(%DateTime{microsecond: {us,_}} = date) do
s = to_seconds(date, :zero)
(s*(1_000*1_000))+us
end
@spec to_unix(DateTime.t) :: non_neg_integer
def to_unix(date), do: trunc(to_seconds(date, :epoch))
@spec to_date(DateTime.t) :: Date.t
def to_date(date), do: DateTime.to_date(date)
@spec to_datetime(DateTime.t, timezone :: Types.valid_timezone) :: DateTime.t | AmbiguousDateTime.t | {:error, term}
def to_datetime(%DateTime{time_zone: timezone} = d, timezone), do: d
def to_datetime(%DateTime{} = d, timezone), do: Timezone.convert(d, timezone)
@spec to_naive_datetime(DateTime.t) :: NaiveDateTime.t
def to_naive_datetime(%DateTime{time_zone: nil} = d) do
%NaiveDateTime{
year: d.year, month: d.month, day: d.day,
hour: d.hour, minute: d.minute, second: d.second,
microsecond: d.microsecond
}
end
def to_naive_datetime(%DateTime{} = d) do
nd = %NaiveDateTime{
year: d.year, month: d.month, day: d.day,
hour: d.hour, minute: d.minute, second: d.second,
microsecond: d.microsecond
}
Timex.shift(nd, [seconds: -1 * Timex.Timezone.total_offset(d.std_offset, d.utc_offset)])
end
@spec to_erl(DateTime.t) :: Types.datetime
def to_erl(%DateTime{} = d) do
{{d.year,d.month,d.day},{d.hour,d.minute,d.second}}
end
@spec century(DateTime.t) :: non_neg_integer
def century(%DateTime{:year => year}), do: Timex.century(year)
@spec is_leap?(DateTime.t) :: boolean
def is_leap?(%DateTime{year: year}), do: :calendar.is_leap_year(year)
@spec beginning_of_day(DateTime.t) :: DateTime.t
def beginning_of_day(%DateTime{} = datetime) do
Timex.Timezone.beginning_of_day(datetime)
end
@spec end_of_day(DateTime.t) :: DateTime.t
def end_of_day(%DateTime{} = datetime) do
Timex.Timezone.end_of_day(datetime)
end
@spec beginning_of_week(DateTime.t, Types.weekstart) :: DateTime.t | AmbiguousDateTime.t | {:error, term}
def beginning_of_week(%DateTime{} = date, weekstart) do
case Timex.days_to_beginning_of_week(date, weekstart) do
{:error, _} = err -> err
days -> beginning_of_day(shift(date, [days: -days]))
end
end
@spec end_of_week(DateTime.t, Types.weekstart) :: DateTime.t | AmbiguousDateTime.t | {:error, term}
def end_of_week(%DateTime{} = date, weekstart) do
case Timex.days_to_end_of_week(date, weekstart) do
{:error, _} = err -> err
days_to_end ->
end_of_day(shift(date, [days: days_to_end]))
end
end
@spec beginning_of_year(DateTime.t) :: DateTime.t
def beginning_of_year(%DateTime{year: year, time_zone: tz}) do
Timex.to_datetime({year, 1, 1}, tz)
end
@spec end_of_year(DateTime.t) :: DateTime.t
def end_of_year(%DateTime{year: year, time_zone: tz}),
do: %{Timex.to_datetime({{year, 12, 31}, {23, 59, 59}}, tz) | :microsecond => {999_999, 6}}
@spec beginning_of_quarter(DateTime.t) :: DateTime.t
def beginning_of_quarter(%DateTime{year: year, month: month, time_zone: tz}) do
month = 1 + (3 * (Timex.quarter(month) - 1))
Timex.DateTime.Helpers.construct({year, month, 1}, tz)
end
@spec end_of_quarter(DateTime.t) :: DateTime.t | AmbiguousDateTime.t
def end_of_quarter(%DateTime{year: year, month: month, time_zone: tz}) do
month = 3 * Timex.quarter(month)
case Timex.DateTime.Helpers.construct({year,month,1}, tz) do
{:error, _} = err -> err
%DateTime{} = d -> end_of_month(d)
%AmbiguousDateTime{:before => b, :after => a} ->
%AmbiguousDateTime{:before => end_of_month(b),
:after => end_of_month(a)}
end
end
@spec beginning_of_month(DateTime.t) :: DateTime.t
def beginning_of_month(%DateTime{year: year, month: month, time_zone: tz}),
do: Timex.DateTime.Helpers.construct({{year, month, 1}, {0, 0, 0, 0}}, tz)
@spec end_of_month(DateTime.t) :: DateTime.t
def end_of_month(%DateTime{year: year, month: month, time_zone: tz} = date),
do: Timex.DateTime.Helpers.construct({{year, month, days_in_month(date)},{23,59,59,999_999}}, tz)
@spec quarter(DateTime.t) :: 1..4
def quarter(%DateTime{month: month}), do: Timex.quarter(month)
def days_in_month(%DateTime{:year => y, :month => m}), do: Timex.days_in_month(y, m)
def week_of_month(%DateTime{:year => y, :month => m, :day => d}), do: Timex.week_of_month(y,m,d)
def weekday(%DateTime{:year => y, :month => m, :day => d}), do: :calendar.day_of_the_week({y, m, d})
def day(%DateTime{} = date) do
ref = beginning_of_year(date)
1 + Timex.diff(date, ref, :days)
end
def is_valid?(%DateTime{:year => y, :month => m, :day => d,
:hour => h, :minute => min, :second => sec}) do
:calendar.valid_date({y,m,d}) and Timex.is_valid_time?({h,min,sec})
end
def iso_week(%DateTime{:year => y, :month => m, :day => d}),
do: Timex.iso_week(y, m, d)
def from_iso_day(%DateTime{year: year} = date, day) when is_day_of_year(day) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(year, day)
%{date | :year => year, :month => month, :day => day_of_month}
end
@spec set(DateTime.t, list({atom(), term})) :: DateTime.t | {:error, term}
def set(%DateTime{} = date, options) do
validate? = Keyword.get(options, :validate, true)
Enum.reduce(options, date, fn
_option, {:error, _} = err ->
err
option, result ->
case option do
{:validate, _} -> result
{:datetime, {{y, m, d}, {h, min, sec}}} ->
if validate? do
%{result |
:year => Timex.normalize(:year, y),
:month => Timex.normalize(:month, m),
:day => Timex.normalize(:day, {y,m,d}),
:hour => Timex.normalize(:hour, h),
:minute => Timex.normalize(:minute, min),
:second => Timex.normalize(:second, sec)
}
else
%{result | :year => y, :month => m, :day => d, :hour => h, :minute => min, :second => sec}
end
{:date, {y, m, d}} ->
if validate? do
{yn,mn,dn} = Timex.normalize(:date, {y,m,d})
%{result | :year => yn, :month => mn, :day => dn}
else
%{result | :year => y, :month => m, :day => d}
end
{:date, %Date{} = d} ->
Timex.set(result, [date: {d.year, d.month, d.day}])
{:time, {h, m, s}} ->
if validate? do
%{result | :hour => Timex.normalize(:hour, h), :minute => Timex.normalize(:minute, m), :second => Timex.normalize(:second, s)}
else
%{result | :hour => h, :minute => m, :second => s}
end
{:time, %Time{} = t} ->
Timex.set(result, [time: {t.hour, t.minute, t.second}])
{:day, d} ->
if validate? do
%{result | :day => Timex.normalize(:day, {result.year, result.month, d})}
else
%{result | :day => d}
end
{:timezone, tz} ->
tz = case tz do
%TimezoneInfo{} -> tz
_ -> Timezone.get(tz, result)
end
%{result | :time_zone => tz.full_name, :zone_abbr => tz.abbreviation,
:utc_offset => tz.offset_utc, :std_offset => tz.offset_std}
{name, val} when name in [:year, :month, :hour, :minute, :second, :microsecond] ->
if validate? do
Map.put(result, name, Timex.normalize(name, val))
else
Map.put(result, name, val)
end
{option_name, _} ->
{:error, {:bad_option, option_name}}
end
end)
end
@doc """
Shifts the given DateTime based on a series of options.
See docs for Timex.shift/2 for details.
"""
@spec shift(DateTime.t, list({atom(), term})) :: DateTime.t | {:error, term}
def shift(%DateTime{time_zone: tz, microsecond: {_us, precision}} = datetime, shifts) when is_list(shifts) do
{logical_shifts, shifts} = Keyword.split(shifts, [:years, :months, :weeks, :days])
datetime =
datetime
|> Timezone.convert("Etc/UTC")
|> logical_shift(logical_shifts)
us = to_gregorian_microseconds(datetime)
shift = calculate_shift(shifts)
shifted_us = us + shift
shifted_secs = div(shifted_us, 1_000*1_000)
rem_us = rem(shifted_us, 1_000*1_000)
# Convert back to DateTime in UTC
shifted = raw_convert(shifted_secs, {rem_us, precision})
# Convert to original timezone
case Timezone.convert(shifted, tz) do
{:error, {:could_not_resolve_timezone, _, _, _}} ->
# This occurs when the shifted date/time doesn't exist because of a leap forward
# This doesn't mean the shift is invalid, simply that we need to ask for the right wall time
# Which in these cases means asking for the time + 1h
shifted = raw_convert(shifted_secs + 60, {rem_us, precision})
Timezone.convert(shifted, tz)
result ->
result
end
catch
:throw, {:error, _} = err ->
err
end
defp raw_convert(secs, {us, precision}) do
{date,{h,mm,s}} = :calendar.gregorian_seconds_to_datetime(secs)
if precision == 0 do
Timex.DateTime.Helpers.construct({date, {h,mm,s,us}}, "Etc/UTC")
else
%DateTime{microsecond: {us, _}} = dt = Timex.DateTime.Helpers.construct({date, {h,mm,s,us}}, "Etc/UTC")
%DateTime{dt | microsecond: {us, precision}}
end
end
defp logical_shift(datetime, []), do: datetime
defp logical_shift(datetime, shifts) do
sorted = Enum.sort_by(shifts, &elem(&1, 0), &compare_unit/2)
do_logical_shift(datetime, sorted)
end
defp do_logical_shift(datetime, []), do: datetime
defp do_logical_shift(datetime, [{unit, value} | rest]) do
do_logical_shift(shift_by(datetime, value, unit), rest)
end
# Consider compare_unit/2 an analog of Kernel.<=/2
defp compare_unit(:years, _), do: false
defp compare_unit(_, :years), do: true
defp compare_unit(:months, _), do: false
defp compare_unit(_, :months), do: true
defp compare_unit(:weeks, _), do: false
defp compare_unit(_, :weeks), do: true
defp compare_unit(:days, _), do: false
defp compare_unit(_, :days), do: true
defp calculate_shift(shifts), do: calculate_shift(shifts, 0)
defp calculate_shift([], acc), do: acc
defp calculate_shift([{:duration, %Duration{} = duration} | rest], acc) do
total_microseconds = Duration.to_microseconds(duration)
calculate_shift(rest, acc + total_microseconds)
end
defp calculate_shift([{:hours, value} | rest], acc) when is_integer(value) do
calculate_shift(rest, acc + (value * 60 * 60 * 1_000 * 1_000))
end
defp calculate_shift([{:minutes, value} | rest], acc) when is_integer(value) do
calculate_shift(rest, acc + (value * 60 * 1_000 * 1_000))
end
defp calculate_shift([{:seconds, value} | rest], acc) when is_integer(value) do
calculate_shift(rest, acc + (value * 1_000 * 1_000))
end
defp calculate_shift([{:milliseconds, value} | rest], acc) when is_integer(value) do
calculate_shift(rest, acc + (value * 1_000))
end
defp calculate_shift([{:microseconds, value} | rest], acc) when is_integer(value) do
calculate_shift(rest, acc + value)
end
defp calculate_shift([other | _], _acc),
do: throw({:error, {:invalid_shift, other}})
defp shift_by(%DateTime{year: y} = datetime, value, :years) do
shifted = %DateTime{datetime | year: y + value}
# If a plain shift of the year fails, then it likely falls on a leap day,
# so set the day to the last day of that month
case :calendar.valid_date({shifted.year,shifted.month,shifted.day}) do
false ->
last_day = :calendar.last_day_of_the_month(shifted.year, shifted.month)
%DateTime{shifted | day: last_day}
true ->
shifted
end
end
defp shift_by(%DateTime{} = datetime, 0, :months),
do: datetime
# Positive shifts
defp shift_by(%DateTime{year: year, month: month, day: day} = datetime, value, :months) when value > 0 do
if (month + value) <= 12 do
ldom = :calendar.last_day_of_the_month(year, month + value)
if day > ldom do
%DateTime{datetime | month: month + value, day: ldom}
else
%DateTime{datetime | month: month + value}
end
else
diff = (12 - month) + 1
shift_by(%DateTime{datetime | year: year + 1, month: 1}, value - diff, :months)
end
end
# Negative shifts
defp shift_by(%DateTime{year: year, month: month, day: day} = datetime, value, :months) do
cond do
(month + value) >= 1 ->
ldom = :calendar.last_day_of_the_month(year, month + value)
if day > ldom do
%DateTime{datetime | month: month + value, day: ldom}
else
%DateTime{datetime | month: month + value}
end
:else ->
shift_by(%DateTime{datetime | year: year - 1, month: 12}, value + month, :months)
end
end
defp shift_by(datetime, value, :weeks),
do: shift_by(datetime, value * 7, :days)
defp shift_by(%DateTime{} = datetime, 0, :days),
do: datetime
# Positive shifts
defp shift_by(%DateTime{year: year, month: month, day: day} = datetime, value, :days) when value > 0 do
ldom = :calendar.last_day_of_the_month(year, month)
cond do
(day + value) <= ldom ->
%DateTime{datetime | day: day + value}
(month + 1) <= 12 ->
diff = (ldom - day) + 1
shift_by(%DateTime{datetime | month: month + 1, day: 1}, value - diff, :days)
:else ->
diff = (ldom - day) + 1
shift_by(%DateTime{datetime | year: year + 1, month: 1, day: 1}, value - diff, :days)
end
end
# Negative shifts
defp shift_by(%DateTime{year: year, month: month, day: day} = datetime, value, :days) do
cond do
(day + value) >= 1 ->
%DateTime{datetime | day: day + value}
(month - 1) >= 1 ->
ldom = :calendar.last_day_of_the_month(year, month - 1)
shift_by(%DateTime{datetime | month: month - 1, day: ldom}, value + day + 1, :days)
:else ->
ldom = :calendar.last_day_of_the_month(year - 1, 12)
shift_by(%DateTime{datetime | year: year - 1, month: 12, day: ldom}, value + day + 1, :days)
end
end
@spec to_seconds(DateTime.t, :epoch | :zero) :: integer | {:error, atom}
defp to_seconds(%DateTime{} = date, :epoch) do
case to_seconds(date, :zero) do
{:error, _} = err -> err
secs -> secs - @epoch_seconds
end
end
defp to_seconds(%DateTime{} = dt, :zero) do
total_offset = Timezone.total_offset(dt.std_offset, dt.utc_offset) * -1
date = {dt.year, dt.month, dt.day}
time = {dt.hour, dt.minute, dt.second}
:calendar.datetime_to_gregorian_seconds({date, time}) + total_offset
end
defp to_seconds(_, _), do: {:error, :badarg}
end | lib/datetime/datetime.ex | 0.892538 | 0.54958 | datetime.ex | starcoder |
defmodule Plug.AccessLog.DefaultFormatter do
@moduledoc """
Default log message formatter.
"""
alias Plug.AccessLog.DefaultFormatter
alias Plug.AccessLog.Formatter
@behaviour Formatter
@doc """
Formats a log message.
The following formatting directives are available:
- `%%` - Percentage sign
- `%a` - Remote IP-address
- `%b` - Size of response in bytes. Outputs "-" when no bytes are sent.
- `%B` - Size of response in bytes. Outputs "0" when no bytes are sent.
- `%{VARNAME}C` - Cookie sent by the client
- `%D` - Time taken to serve the request (microseconds)
- `%{VARNAME}e` - Environment variable contents
- `%h` - Remote hostname
- `%{VARNAME}i` - Header line sent by the client
- `%l` - Remote logname
- `%m` - Request method
- `%M` - Time taken to serve the request (milliseconds)
- `%{VARNAME}o` - Header line sent by the server
- `%P` - The process ID that serviced the request
- `%q` - Query string (prepended with "?" or empty string)
- `%r` - First line of HTTP request
- `%>s` - Response status code
- `%t` - Time the request was received in the format `[10/Jan/2015:14:46:18 +0100]`
- `%T` - Time taken to serve the request (full seconds)
- `%{UNIT}T` - Time taken to serve the request in the given UNIT
- `%u` - Remote user
- `%U` - URL path requested (without query string)
- `%v` - Server name
- `%V` - Server name (canonical)
**Note for %b and %B**: To determine the size of the response the
"Content-Length" will be inspected and, if available, returned
unverified. If the header is not present the response body will be
inspected using `byte_size/1`.
**Note for %h**: The hostname will always be the ip of the client (same as `%a`).
**Note for %l**: Always a dash ("-").
**Note for %T**: Rounding happens, so "0.6 seconds" will be reported as "1 second".
**Note for %{UNIT}T**: Available units are `s` for seconds (same as `%T`),
`ms` for milliseconds (same as `M`) and `us` for microseconds (same as `%D`).
**Note for %V**: Alias for `%v`.
"""
@impl Formatter
def format(format, conn), do: log([], conn, format)
defp log(message, _conn, ""), do: message |> Enum.reverse() |> IO.iodata_to_binary()
defp log(message, conn, <<"%%", rest::binary>>) do
["%" | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%a", rest::binary>>) do
[DefaultFormatter.RemoteIPAddress.format(conn) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%b", rest::binary>>) do
[DefaultFormatter.ResponseBytes.format(conn, "-") | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%B", rest::binary>>) do
[DefaultFormatter.ResponseBytes.format(conn, "0") | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%D", rest::binary>>) do
[DefaultFormatter.RequestServingTime.format(conn, :microseconds) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%h", rest::binary>>) do
[DefaultFormatter.RemoteIPAddress.format(conn) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%l", rest::binary>>) do
["-" | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%m", rest::binary>>) do
[conn.method | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%M", rest::binary>>) do
[DefaultFormatter.RequestServingTime.format(conn, :milliseconds) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%P", rest::binary>>) do
[inspect(conn.owner) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%q", rest::binary>>) do
[DefaultFormatter.QueryString.format(conn) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%r", rest::binary>>) do
[DefaultFormatter.RequestLine.format(conn) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%>s", rest::binary>>) do
[to_string(conn.status) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%t", rest::binary>>) do
[DefaultFormatter.RequestTime.format(conn) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%T", rest::binary>>) do
[DefaultFormatter.RequestServingTime.format(conn, :seconds) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%u", rest::binary>>) do
[DefaultFormatter.RemoteUser.format(conn) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%U", rest::binary>>) do
[DefaultFormatter.RequestPath.format(conn) | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%v", rest::binary>>) do
[conn.host | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%V", rest::binary>>) do
[conn.host | message]
|> log(conn, rest)
end
defp log(message, conn, <<"%{", rest::binary>>) do
[var, rest] = String.split(rest, "}", parts: 2)
<<vartype::binary-1, rest::binary>> = rest
append =
case vartype do
"C" -> DefaultFormatter.RequestCookie.format(conn, var)
"e" -> DefaultFormatter.Environment.format(conn, var)
"i" -> DefaultFormatter.RequestHeader.format(conn, var)
"o" -> DefaultFormatter.ResponseHeader.format(conn, var)
"T" -> DefaultFormatter.RequestServingTime.format(conn, var)
_ -> "-"
end
[append | message]
|> log(conn, rest)
end
defp log(message, conn, <<char, rest::binary>>) do
[<<char>> | message]
|> log(conn, rest)
end
end | lib/plug/accesslog/default_formatter.ex | 0.788217 | 0.470554 | default_formatter.ex | starcoder |
defmodule AtomTweaksWeb.FormHelpers do
@moduledoc """
Functions for building HTML forms, specifically designed to work with
[GitHub Primer](https://primer.style).
"""
use Phoenix.HTML
alias Phoenix.HTML.Form
alias AtomTweaks.Markdown
alias AtomTweaksWeb.ErrorHelpers
@doc """
Displays the appropriate input control for the given field.
## Options
* `:using` -- override the built-in selection of input field based on data type. Can be any of the
`Phoenix.HTML.Form` input function names or the following special values:
* `:markdown` -- displays a specially-formatted `textarea` suitable for entering or editing
Markdown text
* `:tweak_type` -- displays a drop-down list of available `AtomTweaks.Tweaks.Tweak` types
See:
[Dynamic forms with Phoenix](http://blog.plataformatec.com.br/2016/09/dynamic-forms-with-phoenix/)
"""
@spec input(Phoenix.HTML.FormData.t(), atom, keyword) :: Phoenix.HTML.safe()
def input(form, field, options \\ []) do
type = options[:using] || Form.input_type(form, field)
wrapper_opts = [class: "form-group #{error_class(form, field)}"]
label_opts = []
input_opts =
options
|> Keyword.split([:class, :placeholder])
|> Tuple.to_list()
|> hd()
|> Keyword.update(:class, "form-control", &"form-control #{&1}")
content_tag :dl, wrapper_opts do
label =
content_tag :dt do
label(form, humanize(field), label_opts)
end
input =
content_tag :dd do
input(type, form, field, input_opts)
end
error = error_tag(form, field) || ""
[label, input, error]
end
end
defp input(:markdown, form, field, input_opts) do
content =
case Form.input_value(form, field) do
nil -> nil
%Markdown{} = markdown -> markdown.text
value -> value
end
opts =
input_opts
|> Keyword.merge(id: Form.input_id(form, field), name: Form.input_name(form, field))
|> Keyword.put(:class, (input_opts[:class] || "") <> " image-drop")
content_tag(:textarea, "#{content}\n", opts)
end
defp input(:tweak_type, form, field, _input_opts) do
selected = Form.input_value(form, field)
select(
form,
field,
[Init: "init", Style: "style"],
prompt: "Select tweak type",
selected: selected
)
end
defp input(type, form, field, input_opts) do
apply(Form, type, [form, field, input_opts])
end
defp error_class(form, field) do
cond do
!form.source.action -> ""
form.errors[field] -> "errored"
true -> ""
end
end
defp error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag :dd, class: "error" do
ErrorHelpers.translate_error(error)
end
end)
end
end | lib/atom_tweaks_web/helpers/form_helpers.ex | 0.778228 | 0.422177 | form_helpers.ex | starcoder |
defmodule Gringotts.Gateways.Trexle do
@moduledoc """
[Trexle][home] Payment Gateway implementation.
> For further details, please refer [Trexle API documentation][docs].
Following are the features that have been implemented for the Trexle Gateway:
| Action | Method |
| ------ | ------ |
| Authorize | `authorize/3` |
| Purchase | `purchase/3` |
| Capture | `capture/3` |
| Refund | `refund/3` |
| Store | `store/2` |
## PCI compliance is mandatory!
_You, the merchant needs to be PCI-DSS Compliant if you wish to use this
module! Your server will recieve sensitive card and customer information._
## The `opts` argument
Most `Gringotts` API calls accept an optional `keyword` list `opts` to supply
optional arguments for transactions with Trexle. The following keys are
supported:
* `email`
* `ip_address`
* `description`
[docs]: https://docs.trexle.com/
[home]: https://trexle.com/
## Registering your Trexle account at `Gringotts`
After [creating your account][dashboard] successfully on Trexle, head to the dashboard and find
your account "secrets" in the [`API keys`][keys] section.
Here's how the secrets map to the required configuration parameters for Trexle:
| Config parameter | Trexle secret |
| ------- | ---- |
| `:api_key` | **API key** |
Your Application config must look something like this:
config :gringotts, Gringotts.Gateways.Trexle,
api_key: "your-secret-API-key"
[dashboard]: https://trexle.com/dashboard/
[keys]: https://trexle.com/dashboard/api-keys
## Scope of this module
* Trexle processes money in cents.**citation-needed**.
## Supported Gateways
Find the official [list here][gateways].
[gateways]: https://trexle.com/payment-gateway
## Following the examples
1. First, set up a sample application and configure it to work with Trexle.
- You could do that from scratch by following our [Getting Started][gs] guide.
- To save you time, we recommend [cloning our example repo][example-repo]
that gives you a pre-configured sample app ready-to-go.
+ You could use the same config or update it the with your "secrets"
that as described
[above](#module-registering-your-trexle-account-at-gringotts).
2. To save a lot of time, create a [`.iex.exs`][iex-docs] file as shown in
[this gist][trexle.iex.exs] to introduce a set of handy bindings and
aliases.
We'll be using these bindings in the examples below.
[example-repo]: https://github.com/aviabird/gringotts_example
[iex-docs]: https://hexdocs.pm/iex/IEx.html#module-the-iex-exs-file
[trexle.iex.exs]: https://gist.github.com/oyeb/055f40e9ad4102f5480febd2cfa00787
[gs]: https://github.com/aviabird/gringotts/wiki
"""
@base_url "https://core.trexle.com/api/v1/"
use Gringotts.Gateways.Base
use Gringotts.Adapter, required_config: [:api_key]
import Poison, only: [decode: 1]
alias Gringotts.{Address, CreditCard, Money, Response}
@doc """
Performs a (pre) Authorize operation.
The authorization validates the `card` details with the banking network,
places a hold on the transaction `amount` in the customer’s issuing bank and
also triggers risk management. Funds are not transferred.
Trexle returns a "charge token", avaliable in the `Response.id`
field, which can be used in future to perform a `capture/3`.
### Example
The following session shows how one would (pre) authorize a payment of $100 on
a sample `card`.
```
iex> amount = Money.new(100, :USD)
iex> card = %CreditCard{
first_name: "Harry",
last_name: "Potter",
number: "5200828282828210",
year: 2099, month: 12,
verification_code: "123",
brand: "VISA"}
iex> address = %Address{
street1: "301, Gryffindor",
street2: "Hogwarts School of Witchcraft and Wizardry, Hogwarts Castle",
city: "Highlands",
region: "SL",
country: "GB",
postal_code: "11111",
phone: "(555)555-5555"}
iex> options = [email: "<EMAIL>",
ip_address: "127.0.0.1",
billing_address: address,
description: "For our valued customer, Mr. Potter"]
iex> Gringotts.authorize(Gringotts.Gateways.Trexle, amount, card, options)
```
"""
@spec authorize(Money.t(), CreditCard.t(), keyword) :: {:ok | :error, Response}
def authorize(amount, payment, opts \\ []) do
params = create_params_for_auth_or_purchase(amount, payment, opts, false)
commit(:post, "charges", params, opts)
end
@doc """
Captures a pre-authorized `amount`.
`amount` is transferred to the merchant account by Trexle when it is smaller or
equal to the amount used in the pre-authorization referenced by `charge_token`.
Trexle returns a "charge token", avaliable in the `Response.id`
field, which can be used in future to perform a `refund/2`.
## Note
Multiple captures cannot be performed on the same "charge token". If the
captured amount is smaller than the (pre) authorized amount, the "un-captured"
amount is released.**citation-needed**
## Example
The following example shows how one would (partially) capture a previously
authorized a payment worth $10 by referencing the obtained `charge_token`.
```
iex> amount = Money.new(10, :USD)
iex> token = "some-<PASSWORD>"
iex> Gringotts.capture(Gringotts.Gateways.Trexle, token, amount)
```
"""
@spec capture(String.t(), Money.t(), keyword) :: {:ok | :error, Response}
def capture(charge_token, amount, opts \\ []) do
{_, int_value, _} = Money.to_integer(amount)
params = [amount: int_value]
commit(:put, "charges/#{charge_token}/capture", params, opts)
end
@doc """
Transfers `amount` from the customer to the merchant.
Trexle attempts to process a purchase on behalf of the customer, by debiting
`amount` from the customer's account by charging the customer's `card`.
## Example
The following session shows how one would process a payment worth $100 in
one-shot, without (pre) authorization.
```
iex> amount = Money.new(100, :USD)
iex> card = %CreditCard{
first_name: "Harry",
last_name: "Potter",
number: "5200828282828210",
year: 2099, month: 12,
verification_code: "123",
brand: "VISA"}
iex> address = %Address{
street1: "301, Gryffindor",
street2: "Hogwarts School of Witchcraft and Wizardry, Hogwarts Castle",
city: "Highlands",
region: "SL",
country: "GB",
postal_code: "11111",
phone: "(555)555-5555"}
iex> options = [email: "<EMAIL>",
ip_address: "127.0.0.1",
billing_address: address,
description: "For our valued customer, Mr. Potter"]
iex> Gringotts.purchase(Gringotts.Gateways.Trexle, amount, card, options)
```
"""
@spec purchase(Money.t(), CreditCard.t(), keyword) :: {:ok | :error, Response}
def purchase(amount, payment, opts \\ []) do
params = create_params_for_auth_or_purchase(amount, payment, opts)
commit(:post, "charges", params, opts)
end
@doc """
Refunds the amount to the customer's card with reference to a prior transfer.
Trexle processes a full or partial refund worth `amount`, referencing a
previous `purchase/3` or `capture/3`.
Trexle returns a "refund token", avaliable in the `Response.id`
field.
Multiple, partial refunds can be performed on the same "charge token"
referencing a previous `purchase/3` or `capture/3` till the cumulative refunds
equals the `capture/3`d or `purchase/3`d amount.
## Example
The following session shows how one would refund $100 of a previous
`purchase/3` (and similarily for `capture/3`s).
```
iex> amount = Money.new(100, :USD)
iex> token = "some-real-token"
iex> Gringotts.refund(Gringotts.Gateways.Trexle, amount, token)
```
"""
@spec refund(Money.t(), String.t(), keyword) :: {:ok | :error, Response}
def refund(amount, charge_token, opts \\ []) do
{_, int_value, _} = Money.to_integer(amount)
params = [amount: int_value]
commit(:post, "charges/#{charge_token}/refunds", params, opts)
end
@doc """
Stores the card information for future use.
## Example
The following session shows how one would store a card (a payment-source) for
future use.
```
iex> card = %CreditCard{
first_name: "Harry",
last_name: "Potter",
number: "5200828282828210",
year: 2099, month: 12,
verification_code: "123",
brand: "VISA"}
iex> address = %Address{
street1: "301, Gryffindor",
street2: "Hogwarts School of Witchcraft and Wizardry, Hogwarts Castle",
city: "Highlands",
region: "SL",
country: "GB",
postal_code: "11111",
phone: "(555)555-5555"}
iex> options = [email: "<EMAIL>",
ip_address: "127.0.0.1",
billing_address: address,
description: "For our valued customer, Mr. Potter"]
iex> Gringotts.store(Gringotts.Gateways.Trexle, card, options)
```
"""
@spec store(CreditCard.t(), keyword) :: {:ok | :error, Response}
def store(payment, opts \\ []) do
params =
[email: opts[:email]] ++ card_params(payment) ++ address_params(opts[:billing_address])
commit(:post, "customers", params, opts)
end
defp create_params_for_auth_or_purchase(amount, payment, opts, capture \\ true) do
{currency, int_value, _} = Money.to_integer(amount)
[
capture: capture,
amount: int_value,
currency: currency,
email: opts[:email],
ip_address: opts[:ip_address],
description: opts[:description]
] ++ card_params(payment) ++ address_params(opts[:billing_address])
end
defp card_params(%CreditCard{} = card) do
[
"card[name]": CreditCard.full_name(card),
"card[number]": card.number,
"card[expiry_year]": card.year,
"card[expiry_month]": card.month,
"card[cvc]": card.verification_code
]
end
defp address_params(%Address{} = address) do
[
"card[address_line1]": address.street1,
"card[address_line2]": address.street2,
"card[address_city]": address.city,
"card[address_postcode]": address.postal_code,
"card[address_state]": address.region,
"card[address_country]": address.country
]
end
defp commit(method, path, params, opts) do
auth_token = "Basic #{Base.encode64(opts[:config][:api_key])}"
headers = [
{"Content-Type", "application/x-www-form-urlencoded"},
{"Authorization", auth_token}
]
options = [basic_auth: {opts[:config][:api_key], "password"}]
url = "#{base_url(opts)}#{path}"
response = HTTPoison.request(method, url, {:form, params}, headers, options)
response |> respond
end
@spec respond(term) :: {:ok | :error, Response}
defp respond(response)
defp respond({:ok, %{status_code: code, body: body}}) when code in [200, 201] do
{:ok, results} = decode(body)
token = results["response"]["token"]
message = results["response"]["status_message"]
{
:ok,
%Response{id: token, message: message, raw: body, status_code: code}
}
end
defp respond({:ok, %{status_code: code, body: body}}) when code in [401] do
{
:error,
%Response{reason: "Unauthorized access.", message: "Unauthorized access", raw: body}
}
end
defp respond({:ok, %{status_code: status_code, body: body}}) do
{:ok, results} = decode(body)
detail = results["detail"]
{:error, %Response{status_code: status_code, message: detail, reason: detail, raw: body}}
end
defp respond({:error, %HTTPoison.Error{} = error}) do
{
:error,
%Response{
reason: "network related failure",
message: "HTTPoison says '#{error.reason}' [ID: #{error.id || "nil"}]"
}
}
end
defp base_url(opts), do: opts[:test_url] || @base_url
end | lib/gringotts/gateways/trexle.ex | 0.86988 | 0.84075 | trexle.ex | starcoder |