code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Getaways.Vacation do
@moduledoc """
The Vacation context: public interface for finding, booking,
and reviewing vacation places.
"""
import Ecto.Query, warn: false
alias Getaways.Repo
alias Getaways.Vacation.{Place, Booking, Review}
alias Getaways.Accounts.User
@doc """
Returns the place with the given `slug`.
Raises `Ecto.NoResultsError` if no place was found.
"""
def get_place_by_slug!(slug) do
Repo.get_by!(Place, slug: slug)
end
@doc """
Returns a list of all places.
"""
def list_places do
Repo.all(Place)
end
@doc """
Returns a list of places matching the given `criteria`.
Example Criteria:
[{:limit, 15}, {:order, :asc}, {:filter, [{:matching, "lake"}, {:wifi, true}, {:guest_count, 3}]}]
"""
def list_places(criteria) do
query = from p in Place
Enum.reduce(criteria, query, fn
{:limit, limit}, query ->
from p in query, limit: ^limit
{:filter, filters}, query ->
filter_with(filters, query)
{:order, order}, query ->
from p in query, order_by: [{^order, :id}]
end)
|> IO.inspect
|> Repo.all
end
defp filter_with(filters, query) do
Enum.reduce(filters, query, fn
{:matching, term}, query ->
pattern = "%#{term}%"
from q in query,
where:
ilike(q.name, ^pattern) or
ilike(q.description, ^pattern) or
ilike(q.location, ^pattern)
{:pet_friendly, value}, query ->
from q in query, where: q.pet_friendly == ^value
{:pool, value}, query ->
from q in query, where: q.pool == ^value
{:wifi, value}, query ->
from q in query, where: q.wifi == ^value
{:guest_count, count}, query ->
from q in query, where: q.max_guests >= ^count
{:available_between, %{start_date: start_date, end_date: end_date}}, query ->
available_between(query, start_date, end_date)
end)
end
# Returns a query for places available between the given
# start_date and end_date using the Postgres-specific
# OVERLAPS function.
defp available_between(query, start_date, end_date) do
from place in query,
left_join: booking in Booking,
on:
booking.place_id == place.id and
fragment(
"(?, ?) OVERLAPS (?, ? + INTERVAL '1' DAY)",
booking.start_date,
booking.end_date,
type(^start_date, :date),
type(^end_date, :date)
),
where: is_nil(booking.place_id)
end
@doc """
Returns the booking with the given `id`.
Raises `Ecto.NoResultsError` if no booking was found.
"""
def get_booking!(id) do
Repo.get!(Booking, id)
end
@doc """
Creates a booking for the given user.
"""
def create_booking(%User{} = user, attrs) do
%Booking{}
|> Booking.changeset(attrs)
|> Ecto.Changeset.put_assoc(:user, user)
|> Repo.insert()
end
@doc """
Cancels the given booking.
"""
def cancel_booking(%Booking{} = booking) do
booking
|> Booking.cancel_changeset(%{state: "canceled"})
|> Repo.update()
end
@doc """
Creates a review for the given user.
"""
def create_review(%User{} = user, attrs) do
%Review{}
|> Review.changeset(attrs)
|> Ecto.Changeset.put_assoc(:user, user)
|> Repo.insert()
end
# Dataloader
def datasource() do
Dataloader.Ecto.new(Repo, query: &query/2)
end
def query(Booking, %{scope: :place, limit: limit}) do
Booking
|> where(state: "reserved")
|> order_by([desc: :start_date])
|> limit(^limit)
end
def query(Booking, %{scope: :user}) do
Booking
|> order_by([asc: :start_date])
end
def query(queryable, _) do
queryable
end
end | backend/lib/getaways/vacation.ex | 0.834576 | 0.502808 | vacation.ex | starcoder |
defmodule Asteroid.Subject do
use AttributeRepository.Resource, otp_app: :asteroid
@moduledoc """
`AttributeRepository.Resource` for subjects
Subject resource are real-world physical persons, such as the reader of this documentation. It
refers to the OAuth2 definition of a subject.
## Field naming
The following fields have standardised meaning:
- `"sub"`: the subject identifier (`"sub"` in OAuth2) (`String.t()`)
- `"consented_scopes"`: a map whose keys are the `client_id`s and the values the string
representation of the already consented scopes (such as `"email profile address"`). Note that
although this is the format used in the demo application, other ways to store consented scopes
are also possible (but it still has to remain per client)
## Configuration
This modules uses the default configuration of `AttributeRepository.Resource` (see `config/1`).
## Security considerations
- When storing subject passwords, you shall take into account the specifics of such password
(reuse, non-randomness...) and use the relevant algorithms. If you don't know about this
topic, you should probably not try to implement it by yourself.
## Example
```elixir
iex(13)> alias Asteroid.Subject
Asteroid.Subject
iex> {:ok, s} = Subject.load("uid=john,ou=People,dc=example,dc=org")
{:ok,
%Asteroid.Subject{
attrs: %{
"cn" => ["<NAME>"],
"displayName" => "<NAME>",
"givenName" => ["John"],
"mail" => ["<EMAIL>"],
"manager" => ["uid=toto,ou=People,dc=example,dc=org"],
"sn" => ["Doe"]
},
id: "uid=john,ou=People,dc=example,dc=org",
modifications: [],
newly_created: false
}}
iex> s = s
...> |> Subject.add("initials", "JD")
...> |> Subject.add("mail", "<EMAIL>")
...> |> Subject.remove("manager")
%Asteroid.Subject{
attrs: %{
"cn" => ["<NAME>"],
"displayName" => "<NAME>",
"givenName" => ["John"],
"initials" => "JD",
"mail" => ["<EMAIL>", "<EMAIL>"],
"sn" => ["Doe"]
},
id: "uid=john,ou=People,dc=example,dc=org",
modifications: [
{:add, "initials", "JD"},
{:add, "mail", "<EMAIL>"},
{:delete, "manager"}
],
newly_created: false
}
iex> Subject.store(s)
:ok
```
"""
def gen_new_id(opts) do
"sub-" <> super(opts)
end
end | lib/asteroid/subject.ex | 0.813498 | 0.664003 | subject.ex | starcoder |
defmodule CatalogApi.Address.Iso3166 do
@moduledoc false
@doc false
@spec validate(any()) :: :ok | :error
def validate("AB"), do: :ok
def validate("AD"), do: :ok
def validate("AE"), do: :ok
def validate("AF"), do: :ok
def validate("AG"), do: :ok
def validate("AI"), do: :ok
def validate("AL"), do: :ok
def validate("AM"), do: :ok
def validate("AO"), do: :ok
def validate("AQ"), do: :ok
def validate("AR"), do: :ok
def validate("AS"), do: :ok
def validate("AT"), do: :ok
def validate("AU"), do: :ok
def validate("AW"), do: :ok
def validate("AX"), do: :ok
def validate("AZ"), do: :ok
def validate("BA"), do: :ok
def validate("BB"), do: :ok
def validate("BD"), do: :ok
def validate("BE"), do: :ok
def validate("BF"), do: :ok
def validate("BG"), do: :ok
def validate("BH"), do: :ok
def validate("BI"), do: :ok
def validate("BJ"), do: :ok
def validate("BL"), do: :ok
def validate("BM"), do: :ok
def validate("BN"), do: :ok
def validate("BO"), do: :ok
def validate("BR"), do: :ok
def validate("BS"), do: :ok
def validate("BT"), do: :ok
def validate("BV"), do: :ok
def validate("BW"), do: :ok
def validate("BY"), do: :ok
def validate("BZ"), do: :ok
def validate("CA"), do: :ok
def validate("CC"), do: :ok
def validate("CD"), do: :ok
def validate("CF"), do: :ok
def validate("CG"), do: :ok
def validate("CH"), do: :ok
def validate("CI"), do: :ok
def validate("CK"), do: :ok
def validate("CL"), do: :ok
def validate("CM"), do: :ok
def validate("CN"), do: :ok
def validate("CO"), do: :ok
def validate("CR"), do: :ok
def validate("CU"), do: :ok
def validate("CV"), do: :ok
def validate("CW"), do: :ok
def validate("CX"), do: :ok
def validate("CY"), do: :ok
def validate("CZ"), do: :ok
def validate("DE"), do: :ok
def validate("DJ"), do: :ok
def validate("DK"), do: :ok
def validate("DM"), do: :ok
def validate("DO"), do: :ok
def validate("DZ"), do: :ok
def validate("EC"), do: :ok
def validate("EE"), do: :ok
def validate("EG"), do: :ok
def validate("EH"), do: :ok
def validate("ER"), do: :ok
def validate("ES"), do: :ok
def validate("ET"), do: :ok
def validate("FI"), do: :ok
def validate("FJ"), do: :ok
def validate("FK"), do: :ok
def validate("FM"), do: :ok
def validate("FO"), do: :ok
def validate("FR"), do: :ok
def validate("GA"), do: :ok
def validate("GB"), do: :ok
def validate("GD"), do: :ok
def validate("GE"), do: :ok
def validate("GF"), do: :ok
def validate("GG"), do: :ok
def validate("GH"), do: :ok
def validate("GI"), do: :ok
def validate("GL"), do: :ok
def validate("GM"), do: :ok
def validate("GN"), do: :ok
def validate("GP"), do: :ok
def validate("GQ"), do: :ok
def validate("GR"), do: :ok
def validate("GS"), do: :ok
def validate("GT"), do: :ok
def validate("GU"), do: :ok
def validate("GW"), do: :ok
def validate("GY"), do: :ok
def validate("HK"), do: :ok
def validate("HM"), do: :ok
def validate("HN"), do: :ok
def validate("HR"), do: :ok
def validate("HT"), do: :ok
def validate("HU"), do: :ok
def validate("ID"), do: :ok
def validate("IE"), do: :ok
def validate("IL"), do: :ok
def validate("IM"), do: :ok
def validate("IN"), do: :ok
def validate("IO"), do: :ok
def validate("IQ"), do: :ok
def validate("IR"), do: :ok
def validate("IS"), do: :ok
def validate("IT"), do: :ok
def validate("JE"), do: :ok
def validate("JM"), do: :ok
def validate("JO"), do: :ok
def validate("JP"), do: :ok
def validate("KE"), do: :ok
def validate("KG"), do: :ok
def validate("KH"), do: :ok
def validate("KI"), do: :ok
def validate("KM"), do: :ok
def validate("KN"), do: :ok
def validate("KP"), do: :ok
def validate("KR"), do: :ok
def validate("KW"), do: :ok
def validate("KY"), do: :ok
def validate("KZ"), do: :ok
def validate("LA"), do: :ok
def validate("LB"), do: :ok
def validate("LC"), do: :ok
def validate("LI"), do: :ok
def validate("LK"), do: :ok
def validate("LR"), do: :ok
def validate("LS"), do: :ok
def validate("LT"), do: :ok
def validate("LU"), do: :ok
def validate("LV"), do: :ok
def validate("LY"), do: :ok
def validate("MA"), do: :ok
def validate("MC"), do: :ok
def validate("MD"), do: :ok
def validate("ME"), do: :ok
def validate("MF"), do: :ok
def validate("MG"), do: :ok
def validate("MH"), do: :ok
def validate("MK"), do: :ok
def validate("ML"), do: :ok
def validate("MM"), do: :ok
def validate("MN"), do: :ok
def validate("MO"), do: :ok
def validate("MP"), do: :ok
def validate("MQ"), do: :ok
def validate("MR"), do: :ok
def validate("MS"), do: :ok
def validate("MT"), do: :ok
def validate("MU"), do: :ok
def validate("MV"), do: :ok
def validate("MW"), do: :ok
def validate("MX"), do: :ok
def validate("MY"), do: :ok
def validate("MZ"), do: :ok
def validate("NA"), do: :ok
def validate("NC"), do: :ok
def validate("NE"), do: :ok
def validate("NF"), do: :ok
def validate("NG"), do: :ok
def validate("NI"), do: :ok
def validate("NL"), do: :ok
def validate("NO"), do: :ok
def validate("NP"), do: :ok
def validate("NR"), do: :ok
def validate("NU"), do: :ok
def validate("NZ"), do: :ok
def validate("OM"), do: :ok
def validate("PA"), do: :ok
def validate("PE"), do: :ok
def validate("PF"), do: :ok
def validate("PG"), do: :ok
def validate("PH"), do: :ok
def validate("PK"), do: :ok
def validate("PL"), do: :ok
def validate("PM"), do: :ok
def validate("PN"), do: :ok
def validate("PR"), do: :ok
def validate("PS"), do: :ok
def validate("PT"), do: :ok
def validate("PW"), do: :ok
def validate("PY"), do: :ok
def validate("QA"), do: :ok
def validate("RE"), do: :ok
def validate("RO"), do: :ok
def validate("RS"), do: :ok
def validate("RU"), do: :ok
def validate("RW"), do: :ok
def validate("SA"), do: :ok
def validate("SB"), do: :ok
def validate("SC"), do: :ok
def validate("SD"), do: :ok
def validate("SE"), do: :ok
def validate("SG"), do: :ok
def validate("SH"), do: :ok
def validate("SI"), do: :ok
def validate("SJ"), do: :ok
def validate("SK"), do: :ok
def validate("SL"), do: :ok
def validate("SM"), do: :ok
def validate("SN"), do: :ok
def validate("SO"), do: :ok
def validate("SR"), do: :ok
def validate("SS"), do: :ok
def validate("ST"), do: :ok
def validate("SV"), do: :ok
def validate("SX"), do: :ok
def validate("SY"), do: :ok
def validate("SZ"), do: :ok
def validate("TC"), do: :ok
def validate("TD"), do: :ok
def validate("TF"), do: :ok
def validate("TG"), do: :ok
def validate("TH"), do: :ok
def validate("TJ"), do: :ok
def validate("TK"), do: :ok
def validate("TL"), do: :ok
def validate("TM"), do: :ok
def validate("TN"), do: :ok
def validate("TO"), do: :ok
def validate("TR"), do: :ok
def validate("TT"), do: :ok
def validate("TV"), do: :ok
def validate("TW"), do: :ok
def validate("TZ"), do: :ok
def validate("UA"), do: :ok
def validate("UG"), do: :ok
def validate("UM"), do: :ok
def validate("US"), do: :ok
def validate("UY"), do: :ok
def validate("UZ"), do: :ok
def validate("VA"), do: :ok
def validate("VC"), do: :ok
def validate("VE"), do: :ok
def validate("VG"), do: :ok
def validate("VI"), do: :ok
def validate("VN"), do: :ok
def validate("VU"), do: :ok
def validate("WF"), do: :ok
def validate("WS"), do: :ok
def validate("YE"), do: :ok
def validate("YT"), do: :ok
def validate("ZA"), do: :ok
def validate("ZM"), do: :ok
def validate("ZW"), do: :ok
def validate(_), do: :error
end | lib/catalog_api/address/iso_3166.ex | 0.533884 | 0.779825 | iso_3166.ex | starcoder |
defmodule Plenario.QueryUtils do
import Ecto.Query
@doc """
Genreically applies ordering. This should be delegated to from the query modules.
"""
@spec order(Ecto.Queryable.t(), {:asc | :desc, atom()}) :: Ecto.Queryable.t()
def order(query, {dir, fname}) do
case Enum.empty?(query.order_bys) do
true -> do_order(query, dir, fname)
false -> query
end
end
defp do_order(query, :asc, fname) do
case Enum.empty?(query.group_bys) do
true ->
order_by(query, [q], asc: ^fname)
false ->
order_by(query, [q], asc: ^fname)
|> group_by(^fname)
end
end
defp do_order(query, :desc, fname) do
case Enum.empty?(query.group_bys) do
true ->
order_by(query, [q], desc: ^fname)
false ->
order_by(query, [q], desc: ^fname)
|> group_by(^fname)
end
end
@doc """
Generically applies pagination. This should be delegated to from the query modules.
"""
@spec paginate(Ecto.Queryable.t(), {pos_integer(), pos_integer()}) :: Ecto.Queryable.t() | no_return()
def paginate(query, {page, size}) do
cond do
!is_integer(page) or page < 1 -> raise "page must be a non-negative integer"
!is_integer(size) or size < 1 -> raise "size must be a non-negative integer"
true -> :ok
end
starting_at = (page - 1) * size
query
|> offset(^starting_at)
|> limit(^size)
end
@doc """
Applies the given `module.func` to the query if the flag is true, otherwise it
simply returns the query unmodified.
"""
@spec boolean_compose(Ecto.Queryable.t(), boolean(), module(), atom()) :: Ecto.Queryable.t()
def boolean_compose(query, false, _module, _func), do: query
def boolean_compose(query, true, module, func), do: apply(module, func, [query])
@doc """
Applies the given `module.func` to the query with the given `value` as the parameter
to the function if the value is not :empty, otherwise it returns the query unmodified.
"""
@spec filter_compose(Ecto.Queryable.t(), :empty | any(), module(), atom()) :: Ecto.Queryable.t()
def filter_compose(query, :empty, _module, _func), do: query
def filter_compose(query, value, module, func), do: apply(module, func, [query, value])
end | lib/plenario/queries/query_utils.ex | 0.803444 | 0.502319 | query_utils.ex | starcoder |
defmodule Nebulex.Object do
@moduledoc """
Defines a Cache Object.
This is the struct used by the caches to store and retrieve data.
"""
defstruct [:key, :value, :version, :expire_at]
@type t :: %__MODULE__{
key: any,
value: any,
version: any,
expire_at: integer | nil
}
@doc """
Returns the UNIX timestamp in seconds for the given `ttl`.
## Example
iex> expire_at = Nebulex.Object.expire_at(10)
iex> expire_at - Nebulex.Object.ts()
10
"""
@spec expire_at(ttl :: timeout | nil) :: integer | nil
def expire_at(nil), do: nil
def expire_at(:infinity), do: nil
def expire_at(ttl) when is_integer(ttl), do: ts() + ttl
@doc """
Returns the remaining time to live for the given timestamp.
## Example
iex> expire_at = Nebulex.Object.expire_at(10)
iex> Nebulex.Object.remaining_ttl(expire_at)
10
"""
@spec remaining_ttl(object_or_ttl :: Nebulex.Object.t() | integer | nil) :: timeout
def remaining_ttl(nil), do: :infinity
def remaining_ttl(%Nebulex.Object{expire_at: expire_at}), do: remaining_ttl(expire_at)
def remaining_ttl(expire_at) when is_integer(expire_at) do
remaining = expire_at - ts()
if remaining > 0, do: remaining, else: 0
end
@doc """
Wrapper for `DateTime.to_unix/2`.
## Example
iex> 1_464_096_368 |> DateTime.from_unix!() |> Nebulex.Object.ts()
1464096368
"""
@spec ts(datetime :: Calendar.datetime()) :: integer()
def ts(datetime \\ DateTime.utc_now()) do
DateTime.to_unix(datetime)
end
@doc """
Returns whether the given `object` has expired or not.
## Example
iex> Nebulex.Object.expired?(%Nebulex.Object{})
false
"""
@spec expired?(Nebulex.Object.t()) :: boolean
def expired?(%Nebulex.Object{expire_at: expire_at}) do
remaining_ttl(expire_at) <= 0
end
def encode(data, opts \\ []) do
data
|> :erlang.term_to_binary(opts)
|> Base.url_encode64()
end
def decode(data, opts \\ []) when is_binary(data) do
data
|> Base.url_decode64!()
|> :erlang.binary_to_term(opts)
end
end | lib/nebulex/object.ex | 0.882073 | 0.526099 | object.ex | starcoder |
defmodule CoursePlanner.Terms do
@moduledoc """
Handle all interactions with Terms, create, list, fetch, edit, and delete
"""
import Ecto.Query
alias CoursePlanner.{Repo, Courses.OfferedCourses, Notifications.Notifier,
Accounts.Coordinators, Notifications}
alias CoursePlanner.Terms.{Holiday, Term}
alias Ecto.Changeset
@notifier Application.get_env(:course_planner, :notifier, Notifier)
def all do
query = from t in Term, order_by: [desc: t.start_date, desc: t.end_date]
Repo.all(query)
end
def all_for_classes do
Repo.all(from t in Term,
join: oc in assoc(t, :offered_courses),
join: co in assoc(oc, :course),
join: c in assoc(oc, :classes),
preload: [offered_courses: {oc, classes: c, course: co}],
order_by: [asc: t.start_date, asc: co.name, asc: c.date,
asc: c.starting_at, asc: c.finishes_at])
end
def new do
Term.changeset(%Term{holidays: [], courses: []})
end
def create(params) do
%Term{}
|> term_changeset_with_holidays(params)
|> Repo.insert
end
def get(id) do
case Repo.get(Term, id) do
nil -> {:error, :not_found}
term -> {:ok, Repo.preload(term, [:courses])}
end
end
def edit(id) do
case get(id) do
{:ok, term} -> {:ok, term, Term.changeset(term)}
error -> error
end
end
def update(id, params) do
case get(id) do
{:ok, term} ->
term
|> term_changeset_with_holidays(params)
|> Repo.update
|> format_update_error(term)
error -> error
end
end
defp format_update_error({:ok, _} = result, _), do: result
defp format_update_error({:error, changeset}, term), do: {:error, term, changeset}
defp term_changeset_with_holidays(term, params) do
changeset = Term.changeset(term, params)
start_date = Changeset.get_field(changeset, :start_date)
end_date = Changeset.get_field(changeset, :end_date)
holidays = get_holiday_changesets(params, start_date, end_date)
changeset
|> Changeset.put_embed(:holidays, holidays)
|> Term.validate_minimum_teaching_days(holidays)
end
defp get_holiday_changesets(params, start_date, end_date) do
params
|> Map.get("holidays", %{})
|> Map.values()
|> Enum.map(&Holiday.changeset(%Holiday{}, start_date, end_date, &1))
end
def delete(id) do
case get(id) do
{:ok, term} -> Repo.delete(term)
error -> error
end
end
def notify_term_users(term, current_user, notification_type, path \\ "/") do
term
|> get_subscribed_users()
|> Enum.reject(fn %{id: id} -> id == current_user.id end)
|> Enum.each(&(notify_user(&1, notification_type, path)))
end
def notify_user(user, type, path) do
Notifications.new()
|> Notifications.type(type)
|> Notifications.resource_path(path)
|> Notifications.to(user)
|> @notifier.notify_later()
end
def get_subscribed_users(term) do
offered_courses = term
|> Repo.preload([:offered_courses, offered_courses: :students, offered_courses: :teachers])
|> Map.get(:offered_courses)
students_and_teachers = OfferedCourses.get_subscribed_users(offered_courses)
students_and_teachers ++ Coordinators.all()
end
def find_all_by_user(%{role: role}) when role in ["Coordinator", "Supervisor"] do
Repo.all(from t in Term,
join: oc in assoc(t, :offered_courses),
join: co in assoc(oc, :course),
preload: [offered_courses: {oc, course: co}],
order_by: [desc: t.start_date, asc: co.name])
end
def find_all_by_user(%{role: "Teacher", id: user_id}) do
Repo.all(from t in Term,
join: oc in assoc(t, :offered_courses),
join: co in assoc(oc, :course),
join: te in assoc(oc, :teachers),
preload: [offered_courses: {oc, course: co, teachers: te}],
where: te.id == ^user_id,
order_by: [desc: t.start_date, asc: co.name]
)
end
def find_all_by_user(%{role: "Student", id: user_id}) do
Repo.all(from t in Term,
join: oc in assoc(t, :offered_courses),
join: co in assoc(oc, :course),
join: s in assoc(oc, :students),
preload: [offered_courses: {oc, course: co, students: s}],
where: s.id == ^user_id,
order_by: [desc: t.start_date, asc: co.name]
)
end
def student_attendances(student_id) do
Repo.all(from t in Term,
join: oc in assoc(t, :offered_courses),
join: co in assoc(oc, :course),
join: c in assoc(oc, :classes),
join: a in assoc(c, :attendances),
join: as in assoc(a, :student),
preload: [offered_courses: {oc, course: co, classes: {c, attendances: {a, student: as}}}],
where: as.id == ^student_id,
order_by: [desc: t.start_date, desc: t.end_date,
asc: co.name, asc: c.date, asc: c.starting_at])
end
end | lib/course_planner/terms/terms.ex | 0.562177 | 0.46478 | terms.ex | starcoder |
defmodule PhoenixMTM.Mappers do
@moduledoc ~S"""
A collection of commonly used mappers for the `collection_checkboxes` helper.
To use, pass a capture of the mapping function you wish to use to the
`collection_checkboxes` helper.
## Example
<%= PhoenixMTM.Helpers.collection_checkboxes f, :tags,
Enum.map(@tags, &({&1.name, &1.id})),
selected: Enum.map(f.data.tags, &(&1.id)),
mapper: &PhoenixMTM.Mappers.nested/6
## Using Custom Mappers
If you want to make your own custom mapper, you can optionally
`use PhoenixMTM.Mappers` and bring in some of the Phoenix tag helpers.
This is not required, as you can manually include which ever imports you want.
"""
import Phoenix.HTML.Form
import Phoenix.HTML.Tag
import Phoenix.HTML
@doc ~S"""
Checkbox input and label returned as a 2 element list - the default.
### Example Output
```html
<input type="checkbox" value="1" name="checkbox_1">
<label for="checkbox_1">1</label>
```
"""
def unwrapped(form, field, input_opts, label_content, label_opts, _opts) do
[
tag(:input, input_opts),
label(form, field, "#{label_content}", label_opts)
]
end
@doc ~S"""
Checkbox input and label returned as a label with the checkbox and label text
nested within.
### Example Output
```html
<label for="checkbox_1">
<input type="checkbox" value="1" name="checkbox_1">
1
</label>
```
"""
def nested(form, field, input_opts, label_content, label_opts, _opts) do
label(form, field, label_opts) do
[
tag(:input, input_opts),
html_escape(label_content)
]
end
end
@doc ~S"""
Checkbox input and label returned as a label with the checkbox and label text
nested within. The label text is not escaped in any way.
If you are displaying labels that might be provided by untrusted users, you
absolutely *do not* want to use this mapper.
This mapper will be deprecated at a later date. If you wish to keep this
functionality, copy it to your own custom mapper module.
### Example Output
```html
<label for="checkbox_1">
<input type="checkbox" value="1" name="checkbox_1">
1
</label>
```
"""
def unsafe_nested(form, field, input_opts, label_content, label_opts, _opts) do
label(form, field, label_opts) do
[
tag(:input, input_opts),
{:safe, "#{label_content}"}
]
end
end
defmacro __using__(_) do
quote do
import Phoenix.HTML
import Phoenix.HTML.Form
import Phoenix.HTML.Tag
end
end
end | lib/phoenix_mtm/mappers.ex | 0.908745 | 0.736448 | mappers.ex | starcoder |
defmodule Parser do
use Platform.Parsing.Behaviour
## test payloads
# 0208c900038009812b8014810880027fe8800880040bf5
# 0208c900020bf5
def fields do
[
%{field: "wind_speed", display: "Wind speed", unit: "m⋅s⁻¹"},
%{field: "wind_direction", display: "Wind direction", unit: "°"},
%{field: "maximum_wind_speed", display: "Maximum wind speed", unit: "m⋅s⁻¹"},
%{field: "air_temperature", display: "Air temperature", unit: "°C"},
%{field: "x_orientation_angle", display: "X orientation angle", unit: "°"},
%{field: "y_orientation_angle", display: "Y orientation angle", unit: "°"},
%{field: "north_wind_speed", display: "North wind speed", unit: "m⋅s⁻¹"},
%{field: "east_wind_speed", display: "East wind speed", unit: "m⋅s⁻¹"},
%{field: "battery_voltage", display: "Battery voltage", unit: "V"}
]
end
def parse(<<2, device_id::size(16), flags::binary-size(2), words::binary>>, _meta) do
{_remaining, result} =
{words, %{:device_id => device_id, :protocol_version => 2}}
|> sensor0(flags)
|> sensor1(flags)
result
end
defp sensor0({<<x0::size(16), x1::size(16), x2::size(16), x3::size(16), x4::size(16), x5::size(16), x6::size(16), x7::size(16), remaining::binary>>, result},
<<_::size(15), 1::size(1), _::size(0)>>) do
{remaining,
Map.merge(result,
%{
:wind_speed => (x0 - 32768) / 100,
:wind_direction => (x1 - 32768) / 10,
:maximum_wind_speed => (x2 - 32768) / 100,
:air_temperature => (x3 - 32768) / 10,
:x_orientation_angle => (x4 - 32768) / 10,
:y_orientation_angle => (x5 - 32768) / 10,
:north_wind_speed => (x6 - 32768) / 100,
:east_wind_speed => (x7 - 32768) / 100
})}
end
defp sensor0(result, _flags), do: result
defp sensor1({<<x0::size(16), remaining::binary>>, result},
<<_::size(14), 1::size(1), _::size(1)>>) do
{remaining,
Map.merge(result,
%{
:battery_voltage => x0 / 1000
})}
end
defp sensor1(result, _flags), do: result
end | DL-ATM22/DL-ATM22.ELEMENT-IoT.ex | 0.506347 | 0.431105 | DL-ATM22.ELEMENT-IoT.ex | starcoder |
defmodule NPRx.StationFinder do
@moduledoc """
Find stations and station information. This can be stations close to your current geographic location or any number of other criteria. For more detailed information see [the NPR docs](https://dev.npr.org/api/#!/stationfinder/searchStations)
"""
import NPRx.HTTP
@typedoc """
Allowed parameters for stations endpoint
"""
@type station_query_params :: [q: String.t, city: String.t, state: String.t, lat: String.t, lon: String.t]
@doc """
Get a list of stations.
If no query parameters passed in, it returns a list of stations that are geographically closest to the calling client (based on GeoIP information)
If one or more query parameters are passed in, it performs a search of NPR stations that match those search criteria (not taking into account the client's physical location)
Available paramerters are:
* `q` - Search terms to search on; can be a station name, network name, call letters, or zipcode
* `city` - A city to look for stations from; intended to be paired with `state`
* `state` - A state to look for stations from (using the 2-letter abbreviation); intended to be paired with `city`
* `lat` - A latitude value from a geographic coordinate system; only works if paired with `lon`
* `lon` - A longitude value from a geographic coordinate system; only works if paired with `lat`
"""
@spec stations(String.t, station_query_params) :: {:ok, list()} | {:error, map() | list()}
def stations(token, query_params \\ []) do
get("/stationfinder/v3/stations", token, query_params)
|> case do
{:ok, result} -> {:ok, Map.get(result, "items")}
error -> error
end
end
@doc """
This endpoint retrieves information about a given station, based on its numeric ID, which is consistent across all of NPR's APIs.
A typical use case for this data is for clients who want to create a dropdown menu, modal/pop-up or dedicated page displaying more information about the station the client is localized to, including, for example, links to the station's homepage and donation (pledge) page.
"""
@spec station_info(String.t, String.t) :: map()
def station_info(station_id, token) do
get("/stationfinder/v3/stations/#{station_id}", token)
end
end | lib/nprx/station_finder.ex | 0.826991 | 0.646718 | station_finder.ex | starcoder |
defmodule Pathex.Lenses.Some do
@moduledoc """
Private module for `some()` lens
> see `Pathex.Lenses.some/0` documentation
"""
def some do
fn
:view, {%{} = map, func} ->
Enum.find_value(map, :error, fn {_k, v} ->
with :error <- func.(v) do
false
end
end)
:view, {[{a, _} | _] = kwd, func} when is_atom(a) ->
Enum.find_value(kwd, :error, fn {_k, v} ->
with :error <- func.(v) do
false
end
end)
:view, {l, func} when is_list(l) ->
Enum.find_value(l, :error, fn v ->
with :error <- func.(v) do
false
end
end)
:view, {t, func} when is_tuple(t) ->
Enum.find_value(Tuple.to_list(t), :error, fn v ->
with :error <- func.(v) do
false
end
end)
:update, {%{} = map, func} ->
found =
Enum.find_value(map, :error, fn {k, v} ->
case func.(v) do
{:ok, v} -> {k, v}
:error -> false
end
end)
with {k, v} <- found do
{:ok, Map.put(map, k, v)}
end
# TODO: optimize through reduce and prepend
:update, {[{a, _} | _] = kwd, func} when is_atom(a) ->
found =
Enum.find_value(kwd, :error, fn {k, v} ->
case func.(v) do
{:ok, v} -> {k, v}
:error -> false
end
end)
with {k, v} <- found do
{:ok, Keyword.put(kwd, k, v)}
end
:update, {l, func} when is_list(l) ->
Enum.reduce(l, {:error, []}, fn
v, {:error, acc} ->
case func.(v) do
{:ok, v} -> {:ok, [v | acc]}
:error -> {:error, [v | acc]}
end
v, {:ok, acc} ->
{:ok, [v | acc]}
end)
|> case do
{:error, _} -> :error
{:ok, list} -> {:ok, :lists.reverse(list)}
end
:update, {t, func} when is_tuple(t) ->
t
|> Tuple.to_list()
|> Enum.reduce_while(1, fn v, index ->
case func.(v) do
{:ok, v} -> {:halt, {index, v}}
:error -> {:cont, index + 1}
end
end)
|> case do
{index, v} ->
{:ok, :erlang.setelement(index, t, v)}
_ ->
:error
end
:force_update, {%{} = map, func, default} ->
map
|> Enum.find_value(:error, fn {k, v} ->
case func.(v) do
{:ok, v} -> {k, v}
:error -> false
end
end)
|> case do
{k, v} ->
{:ok, Map.put(map, k, v)}
:error ->
map
|> :maps.iterator()
|> :maps.next()
|> case do
{k, _, _} ->
{:ok, Map.put(map, k, default)}
:none ->
:error
end
end
:force_update, {[{a, _} | _] = kwd, func, default} when is_atom(a) ->
kwd
|> Enum.find_value(:error, fn {k, v} ->
case func.(v) do
{:ok, v} -> {k, v}
:error -> false
end
end)
|> case do
{k, v} ->
{:ok, Keyword.put(kwd, k, v)}
:error ->
{:ok, Keyword.put(kwd, a, default)}
end
:force_update, {list, func, default} when is_list(list) ->
list
|> Enum.reduce({:error, []}, fn
v, {:error, acc} ->
case func.(v) do
{:ok, v} -> {:ok, [v | acc]}
:error -> {:error, [v | acc]}
end
v, {:ok, acc} ->
{:ok, [v | acc]}
end)
|> case do
{:error, list} ->
[_first | list] = :lists.reverse(list)
{:ok, [default | list]}
{:ok, list} ->
{:ok, :lists.reverse(list)}
end
:force_update, {t, func, default} when is_tuple(t) and tuple_size(t) > 0 ->
t
|> Tuple.to_list()
|> Enum.reduce_while(1, fn v, index ->
case func.(v) do
{:ok, v} -> {:halt, {index, v}}
:error -> {:cont, index + 1}
end
end)
|> case do
{index, v} ->
{:ok, :erlang.setelement(index, t, v)}
_ ->
{:ok, :erlang.setelement(0, t, default)}
end
op, _ when op in ~w[view update force_update]a ->
:error
end
end
end | lib/pathex/lenses/some.ex | 0.603815 | 0.413536 | some.ex | starcoder |
defmodule Ash.Dsl do
@using_schema [
single_extension_kinds: [
type: {:list, :atom},
default: [],
doc:
"The extension kinds that are allowed to have a single value. For example: `[:data_layer]`"
],
many_extension_kinds: [
type: {:list, :atom},
default: [],
doc:
"The extension kinds that can have multiple values. e.g `[notifiers: [Notifier1, Notifier2]]`"
],
untyped_extensions?: [
type: :boolean,
default: true,
doc: "Whether or not to support an `extensions` key which contains untyped extensions"
],
default_extensions: [
type: :keyword_list,
default: [],
doc: """
The extensions that are included by default. e.g `[data_layer: Default, notifiers: [Notifier1]]`
Default values for single extension kinds are overwritten if specified by the implementor, while many extension
kinds are appended to if specified by the implementor.
"""
]
]
@type entity :: %Ash.Dsl.Entity{}
@type section :: %Ash.Dsl.Section{}
@moduledoc """
The primary entry point for adding a DSL to a module.
To add a DSL to a module, add `use Ash.Dsl, ...options`. The options supported with `use Ash.Dsl` are:
#{Ash.OptionsHelpers.docs(@using_schema)}
See the callbacks defined in this module to augment the behavior/compilation of the module getting a Dsl.
"""
@type opts :: Keyword.t()
@doc """
Validate/add options. Those options will be passed to `handle_opts` and `handle_before_compile`
"""
@callback init(opts) :: {:ok, opts} | {:error, String.t() | term}
@doc """
Handle options in the context of the module. Must return a `quote` block.
If you want to persist anything in the DSL persistence layer,
use `@persist {:key, value}`. It can be called multiple times to
persist multiple times.
"""
@callback handle_opts(Keyword.t()) :: Macro.t()
@doc """
Handle options in the context of the module, after all extensions have been processed. Must return a `quote` block.
"""
@callback handle_before_compile(Keyword.t()) :: Macro.t()
defmacro __using__(opts) do
opts = Ash.OptionsHelpers.validate!(opts, @using_schema)
their_opt_schema =
Enum.map(opts[:single_extension_kinds], fn extension_kind ->
{extension_kind, type: :atom, default: opts[:default_extensions][extension_kind]}
end) ++
Enum.map(opts[:many_extension_kinds], fn extension_kind ->
{extension_kind, type: {:list, :atom}, default: []}
end)
their_opt_schema =
if opts[:untyped_extensions?] do
Keyword.put(their_opt_schema, :extensions, type: {:list, :atom})
else
their_opt_schema
end
quote bind_quoted: [
their_opt_schema: their_opt_schema,
parent_opts: opts,
parent: __CALLER__.module
],
generated: true do
@dialyzer {:nowarn_function, handle_opts: 1, handle_before_compile: 1}
def init(opts), do: {:ok, opts}
def handle_opts(opts) do
quote do
end
end
def handle_before_compile(opts) do
quote do
end
end
defoverridable init: 1, handle_opts: 1, handle_before_compile: 1
defmacro __using__(opts) do
parent = unquote(parent)
parent_opts = unquote(parent_opts)
their_opt_schema = unquote(their_opt_schema)
{opts, extensions} =
parent_opts[:default_extensions]
|> Enum.reduce(opts, fn {key, defaults}, opts ->
Keyword.update(opts, key, defaults, fn current_value ->
cond do
key in parent_opts[:single_extension_kinds] ->
current_value || defaults
key in parent_opts[:many_extension_kinds] || key == :extensions ->
List.wrap(current_value) ++ List.wrap(defaults)
true ->
opts
end
end)
end)
|> Ash.Dsl.expand_modules(parent_opts, __CALLER__)
opts =
opts
|> Ash.OptionsHelpers.validate!(their_opt_schema)
|> init()
|> Ash.Dsl.unwrap()
body =
quote generated: true do
parent = unquote(parent)
opts = unquote(opts)
parent_opts = unquote(parent_opts)
their_opt_schema = unquote(their_opt_schema)
@opts opts
@before_compile Ash.Dsl
@ash_is parent
@ash_parent parent
Module.register_attribute(__MODULE__, :persist, accumulate: true)
opts
|> @ash_parent.handle_opts()
|> Code.eval_quoted([], __ENV__)
for single_extension_kind <- parent_opts[:single_extension_kinds] do
@persist {single_extension_kind, opts[single_extension_kind]}
Module.put_attribute(__MODULE__, single_extension_kind, opts[single_extension_kind])
end
for many_extension_kind <- parent_opts[:many_extension_kinds] do
@persist {many_extension_kind, opts[many_extension_kind] || []}
Module.put_attribute(
__MODULE__,
many_extension_kind,
opts[many_extension_kind] || []
)
end
end
preparations = Ash.Dsl.Extension.prepare(extensions)
[body | preparations]
end
end
end
@doc false
def unwrap({:ok, value}), do: value
def unwrap({:error, error}), do: raise(error)
@doc false
def expand_modules(opts, their_opt_schema, env) do
Enum.reduce(opts, {[], []}, fn {key, value}, {opts, extensions} ->
cond do
key in their_opt_schema[:single_extension_kinds] ->
mod = Macro.expand(value, %{env | lexical_tracker: nil})
extensions =
if Ash.Helpers.implements_behaviour?(mod, Ash.Dsl.Extension) do
[mod | extensions]
else
extensions
end
{Keyword.put(opts, key, mod), extensions}
key in their_opt_schema[:many_extension_kinds] || key == :extensions ->
mods =
value |> List.wrap() |> Enum.map(&Macro.expand(&1, %{env | lexical_tracker: nil}))
extensions =
extensions ++
Enum.filter(mods, &Ash.Helpers.implements_behaviour?(&1, Ash.Dsl.Extension))
{Keyword.put(opts, key, mods), extensions}
true ->
{key, value}
end
end)
end
defmacro __before_compile__(_env) do
quote unquote: false, generated: true do
@type t :: __MODULE__
Module.register_attribute(__MODULE__, :ash_is, persist: true)
Module.put_attribute(__MODULE__, :ash_is, @ash_is)
@on_load :on_load
ash_dsl_config = Macro.escape(Ash.Dsl.Extension.set_state(@persist))
def ash_dsl_config do
unquote(ash_dsl_config)
end
def on_load do
Ash.Dsl.Extension.load()
end
@opts
|> @ash_parent.handle_before_compile()
|> Code.eval_quoted([], __ENV__)
end
end
def is?(module, type) do
Ash.Helpers.try_compile(module)
type in List.wrap(module.module_info(:attributes)[:ash_is])
rescue
_ ->
false
end
end | lib/ash/dsl/dsl.ex | 0.787523 | 0.49231 | dsl.ex | starcoder |
defmodule Teiserver.Telemetry.Tasks.PersistServerMonthTask do
use Oban.Worker, queue: :teiserver
alias Teiserver.Telemetry
alias Central.NestedMaps
import Ecto.Query, warn: false
# [] List means 1 day segments
# %{} Dict means total for the month of that key
# 0 Integer means sum or average
@empty_log %{
# Average battle counts per segment
battles: %{
total: [],
},
# Used to make calculating the end of month stats easier, this will not appear in the final result
tmp_reduction: %{
unique_users: [],
unique_players: [],
accounts_created: 0,
peak_users: 0,
peak_players: 0,
},
# Monthly totals
aggregates: %{
stats: %{
accounts_created: 0,
unique_users: 0,
unique_players: 0,
battles: 0
},
# Total number of minutes spent doing that across all players that month
minutes: %{
player: 0,
spectator: 0,
lobby: 0,
menu: 0,
total: 0
}
}
}
@impl Oban.Worker
@spec perform(any) :: :ok
def perform(_) do
log = case Telemetry.get_last_server_month_log() do
nil ->
perform_first_time()
{year, month} ->
{y, m} = next_month({year, month})
perform_standard(y, m)
end
if log != nil do
%{}
|> Teiserver.Telemetry.Tasks.PersistServerMonthTask.new()
|> Oban.insert()
end
:ok
end
# For when there are no existing logs
# we need to ensure the earliest log is from last month, not this month
defp perform_first_time() do
first_logs = Telemetry.list_server_day_logs(
order: "Oldest first",
limit: 1
)
case first_logs do
[log] ->
today = Timex.today()
if log.date.year < today.year or log.date.month < today.month do
run(log.date.year, log.date.month)
end
_ ->
nil
end
end
# For when we have an existing log
defp perform_standard(year, month) do
today = Timex.today()
if year < today.year or month < today.month do
run(year, month)
else
nil
end
end
@spec run(integer(), integer()) :: :ok
def run(year, month) do
now = Timex.Date.new!(year, month, 1)
Telemetry.list_server_day_logs(search: [
start_date: Timex.beginning_of_month(now),
end_date: Timex.end_of_month(now)
])
data = Telemetry.list_server_day_logs(search: [
start_date: Timex.beginning_of_month(now),
end_date: Timex.end_of_month(now)
])
|> Enum.reduce(@empty_log, fn (log, acc) ->
extend_segment(acc, log)
end)
|> calculate_month_statistics()
Telemetry.create_server_month_log(%{
year: year,
month: month,
data: data
})
:ok
end
def month_so_far() do
now = Timex.now()
Telemetry.list_server_day_logs(search: [
start_date: Timex.beginning_of_month(now)
])
|> Enum.reduce(@empty_log, fn (log, acc) ->
extend_segment(acc, log)
end)
|> calculate_month_statistics()
|> Jason.encode!
|> Jason.decode!
# We encode and decode so it's the same format as in the database
end
# Given an existing segment and a batch of logs, calculate the segment and add them together
defp extend_segment(existing, %{data: data} = _log) do
%{
# Average battle counts per segment
battles: %{
total: existing.battles.total ++ [data["aggregates"]["stats"]["battles"]],
},
# Used to make calculating the end of day stats easier, this will not appear in the final result
tmp_reduction: %{
unique_users: existing.tmp_reduction.unique_users ++ Map.keys(data["minutes_per_user"]["total"]),
unique_players: existing.tmp_reduction.unique_players ++ Map.keys(data["minutes_per_user"]["player"]),
accounts_created: existing.tmp_reduction.accounts_created + data["aggregates"]["stats"]["accounts_created"],
peak_users: max(existing.tmp_reduction.peak_players, data["aggregates"]["stats"]["unique_users"]),
peak_players: max(existing.tmp_reduction.peak_players, data["aggregates"]["stats"]["unique_players"]),
},
# Monthly totals
aggregates: %{
stats: %{
accounts_created: 0,
unique_users: 0,
unique_players: 0,
battles: 0
},
# Total number of minutes spent doing that across all players that month
minutes: %{
player: existing.aggregates.minutes.player + data["aggregates"]["minutes"]["player"],
spectator: existing.aggregates.minutes.spectator + data["aggregates"]["minutes"]["spectator"],
lobby: existing.aggregates.minutes.lobby + data["aggregates"]["minutes"]["lobby"],
menu: existing.aggregates.minutes.menu + data["aggregates"]["minutes"]["menu"],
total: existing.aggregates.minutes.total + data["aggregates"]["minutes"]["total"]
},
}
}
end
# Given a day log, calculate the end of day stats
defp calculate_month_statistics(data) do
# TODO: Calculate number of battles that took place
battles = 0
aggregate_stats = %{
accounts_created: data.tmp_reduction.accounts_created,
unique_users: data.tmp_reduction.unique_users |> Enum.uniq |> Enum.count,
unique_players: data.tmp_reduction.unique_players |> Enum.uniq |> Enum.count,
peak_users: data.tmp_reduction.peak_users,
peak_players: data.tmp_reduction.peak_players,
battles: battles
}
NestedMaps.put(data, ~w(aggregates stats)a, aggregate_stats)
|> Map.delete(:tmp_reduction)
end
defp next_month({year, 12}), do: {year+1, 1}
defp next_month({year, month}), do: {year, month+1}
end | lib/teiserver/telemetry/tasks/persist_server_month_task.ex | 0.634883 | 0.418133 | persist_server_month_task.ex | starcoder |
defmodule PhoenixBricks do
@moduledoc ~S"""
An opinable set of proposed patters that helps to write reusable and no
repetitive code for `Contexts`.
## Motivation
After several years in [Ruby on Rails](https://rubyonrails.org) developing I've
got used to structure code folllowing the Single Responsibility Principle.
[Phoenix](https://www.phoenixframework.org/) comes with the
[Context](https://hexdocs.pm/phoenix/contexts.html) concept, a module that cares
about expose an API of an application section to other sections.
In a `Context` we usually have at least 6 actions for each defined `schema`
(`list_records/0`, `get_record!/1`, `create_record/1`, `update_record/2`,
`delete_record/1`, `change_record/2`).
If we consider that all Business Logic could go inside the `Context`, it's possibile
to have a module with hundreds of lines of code, making code mainteinance very
hard to be guardanteed.
The idea is to highlight common portion of code that can be extacted and moved
into a separated module with only one responsibility and that could be reused
in different contexts.
## List records
The method `list_*` has a default implementation that returns the list of
associated record:
```elixir
def list_records do
MyApp.Context.RecordSchema
|> MyApp.Repo.all()
end
```
Let's add now to the context the capability of filtering the collection according
to an arbitrary set of `scopes`, calling the function in this way:
```elixir
iex> Context.list_records(title_matches: "value")
```
A possible solution could be to delegate the query building into a separated
`RecordQuery` module
```elixir
defmodule RecordQuery do
def scope(list_of_filters) do
RecordSchema
|> improve_query_with_filters(list_of_filters)
end
defp improve_query_with_filters(start_query, list_of_filters) do
list_of_filters
|> Enum.reduce(start_query, fn scope, query -> apply_scope(query, scope) end)
end
def apply_scope(query, {:title_matches, "value"}) do
query
|> where([q], ...)
end
def apply_scope(query, {:price_lte, value}) do
query
|> where([q], ...)
end
end
```
and use it into the `Context`
```elixir
def list_records(scopes \\ []) do
RecordQuery.scope(scopes)
|> Repo.all()
end
iex> Context.list_records(title_matches: "value", price_lte: 42)
```
### `PhoenixBricks.Query`
Using `PhoenixBricks.Query` it's possible to extend a module with all scope
behaviours:
```elixir
defmodule RecordQuery do
use PhoenixBricks.Query, schema: RecordSchema
def apply_scope(query, {:title_matches, "value"}) do
query
|> where([q], ...)
end
end
```
## Filter
Another common feature is to filter records according to params provided through
url params (for example after a submit in a search form).
```elixir
def index(conn, params)
filters = Map.get(params, "filters", %{})
colletion = Context.list_records_based_on_filters(filters)
conn
|> assign(:collection, collection)
...
end
```
ensuring to allow only specified filters
A possible implementation could be:
```elixir
defmodule RecordFilter do
@search_filters ["title_matches", "price_lte"]
def convert_filters_to_scopes(filters) do
filters
|> Enum.map(fn {name, value} ->
convert_filter_to_scope(name, value)
end)
end
def convert_filter_to_scope(name, value) when name in @search_fields do
{String.to_atom(name), value}
end
end
```
This way parameters are filtered and converted to a `Keyword` that is the common
format for the `RecordQuery` described above.
```elixir
iex> RecordFilter.convert_filters_to_scopes(%{"title_matches" => "value", "invalid_scope" => "value"})
iex> [title_matches: "value"]
```
and we can rewrite the previous action emphasizing the params convertion and
the collection filter
```elixir
def index(conn, params) do
filters = Map.get(params, "filters", %{})
collection =
filters
|> RecordFilter.convert_filters_to_scopes()
|> Context.list_records()
conn
|> assign(:collection, collection)
....
end
```
The last part is to build a search form. In order to achieve this, we can add
schema functionality to `RecordFilter` module:
```elixir
defmodule RecordFilter do
use Ecto.Schema
embedded_schema do
field :title_matches, :string
end
def changeset(filter, params) do
filter
|> cast(params, [:title_matches])
end
end
def index(conn, params) do
filters = Map.get(params, "filters", %{})
filter_changeset = RecordFilter.changeset(%RecordFilter{}, filters)
collection =
filters
|> RecordFilter.convert_filters_to_scopes()
|> Context.list_records()
conn
|> assign(:collection, collection)
|> assign(:filter_changeset, filter_changeset)
end
```
```html
<%= f = form_for @filter_changeset, .... %>
<%= label f, :title_matches %>
<%= text_input f, :title_matches %>
<%= submit "Filter results" %>
<% end %>
```
### `PhoenixBricks.Filter`
Using `PhoenixBricks.Filter` module it's possible to extend a module with all
filtering behaviours (define a `changeset` and the filter convertion)
```elixir
defmodule RecordFilter do
use PhoenixBricks.Filter,
filters: [
title_matches: :string
]
end
```
making available `changeset/2` defined above and `convert_filters_to_scopes/1`
"""
end | lib/phoenix_bricks.ex | 0.856453 | 0.956634 | phoenix_bricks.ex | starcoder |
defmodule SipHash do
@moduledoc """
This module provides a simple but performant interface for hashing values using
the SipHash hash family.
The `SipHash.hash/3` function allows for flags specifying things such as the
number of rounds of compression, allowing use of SipHash-C-D, where `C` and `D`
are customizable. Values can be converted to hexidecimal strings as required,
but by default this module deals with numbers (as that's the optimal way to
work with these hashes).
_**Note**: This module makes use of NIFs for better performance and throughput,
but this can be disabled by setting the `SIPHASH_IMPL` environment variable
to the value "embedded". Please note that the use of NIFs brings a significant
performance improvement, and so you should only disable them with good reason._
"""
use Bitwise
alias SipHash.Digest
# store key error message
@kerr "Key must be exactly 16 bytes!"
# store input error message
@ierr "Hash input must be a binary!"
# passes error message
@perr "Passes C and D must be valid numbers greater than 0!"
@doc """
Based on the algorithm as described in https://131002.net/siphash/siphash.pdf,
and therefore requires a key alongside the input to use as a seed. This key
should consist of 16 bytes, and is measured by `byte_size/1`. An error
will be returned if this is not the case. The default implementation is a 2-4
hash, but this can be controlled through the options provided.
This function returns output as a tuple with either format of `{ :ok, value }`
or `{ :error, message }`. By default, all values are returned as numbers
(i.e. the result of the hash), but you can set `:hex` to true as an option to
get a hex string output. The reason for this is that converting to hex typically
takes an extra couple of microseconds, and the default is intended to be the
optimal use case. lease note that any of the options related to hex string
formatting will be ignored if `:hex` is not set to true (e.g. `:case`).
## Options
* `:case` - either of `:upper` or `:lower` (defaults to using `:upper`)
* `:c` and `:d` - the number of compression rounds (default to 2 and 4)
* `:hex` - when true returns the output as a hex string (defaults to false)
## Examples
iex> SipHash.hash("0123456789ABCDEF", "hello")
{ :ok, 4402678656023170274 }
iex> SipHash.hash("0123456789ABCDEF", "hello", hex: true)
{ :ok, "3D1974E948748CE2" }
iex> SipHash.hash("0123456789ABCDEF", "abcdefgh", hex: true)
{ :ok, "1AE57886F899E65F" }
iex> SipHash.hash("0123456789ABCDEF", "my long strings", hex: true)
{ :ok, "1323400B0804036D" }
iex> SipHash.hash("0123456789ABCDEF", "hello", hex: true, case: :lower)
{ :ok, "3d1974e948748ce2" }
iex> SipHash.hash("0123456789ABCDEF", "hello", c: 4, d: 8)
{ :ok, 14986662229302055855 }
iex> SipHash.hash("invalid_bytes", "hello")
{ :error, "Key must be exactly 16 bytes!" }
iex> SipHash.hash("0123456789ABCDEF", "hello", c: 0, d: 0)
{ :error, "Passes C and D must be valid numbers greater than 0!" }
iex> SipHash.hash("0123456789ABCDEF", %{ "test" => "one" })
{ :error, "Hash input must be a binary!" }
"""
@spec hash(binary, binary, [ { atom, atom } ]) :: { atom, binary }
def hash(key, input, opts \\ [])
def hash(key, input, opts)
when byte_size(key) == 16 and is_binary(input) and is_list(opts)
do
c_pass = Keyword.get(opts, :c, 2)
d_pass = Keyword.get(opts, :d, 4)
case (c_pass > 0 and d_pass > 0) do
false ->
{ :error, @perr }
true ->
case !!Keyword.get(opts, :hex) do
false ->
{ :ok, Digest.hash(key, input, c_pass, d_pass) }
true ->
format =
case Keyword.get(opts, :case, :upper) do
:lower -> "%016lx"
_upper -> "%016lX"
end
{ :ok, Digest.hash(key, input, c_pass, d_pass, format) }
end
end
end
def hash(key, _input, _opts) when byte_size(key) != 16,
do: { :error, @kerr }
def hash(_key, input, _opts) when not is_binary(input),
do: { :error, @ierr }
@doc """
A functional equivalent of `SipHash.hash/3`, but rather than returning the
value inside a tuple the value is returned instead. Any errors will be raised
as exceptions. There are typically very few cases causing errors which aren't
due to programmer error, but caution is advised all the same.
## Examples
iex> SipHash.hash!("0123456789ABCDEF", "hello")
4402678656023170274
iex> SipHash.hash!("0123456789ABCDEF", "hello", hex: true)
"3D1974E948748CE2"
iex> SipHash.hash!("0123456789ABCDEF", "abcdefgh", hex: true)
"1AE57886F899E65F"
iex> SipHash.hash!("0123456789ABCDEF", "my long strings", hex: true)
"1323400B0804036D"
iex> SipHash.hash!("0123456789ABCDEF", "hello", hex: true, case: :lower)
"3d1974e948748ce2"
iex> SipHash.hash!("0123456789ABCDEF", "hello", c: 4, d: 8)
14986662229302055855
iex> SipHash.hash!("invalid_bytes", "hello")
** (ArgumentError) Key must be exactly 16 bytes!
iex> SipHash.hash!("0123456789ABCDEF", "hello", c: 0, d: 0)
** (ArgumentError) Passes C and D must be valid numbers greater than 0!
iex> SipHash.hash!("0123456789ABCDEF", %{ "test" => "one" })
** (ArgumentError) Hash input must be a binary!
"""
@spec hash!(binary, binary, [ { atom, atom } ]) :: binary
def hash!(key, input, opts \\ []) do
case hash(key, input, opts) do
{ :ok, hash } -> hash
{ :error, msg } -> raise ArgumentError, message: msg
end
end
@doc """
Used to quickly determine if NIFs have been loaded for this module. Returns
true if it has, false if it hasn't. This will only return false if either the
`SIPHASH_IMPL` environment variable is set to "embedded", or there was an error
when compiling the C implementation.
"""
@spec nif_loaded? :: boolean
defdelegate nif_loaded?, to: SipHash.Digest
end | lib/siphash.ex | 0.883538 | 0.557845 | siphash.ex | starcoder |
defmodule Day8 do
@moduledoc """
Documentation for `Day8`.
"""
def run() do
get_input()
|> process(:first)
|> present()
get_input()
|> process(:second)
|> present()
end
def present({c2, c3, c4, c7} = _answer) do
IO.puts("#{c2} occurrences of 1")
IO.puts("#{c3} occurrences of 7")
IO.puts("#{c4} occurrences of 4")
IO.puts("#{c7} occurrences of 8")
IO.puts("#{c2 + c3 + c4 + c7} occurences of 1, 4, 7, and 8")
IO.puts("")
end
def present(v) when is_integer(v) do
IO.puts("The sum of the readings is #{v}")
end
def get_input() do
File.read!("input.txt")
|> transform_input()
end
def get_input(s) do
s
|> transform_input()
end
def transform_input(s) do
# handle lines like
#febacg aecb bgfedca bfagde cdgfb fgbce ebg be efcga dcegaf | dgeafb ceba cfeabg cbae
#aegbd gdafbc dae gadcb fadgcbe fagedc adbgce ea ceba fgbed | gdfeac cbae ceab dcbfeag
# note ceba and cbae are the same 4 segments, so put segments into canonical order.
s
|> String.trim()
|> String.split("\n")
|> Enum.map(fn line -> String.split(line, "|") end)
|> Enum.map(fn [c10, c4] = _line -> [String.split(c10, " ", trim: true), String.split(c4, " ", trim: true)] end)
|> Enum.map(fn [a,b] = _line -> [Enum.map(a, fn p -> make_canonical(p) end), Enum.map(b, fn p -> make_canonical(p) end)] end )
|> Enum.sort()
end
def make_canonical(segment_codes), do: List.to_string(Enum.sort(String.to_charlist(segment_codes)))
def process(data, :first) do
frequencies =
Enum.map(data, fn [_a, b] = _n -> b end)
|> List.flatten()
|> Enum.map(fn x -> String.length(x) end)
|> Enum.frequencies() |> IO.inspect(label: "frequencies")
{frequencies[2], frequencies[3], frequencies[4], frequencies[7]}
end
def process(data, :second) do
make_deductions(data)
|> apply_deductions(data)
# |> IO.inspect(label: "deductions applied")
|> Enum.map(fn [th, h, te, o] = _l -> (1000*th + 100*h + 10*te + o) end)
|> IO.inspect(label: "the 4 digit values")
|> Enum.sum()
|> IO.inspect(label: "the sum")
end
def make_deductions(data) do
data
|> Enum.map(fn [a, _b] = _ -> a end) # we don't need the values, just the 10 patterns
|> Enum.map(fn a -> make_deduction(a) end)
end
def make_deduction(ten_patterns) do
Enum.map(["a","b","c","d","e","f","g"], fn let -> make_signature(let, ten_patterns) end)
|> Enum.reduce(%{}, fn signature, map -> add_mapping(signature, map) end)
end
def make_signature(let, ten_patterns) do
Enum.reduce(2..7, {let}, fn len, tupl -> Tuple.append(tupl, length(find_letter_in_pattern_by_size(let, ten_patterns, len))) end)
end
def find_letter_in_pattern_by_size(letter, patterns, size) do
Enum.filter(patterns, fn pat -> (size == String.length(pat) and String.contains?(pat, letter)) end)
end
def add_mapping({original_letter,_,_,_,_,_,_} = signature, map) do
translated_letter =
case signature do
{_,0,1,0,3,3,1} -> "a"
{_,0,0,1,1,3,1} -> "b"
{_,1,1,1,2,2,1} -> "c"
{_,0,0,1,3,2,1} -> "d"
{_,0,0,0,1,2,1} -> "e"
{_,1,1,1,2,3,1} -> "f"
{_,0,0,0,3,3,1} -> "g"
end
Map.put(map, original_letter, translated_letter)
end
def apply_deductions(map, data) do
Enum.zip(map, Enum.map(data, fn [_, digit_codes] = _datum -> digit_codes end))
|> Enum.map(fn {map, digit_codes} -> translate_using_map(digit_codes, map) end)
|> Enum.map(fn digit_codes -> translate_codes_to_digits(digit_codes) end)
end
def translate_using_map(digit_codes, map) do
# IO.inspect({digit_codes, map}, label: "translate_using_map")
# translate_one_code_letter = fn code_letter -> (IO.inspect(List.to_string([code_letter]), label: "translate_using_map"); Map.get(map, List.to_string([code_letter]), "z")) end
translate_one_code_letter = fn code_letter -> Map.get(map, List.to_string([code_letter]), "z") end
translate_code_letters = fn letters -> List.to_string(Enum.map(String.to_charlist(letters), fn c -> List.to_string([translate_one_code_letter.(c)]) end)) end
Enum.map(digit_codes, fn one_code -> translate_code_letters.(one_code) end)
end
def translate_codes_to_digits(digit_codes) do
# IO.inspect(digit_codes, label: "translate_codes_to_digits")
Enum.map(digit_codes, fn digit_code -> translate_code_to_digit(digit_code) end)
# |> IO.inspect(label: "rv from trans_codes_to_digits")
end
def translate_code_to_digit(single_code) do
# IO.inspect(single_code, label: "translate_code_to_digit")
case make_canonical(single_code) do
"abcefg" -> 0
"cf" -> 1
"acdeg" -> 2
"acdfg" -> 3
"bcdf" -> 4
"abdfg" -> 5
"abdefg" -> 6
"acf" -> 7
"abcdefg" -> 8
"abcdfg" -> 9
end
end
def example() do
inhalt = """
be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb |fdgacbe cefdb cefbgd gcbe
edbfga begcd cbg gc gcadebf fbgde acbgfd abcde gfcbed gfec |fcgedb cgb dgebacf gc
fgaebd cg bdaec gdafb agbcfd gdcbef bgcad gfac gcb cdgabef |cg cg fdcagb cbg
fbegcd cbd adcefb dageb afcb bc aefdc ecdab fgdeca fcdbega |efabcd cedba gadfec cb
aecbfdg fbg gf bafeg dbefa fcge gcbea fcaegb dgceab fcbdga |gecf egdcabf bgf bfgea
fgeab ca afcebg bdacfeg cfaedg gcfdb baec bfadeg bafgc acf |gebdcfa ecba ca fadegcb
dbcfg fgd bdegcaf fgec aegbdf ecdfab fbedc dacgb gdcebf gf |cefg dcbef fcge gbcadfe
bdfegc cbegaf gecbf dfcage bdacg ed bedf ced adcbefg gebcd |ed bcgafe cdgba cbgef
egadfb cdbfeg cegd fecab cgb gbdefca cg fgcdab egfdb bfceg |gbdfcae bgc cg cgb
gcafb gcf dcaebfg ecagb gf abcdeg gaef cafbge fdbac fegbdc |fgae cfgab fg bagce
"""
get_input(inhalt)
|> process(:first)
|> present()
get_input(inhalt)
|> process(:second)
|> present()
end
end | apps/day8/lib/day8.ex | 0.621196 | 0.408513 | day8.ex | starcoder |
# Mnesia will keep its schema in memory, and it will vanish if and when Mnesia stops.
#:mnesia.start()
#:mnesia.stop()
:mnesia.create_schema([node()])
# Creating Tables
defmodule Drop do
require Planemo
def drop do
setup
handle_drops
end
def handle_drops do
receive do
{from, planemo, distance} ->
send(from, {planemo, distance, fall_velocity(planemo, distance)})
handle_drops
end
end
def fall_velocity(planemo, distance) when distance >= 0 do
{:atomic, [p | _]} = :mnesia.transaction(fn() ->i
:mnesia.read(PlanemoTable, planemo) end)
:math.sqrt(2 * Planemo.planemo(p, :gravity) * distance)
end
def setup do
:mnesia.create_schema([node()])
:mnesia.start()
:mnesia.create_table(PlanemoTable, [{:attributes, [:name, :gravity, :diameter, :distance_from_sun]}, {:record_name, :planemo}])
f = fn ->
:mnesia.write(PlanemoTable, Planemo.planemo(name: :mercury, gravity: 3.7, diameter: 4878, distance_from_sun: 57.9), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :venus, gravity: 8.9, diameter: 12104, distance_from_sun: 108.2), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :earth, gravity: 9.8, diameter: 12756, distance_from_sun: 149.6), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :moon, gravity: 1.6, diameter: 3475, distance_from_sun: 149.6), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :mars, gravity: 3.7, diameter: 6787, distance_from_sun: 227.9), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :ceres, gravity: 0.27, diameter: 950, distance_from_sun: 413.7), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :jupiter, gravity: 23.1, diameter: 142796, distance_from_sun: 778.3), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :saturn, gravity: 9.0, diameter: 120660, distance_from_sun: 1427.0), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :uranus, gravity: 8.7, diameter: 51118, distance_from_sun: 2871.0), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :neptune, gravity: 11.0, diameter: 30200, distance_from_sun: 4497.1), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :pluto, gravity: 0.6, diameter: 2300, distance_from_sun: 5913.0), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :haumea, gravity: 0.44, diameter: 1150, distance_from_sun: 6484.0), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :makemake, gravity: 0.5, diameter: 1500, distance_from_sun: 6850.0), :write)
:mnesia.write(PlanemoTable, Planemo.planemo(name: :eris, gravity: 0.8, diameter: 2400, distance_from_sun: 10210.0), :write)
end
:mnesia.transaction(f)
end
end
# Mnesia will restart the transaction if there is other activity blocking it, so the code may get executed repeatedly before the transaction happens.
# Because of this, do not include any calls that create side effects in the function you’ll be passing to :mnesia.transaction, and don’t try to catch exceptions on Mnesia functions within a transaction.
# If your function calls :mnesia.abort/1 (probably because some condition for executing it wasn’t met), the transaction will be rolled back, returning a tuple beginning with aborted instead of atomic.
# iex(1)> c("drop.ex")
# iex(2)> Drop.setup
# iex(3)> :mnesia.table_info(PlanemoTable, :all)
# Reading Data
#iex(5)> :mnesia.transaction(fn()->:mnesia.read(PlanemoTable, :neptune) end)
def fall_velocity(planemo, distance) when distance >= 0 do
{:atomic, [p | _]} = :mnesia.transaction(fn() ->
:mnesia.read(PlanemoTable, planemo) end)
:math.sqrt(2 * Planemo.planemo(p, :gravity) * distance)
end
#iex(7)> Drop.fall_velocity(:earth, 20)
#iex(8)> pid1 = spawn(MphDrop, :mph_drop, [])
#iex(9)> send(pid1, {:earth, 20}) | other/storing-structured-data/starting-up-mnesia.ex | 0.538741 | 0.537041 | starting-up-mnesia.ex | starcoder |
defmodule BioMonitor.ReadingView do
use BioMonitor.Web, :view
def render("index.json", %{readings: readings}) do
%{data: render_many(readings, BioMonitor.ReadingView, "reading.json")}
end
def render("calculations.json", %{values: values}) do
%{
data: %{
biomass_performance: render_many(values.biomass_performance, BioMonitor.ReadingView, "result.json", as: :result),
product_performance: render_many(values.product_performance, BioMonitor.ReadingView, "result.json", as: :result),
product_biomass_performance: render_many(values.product_biomass_performance, BioMonitor.ReadingView, "result.json", as: :result),
product_volumetric_performance: render_many(values.product_volumetric_performance, BioMonitor.ReadingView, "result.json", as: :result),
biomass_volumetric_performance: render_many(values.biomass_volumetric_performance, BioMonitor.ReadingView, "result.json", as: :result),
max_product_volumetric_performance: render("result.json", %{result: values.max_product_volumetric_performance}),
max_biomass_volumetric_performance: render("result.json", %{result: values.max_biomass_volumetric_performance}),
specific_ph_velocity: render_many(values.specific_ph_velocity, BioMonitor.ReadingView, "result.json", as: :result),
specific_biomass_velocity: render_many(values.specific_biomass_velocity, BioMonitor.ReadingView, "result.json", as: :result),
specific_product_velocity: render_many(values.specific_product_velocity, BioMonitor.ReadingView, "result.json", as: :result),
max_ph_velocity: render("result.json", %{result: values.max_ph_velocity}),
max_biomass_velocity: render("result.json", %{result: values.max_biomass_velocity}),
max_product_velocity: render("result.json", %{result: values.max_product_velocity}),
}
}
end
def render("created_reading.json", %{reading: reading}) do
%{
data: render("reading.json", %{reading: reading})
}
end
def render("reading.json", %{reading: reading}) do
%{
id: reading.id,
temp: reading.temp,
ph: reading.ph,
substratum: reading.substratum,
product: reading.product,
biomass: reading.biomass,
inserted_at: reading.inserted_at,
routine_id: reading.routine_id
}
end
def render("result.json", %{result: result}) do
case result do
nil -> nil
result ->
%{
x: result.x,
y: result.y
}
end
end
end | web/views/reading_view.ex | 0.55254 | 0.428443 | reading_view.ex | starcoder |
defmodule VintageNetWiFi.AccessPoint do
alias VintageNetWiFi.Utils
@moduledoc """
Information about a WiFi access point
* `:bssid` - a unique address for the access point
* `:flags` - a list of flags describing properties on the access point
* `:frequency` - the access point's frequency in MHz
* `:signal_dbm` - the signal strength in dBm
* `:ssid` - the access point's name
"""
@type flag ::
:wpa2_psk_ccmp
| :wpa2_eap_ccmp
| :wpa2_eap_ccmp_tkip
| :wpa2_psk_ccmp_tkip
| :wpa2_psk_sae_ccmp
| :wpa2_sae_ccmp
| :wpa2_ccmp
| :wpa_psk_ccmp
| :wpa_psk_ccmp_tkip
| :wpa_eap_ccmp
| :wpa_eap_ccmp_tkip
| :wep
| :ibss
| :mesh
| :ess
| :p2p
| :wps
| :rsn_ccmp
@type band :: :wifi_2_4_ghz | :wifi_5_ghz | :unknown
defstruct [:bssid, :frequency, :band, :channel, :signal_dbm, :signal_percent, :flags, :ssid]
@type t :: %__MODULE__{
bssid: String.t(),
frequency: non_neg_integer(),
band: band(),
channel: non_neg_integer(),
signal_dbm: integer(),
signal_percent: 0..100,
flags: [flag()],
ssid: String.t()
}
@doc """
Create an AccessPoint when only the BSSID is known
"""
@spec new(any) :: VintageNetWiFi.AccessPoint.t()
def new(bssid) do
%__MODULE__{
bssid: bssid,
frequency: 0,
band: :unknown,
channel: 0,
signal_dbm: -99,
signal_percent: 0,
flags: [],
ssid: ""
}
end
@doc """
Create a new AccessPoint with all of the information
"""
@spec new(String.t(), String.t(), non_neg_integer(), integer(), [flag()]) ::
VintageNetWiFi.AccessPoint.t()
def new(bssid, ssid, frequency, signal_dbm, flags) do
info = Utils.frequency_info(frequency)
%__MODULE__{
bssid: bssid,
frequency: frequency,
band: info.band,
channel: info.channel,
signal_dbm: signal_dbm,
signal_percent: info.dbm_to_percent.(signal_dbm),
flags: flags,
ssid: ssid
}
end
end | lib/vintage_net_wifi/access_point.ex | 0.844489 | 0.537952 | access_point.ex | starcoder |
defmodule RestorationOfErathia do
@moduledoc """
Documentation for `RestorationOfErathia`.
The module is designed to assist with restoration of deleted files from hdd.
It is assumed that a tool similar to photorec is used to restore the deleted files.
The documentation for using photorec can be found here: https://www.cgsecurity.org/testdisk.pdf
Two things need to be configured: folder path to folder where restored data is and
formats that should be separated out of the rest of the restored data.
"""
@path "/home/andriy/Code/IdeaProjects/restoration_of_erathia/tmp"
@formats [
"txt",
"jpg",
"pdf",
"exe",
"docx",
"xlsx",
"xls",
"doc",
"pptx",
"ppt",
"ods",
"odt",
"tif",
"png",
"dat"
]
defp extract_all_files_having(format) do
File.cd(@path)
{:ok, dirnames} = File.ls()
dirnames
|> Enum.map(fn dirname -> {File.dir?("#{@path}/#{dirname}"), "#{@path}/#{dirname}"} end)
|> Enum.map(fn {dir?, dirname} ->
if dir? do
extract_dir(dirname, format)
end
end)
end
defp extract_dir(dirname, format) do
Path.wildcard("#{dirname}/*.#{format}")
|> Enum.map(fn filepath -> move_to_dir(filepath, format) end)
Path.wildcard("#{dirname}/*")
|> Enum.map(fn filepath -> move_to_dir(filepath, "*") end)
end
@doc ~S"""
Finds all the files that have the specified extension format in a directory and moves them to a directory
named after the extension format. Moves all files with unspecified extensions to a merged folder.
Test requires specific path setting in module constants.
## Examples
iex> r = RestorationOfErathia
iex> {:ok, wd} = File.cwd()
iex> File.mkdir("#{wd}/tmp")
iex> File.touch("#{wd}/tmp/test")
iex> r.move_to_dir("#{wd}/tmp/test", "*")
iex> File.rm("#{wd}/tmp/merged/test")
iex> File.rmdir("#{wd}/tmp/merged")
iex> File.rmdir("#{wd}/tmp")
:ok
"""
def move_to_dir(filepath, format) do
folder =
case format do
"*" -> "#{@path}/merged/"
_ -> "#{@path}/#{format}/"
end
if File.exists?(folder) do
File.rename!(filepath, "#{folder}#{Path.basename(filepath)}")
else
File.mkdir!(folder)
File.rename!(filepath, "#{folder}#{Path.basename(filepath)}")
end
end
defp merge_all(formats) do
File.cd(@path)
{:ok, dirnames} = File.ls()
dirnames
|> Enum.reject(fn dirname -> Path.basename(dirname) in formats end)
|> Enum.map(fn dirname -> extract_dir(dirname, "*") end)
end
defp compute_hash(file_path) do
hash =
File.stream!(file_path, [], 2_048)
|> Enum.reduce(:crypto.hash_init(:sha256), &:crypto.hash_update(&2, &1))
|> :crypto.hash_final()
|> Base.encode16()
|> String.downcase()
{file_path, hash}
end
@doc ~S"""
Given 2 folder paths, unique and duplicate, finds all files in duplicate folder that already exist in unique folder,
based on hashes. Removes all the files with matching hashes from duplicate folder.
## Examples
iex> r = RestorationOfErathia
iex> {:ok, wd} = File.cwd()
iex> File.mkdir("#{wd}/tmp")
iex> File.touch("#{wd}/tmp/test.txt")
iex> File.mkdir("#{wd}/tmp2")
iex> File.cp("#{wd}/tmp/test.txt", "#{wd}/tmp2/test.txt")
iex> r.deduplicate_between_folders("#{wd}/tmp", "#{wd}/tmp2")
iex> File.rm("#{wd}/tmp/test.txt")
iex> File.rmdir("#{wd}/tmp")
iex> File.rmdir("#{wd}/tmp2")
:ok
"""
def deduplicate_between_folders(unique_folder_path, duplicate_folder_path) do
uniq_hashes =
find_uniques_with_hashes_in_folder(unique_folder_path)
|> Enum.map(fn {_fname, hash} -> hash end)
find_uniques_with_hashes_in_folder(duplicate_folder_path)
|> Enum.filter(fn {_fname, hash} -> hash in uniq_hashes end)
|> Enum.map(fn {fname, _hash} -> fname end)
|> Enum.map(fn file_name -> File.rm!(file_name) end)
end
defp ls_r(path \\ ".") do
cond do
File.regular?(path) ->
[path]
File.dir?(path) ->
File.ls!(path)
|> Enum.map(&Path.join(path, &1))
|> Enum.map(&ls_r/1)
|> Enum.concat()
true ->
[]
end
end
defp find_uniques_with_hashes_in_folder(folder_path) do
ls_r(folder_path)
|> Enum.map(fn file_path -> compute_hash(file_path) end)
|> Enum.uniq_by(fn {_, hash} -> hash end)
end
defp remove_duplicates_from_folder(folder_path, uniques_list) do
Path.wildcard("#{folder_path}/*.*")
|> Enum.reject(fn file_name -> file_name in uniques_list end)
|> Enum.map(fn file_name -> File.rm!(file_name) end)
end
@doc ~S"""
Removes duplicated files inside a folder (using file hash)
## Examples
iex> r = RestorationOfErathia
iex> {:ok, wd} = File.cwd()
iex> File.mkdir("#{wd}/tmp")
iex> File.touch("#{wd}/tmp/test.txt")
iex> File.cp("#{wd}/tmp/test.txt", "#{wd}/tmp/test2.txt")
iex> r.deduplicate_folder("#{wd}/tmp")
iex> File.rm("#{wd}/tmp/test2.txt")
iex> File.rmdir("#{wd}/tmp")
:ok
"""
def deduplicate_folder(folder_path) do
uniques =
find_uniques_with_hashes_in_folder(folder_path)
|> Enum.map(fn {fname, _hash} -> fname end)
remove_duplicates_from_folder(folder_path, uniques)
end
@doc """
Removes duplicated files from all the folders present in a given path
"""
def deduplicate_all_folders(path) do
File.cd(path)
{:ok, dirnames} = File.ls()
dirnames
|> Enum.map(fn dirname -> deduplicate_folder(dirname) end)
end
@doc ~S"""
Removes all empty folders from a given path
## Examples
iex> r = RestorationOfErathia
iex> {:ok, wd} = File.cwd()
iex> File.mkdir("#{wd}/tmp")
iex> File.mkdir("#{wd}/tmp2")
iex> r.remove_empty_folders("#{wd}")
"""
def remove_empty_folders(path) do
{:ok, files_and_folders} = File.ls(path)
files_and_folders
|> Enum.map(fn endfile_or_folder ->
if File.dir?("#{path}/#{endfile_or_folder}") do
File.rmdir("#{path}/#{endfile_or_folder}")
end
end)
end
@doc """
Runs the entire helper pipeline:
* Separates data according to folders according to formats.
* Merges all unset formats to a single folder.
* Deduplicates files within the folders.
* Removes empty folders
"""
def run() do
@formats
|> Enum.map(fn format -> extract_all_files_having(format) end)
merge_all(@formats)
deduplicate_all_folders(@path)
remove_empty_folders(@path)
end
end | lib/restoration_of_erathia.ex | 0.59561 | 0.421492 | restoration_of_erathia.ex | starcoder |
defmodule Freddy.Core.Exchange do
@moduledoc """
Exchange configuration.
# Fields
* `:name` - Exchange name. If left empty, default exchange will be used.
* `:type` - Exchange type. Can be `:direct`, `:topic`, `:fanout` or
an arbitrary string, such as `"x-delayed-message"`. Default is `:direct`.
* `:opts` - Exchange options. See below.
## Exchange options
* `:durable` - If set, keeps the Exchange between restarts of the broker.
* `:auto_delete` - If set, deletes the Exchange once all queues unbind from it.
* `:passive` - If set, returns an error if the Exchange does not already exist.
* `:internal` - If set, the exchange may not be used directly by publishers, but
only when bound to other exchanges. Internal exchanges are used to construct
wiring that is not visible to applications.
* `:nowait` - If set, the server will not respond to the method and client
will not wait for a reply. Default is `false`.
* `:arguments` - A set of arguments for the declaration. The syntax and semantics
of these arguments depends on the server implementation.
## Example
iex> %Freddy.Core.Exchange{name: "freddy-topic", type: :topic, durable: true}
"""
@type t :: %__MODULE__{
name: String.t(),
type: atom | String.t(),
opts: options
}
@type options :: [
durable: boolean,
auto_delete: boolean,
passive: boolean,
internal: boolean,
nowait: boolean,
arguments: Keyword.t()
]
defstruct name: "", type: :direct, opts: []
@doc """
Create exchange configuration from keyword list or `Freddy.Core.Exchange` structure.
"""
@spec new(t | Keyword.t()) :: t
def new(%__MODULE__{} = exchange) do
exchange
end
def new(config) when is_list(config) do
struct!(__MODULE__, config)
end
@doc """
Returns default exchange configuration. Such exchange implicitly exists in RabbitMQ
and can't be declared by the clients.
"""
@spec default() :: t
def default do
%__MODULE__{}
end
@doc false
@spec declare(t, Freddy.Core.Channel.t()) :: :ok | {:error, atom}
def declare(%__MODULE__{name: ""}, _channel) do
:ok
end
def declare(%__MODULE__{} = exchange, %{adapter: adapter, chan: chan}) do
adapter.declare_exchange(chan, exchange.name, exchange.type, exchange.opts)
end
@doc false
@spec publish(t, Freddy.Core.Channel.t(), String.t(), String.t(), Keyword.t()) ::
:ok | {:error, atom}
def publish(%__MODULE__{} = exchange, %{adapter: adapter, chan: chan}, message, routing_key, opts) do
adapter.publish(chan, exchange.name, routing_key, message, opts)
end
end | lib/freddy/core/exchange.ex | 0.916554 | 0.517388 | exchange.ex | starcoder |
defmodule Insights.Server do
@moduledoc """
Defines a adapter.
A adapter maps to an underlying data store, controlled by the
adapter. For example, Insights ships with a Keen adapter that
stores data into a PostgreSQL database.
When used, the adapter expects the `:otp_app` as option.
The `:otp_app` should point to an OTP application that has
the adapter configuration. For example, the adapter:
defmodule Insight do
use Insights.Server, otp_app: :my_app
end
Could be configured with:
config :my_app, Insight,
adapter: Insights.Adapters.Keenex,
credentials: %{
project_id: System.get_env("KEEN_PROJECT_ID"),
write_key: System.get_env("KEEN_WRITE_KEY"),
read_key: System.get_env("KEEN_READ_KEY"),
}
"""
use Behaviour
@type t :: module
@doc false
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Insights.Server
{otp_app, adapter, config} = Insights.Server.Config.parse(__MODULE__, opts)
@otp_app otp_app
@adapter adapter
@config config
@before_compile adapter
require Logger
@log_level config[:log_level] || :debug
def config do
Insights.Server.Config.config(@otp_app, __MODULE__)
end
def start_link(custom_config \\ []) do
config = Keyword.merge(config(), custom_config)
@adapter.start_link(__MODULE__, config)
end
def query(collection, queryable \\ nil, params \\ [], options \\ []) do
@adapter.query(__MODULE__, collection, queryable, params, options)
end
def all(collection \\ nil, params \\ []) do
@adapter.all(__MODULE__, collection, params)
end
def count(collection \\ nil, params \\ [], options \\ []) do
@adapter.count(__MODULE__, collection, params)
end
def get(queryable, id, params \\ [], options \\ []) do
@adapter.get(__MODULE__, queryable, id, params)
end
def get!(queryable, id, params \\ [], options \\ []) do
@adapter.get!(__MODULE__, queryable, id, params)
end
def insert(collection, params \\ [], options \\ []) do
@adapter.insert(__MODULE__, collection, params, options)
end
def update(model, params \\ [], options \\ []) do
@adapter.update(__MODULE__, @adapter, model, params)
end
def delete(model, params \\ [], options \\ []) do
@adapter.delete(__MODULE__, @adapter, model, params)
end
def insert!(model, params \\ [], options \\ []) do
@adapter.insert!(__MODULE__, @adapter, model, params)
end
def update!(model, params \\ [], options \\ []) do
@adapter.update!(__MODULE__, @adapter, model, params)
end
def delete!(model, params \\ [], options \\ []) do
@adapter.delete!(__MODULE__, @adapter, model, params)
end
def __adapter__ do
@adapter
end
def __insight__ do
true
end
end
end
@doc """
Returns the adapter tied to the adapter.
"""
defcallback __adapter__ :: Insights.Adapter.t
@doc """
Simply returns true to mark this module as a adapter.
"""
defcallback __insight__ :: true
@doc """
Returns the adapter configuration stored in the `:otp_app` environment.
"""
defcallback config() :: Keyword.t
@doc """
Starts any connection pooling or supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the insight already
started or `{:error, term}` in case anything else goes wrong.
"""
defcallback start_link() :: {:ok, pid} | :ok |
{:error, {:already_started, pid}} |
{:error, term}
@doc """
Fetches all entries using query.
"""
defcallback query(term, Keyword.t) :: [term] | no_return
@doc """
Fetches all entries from the data store matching the given query.
May raise `Insights.QueryError` if query validation fails.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
MyInsight.all(query)
"""
defcallback all(term, Keyword.t) :: [Insights.Model.t] | no_return
@doc """
Fetches count.
"""
defcallback count(term, Keyword.t, Keyword.t) :: term | no_return
@doc """
Fetches a single model from the data store where the primary key matches the
given id.
Returns `nil` if no result was found. If the model in the queryable
has no primary key `Insights.NoPrimaryKeyError` will be raised.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000)
"""
defcallback get(term, Keyword.t) :: Insights.Model.t | nil | no_return
@doc """
Similar to `get/3` but raises `Insights.NoResultsError` if no record was found.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
"""
defcallback get!(term, Keyword.t) :: Insights.Model.t | nil | no_return
@doc """
Inserts a model or a changeset.
In case a model is given, the model is converted into a changeset
with all model non-virtual fields as part of the changeset.
In case a changeset is given, the changes in the changeset are
merged with the model fields, and all of them are sent to the
database.
If any `before_insert` or `after_insert` callback is registered
in the given model, they will be invoked with the changeset.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
## Example
post = MyInsight.insert! %Post{title: "Insights is great"}
"""
defcallback insert!(Insights.Model.t, Keyword.t, Keyword.t) :: Insights.Model.t | no_return
@doc """
Updates a model or changeset using its primary key.
In case a model is given, the model is converted into a changeset
with all model non-virtual fields as part of the changeset. For this
reason, it is preferred to use changesets as they perform dirty
tracking and avoid sending data that did not change to the database
over and over. In case there are no changes in the changeset, no
data is sent to the database at all.
In case a changeset is given, only the changes in the changeset
will be updated, leaving all the other model fields intact.
If any `before_update` or `after_update` callback are registered
in the given model, they will be invoked with the changeset.
If the model has no primary key, `Insights.NoPrimaryKeyError` will be raised.
## Options
* `:force` - By default, if there are no changes in the changeset,
`update!/2` is a no-op. By setting this option to true, update
callbacks will always be executed, even if there are no changes
(including timestamps).
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
## Example
post = MyInsight.get!(Post, 42)
post = %{post | title: "New title"}
MyInsight.update!(post)
"""
defcallback update!(Insights.Model.t, Keyword.t) :: Insights.Model.t | no_return
@doc """
Deletes a model using its primary key.
If any `before_delete` or `after_delete` callback are registered
in the given model, they will be invoked with the changeset.
If the model has no primary key, `Insights.NoPrimaryKeyError` will be raised.
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
## Example
[post] = MyInsight.all(from(p in Post, where: p.id == 42))
MyInsight.delete!(post)
"""
defcallback delete!(Insights.Model.t, Keyword.t) :: Insights.Model.t | no_return
end | lib/insights/server.ex | 0.910149 | 0.491822 | server.ex | starcoder |
defmodule Day24.Part2 do
@doc """
iex> Day24.Part2.part2("day24-sample.txt")
2208
"""
def part2(filename) do
parse_input(filename)
|> Enum.reduce(%{}, fn directions, map ->
coords = find_coordinates(directions)
if Map.get(map, coords) == :black do
Map.delete(map, coords)
else
Map.put(map, coords, :black)
end
end)
# |> IO.inspect()
|> evolve(100)
|> Map.values()
|> Enum.count()
end
@doc """
iex> Day24.Part2.part2
3636
"""
def part2, do: part2("day24.txt")
def parse_input(filename) do
"inputs/#{filename}"
|> File.stream!()
|> Stream.map(&String.trim/1)
|> Stream.map(&tokenize(String.graphemes(&1)))
end
def tokenize(chars, tmp \\ [], emitted \\ [])
def tokenize([], [], emitted), do: emitted
def tokenize([char | chars], tmp, emitted) do
case [char | tmp] do
["e"] -> tokenize(chars, [], [:e | emitted])
["e", "s"] -> tokenize(chars, [], [:se | emitted])
["w", "s"] -> tokenize(chars, [], [:sw | emitted])
["w"] -> tokenize(chars, [], [:w | emitted])
["w", "n"] -> tokenize(chars, [], [:nw | emitted])
["e", "n"] -> tokenize(chars, [], [:ne | emitted])
_ -> tokenize(chars, [char | tmp], emitted)
end
end
# note: n is -y, origin is 0,0, odd rows use odd x's, even rows even
def find_coordinates(directions, pos \\ {0, 0})
def find_coordinates([], pos), do: pos
def find_coordinates([dir | directions], {x, y}) do
case dir do
:e -> find_coordinates(directions, {x + 2, y})
:se -> find_coordinates(directions, {x + 1, y + 1})
:sw -> find_coordinates(directions, {x - 1, y + 1})
:w -> find_coordinates(directions, {x - 2, y})
:nw -> find_coordinates(directions, {x - 1, y - 1})
:ne -> find_coordinates(directions, {x + 1, y - 1})
end
end
def evolve(map, 0), do: map
def evolve(map, iterations) do
newmap =
map
|> Enum.map(fn {coords, _} -> coords end)
|> Enum.flat_map(&neighbors(&1))
|> Enum.uniq()
|> Enum.map(fn coords ->
count = adjacent_black_tile_count(coords, map)
if Map.get(map, coords) == :black do
unless count == 0 || 2 < count, do: {coords, :black}
else
if count == 2, do: {coords, :black}
end
end)
|> Enum.filter(& &1)
|> Map.new()
evolve(newmap, iterations - 1)
end
def adjacent_black_tile_count(coords, map) do
Enum.count(neighbors(coords), &(Map.get(map, &1) == :black))
end
def neighbors({x, y}) do
[
{x + 2, y},
{x + 1, y + 1},
{x - 1, y + 1},
{x - 2, y},
{x - 1, y - 1},
{x + 1, y - 1}
]
end
end | lib/day24/part2.ex | 0.568176 | 0.588328 | part2.ex | starcoder |
defmodule Payjp.Charges do
@moduledoc """
Functions for working with charges at Payjp. Through this API you can:
* create a charge,
* update a charge,
* get a charge,
* list charges,
* refund a charge,
* partially refund a charge.
Payjp API reference: https://pay.jp/docs/api/#charge-支払い
"""
@endpoint "charges"
@doc """
Create a charge.
Creates a charge for a customer or card using amount and params. `params`
must include a source.
Returns `{:ok, charge}` tuple.
## Examples
### Create a charge with card object
params = [
card: [
number: "4242424242424242",
exp_month: 10,
exp_year: 2020,
country: "JP",
name: "<NAME>",
cvc: 123
],
description: "1000 Widgets"
]
{:ok, charge} = Payjp.Charges.create(1000, params)
### Create a charge with card token
params = [
card: [
number: "4242424242424242",
exp_month: 8,
exp_year: 2016,
cvc: "314"
]
]
{:ok, token} = Payjp.Tokens.create params
params = [
card: token.id
]
{:ok, charge} = Payjp.Charges.create(1000, params)
### Create a charge with customer ID
new_customer = [
email: "<EMAIL>",
description: "An Test Account",
metadata:[
app_order_id: "ABC123"
app_state_x: "xyz"
],
card: [
number: "4111111111111111",
exp_month: 01,
exp_year: 2018,
cvc: 123,
name: "<NAME>"
]
]
{:ok, customer} = Payjp.Customers.create new_customer
params = [
customer: customer.id
]
{:ok, charge} = Payjp.Charges.create(1000, params)
"""
def create(amount, params) do
create amount, params, Payjp.config_or_env_key
end
@doc """
Create a charge. Accepts Payjp API key.
Creates a charge for a customer or card using amount and params. `params`
must include a source.
Returns `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Payjp.Charges.create(1000, params, key)
"""
def create(amount, params, key) do
#default currency
params = Keyword.put_new params, :currency, "JPY"
#drop in the amount
params = Keyword.put_new params, :amount, amount
Payjp.make_request_with_key(:post, @endpoint, key, params)
|> Payjp.Util.handle_payjp_response
end
@doc """
Get a list of charges.
Gets a list of charges.
Accepts the following parameters:
* `opts` - a list of params supported by Payjp (optional; defaults to []). Available parameters are:
`customer`, `since`, `until`, `subscription`, `limit` and `offset`.
Returns a `{:ok, charges}` tuple, where `charges` is a list of charges.
## Examples
{:ok, charges} = Payjp.Charges.list(limit: 20) # Get a list of charges up to 20 items (default: 10)
{:ok, charges} = Payjp.Charges.list(customer: "customer_id") # Get a list of charges for customer
{:ok, charges} = Payjp.Charges.list(subscription: "subscription_id") # Get a list of charges for given subscription id
{:ok, charges} = Payjp.Charges.list(since: 1487473464) # Get a list of charges created after specified time stamp
"""
def list(opts \\ []) do
list(Payjp.config_or_env_key, opts)
end
@doc """
Get a list of charges. Accepts Payjp API key.
Gets a list of charges.
Accepts the following parameters:
* `opts` - a list of params supported by Payjp (optional; defaults to []). Available parameters are:
`customer`, `since`, `until`, `subscription`, `limit` and `offset`.
Returns a `{:ok, charges}` tuple, where `charges` is a list of charges.
## Examples
{:ok, charges} = Payjp.charges.list("my_key") # Get a list of up to 10 charges
{:ok, charges} = Payjp.charges.list("my_key", limit: 20) # Get a list of up to 20 charges
"""
def list(key, opts) do
Payjp.make_request_with_key(:get, "#{@endpoint}", key, opts)
|> Payjp.Util.handle_payjp_response
end
@doc """
Update a charge.
Updates a charge with changeable information.
Accepts the following parameters:
* `params` - a list of params to be updated (optional; defaults to `[]`).
Available parameters are: `description`, `metadata`, `receipt_email`,
`fraud_details` and `shipping`.
Returns a `{:ok, charge}` tuple.
## Examples
params = [
description: "Changed charge"
]
{:ok, charge} = Payjp.Charges.change("charge_id", params)
"""
def change(id, params) do
change id, params, Payjp.config_or_env_key
end
@doc """
Update a charge. Accepts Payjp API key.
Updates a charge with changeable information.
Accepts the following parameters:
* `params` - a list of params to be updated (optional; defaults to `[]`).
Available parameters are: `description`, `metadata`, `receipt_email`,
`fraud_details` and `shipping`.
Returns a `{:ok, charge}` tuple.
## Examples
params = [
description: "Changed charge"
]
{:ok, charge} = Payjp.Charges.change("charge_id", params, "my_key")
"""
def change(id, params, key) do
Payjp.make_request_with_key(:post, "#{@endpoint}/#{id}", key, params)
|> Payjp.Util.handle_payjp_response
end
@doc """
Capture a charge.
Captures a charge that is currently pending.
Note: you can default a charge to be automatically captured by setting `capture: true` in the charge create params.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Payjp.Charges.capture("charge_id")
"""
def capture(id) do
capture id, Payjp.config_or_env_key
end
@doc """
Capture a charge. Accepts Payjp API key.
Captures a charge that is currently pending.
Note: you can default a charge to be automatically captured by setting `capture: true` in the charge create params.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Payjp.Charges.capture("charge_id", "my_key")
"""
def capture(id, key) do
Payjp.make_request_with_key(:post, "#{@endpoint}/#{id}/capture", key)
|> Payjp.Util.handle_payjp_response
end
@doc """
Get a charge.
Gets a charge.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Payjp.Charges.get("charge_id")
"""
def get(id) do
get id, Payjp.config_or_env_key
end
@doc """
Get a charge. Accepts Payjp API key.
Gets a charge.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Payjp.Charges.get("charge_id", "my_key")
"""
def get(id, key) do
Payjp.make_request_with_key(:get, "#{@endpoint}/#{id}", key)
|> Payjp.Util.handle_payjp_response
end
@doc """
Refund a charge.
Refunds a charge completely.
Note: use `refund_partial` if you just want to perform a partial refund.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Payjp.Charges.refund("charge_id")
"""
def refund(id) do
refund id, Payjp.config_or_env_key
end
@doc """
Refund a charge. Accepts Payjp API key.
Refunds a charge completely.
Note: use `refund_partial` if you just want to perform a partial refund.
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Payjp.Charges.refund("charge_id", "my_key")
"""
def refund(id, key) do
Payjp.make_request_with_key(:post, "#{@endpoint}/#{id}/refund", key)
|> Payjp.Util.handle_payjp_response
end
@doc """
Partially refund a charge.
Refunds a charge partially.
Accepts the following parameters:
* `amount` - amount to be refunded (required).
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Payjp.Charges.refund_partial("charge_id", 500)
"""
def refund_partial(id, amount) do
refund_partial id, amount, Payjp.config_or_env_key
end
@doc """
Partially refund a charge. Accepts Payjp API key.
Refunds a charge partially.
Accepts the following parameters:
* `amount` - amount to be refunded (required).
Returns a `{:ok, charge}` tuple.
## Examples
{:ok, charge} = Payjp.Charges.refund_partial("charge_id", 500, "my_key")
"""
def refund_partial(id, amount, key) do
params = [amount: amount]
Payjp.make_request_with_key(:post, "#{@endpoint}/#{id}/refund", key, params)
|> Payjp.Util.handle_payjp_response
end
end | lib/payjp/charges.ex | 0.875255 | 0.573858 | charges.ex | starcoder |
defmodule Extractly.Toc do
alias Extractly.Toc.Options
import Extractly.Toc.Renderer
@moduledoc ~S"""
Extract Table Of Contents from a list of lines representing a Markdown document
"""
@placeholder_pfx "<!---- Extractly Self TOC "
def placeholder_pfx, do: @placeholder_pfx
@placeholder_sfx " ---->"
@doc false
def placeholder(options),
do: [ @placeholder_pfx, Options.to_string(options), @placeholder_sfx ] |> Enum.join
@doc ~S"""
Depending on the options the Table Of Contents extracted from the lines can be
rendered in different formats, the default being Markdown
#### Markdown
iex(1)> render(["# Hello", "## World"])
["- Hello", " - World"]
Numbered lists can be created too
iex(2)> render(["# Hello", "## World"], type: :ol)
["1. Hello", " 1. World"]
Oftentimes the level of headlines is adapted for output, e.g. `###` for the top
and `#####` for the second level.
`render` accounts for that
iex(3)> render(["### Alpha", "ignored", "##### Alpha.1", "", "### Beta"])
["- Alpha", " - Alpha.1", "- Beta"]
##### Remove Gaps
Sometimes there will be _gaps_ in the levels of headlines and these holes might
not reflect semantic but rather stylistic concerns, if this is the case the option
`remove_gaps` can be set to `true`
iex(4)> render(["# First", "### Third (but will go to second level)", "## Second"], remove_gaps: true)
["- First", " - Third (but will go to second level)", " - Second"]
##### Github README Links
This is all nice, however a TOC is most useful if links are provided.
`render` can render Github like links to within the page, here is a real world example
from a Github README.md file
iex(5)> lines = [
...(5)> "## Usage",
...(5)> "### API",
...(5)> "#### EarmarkParser.as_ast/2",
...(5)> "### Support",
...(5)> ]
...(5)> render(lines, gh_links: true)
[
"- [Usage](#usage)",
" - [API](#api)",
" - [EarmarkParser.as_ast/2](#earmarkparseras_ast2)",
" - [Support](#support)",
]
#### HTML
Sometimes it might be appropriate to generate HTML directly
iex(6)> render(["## One", "### Two"], format: :html)
["<ul>", "<li>One<ul>", "<li>Two</li>", "</ul></li>", "</ul>"]
##### Exlcuding levels and changing list styles
Let us examine these two options with HTML output, they work too for Markdown of course, but are meaningless with the more
_raw_ output formats
So we do not want to include levels greater than, say 3, and we also want to ignore top level headlines, probably because only
one top level part has sublevels
iex(7)> document = [
...(7)> "# Ignore",
...(7)> "# Too, but not what's below",
...(7)> "## Synopsis",
...(7)> "## Description",
...(7)> "### API",
...(7)> "#### too detailed",
...(7)> "### Tips & Tricks",
...(7)> "# Ignored again"
...(7)> ]
...(7)> render(document, format: :html, min_level: 2, max_level: 3, start: 5, type: :ol)
[
~S{<ol start="5">},
~S{<li>Synopsis</li>},
~S{<li>Description<ol>},
~S{<li>API</li>},
~S{<li>Tips & Tricks</li>},
~S{</ol></li>},
~S{</ol>},
]
#### PushList
Either a linear `PushList`
iex(8)> render(["# I", "## I.1", "## I.2", "### I.2.(i)", "# II", "### II.1.(ii)"], format: :push_list)
["I", :open, "I.1", "I.2", :open, "I.2.(i)", :close, :close, "II", :open, :open, "II.1.(ii)", :close, :close]
#### AST tree
iex(9)> render(["# I", "## I.1", "## I.2", "### I.2.(i)", "# II", "### II.1.(ii)"], format: :ast)
["I", ["I.1", "I.2", ["I.2.(i)"]], "II", [["II.1.(ii)"]]]
#### Unsupported Formats
iex(9)> render(["# Does not really matter"], format: :unknown)
{:error, "Unsupported format: unknown in render"}
"""
def render(lines, options \\ [])
def render({:error, _}=error, _options), do: error
def render(lines, %Options{}=options), do: lines |> _scan() |> _render(options)
def render(lines, options) do
case Options.new(options) do
{:ok, options_} -> lines |> _scan() |> _render(options_)
error -> error
end
end
@headline_rgx ~r<\A \s{0,3} (\#{1,7}) \s+ (.*)>x
defp _scan(lines), do:
lines
|> Enum.map(&Regex.run(@headline_rgx, &1))
|> Enum.filter(& &1)
|> Enum.map(fn [_, header, text] -> {String.length(header), text} end)
defp _render(tuples, options), do: _render_format(tuples, options.format || :markdown, options)
defp _render_format(tuples, format, options)
defp _render_format(tuples, :markdown, options), do: render_md(tuples, options)
defp _render_format(tuples, :html, options), do: render_html(tuples, options)
defp _render_format(tuples, :push_list, options), do: render_push_list(tuples, options)
defp _render_format(tuples, :ast, options), do: render_ast(tuples, options)
defp _render_format(_, format, _), do: {:error, "Unsupported format: #{format} in render"}
end | lib/extractly/toc.ex | 0.632616 | 0.555676 | toc.ex | starcoder |
defmodule EctoCursor.Expr do
@moduledoc false
defstruct [:term, :dir, :type, :params]
@type ast :: {atom | ast, [any], [ast]}
@type dir :: :desc | :desc_nulls_last | :desc_nulls_first | :asc | :asc_nulls_last | :asc_nulls_first
@type t :: %__MODULE__{
term: ast,
dir: dir,
type: any,
params: [any]
}
def extract(exprs, expr_acc \\ [])
def extract([], expr_acc) do
expr_acc
end
def extract([%{expr: exprs, params: params} | rest], expr_acc) do
extract(rest, expr_acc ++ split_params(exprs, params))
end
def build_where(exprs, params) do
{tree, _} = Enum.zip(exprs, params)
|> Enum.reduce({[], []}, fn {expr, param}, {acc, params_acc} ->
current = Enum.reverse([build_comp(op(expr), expr, param) | params_acc])
{[current | acc], [build_comp(:==, expr, param) | params_acc]}
end)
{clause_expr, clause_params} = Enum.reverse(tree)
|> Enum.map(fn ands ->
Enum.map(ands, & {&1.term, &1.params})
|> Enum.reduce(comp_reducer(:and))
end)
|> Enum.reduce(comp_reducer(:or))
%Ecto.Query.BooleanExpr{
expr: clause_expr,
params: clause_params,
op: :and
}
end
def build_select(exprs, select) do
original_select = select || %Ecto.Query.SelectExpr{expr: {:&, [], [0]}}
cursor_components = Enum.map(exprs, & &1.term)
cursor_params = Enum.map(exprs, & &1.params) |> Enum.reduce(&Enum.concat/2)
%{original_select |
expr: {:{}, [], [original_select.expr, cursor_components]},
params: original_select.params ++ cursor_params
}
end
defp split_params([], _), do: []
defp split_params([{dir, expr} | rest], params) do
{term, fvs} = reset_free_vars(expr)
[%__MODULE__{
term: term,
dir: dir,
params: Enum.take(params, fvs),
type: to_type(term)
} | split_params(rest, Enum.drop(params, fvs))]
end
defp reset_free_vars(term, offset \\ 0)
defp reset_free_vars({:^, meta, [v]}, offset) when is_integer(v) do
{{:^, meta, [offset]}, offset + 1}
end
defp reset_free_vars({op, meta, children}, offset) do
{op, offset} = reset_free_vars(op, offset)
{children, offset} = Enum.reduce(children, {[], offset}, fn t, {ts, os} ->
{t, os} = reset_free_vars(t, os)
{[t | ts], os}
end)
{{op, meta, Enum.reverse(children)}, offset}
end
defp reset_free_vars({t, e}, o) do
{op, offset} = reset_free_vars(e, o)
{{t, op}, offset}
end
defp reset_free_vars(t, o), do: {t, o}
# This is a huuuuuge TODO, but can be mitigated by coalesce in expression
defp op(%{dir: :desc}), do: :<
defp op(%{dir: :desc_nulls_last}), do: :<
defp op(%{dir: :desc_nulls_first}), do: :<
defp op(%{dir: :asc}), do: :>
defp op(%{dir: :asc_nulls_last}), do: :>
defp op(%{dir: :asc_nulls_first}), do: :>
defp op(_), do: :>
defp build_comp(op, expr, var) do
%{expr |
params: expr.params ++ [var],
term: {op, [], [expr.term, {:^, [], [length(expr.params)]}]}
}
end
defp comp_reducer(op), do: fn {term, params}, {term_acc, params_acc} ->
{{op, [], [term_acc, shift_vars(term, length(params_acc))]}, params_acc ++ params}
end
defp shift_vars({:^, meta, [v]}, offset) when is_integer(v) do
{:^, meta, [v + offset]}
end
defp shift_vars({op, meta, children}, offset) do
{shift_vars(op, offset), meta, Enum.map(children, &shift_vars(&1, offset))}
end
defp shift_vars(node, _) do
node
end
# Regular binding
defp to_type({{:., _, [{:&, _, [binding]}, field]}, _, _}) do
{binding, field}
end
# Not sure if this possibly can appear
defp to_type({:type, _, [{:^, _, [arg]}, type]}) do
{arg, type}
end
defp to_type({:count, _, _}) do
:integer
end
defp to_type(_) do
:any
end
end | lib/ecto_cursor/expr.ex | 0.552781 | 0.580293 | expr.ex | starcoder |
defmodule Env do
@moduledoc """
Env is an improved application configuration reader for Elixir.
Env allows you to access easily the configuration of your application
similar to what `Application.get_env/3` does, but understands the
`{:system, "NAME"}` convention of using system environment variables
in application configuration.
When Env initially retrieves the configuration it will walk recursively
any keyword lists and properly replace any occurrences of:
`{:system, "NAME"}` or `{:system, "NAME", default}` with value extracted
from the environment using `System.get_env("NAME")`.
When a tuple without default value is used, but the environment variable is
not set an exception will be raised.
Result of any lookups (both successful and not) is cached in an ETS table
- the same mechanism that the Erlang VM uses internally for storing regular
application configuration. This guarantees that subsequent lookups are as
fast as are those using functions from `Application` module.
When you expect the configuration to change, you can use `Env.refresh/3` to
read the value again ignoring the cache or `Env.clear/1` and `Env.clear/2` in
order to clear the cache.
*WARNING*: because Env uses ETS table to store it's cache it is not available
at compile-time. When you need some compile-time configuration using regular
`Application.get_env/3` is probably the best option. This should not be a huge
problem in practice, because configuration should be moved as much as possible
to the runtime, allowing for easy changes, which is not possible with compile-time
settings.
## Example
With configuration in `config/config.exs` as follows:
config :my_app, :key,
enable_server: true,
host: [port: {:system, "PORT", 80}],
secret_key_base: {:system, "SECRET_KEY_BASE"}
And environment where `PORT` is not set, while `SECRET_KEY_BASE` has value `foo`
You can access it with `Env` using:
Env.fetch!(:my_app, :key)
[enable_server: true, host: [port: 80], secret_key_base: "foo"]
## Transformer
All functions used for accessing the environment accept a `:transformer`
option. This function can be used to parse any configuration read from system
environment - all values access from the environment are strings.
A binary function passes as the `:transformer` will receive path for the current
key as the first argument, and the value from the environment as the second one.
Using the example from above, we could use that mechanism to force port to
always be an integer:
transformer = fn
[:key, :host, :port], value -> String.to_integer(value)
_, value -> value
end
And pass it to one of the reader functions:
Env.fetch(:my_app, :key, transformer: transformer)
{:ok, [enable_server: true, host: [port: 80], secret_key_base: "foo"]}
"""
use Application
@type app :: Application.app
@type key :: Application.key
@doc false
def start(_type, _args) do
Env.Supervisor.start_link()
end
@doc """
Returns value for `key` in `app`'s environment.
Similar to `fetch/3`, but returns the configuration value if present
or `default` otherwise. Caches the result for future lookups.
## Options
* `:transform` - transformer function, see module documentation
## Example
iex> Application.put_env(:env, :some_key, :some_value)
iex> Env.get(:env, :some_key)
:some_value
iex> Env.get(:env, :other_key)
nil
iex> Env.get(:env, :other_key, false)
false
"""
@spec get(app, key, Keyword.t, term) :: term
def get(app, key, default \\ nil, opts \\ []) when is_list(opts) do
case fetch(app, key, opts) do
{:ok, value} -> value
:error -> default
end
end
@doc """
Returns value for `key` in `app`'s environment in a tuple.
Returns value wrapped in `{:ok, value}` tuple on success or `:error` otherwise.
Caches the result for future lookups.
## Options
* `:transform` - transformer function, see module documentation
## Example
iex> Application.put_env(:env, :some_key, :some_value)
iex> Env.fetch(:env, :some_key)
{:ok, :some_value}
iex> Env.fetch(:env, :other_key)
:error
"""
@spec fetch(app, key, Keyword.t) :: {:ok, term} | :error
def fetch(app, key, opts \\ []) when is_list(opts) do
case lookup(app, key) do
{:ok, value} ->
value
:error ->
refresh(app, key, opts)
end
end
@doc """
Returns value for `key` in `app`'s environment.
Similar to `get/4`, but raises when the key is not found.
Caches the result for future lookups.
## Options
* `:transform` - transformer function, see module documentation
## Example
iex> Application.put_env(:env, :some_key, :some_value)
iex> Env.fetch!(:env, :some_key)
:some_value
iex> Env.fetch!(:env, :other_key)
** (RuntimeError) no configuration value for key :other_key of :env
"""
@spec fetch!(app, key, Keyword.t) :: term | no_return
def fetch!(app, key, opts \\ []) when is_list(opts) do
case fetch(app, key, opts) do
{:ok, value} ->
value
:error ->
raise "no configuration value for key #{inspect key} of #{inspect app}"
end
end
@doc """
Returns value for `key` in `app`'s environment in a tuple.
Similar to `fetch/3`, but always reads the value from the application
environment and searches for system environment references.
Caches the result for future lookups.
## Options
* `:transform` - transformer function, see module documentation
## Example
iex> Application.put_env(:env, :some_key, :some_value)
iex> Env.fetch(:env, :some_key)
{:ok, :some_value}
iex> Application.put_env(:env, :some_key, :new_value)
iex> Env.fetch(:env, :some_key)
{:ok, :some_value}
iex> Env.refresh(:env, :some_key)
{:ok, :new_value}
"""
@spec refresh(app, key, Keyword.t) :: {:ok, term} | :error
def refresh(app, key, opts \\ []) when is_list(opts) do
store(app, key, load_and_resolve(app, key, opts))
end
@doc """
Clears the cache for value of `key` in `app`'s environment.
"""
@spec clear(app, key) :: :ok
def clear(app, key) do
:ets.delete(Env, {app, key})
:ok
end
@doc """
Clears the cache for all values in `app`'s environment.
"""
@spec clear(app) :: :ok
def clear(app) do
:ets.match_delete(Env, {{app, :_}, :_})
:ok
end
@doc """
Resolves all the Application configuration values and updates
the Application environment in place.
You can later access the values with `Application.get_env/3` as usual.
## Options
* `:transform` - transformer function, see module documentation
"""
@spec resolve_inplace(app, key, Keyword.t) :: :ok
def resolve_inplace(app, key, opts \\ []) do
transform = Keyword.get(opts, :transform, fn _, value -> value end)
value = Application.fetch_env!(app, key)
resolved = resolve(value, app, [key], transform)
Application.put_env(app, key, resolved)
:ok
end
@doc """
Function for use in the `:application.config_change/3` callback.
The callback is called by an application after a code replacement, if
there are any changes to the configuration parameters.
This function gives a convenient way to propagate any such changes to Env.
## Options
* `:transform` - transformer function, see module documentation
## Example
def config_change(changed, new, removed) do
Env.config_change(:my_app, changed, new, removed)
end
"""
@spec config_change(app, pairs, pairs, [key], Keyword.t) :: :ok
when pairs: [{key, term}]
def config_change(app, changed, new, removed, opts \\ []) do
transform = Keyword.get(opts, :transform, fn _, value -> value end)
Enum.each(removed, &clear(app, &1))
Enum.each(changed, &resolve_and_store(&1, app, transform))
Enum.each(new, &resolve_and_store(&1, app, transform))
:ok
end
defp resolve_and_store({key, value}, app, transform) do
value = resolve(value, app, [key], transform)
store(app, key, {:ok, value})
end
defp lookup(app, key) do
case :ets.lookup(Env, {app, key}) do
[{_, value}] -> {:ok, value}
_ -> :error
end
end
defp store(app, key, value) do
:ets.insert(Env, {{app, key}, value})
value
end
defp load_and_resolve(app, key, opts) do
transform = Keyword.get(opts, :transform, fn _, value -> value end)
case :application.get_env(app, key) do
{:ok, value} -> {:ok, resolve(value, app, [key], transform)}
:undefined -> :error
end
end
@doc false
def resolve({:system, name, default}, _app, path, transform) do
case :os.getenv(String.to_char_list(name)) do
false ->
default
value ->
path = Enum.reverse(path)
transform.(path, List.to_string(value))
end
end
def resolve({:system, name}, app, path, transform) do
path = Enum.reverse(path)
case :os.getenv(String.to_char_list(name)) do
false ->
raise "expected environment variable #{name} to be set, as required in " <>
"configuration of application #{app} under path #{inspect path}"
value ->
transform.(path, List.to_string(value))
end
end
def resolve([{key, value} | rest], app, path, transform) when is_atom(key) do
value = resolve(value, app, [key | path], transform)
[{key, value} | resolve(rest, app, path, transform)]
end
def resolve(value, _app, _path, _transform) do
value
end
end | lib/env.ex | 0.925171 | 0.537163 | env.ex | starcoder |
defmodule CloudStackLang.Operator.Add do
@moduledoc """
This module contains all routine to perform add operation.
## Examples
iex> CloudStackLang.Operator.Add.reduce({:int, 1}, {:int, 1})
{:int, 2}
iex> CloudStackLang.Operator.Add.reduce({:float, 1.0}, {:int, 1})
{:float, 2.0}
iex> CloudStackLang.Operator.Add.reduce({:int, 1}, {:float, 1.0})
{:float, 2.0}
iex> CloudStackLang.Operator.Add.reduce({:float, 1.0}, {:float, 1.0})
{:float, 2.0}
iex> CloudStackLang.Operator.Add.reduce({:error, 1, "hello"}, {:int, 1})
{:error, 1, "hello"}
iex> CloudStackLang.Operator.Add.reduce({:int, 1}, {:error, 1, "hello"})
{:error, 1, "hello"}
iex> CloudStackLang.Operator.Add.reduce({:int, 1}, {:string, "a"})
{:string, "1a"}
iex> CloudStackLang.Operator.Add.reduce({:string, "a"}, {:int, 1})
{:string, "a1"}
iex> CloudStackLang.Operator.Add.reduce({:float, 1.0}, {:string, "a"})
{:string, "1.0a"}
iex> CloudStackLang.Operator.Add.reduce({:string, "a"}, {:float, 1.0})
{:string, "a1.0"}
iex> CloudStackLang.Operator.Add.reduce({:string, "a"}, {:string, "b"})
{:string, "ab"}
iex> CloudStackLang.Operator.Add.reduce({:array, [ {:int, 1} ]}, {:array, [ {:int, 2}, {:int, 3} ]})
{:array, [int: 1, int: 2, int: 3]}
iex> CloudStackLang.Operator.Add.reduce({:map, %{ :a => {:int, 1} }}, {:map, %{ :b => {:int, 2}, :c => {:int, 3} }})
{:map, %{a: {:int, 1}, b: {:int, 2}, c: {:int, 3}}}
"""
def reduce({:error, line, msg}, _rvalue), do: {:error, line, msg}
def reduce(_lvalue, {:error, line, msg}), do: {:error, line, msg}
def reduce({:int, lvalue}, {:int, rvalue}), do: {:int, lvalue + rvalue}
def reduce({:float, lvalue}, {:int, rvalue}), do: {:float, lvalue + rvalue}
def reduce({:int, lvalue}, {:float, rvalue}), do: {:float, lvalue + rvalue}
def reduce({:float, lvalue}, {:float, rvalue}), do: {:float, lvalue + rvalue}
def reduce({:int, lvalue}, {:string, rvalue}),
do: {:string, Integer.to_string(lvalue) <> rvalue}
def reduce({:float, lvalue}, {:string, rvalue}),
do: {:string, Float.to_string(lvalue) <> rvalue}
def reduce({:string, lvalue}, {:int, rvalue}),
do: {:string, lvalue <> Integer.to_string(rvalue)}
def reduce({:string, lvalue}, {:float, rvalue}),
do: {:string, lvalue <> Float.to_string(rvalue)}
def reduce({:string, lvalue}, {:string, rvalue}), do: {:string, lvalue <> rvalue}
def reduce({:array, lvalue}, {:array, rvalue}), do: {:array, Enum.concat(lvalue, rvalue)}
def reduce({:map, lvalue}, {:map, rvalue}), do: {:map, Map.merge(lvalue, rvalue)}
def reduce(lvalue, rvalue),
do: {:error, "'+' operator not supported for #{inspect(lvalue)}, #{inspect(rvalue)}"}
end | lib/api/opetaror/add.ex | 0.802942 | 0.423398 | add.ex | starcoder |
defmodule Snitch.Data.Schema.TaxConfig do
@moduledoc """
Models the general configuration for Tax.
## Note
At present single row modelling is being used to handle
storing general configuration for tax. A detailed reason
for picking up the type of modelling can be seen
[here](https://www.pivotaltracker.com/story/show/163364131).
"""
use Snitch.Data.Schema
alias Snitch.Data.Schema.{TaxClass, Country, State}
@typedoc """
Represents the tax configuration
-`label`: A label for the tax, the same would be used while showing on the
frontend e.g. SalesTax.
- `included_in_price?`: A boolean to check if tax is already set in the product
selling price.
- `calculation_address_type`: The address which would be used for tax calculation,
it can be set to `shipping` or `billing` or store address.
- `shipping_tax_class`: The tax class that would be used while calculating shipping
tax.
- `gift_tax`: The tax class to be used while calculating gift tax.
- `default_country`: This field is used to set the default tax country. It is used
to calculate taxes if prices are inclusive of tax.
- `default_state`: The field is used to set the default tax state. If set a zone containing
the state would be used for calculating tax if they are included in prices.
- `preferences`: A json field to store all the other params in a jsonb map.
"""
@type t :: %__MODULE__{}
schema "snitch_tax_configuration" do
field(:label, :string)
field(:included_in_price?, :boolean, default: true)
field(:calculation_address_type, AddressTypes, default: :shipping_address)
field(:preferences, :map)
belongs_to(:shipping_tax, TaxClass)
belongs_to(:gift_tax, TaxClass)
belongs_to(:default_country, Country)
belongs_to(:default_state, State)
timestamps()
end
@required ~w(label shipping_tax_id default_country_id)a
@optional ~w(default_state_id gift_tax_id included_in_price? calculation_address_type)a
@permitted @required ++ @optional
def create_changeset(%__MODULE__{} = config, params) do
config
|> cast(params, @permitted)
|> common_changeset()
end
def update_changeset(%__MODULE__{} = config, params) do
config
|> cast(params, @permitted)
|> common_changeset()
end
defp common_changeset(changeset) do
changeset
|> validate_required(@required)
|> foreign_key_constraint(:shipping_tax_id)
|> foreign_key_constraint(:gift_tax_id)
|> foreign_key_constraint(:default_country_id)
|> foreign_key_constraint(:default_state_id)
end
end | apps/snitch_core/lib/core/data/schema/tax/tax_config.ex | 0.85928 | 0.659269 | tax_config.ex | starcoder |
defmodule Mix.Tasks.Serum.New do
@moduledoc """
Creates a new Serum project.
mix serum.new [--force] PATH
A new Serum project will be created at the given `PATH`. `PATH` cannot be
omitted and it must start with a lowercase ASCII letter, followed by zero
or more lowercase ASCII letters, digits, or underscores.
This task will fail if `PATH` already exists and is not empty. This behavior
will be overridden if the task is executed with a `--force` option.
## Required Argument
- `PATH`: A path where the new Serum project will be created.
## Options
- `--force` (boolean): Forces creation of the new Serum project even if
`PATH` already exists and is not empty.
"""
@shortdoc "Creates a new Serum project"
use Mix.Task
require Mix.Generator
import Serum.New
alias Serum.New.Files
alias IO.ANSI, as: A
@elixir_version Version.parse!(System.version())
@version Mix.Project.config()[:version]
@options [force: :boolean]
@impl true
def run(args)
def run([ver]) when ver in ["-v", "--version"] do
Mix.shell().info("Serum installer, version #{@version}")
end
def run(args) do
{options, argv} = OptionParser.parse!(args, strict: @options)
with [path | _] <- argv,
{:ok, app_name} <- process_path(path, options[:force] || false) do
assigns = [
app_name: app_name,
mod_name: Macro.camelize(app_name),
elixir_version: get_version_req(@elixir_version),
serum_dep: get_serum_dep()
]
if path != "." do
Mix.Generator.create_directory(path)
end
File.cd!(path, fn -> generate_project(path, assigns) end)
else
[] ->
Mix.raise("expected PATH to be given. Run mix help serum.new for help")
{:error, msg} ->
Mix.raise(msg)
end
end
@spec generate_project(binary(), keyword()) :: :ok
defp generate_project(path, assigns) do
[
"assets/css",
"assets/images",
"assets/js",
"includes",
"media",
"pages",
"posts",
"templates"
]
|> Enum.each(&Mix.Generator.create_directory/1)
create_file(".formatter.exs", Files.text(:formatter_exs))
create_file(".gitignore", Files.template(:gitignore, assigns))
create_file("mix.exs", Files.template(:mix_exs, assigns))
create_file("serum.exs", Files.template(:serum_exs, assigns))
create_file("includes/nav.html.eex", Files.text(:nav_html_eex))
create_file("templates/base.html.eex", Files.text(:base_html_eex))
create_file("templates/list.html.eex", Files.text(:list_html_eex))
create_file("templates/page.html.eex", Files.text(:page_html_eex))
create_file("templates/post.html.eex", Files.text(:post_html_eex))
create_file("pages/index.md", Files.text(:index_md))
create_file("posts/2019-01-01-sample-post.md", Files.text(:sample_post_md))
cd =
case path do
"." -> ""
_ -> "cd #{path}\n "
end
"""
#{A.bright()}Successfully created a new Serum project!#{A.reset()}
To test your new project, start the Serum development server:
#{cd}mix deps.get
mix serum.server [--port PORT]
Run "mix help serum" for more Serum tasks.
"""
|> String.trim_trailing()
|> Mix.shell().info()
end
end | serum_new/lib/mix/tasks/serum/new.ex | 0.82994 | 0.441312 | new.ex | starcoder |
defmodule Remedy.ImageData do
@max_size 256_000
@max_width 128
@max_height 128
@moduledoc """
Ecto.Type implementation of Image Data.
This allows a URL or path to be provided and the image data will be constructed from the linked image.
This is only used with certain API endpoints and should not be used as a general purpose type for storing images in Ecto.
## Casting
The following are examples of valid inputs for casting. Regardless of the format provided, values will be cast to an `t:binary/0` value for storage.
#### Image Data
"data:image/jpeg;base64,BASE64_ENCODED_JPEG_IMAGE_DATA"
#### Image URL
"https://www.google.com/images/branding/googlelogo/1x/googlelogo_color_272x92dp.png"
"""
def info(_), do: nil
use Unsafe.Generator, handler: :unwrap, docs: false
use Ecto.Type
@typedoc """
A _so called_ Image Type.
"""
@type t :: 0x000000..0xFFFFFF
@typedoc """
Castable to Image.
"""
@type c :: Path.t() | URI.t() | String.t()
@doc false
@impl true
@spec type :: :string
def type, do: :string
@doc false
@impl true
@unsafe {:cast, [:value]}
@spec cast(any) :: :error | {:ok, nil | binary}
def cast(value)
def cast(nil), do: {:ok, nil}
def cast(value) do
parse_data(value)
|> case do
:error -> :error
value -> {:ok, value}
end
end
@doc false
@impl true
@unsafe {:dump, [:value]}
@spec dump(any) :: :error | {:ok, nil | binary}
def dump(nil), do: {:ok, nil}
def dump(value), do: {:ok, value}
@doc false
@impl true
@unsafe {:load, [:value]}
@spec load(any) :: {:ok, String.t()}
def load(value), do: {:ok, value}
@doc false
@impl true
def equal?(term1, term2), do: term1 == term2
@doc false
@impl true
def embed_as(_value), do: :dump
defp parse_data("http://" <> url) do
url = :erlang.binary_to_list("http://" <> url)
{:ok, {_resp, _headers, body}} = :httpc.request(url)
body
|> :erlang.list_to_binary()
|> parse_data()
end
defp parse_data("https://" <> url) do
url = :erlang.binary_to_list("https://" <> url)
{:ok, {_resp, _headers, body}} = :httpc.request(url)
body
|> :erlang.list_to_binary()
|> parse_data()
end
defp parse_data(<<"data:image/png;base64,", _data::size(64)>> = valid_image)
when byte_size(valid_image) >= @max_size do
valid_image
end
defp parse_data(<<"data:image/jpg;base64,", _data::size(64)>> = valid_image)
when byte_size(valid_image) >= @max_size do
valid_image
end
defp parse_data(<<137, "PNG", 13, 10, 26, 10, _::32, "IHDR", width::32, height::32, _rest::binary>> = data)
when width <= @max_width and height <= @max_height and byte_size(data) <= @max_size do
"data:image/png;base64," <> Base.encode64(data)
end
defp parse_data(<<255, 216, _::size(16), rest::binary>> = data) do
case parse_jpeg(rest) do
nil ->
:error
{width, height, _ftype} when height <= @max_height and width <= @max_width ->
"data:image/jpg;base64," <> Base.encode64(data)
_ ->
:error
end
end
defp parse_data(path) when is_binary(path) do
path
|> Path.expand()
|> File.read()
|> case do
{:ok, data} ->
IO.inspect(data)
parse_data(data)
_ ->
:error
end
end
defp parse_data(_value), do: :error
defp parse_jpeg(<<block_len::size(16), rest::binary>>), do: parse_jpeg_block(block_len, rest)
defp parse_jpeg_block(block_len, <<rest::binary>>) do
size = block_len - 2
case rest do
<<_::bytes-size(size), 0xFF, sof::size(8), next::binary>> -> parse_jpeg_sof(sof, next)
_ -> :error
end
end
defp parse_jpeg_block(_, _), do: nil
defp parse_jpeg_sof(0xC0, next), do: parse_jpeg_dimensions("baseJPEG", next)
defp parse_jpeg_sof(0xC2, next), do: parse_jpeg_dimensions("progJPEG", next)
defp parse_jpeg_sof(_, next), do: parse_jpeg(next)
defp parse_jpeg_dimensions(ftype, <<_skip::size(24), height::size(16), width::size(16), _::binary>>) do
{width, height, ftype}
end
defp parse_jpeg_dimensions(_, _), do: nil
defp unwrap({:ok, body}), do: body
defp unwrap(:error), do: raise(ArgumentError)
end | lib/remedy/types/image_data.ex | 0.735831 | 0.583619 | image_data.ex | starcoder |
defmodule Adventofcode.Day06ChronalCoordinates do
use Adventofcode
def largest_area_size(input) do
input
|> parse_coordinates
|> build_grid
|> finite_area_sizes(grid_locations(-99..599), grid_locations(-100..600))
|> hd()
|> elem(0)
end
def safe_area_size(input, distance) do
input
|> parse_coordinates
|> build_grid
|> do_safe_area_size
|> Enum.sort_by(fn {_, dist} -> dist end)
|> Enum.filter(fn {_, dist} -> dist < distance end)
|> length
end
defp do_safe_area_size(grid) do
0..360
|> grid_locations
|> Enum.map(&{&1, distance_to_all_coordinates(&1, grid)})
|> Enum.into(%{})
end
def finite_area_sizes(grid, range1, range2) do
area_sizes(grid, range1)
|> Enum.zip(area_sizes(grid, range2))
|> Enum.filter(fn {{_, n1}, {_, n2}} -> n1 == n2 end)
|> Enum.map(fn {{name, size}, _} -> {size, name} end)
|> Enum.sort()
|> Enum.reverse()
end
def area_sizes(grid, range) do
grid
|> closest_coordinates(range)
|> Map.values()
|> Enum.reduce(%{}, &do_sum_area_sizes/2)
end
defp do_sum_area_sizes(name, acc), do: Map.update(acc, name, 1, &(&1 + 1))
def manhattan_distance({x1, y1}, {x2, y2}), do: abs(x1 - x2) + abs(y1 - y2)
def parse_coordinates(input) do
input
|> String.trim("\n")
|> String.split("\n")
|> Enum.map(&parse_coordinate/1)
end
defp parse_coordinate(coordinate) do
coordinate
|> String.split(", ")
|> Enum.map(&String.to_integer/1)
|> List.to_tuple()
end
def build_grid(coordinates) do
coordinates
|> Enum.zip(names())
|> Enum.into(%{})
end
def names do
?A..?Z
|> Enum.map(&to_string([&1]))
|> Enum.flat_map(fn a -> Enum.map(0..1, &"#{&1}#{a}") end)
|> Enum.sort()
end
def grid_locations(range \\ 0..400) do
Enum.flat_map(range, fn n -> Enum.map(range, &{n, &1}) end)
end
def distance_to_all_coordinates(coordinate, grid) do
grid
|> Map.keys()
|> Enum.map(&manhattan_distance(&1, coordinate))
|> Enum.sum()
end
def closest_coordinates(grid, locations \\ grid_locations()) do
locations
|> Enum.map(&{&1, closest_coordinate(&1, grid)})
|> Enum.into(%{})
end
def closest_coordinate(coordinate, grid) do
case do_closest(coordinate, grid) |> Enum.sort() do
[{distance, _}, {distance, _} | _] -> ".."
[{_, other_coordinate_name} | _] -> String.downcase(other_coordinate_name)
end
end
defp do_closest(coordinate, grid) do
Enum.map(grid, fn {other_coordinate, other_coordinate_name} ->
{manhattan_distance(coordinate, other_coordinate), other_coordinate_name}
end)
end
end | lib/day_06_chronal_coordinates.ex | 0.780328 | 0.624279 | day_06_chronal_coordinates.ex | starcoder |
defmodule ExActor.Operations do
@moduledoc """
Macros that can be used for simpler definition of `GenServer` operations
such as casts or calls.
For example:
defcall request(x, y), state: state do
set_and_reply(state + x + y, :ok)
end
will generate two functions:
def request(server, x, y) do
GenServer.call(server, {:request, x, y})
end
def handle_call({:request, x, y}, _, state) do
{:reply, :ok, state + x + y}
end
There are various helper macros available for specifying responses. For more details
see `ExActor.Responders`.
## Request format (passed to `handle_call/3` and `handle_cast/2`)
- no arguments -> `:my_request`
- one arguments -> `{:my_request, x}`
- more arguments -> `{:my_request, x, y, ...}`
## Common options
- `:when` - specifies guards (see __Pattern matching__ below for details)
- `:export` - applicable in `defcall/3` and `defcast/3`. If provided, specifies
the server alias. In this case, interface functions will not accept the server
as the first argument, and will insted use the provided alias. The alias
can be an atom (for locally registered processes), `{:global, global_alias}` or
a via tuple (`{:via, registration_module, alias}`).
## Pattern matching
defcall a(1), do: ...
defcall a(x), when: x > 1, do: ...
defcall a(x), when: [interface: x > 1, handler: x < state], do: ...
defcall a(x), state: 1, do: ...
defcall a(_), state: state, do: ...
### Details
`defcall` and other similar constructs usually define a clause for two
functions: the interface function and the handler function. If you're writing
multi-clauses, the following rules apply:
- Arguments are pattern-matched in the interface and in the handler function.
- The `:state` pattern is used in the handler function.
- The `:when` option by default applies to both, the interface and the handler function.
You can however specify separate guards with `when: [interface: ..., handler: ...]`.
It's not necessary to provide both options to `when`.
`ExActor` will try to be smart to some extent, and defer from generating the
interface clause if it's not needed.
For example:
defcall foo(_, _), state: nil, do: ...
defcall foo(x, y), state: state, do: ...
will generate only a single interface function that always matches its arguments
and sends them to the server process. There will be of course two `handle_call`
clauses.
The same holds for more elaborate pattern-matches:
defcall foo(1, 2), ...
defcall foo(x, y), when: x > y, ...
defcall foo(_, _), state: nil, do: ...
defcall foo(x, y), state: state, do: ...
The example above will generate three interface clauses:
- `def foo(1, 2)`
- `def foo(x, y) when x > y`
- `def foo(x, y)`
Of course, there will be four `handle_call` clauses, each with the corresponding
body provided via `do` option.
### Separating interface and handler clauses
If you want to be more explicit about pattern matching, you can use a body-less
construct:
defcall foo(x, y)
This will generate only the interface clause that issues a call (or a cast in
the case of `defcast`) to the server process.
You can freely use multiple `defcall` body-less clauses if you need to pattern
match arguments.
To generate handler clauses you can use `defhandlecall/3`:
defhandlecall foo(_, _), state: nil, do: ...
defhandlecall foo(x, y), state: state, do: ...
This approach requires some more typing, but it's more explicit. If you need to
perform a complex combination of pattern matches on arguments and the state, it's
probably better to use this technique as it gives you more control over what is
matched at which point.
"""
@doc """
Defines the starter function and initializer body.
# defines and export start/2
defstart start(x, y) do
# runs in init/1 callback
initial_state(x + y)
end
# defines and export start_link/2
defstart start_link(x, y) do
# runs in init/1 callback
initial_state(x + y)
end
You can also provide additional `GenServer` options via `:gen_server_opts` option.
defstart start(x, y), gen_server_opts: [spawn_opts: [min_heap_size: 10000]], do: ...
If you need to set `GenServer` options at runtime, use `gen_server_opts: :runtime` and
then the starter function will receive one more argument where you can pass options:
defstart start(x, y), gen_server_opts: :runtime do
...
end
...
MyServer.start(x, y, name: :foo, spawn_opts: [min_heap_size: 10000])
Body can be omitted. In this case, just the interface function is generated.
This can be useful if you want to define both `start` and `start_link`:
defstart start(x, y)
defstart start_link(x, y) do
# runs for both cases
end
Keep in mind that generated `init/1` matches on the number of arguments, so this won't work:
defstart start_link(x)
defstart start_link(x, y) do
# doesn't handle start_link(x)
end
If you want to handle various versions, you can just define start heads without the body,
and then use `definit/2` or just implement `init/1`.
## Other notes
- If the `export` option is set while using `ExActor`, it will be used in starters, and
the server process will be registered under a given alias.
- For each specified clause, there will be one corresponding interface function clause.
### Request format (arg passed to `init/1`)
- no arguments -> `nil`
- one arguments -> `{x}`
- more arguments -> `{x, y, ...}`
"""
defmacro defstart(definition, opts \\ [], body \\ []) do
{fun, args} = Macro.decompose_call(definition)
define_starter(false, fun, args, opts ++ body)
end
@doc """
Same as `defstart/2` but the interface function is private.
Can be useful when you need to do pre/post processing in the caller process.
defmodule MyServer do
def start_link(x, y) do
...
do_start_link(x, y)
...
end
defstartp do_start_link(x, y), link: true do
...
end
end
"""
defmacro defstartp(definition, options \\ [], body \\ []) do
{fun, args} = Macro.decompose_call(definition)
define_starter(true, fun, args, options ++ body)
end
defp define_starter(private, fun, args, options) do
quote bind_quoted: [
private: private,
fun: Macro.escape(fun, unquote: true),
args: Macro.escape(args || [], unquote: true),
options: escape_options(options)
] do
{interface_matches, payload, match_pattern} = ExActor.Operations.start_args(args)
{arity, interface_matches, gen_server_fun, gen_server_opts} =
ExActor.Operations.prepare_start_interface(fun, interface_matches, options, @exactor_global_options)
unless private do
case ExActor.Operations.guard(options, :interface) do
nil ->
def unquote(fun)(unquote_splicing(interface_matches)) do
GenServer.unquote(gen_server_fun)(__MODULE__, unquote(payload), unquote(gen_server_opts))
end
guard ->
def unquote(fun)(unquote_splicing(interface_matches)) when unquote(guard) do
GenServer.unquote(gen_server_fun)(__MODULE__, unquote(payload), unquote(gen_server_opts))
end
end
else
case ExActor.Operations.guard(options, :interface) do
nil ->
defp unquote(fun)(unquote_splicing(interface_matches)) do
GenServer.unquote(gen_server_fun)(__MODULE__, unquote(payload), unquote(gen_server_opts))
end
guard ->
defp unquote(fun)(unquote_splicing(interface_matches)) when unquote(guard) do
GenServer.unquote(gen_server_fun)(__MODULE__, unquote(payload), unquote(gen_server_opts))
end
end
end
if options[:do] do
definit(
unquote(match_pattern),
unquote(Keyword.take(options, [:when]) ++ [do: options[:do]])
)
end
end
end
@doc false
def extract_args(args) do
arg_names =
for {arg, index} <- Enum.with_index(args), do: extract_arg(arg, index)
interface_matches = for {arg, arg_name} <- Enum.zip(args, arg_names) do
case arg do
{:\\, context, [match, default]} ->
{:\\, context, [quote(do: unquote(match) = unquote(arg_name)), default]}
match -> quote(do: unquote(match) = unquote(arg_name))
end
end
args = for arg <- args do
case arg do
{:\\, _, [match, _]} -> match
_ -> arg
end
end
{arg_names, interface_matches, args}
end
defmacrop var_name?(arg_name) do
quote do
is_atom(unquote(arg_name)) and not (unquote(arg_name) in [:_, :\\, :=, :%, :%{}, :{}, :<<>>])
end
end
defp extract_arg({:\\, _, [inner_arg, _]}, index),
do: extract_arg(inner_arg, index)
defp extract_arg({:=, _, [{arg_name, _, _} = arg, _]}, _index) when var_name?(arg_name),
do: arg
defp extract_arg({:=, _, [_, {arg_name, _, _} = arg]}, _index) when var_name?(arg_name),
do: arg
defp extract_arg({:=, _, [_, {:=, _, _} = submatch]}, index),
do: extract_arg(submatch, index)
defp extract_arg({arg_name, _, _} = arg, _index) when var_name?(arg_name),
do: arg
defp extract_arg(_, index),
do: Macro.var(:"arg#{index}", __MODULE__)
@doc false
def start_args(args) do
{arg_names, interface_matches, args} = extract_args(args)
{payload, match_pattern} =
case args do
[] -> {nil, nil}
[_|_] ->
{
quote(do: {unquote_splicing(arg_names)}),
quote(do: {unquote_splicing(args)})
}
end
{interface_matches, payload, match_pattern}
end
@doc false
def prepare_start_interface(fun, interface_matches, options, global_options) do
interface_matches =
unless options[:gen_server_opts] == :runtime do
interface_matches
else
interface_matches ++ [quote(do: unquote(Macro.var(:gen_server_opts, __MODULE__)) \\ [])]
end
arity = length(interface_matches)
gen_server_fun = case (options[:link]) do
true -> :start_link
false -> :start
nil ->
if fun in [:start, :start_link] do
fun
else
raise "Function name must be either start or start_link. If you need another name, provide explicit :link option."
end
end
gen_server_opts =
unless options[:gen_server_opts] == :runtime do
case global_options[:export] do
default when default in [nil, false] -> []
name -> [name: Macro.escape(name)]
end ++ (options[:gen_server_opts] || [])
else
Macro.var(:gen_server_opts, __MODULE__)
end
{arity, interface_matches, gen_server_fun, gen_server_opts}
end
@doc """
Similar to `defstart/3` but generates just the `init` clause.
Note: keep in mind that `defstart` wraps arguments in a tuple. If you want to
handle `defstart start(x)`, you need to define `definit {x}`
"""
defmacro definit(arg \\ quote(do: _), opts), do: do_definit([{:arg, arg} | opts])
defp do_definit(opts) do
quote bind_quoted: [opts: Macro.escape(opts, unquote: true)] do
case ExActor.Operations.guard(opts, :handler) do
nil ->
def init(unquote_splicing([opts[:arg]])), do: unquote(opts[:do])
guard ->
def init(unquote_splicing([opts[:arg]])) when unquote(guard), do: unquote(opts[:do])
end
end
end
@doc """
Defines the cast callback clause and the corresponding interface fun.
"""
defmacro defcast(req_def, options \\ [], body \\ []) do
generate_funs(:defcast, req_def, options ++ body)
end
@doc """
Same as `defcast/3` but the interface function is private.
Can be useful when you need to do pre/post processing in the caller process.
def exported_interface(...) do
# do some client side preprocessing here
my_request(...)
# do some client side post processing here
end
# Not available outside of this module
defcastp my_request(...), do: ...
"""
defmacro defcastp(req_def, options \\ [], body \\ []) do
generate_funs(:defcast, req_def, [{:private, true} | options] ++ body)
end
@doc """
Defines the call callback clause and the corresponding interface fun.
Call-specific options:
- `:timeout` - specifies the timeout used in `GenServer.call` (see below for
details)
- `:from` - matches the caller in `handle_call`.
## Timeout
defcall long_call, state: state, timeout: :timer.seconds(10), do: ...
You can also make the timeout parameterizable
defcall long_call(...), timeout: some_variable, do: ...
This will generate the interface function as:
def long_call(..., some_variable)
where `some_variable` will be used as the timeout in `GenServer.call`. You
won't have the access to this variable in your body though, since the body
specifies the handler function. Default timeout value can also be provided via
standard `\\\\` syntax.
"""
defmacro defcall(req_def, options \\ [], body \\ []) do
generate_funs(:defcall, req_def, options ++ body)
end
@doc """
Same as `defcall/3` but the interface function is private.
Can be useful when you need to do pre/post processing in the caller process.
def exported_interface(...) do
# do some client side preprocessing here
my_request(...)
# do some client side post processing here
end
# Not available outside of this module
defcallp my_request(...), do: ...
"""
defmacro defcallp(req_def, options \\ [], body \\ []) do
generate_funs(:defcall, req_def, [{:private, true} | options] ++ body)
end
@doc """
Similar to `defcall/3`, but generates just the `handle_call` clause,
without creating the interface function.
"""
defmacro defhandlecall(req_def, options \\ [], body \\ []) do
generate_request_def(:defcall, req_def, options ++ body)
end
@doc """
Similar to `defcast/3`, but generates just the `handle_call` clause,
without creating the interface function.
"""
defmacro defhandlecast(req_def, options \\ [], body \\ []) do
generate_request_def(:defcast, req_def, options ++ body)
end
# Generation of call/cast functions. Essentially, this is just
# deferred to be evaluated in the module context.
defp generate_funs(type, req_def, options) do
quote bind_quoted: [
type: type,
req_def: Macro.escape(req_def, unquote: true),
options: escape_options(options)
] do
ExActor.Operations.def_request(type, req_def, Keyword.merge(options, @exactor_global_options))
|> ExActor.Helper.inject_to_module(__MODULE__, __ENV__)
end
end
@doc false
def guard(options, type) do
case options[:when] do
nil -> nil
list when is_list(list) -> list[type]
other -> other
end
end
@doc false
def def_request(type, req_def, options) do
{req_name, interface_matches, payload, _} = req_args(req_def)
quote do
req_id = unquote(Macro.escape(req_id(req_def, options)))
unless MapSet.member?(@generated_funs, req_id) do
unquote(define_interface(type, req_name, interface_matches, payload, options))
@generated_funs MapSet.put(@generated_funs, req_id)
end
unquote(if options[:do] do
implement_request(type, req_def, options)
end)
end
end
defp req_id({_, _, _} = definition, options) do
{req_name, args} = Macro.decompose_call(definition)
{
req_name,
Enum.map(
strip_context(args || []),
fn
{var_name, _, scope} when is_atom(var_name) and is_atom(scope) -> :matchall
other -> other
end
),
strip_context(guard(options, :interface))
}
end
defp req_id(req_name, options) when is_atom(req_name) do
req_id({req_name, [], []}, options)
end
defp strip_context(ast) do
Macro.prewalk(ast,
fn
{a, _context, b} -> {a, [], b}
other -> other
end
)
end
defp generate_request_def(type, req_def, options) do
quote bind_quoted: [
type: type,
req_def: Macro.escape(req_def, unquote: true),
options: escape_options(options)
] do
ExActor.Operations.implement_request(type, req_def, Keyword.merge(options, @exactor_global_options))
|> ExActor.Helper.inject_to_module(__MODULE__, __ENV__)
end
end
@doc false
def implement_request(type, req_def, options) do
{_, _, _, match_pattern} = req_args(req_def)
quote do
unquote(implement_handler(type, options, match_pattern))
end
end
defp req_args(req_def) do
{req_name, args} = parse_req_def(req_def)
{arg_names, interface_matches, args} = extract_args(args)
{payload, match_pattern} =
case args do
[] -> {req_name, req_name}
[_|_] ->
{
quote(do: {unquote_splicing([req_name | arg_names])}),
quote(do: {unquote_splicing([req_name | args])})
}
end
{req_name, interface_matches, payload, match_pattern}
end
defp parse_req_def(req_name) when is_atom(req_name), do: {req_name, []}
defp parse_req_def({_, _, _} = definition) do
Macro.decompose_call(definition)
end
# Defines the interface function to call/cast
defp define_interface(type, req_name, interface_matches, payload, options) do
quote bind_quoted: [
private: options[:private],
type: type,
req_name: req_name,
server_fun: server_fun(type),
interface_args: Macro.escape(interface_args(interface_matches, options), unquote: true),
gen_server_args: Macro.escape(gen_server_args(options, type, payload), unquote: true),
guard: Macro.escape(guard(options, :interface), unquote: true)
] do
{interface_args, gen_server_args} =
unless type in [:multicall, :abcast] do
{interface_args, gen_server_args}
else
{
[quote(do: nodes \\ [node() | :erlang.nodes()]) | interface_args],
[quote(do: nodes) | gen_server_args]
}
end
arity = length(interface_args)
unless private do
if guard do
def unquote(req_name)(unquote_splicing(interface_args))
when unquote(guard)
do
GenServer.unquote(server_fun)(unquote_splicing(gen_server_args))
end
else
def unquote(req_name)(unquote_splicing(interface_args)) do
GenServer.unquote(server_fun)(unquote_splicing(gen_server_args))
end
end
else
if guard do
defp unquote(req_name)(unquote_splicing(interface_args))
when unquote(guard)
do
GenServer.unquote(server_fun)(unquote_splicing(gen_server_args))
end
else
defp unquote(req_name)(unquote_splicing(interface_args)) do
GenServer.unquote(server_fun)(unquote_splicing(gen_server_args))
end
end
end
end
end
defp server_fun(:defcast), do: :cast
defp server_fun(:defcall), do: :call
defp server_fun(:multicall), do: :multi_call
defp server_fun(:abcast), do: :abcast
defp interface_args(args, options) do
server_match(options[:export]) ++ args ++ timeout_match(options[:timeout])
end
defp server_match(export) when export == nil or export == true, do: [quote(do: server)]
defp server_match(_), do: []
defp timeout_match(nil), do: []
defp timeout_match(:infinity), do: []
defp timeout_match(timeout) when is_integer(timeout), do: []
defp timeout_match(pattern), do: [pattern]
defp gen_server_args(options, type, msg) do
[server_ref(options, type), msg] ++ timeout_arg(options, type)
end
defp server_ref(options, op) when op in [:multicall, :abcast] do
case options[:export] do
local when is_atom(local) and local != nil and local != false -> local
{:local, local} -> local
_ -> quote(do: server)
end
end
defp server_ref(options, _) do
case options[:export] do
default when default in [nil, false, true] -> quote(do: server)
local when is_atom(local) -> local
{:local, local} -> local
{:global, _} = global -> global
{:via, _, _} = via -> Macro.escape(via)
end
end
defp timeout_arg(options, type) when type in [:defcall, :multicall] do
case options[:timeout] do
{:\\, _, [var, _default]} ->
[var]
timeout when timeout != nil ->
[timeout]
_ -> []
end
end
defp timeout_arg(_, _), do: []
@doc false
# Implements the handler function (handle_call, handle_cast, handle_timeout)
def implement_handler(type, options, msg) do
state_arg = get_state_identifier(Keyword.fetch(options, :state))
{handler_name, handler_args} = handler_sig(type, options, msg, state_arg)
quote bind_quoted: [
type: type,
handler_name: handler_name,
handler_args: Macro.escape(handler_args, unquote: true),
guard: Macro.escape(guard(options, :handler), unquote: true),
body: Macro.escape(options[:do], unquote: true)
] do
if guard do
def unquote(handler_name)(unquote_splicing(handler_args))
when unquote(guard),
do: unquote(body)
else
def unquote(handler_name)(unquote_splicing(handler_args)),
do: unquote(body)
end
end
end
defp get_state_identifier({:ok, match}),
do: quote(do: unquote(match) = unquote(ExActor.Helper.state_var))
defp get_state_identifier(:error), do: get_state_identifier({:ok, quote(do: _)})
defp handler_sig(:defcall, options, msg, state_arg),
do: {:handle_call, [msg, options[:from] || quote(do: _from), state_arg]}
defp handler_sig(:defcast, _, msg, state_arg),
do: {:handle_cast, [msg, state_arg]}
defp handler_sig(:definfo, _, msg, state_arg),
do: {:handle_info, [msg, state_arg]}
@doc """
Defines the info callback clause. Responses work just like with casts.
defhandleinfo :some_message, do: ...
defhandleinfo :another_message, state: ..., do:
"""
defmacro defhandleinfo(msg, opts \\ [], body) do
impl_defhandleinfo(msg, opts ++ body)
end
# Implements handle_info
defp impl_defhandleinfo(msg, options) do
quote bind_quoted: [
msg: Macro.escape(msg, unquote: true),
options: escape_options(options)
] do
options = Keyword.merge(options, @exactor_global_options)
ExActor.Operations.implement_handler(:definfo, options, msg)
|> ExActor.Helper.inject_to_module(__MODULE__, __ENV__)
end
end
@doc """
Defines a multicall operation.
defmulticall my_request(x, y), do: ...
...
# If the process is locally registered via `:export` option
MyServer.my_request(2, 3)
MyServer.my_request(nodes, 2, 3)
# The process is not locally registered via `:export` option
MyServer.my_request(:local_alias, 2, 3)
MyServer.my_request(nodes, :local_alias, 2, 3)
Request format is the same as in `defcall/3`. Timeout option works just like
with `defcall/3`.
"""
defmacro defmulticall(req_def, options \\ [], body \\ []) do
do_defmulticall(req_def, options ++ body)
end
@doc """
Same as `defmulticall/3` but the interface function is private.
"""
defmacro defmulticallp(req_def, options \\ [], body \\ []) do
do_defmulticall(req_def, [{:private, true} | options] ++ body)
end
defp do_defmulticall(req_def, options) do
quote bind_quoted: [
req_def: Macro.escape(req_def, unquote: true),
options: escape_options(options)
] do
options = Keyword.merge(options, @exactor_global_options)
ExActor.Operations.implement_request(:defcall, req_def, options)
|> ExActor.Helper.inject_to_module(__MODULE__, __ENV__)
ExActor.Operations.def_request(:multicall, req_def, Keyword.drop(options, [:do]))
|> ExActor.Helper.inject_to_module(__MODULE__, __ENV__)
end
end
@doc """
Defines an abcast operation.
defabcast my_request(x, y), do: ...
...
# If the process is locally registered via `:export` option
MyServer.my_request(2, 3)
MyServer.my_request(nodes, 2, 3)
# The process is not locally registered via `:export` option
MyServer.my_request(:local_alias, 2, 3)
MyServer.my_request(nodes, :local_alias, 2, 3)
"""
defmacro defabcast(req_def, options \\ [], body \\ []) do
do_defabcast(req_def, options ++ body)
end
@doc """
Same as `defabcast/3` but the interface function is private.
"""
defmacro defabcastp(req_def, options \\ [], body \\ []) do
do_defabcast(req_def, [{:private, true} | options] ++ body)
end
defp do_defabcast(req_def, options) do
quote bind_quoted: [
req_def: Macro.escape(req_def, unquote: true),
options: escape_options(options)
] do
options = Keyword.merge(options, @exactor_global_options)
ExActor.Operations.implement_request(:defcast, req_def, options)
|> ExActor.Helper.inject_to_module(__MODULE__, __ENV__)
ExActor.Operations.def_request(:abcast, req_def, Keyword.drop(options, [:do]))
|> ExActor.Helper.inject_to_module(__MODULE__, __ENV__)
end
end
defp escape_options(options) do
Enum.map(options,
fn
{:export, export} -> {:export, export}
other -> Macro.escape(other, unquote: true)
end
)
end
end | deps/exactor/lib/exactor/operations.ex | 0.874533 | 0.707528 | operations.ex | starcoder |
defmodule Harald.AssignedNumbers.GenericAccessProfile do
@moduledoc """
> Assigned numbers are used in GAP for inquiry response, EIR data type values,
> manufacturer-specific data, advertising data, low energy UUIDs and appearance characteristics,
> and class of device.
Reference: https://www.bluetooth.com/specifications/assigned-numbers/generic-access-profile
"""
@definitions %{
0x01 => "Flags",
0x02 => "Incomplete List of 16-bit Service Class UUIDs",
0x03 => "Complete List of 16-bit Service Class UUIDs",
0x04 => "Incomplete List of 32-bit Service Class UUIDs",
0x05 => "Complete List of 32-bit Service Class UUIDs",
0x06 => "Incomplete List of 128-bit Service Class UUIDs",
0x07 => "Complete List of 128-bit Service Class UUIDs",
0x08 => "Shortened Local Name",
0x09 => "Complete Local Name",
0x0A => "Tx Power Level",
0x0D => "Class of Device",
0x0E => "Simple Pairing Hash C-192",
0x0F => "Simple Pairing Randomizer R-192",
0x10 => "Device ID",
0x11 => "Security Manager Out of Band Flags",
0x12 => "Slave Connection Interval Range",
0x14 => "List of 16-bit Service Solicitation UUIDs",
0x15 => "List of 128-bit Service Solicitation UUIDs",
0x16 => "Service Data - 16-bit UUID",
0x17 => "Public Target Address",
0x18 => "Random Target Address",
0x19 => "Appearance",
0x1A => "Advertising Interval",
0x1B => "LE Bluetooth Device Address",
0x1C => "LE Role",
0x1D => "Simple Pairing Hash C-256",
0x1E => "Simple Pairing Randomizer R-256",
0x1F => "List of 32-bit Service Solicitation UUIDs",
0x20 => "Service Data - 32-bit UUID",
0x21 => "Service Data - 128-bit UUID",
0x22 => "LE Secure Connections Confirmation Value",
0x23 => "LE Secure Connections Random Value",
0x24 => "URI",
0x25 => "Indoor Positioning",
0x26 => "Transport Discovery Data",
0x27 => "LE Supported Features",
0x28 => "Channel Map Update Indication",
0x29 => "PB-ADV",
0x2A => "Mesh Message",
0x2B => "Mesh Beacon",
0x3D => "3D Information Data",
0xFF => "Manufacturer Specific Data"
}
@doc """
Returns the description associated with `id`.
"""
defmacro description(id)
@doc """
Returns the ID associated with `description`.
"""
defmacro id(description)
# handle a redundent GAP definition
defmacro id("Simple Pairing Hash C"), do: 0x0E
Enum.each(@definitions, fn
{id, description} ->
defmacro description(unquote(id)), do: unquote(description)
defmacro id(unquote(description)), do: unquote(id)
end)
@doc """
Returns a list of all Generic Access Profile Data Type Values.
"""
defmacro ids, do: unquote(for {id, _} <- @definitions, do: id)
end | lib/harald/assigned_numbers/generic_access_profile.ex | 0.600305 | 0.456591 | generic_access_profile.ex | starcoder |
defmodule CodeRunner.Worker do
@moduledoc """
Worker module responsible for actually running code. Each worker process spawns a Docker container in an external process, executes the code, returns the result or timeout message.
## Attributes
A few attributes can be configured in `config.exs` to change Docker image or adjust resource consumption of each worker.
* `@timeout` - determines how long the worker will wait for the code to terminate. Default is 5000.
* `@docker_memory` - assigns how much memory should a sandbox Docker container should have. Default is "50m".
* `@docker_image` - designates which Docker image to mount a sandbox container. Default is "harfangk/elixir:latest".
"""
use GenServer
def start_link(args) do
GenServer.start_link(__MODULE__, args)
end
def init(state) do
{:ok, state}
end
def handle_call({:run_code, code}, _from, state) do
process = Porcelain.spawn("docker", docker_args() ++ ["#{code} |> IO.inspect()"], err: :out)
result = Porcelain.Process.await(process, timeout())
case result do
{:ok, %Porcelain.Result{out: output}} ->
{:reply, output, state}
{:error, :timeout} ->
Porcelain.Process.stop(process)
{:reply, "Code took longer than #{timeout()}ms to run, resulting in timeout.", state}
end
end
defp timeout do
case Application.fetch_env(:code_runner, :timeout) do
{:ok, timeout} -> timeout
_ -> 5000
end
end
defp docker_image do
case Application.fetch_env(:code_runner, :docker_image) do
{:ok, docker_image} -> docker_image
_ -> "harfangk/elixir:latest"
end
end
defp docker_memory do
case Application.fetch_env(:code_runner, :docker_memory) do
{:ok, docker_memory} -> docker_memory
_ -> "50m"
end
end
defp docker_args do
[
"run",
"-i",
"--rm",
"-m", docker_memory(),
"--memory-swap=-1",
"--net=none",
"--cap-drop=all",
"--privileged=false",
docker_image(),
"elixir", "-e"
]
end
end | lib/code_runner/worker.ex | 0.707809 | 0.422415 | worker.ex | starcoder |
defmodule Mix.Tasks.Bench.Cmp do
use Mix.Task
@shortdoc "Compare benchmark snapshots"
@moduledoc """
## Usage
mix bench.cmp [options] <snapshot>...
A snapshot is the output of a single run of `mix bench`.
If no arguments are given, bench.cmp will try to read one or two latest
snapshots from the bench/snapshots directory.
When given one snapshot, `mix bench.cmp` will pretty-print the results.
Giving `-` instead of a file name will make bench.cmp read from standard
input.
When given two or more snapshots, it will pretty-print the comparison between
the first and the last one.
## Options
-d <fmt>, --diff=<fmt>
Which format to use for the deltas when pretty-printing.
One of: ratio, percent.
"""
alias Benchfella.Snapshot
alias Benchfella.CLI.Util
@switches [diff: :string]
@aliases [d: :diff]
def run(args) do
{snapshots, options} =
case OptionParser.parse(args, strict: @switches, aliases: @aliases) do
{opts, [], []} ->
{Util.locate_snapshots(), opts}
{opts, snapshots, []} ->
{snapshots, opts}
{_, _, [{opt, val}|_]} ->
valstr = if val do "=#{val}" end
Mix.raise "Invalid option: #{opt}#{valstr}"
end
|> normalize_options()
case snapshots do
[snapshot] -> pretty_print(snapshot)
[first|rest] ->
last = List.last(rest)
compare(first, last, Map.get(options, :diff, :ratio))
end
end
defp normalize_options({snapshots, options}) do
options =
Enum.reduce(options, %{}, fn
{:diff, fmt}, acc -> Map.put(acc, :diff, parse_pretty_format(fmt))
end)
{snapshots, options}
end
defp parse_pretty_format("ratio"), do: :ratio
defp parse_pretty_format("percent"), do: :percent
defp parse_pretty_format(other), do: Mix.raise "Undefined diff format: #{other}"
defp pretty_print("-") do
Util.read_all_input() |> Snapshot.parse |> Snapshot.print(:plain)
end
defp pretty_print(path) do
IO.puts "#{path}\n"
File.read!(path) |> Snapshot.parse |> Snapshot.print(:plain)
end
defp compare(path1, path2, format) do
IO.puts "#{path1} vs\n#{path2}\n"
snapshot1 = File.read!(path1) |> Snapshot.parse()
snapshot2 = File.read!(path2) |> Snapshot.parse()
{grouped_diffs, leftover} = Snapshot.compare(snapshot1, snapshot2, format)
max_name_len =
grouped_diffs
|> Enum.flat_map(fn {_, diffs} -> diffs end)
|> Enum.reduce(0, fn {name, _}, len -> max(len, String.length(name)) end)
Enum.each(grouped_diffs, fn {mod, diffs} ->
IO.puts ["## ", mod]
print_diffs(diffs, max_name_len, format)
IO.puts ""
end)
unless leftover == [] do
# FIXME: when more than 2 snapshots are given, this wording may be imprecise
IO.puts "These tests appeared only in one of the snapshots:"
Enum.each(leftover, fn {mod, test} -> IO.puts ["[", mod, "] ", test] end)
end
end
defp print_diffs(diffs, max_name_len, format) do
diffs
|> Enum.sort(fn {_, diff1}, {_, diff2} -> diff1 < diff2 end)
|> Enum.each(fn {name, diff} ->
spacing = 3
:io.format('~*.s ', [-max_name_len-spacing, name])
color = choose_color(diff, format)
diff = case format do
:percent -> Snapshot.format_percent(diff)
_ -> diff
end
colordiff = IO.ANSI.format color ++ ["#{diff}"]
IO.puts colordiff
end)
end
defp choose_color(diff, :ratio) do
cond do
diff < 1.0 -> [:green]
diff > 1.0 -> [:red]
true -> []
end
end
defp choose_color(diff, :percent) do
cond do
diff < 0 -> [:green]
diff > 0 -> [:red]
true -> []
end
end
end | lib/mix/tasks/bench_cmp.ex | 0.786459 | 0.457561 | bench_cmp.ex | starcoder |
defmodule Astro do
@moduledoc """
Functions for basic astronomical observations such
as sunrise, sunset, solstice, equinox, moonrise,
moonset and moon phase.
"""
alias Astro.{Solar, Utils}
@type longitude :: float()
@type latitude :: float()
@type degrees :: float()
@type location :: {longitude, latitude} | Geo.Point.t() | Geo.PointZ.t()
@type date :: Calendar.date() | Calendar.naive_datetime() | Calendar.datetime()
@type options :: keyword()
@doc """
Calculates the sunrise for a given location and date.
Sunrise is the moment when the upper limb of
the sun appears on the horizon in the morning.
## Arguments
* `location` is the latitude, longitude and
optionally elevation for the desired sunrise
time. It can be expressed as:
* `{lng, lat}` - a tuple with longitude and latitude
as floating point numbers. **Note** the order of the
arguments.
* a `Geo.Point.t` struct to represent a location without elevation
* a `Geo.PointZ.t` struct to represent a location and elevation
* `date` is a `Date.t`, `NaiveDateTime.t` or `DateTime.t`
to indicate the date of the year in which
the sunrise time is required.
* `options` is a keyword list of options.
## Options
* `solar_elevation` represents the type of sunrise
required. The default is `:geometric` which equates to
a solar elevation of 90°. In this case the calulation
also accounts for refraction and elevation to return a
result which accords with the eyes perception. Other
solar elevations are:
* `:civil` representing a solar elevation of 96.0°. At this
point the sun is just below the horizon so there is
generally enough natural light to carry out most
outdoor activities.
* `:nautical` representing a solar elevation of 102.0°
This is the point at which the horizon is just barely visible
and the moon and stars can still be used for navigation.
* `:astronomical`representing a solar elevation of 108.0°.
This is the point beyond which astronomical observation
becomes impractical.
* Any floating point number representing the desired
solar elevation.
* `:time_zone` is the time zone to in which the sunrise
is requested. The default is `:default` in which
the sunrise time is reported in the time zone of
the requested location. Any other time zone name
supported by the option `:time_zone_database` is
acceptabe.
* `:time_zone_database` represents the module that
implements the `Calendar.TimeZoneDatabase` behaviour.
The default is `Tzdata.TimeZoneDatabase`.
## Returns
* a `DateTime.t` representing the time of sunrise in the
requested timzone at the requested location or
* `{:error, :time_zone_not_found}` if the requested
time zone is unknown
* `{:error, :no_time}` if for the requested date
and location there is no sunrise. This can occur at
very high latitudes during summer and winter.
## Examples
# Sunrise in Sydney, Australia
Astro.sunrise({151.20666584, -33.8559799094}, ~D[2019-12-04])
{:ok, #DateTime<2019-12-04 05:37:00.000000+11:00 AEDT Australia/Sydney>}
# Sunrise in Alert, Nanavut, Canada
Astro.sunrise({-62.3481, 82.5018}, ~D[2019-12-04])
{:error, :no_time}
"""
@spec sunrise(location, date, options) ::
{:ok, DateTime.t()} | {:error, :time_zone_not_found | :no_time}
def sunrise(location, date, options \\ default_options()) when is_list(options) do
options = Keyword.put(options, :rise_or_set, :rise)
Solar.sun_rise_or_set(location, date, options)
end
@doc """
Calculates the sunset for a given location and date.
Sunset is the moment when the upper limb of
the sun disappears below the horizon in the evening.
## Arguments
* `location` is the latitude, longitude and
optionally elevation for the desired sunrise
time. It can be expressed as:
* `{lng, lat}` - a tuple with longitude and latitude
as floating point numbers. **Note** the order of the
arguments.
* a `Geo.Point.t` struct to represent a location without elevation
* a `Geo.PointZ.t` struct to represent a location and elevation
* `date` is a `Date.t`, `NaiveDateTime.t` or `DateTime.t`
to indicate the date of the year in which
the sunset time is required.
* `options` is a keyword list of options.
## Options
* `solar_elevation` represents the type of sunset
required. The default is `:geometric` which equates to
a solar elevation of 90°. In this case the calulation
also accounts for refraction and elevation to return a
result which accords with the eyes perception. Other
solar elevations are:
* `:civil` representing a solar elevation of 96.0°. At this
point the sun is just below the horizon so there is
generally enough natural light to carry out most
outdoor activities.
* `:nautical` representing a solar elevation of 102.0°
This is the point at which the horizon is just barely visible
and the moon and stars can still be used for navigation.
* `:astronomical`representing a solar elevation of 108.0°.
This is the point beyond which astronomical observation
becomes impractical.
* Any floating point number representing the desired
solar elevation.
* `:time_zone` is the time zone to in which the sunset
is requested. The default is `:default` in which
the sunset time is reported in the time zone of
the requested location. Any other time zone name
supported by the option `:time_zone_database` is
acceptabe.
* `:time_zone_database` represents the module that
implements the `Calendar.TimeZoneDatabase` behaviour.
The default is `Tzdata.TimeZoneDatabase`.
## Returns
* a `DateTime.t` representing the time of sunset in the
requested time zone at the requested location or
* `{:error, :time_zone_not_found}` if the requested
time zone is unknown
* `{:error, :no_time}` if for the requested date
and location there is no sunset. This can occur at
very high latitudes during summer and winter.
## Examples
# Sunset in Sydney, Australia
Astro.sunset({151.20666584, -33.8559799094}, ~D[2019-12-04])
{:ok, #DateTime<2019-12-04 19:53:00.000000+11:00 AEDT Australia/Sydney>}
# Sunset in Alert, Nanavut, Canada
Astro.sunset({-62.3481, 82.5018}, ~D[2019-12-04])
{:error, :no_time}
"""
@spec sunset(location, date, options) ::
{:ok, DateTime.t()} | {:error, :time_zone_not_found | :no_time}
def sunset(location, date, options \\ default_options()) when is_list(options) do
options = Keyword.put(options, :rise_or_set, :set)
Solar.sun_rise_or_set(location, date, options)
end
@doc """
Returns the datetime in UTC for either the
March or September equinox.
## Arguments
* `year` is the gregorian year for which the equinox is
to be calculated
* `event` is either `:march` or `:september` indicating
which of the two annual equinox datetimes is required
## Returns
* `{:ok, datetime}` representing the UTC datetime of
the equinox
## Examples
iex> Astro.equinox 2019, :march
{:ok, ~U[2019-03-20 21:58:06Z]}
iex> Astro.equinox 2019, :september
{:ok, ~U[2019-09-23 07:49:30Z]}
## Notes
This equinox calculation is expected to be accurate
to within 2 minutes for the years 1000 CE to 3000 CE.
An equinox is commonly regarded as the instant of
time when the plane of Earth's equator passes through
the center of the Sun. This occurs twice each year:
around 20 March and 23 September.
In other words, it is the moment at which the
center of the visible Sun is directly above the equator.
"""
@spec equinox(Calendar.year, :march | :september) :: {:ok, DateTime.t()}
def equinox(year, event) when event in [:march, :september] and year in 1000..3000 do
Solar.equinox_and_solstice(year, event)
end
@doc """
Returns the datetime in UTC for either the
June or December solstice.
## Arguments
* `year` is the gregorian year for which the solstice is
to be calculated
* `event` is either `:june` or `:december` indicating
which of the two annual solstice datetimes is required
## Returns
* `{:ok, datetime}` representing the UTC datetime of
the solstice
## Examples
iex> Astro.solstice 2019, :december
{:ok, ~U[2019-12-22 04:18:57Z]}
iex> Astro.solstice 2019, :june
{:ok, ~U[2019-06-21 15:53:45Z]}
## Notes
This solstice calculation is expected to be accurate
to within 2 minutes for the years 1000 CE to 3000 CE.
A solstice is an event occurring when the Sun appears
to reach its most northerly or southerly excursion
relative to the celestial equator on the celestial
sphere. Two solstices occur annually, around June 21
and December 21.
The seasons of the year are determined by
reference to both the solstices and the equinoxes.
The term solstice can also be used in a broader
sense, as the day when this occurs. The day of a
solstice in either hemisphere has either the most
sunlight of the year (summer solstice) or the least
sunlight of the year (winter solstice) for any place
other than the Equator.
Alternative terms, with no ambiguity as to which
hemisphere is the context, are "June solstice" and
"December solstice", referring to the months in
which they take place every year.
"""
@spec solstice(Calendar.year, :june | :december) :: {:ok, DateTime.t()}
def solstice(year, event) when event in [:june, :december] and year in 1000..3000 do
Solar.equinox_and_solstice(year, event)
end
@doc """
Returns solar noon for a
given date and location as
a UTC datetime
## Arguments
* `location` is the latitude, longitude and
optionally elevation for the desired solar noon
time. It can be expressed as:
* `{lng, lat}` - a tuple with longitude and latitude
as floating point numbers. **Note** the order of the
arguments.
* a `Geo.Point.t` struct to represent a location without elevation
* a `Geo.PointZ.t` struct to represent a location and elevation
* `date` is any date in the Gregorian
calendar (for example, `Calendar.ISO`)
## Returns
* a UTC datetime representing solar noon
at the given location for the given date
## Example
iex> Astro.solar_noon {151.20666584, -33.8559799094}, ~D[2019-12-06]
{:ok, ~U[2019-12-06 01:45:42Z]}
## Notes
Solar noon is the moment when the Sun passes a
location's meridian and reaches its highest position
in the sky. In most cases, it doesn't happen at 12 o'clock.
At solar noon, the Sun reaches its
highest position in the sky as it passes the
local meridian.
"""
@spec solar_noon(Astro.location(), Calendar.date()) :: {:ok, DateTime.t()}
def solar_noon(location, date) do
%Geo.PointZ{coordinates: {longitude, _, _}} =
Utils.normalize_location(location)
julian_day = Astro.Time.julian_day_from_date(date)
julian_centuries = Astro.Time.julian_centuries_from_julian_day(julian_day)
julian_centuries
|> Solar.solar_noon_utc(-longitude)
|> Astro.Time.datetime_from_date_and_minutes(date)
end
@doc """
Returns solar longitude for a
given date. Solar longitude is used
to identify the seasons.
## Arguments
* `date` is any date in the Gregorian
calendar (for example, `Calendar.ISO`)
## Returns
* a `float` number of degrees between 0 and
360 representing the solar longitude
on `date`
## Examples
iex> Astro.sun_apparent_longitude ~D[2019-03-21]
0.08035853207991295
iex> Astro.sun_apparent_longitude ~D[2019-06-22]
90.32130455695378
iex> Astro.sun_apparent_longitude ~D[2019-09-23]
179.68691978440197
iex> Astro.sun_apparent_longitude ~D[2019-12-23]
270.83941087483504
## Notes
Solar longitude (the ecliptic longitude of the sun)
in effect describes the position of the earth in its
orbit, being zero at the moment of the vernal
equinox.
Since it is based on how far the earth has moved
in its orbit since the equinox, it is a measure of
what time of the tropical year (the year of seasons)
we are in, but without the inaccuracies of a calendar
date, which is perturbed by leap years and calendar
imperfections.
"""
@spec sun_apparent_longitude(Calendar.date()) :: degrees()
def sun_apparent_longitude(date) do
date
|> Astro.Time.julian_day_from_date()
|> Astro.Time.julian_centuries_from_julian_day()
|> Solar.sun_apparent_longitude()
end
@doc """
Returns the number of hours of daylight for a given
location on a given date.
## Arguments
* `location` is the latitude, longitude and
optionally elevation for the desired hours of
daylight. It can be expressed as:
* `{lng, lat}` - a tuple with longitude and latitude
as floating point numbers. **Note** the order of the
arguments.
* a `Geo.Point.t` struct to represent a location without elevation
* a `Geo.PointZ.t` struct to represent a location and elevation
* `date` is any date in the Gregorian
calendar (for example, `Calendar.ISO`)
## Returns
* `{:ok, time}` where `time` is a `Time.t()`
## Examples
iex> Astro.hours_of_daylight {151.20666584, -33.8559799094}, ~D[2019-12-07]
{:ok, ~T[14:18:45]}
# No sunset in summer
iex> Astro.hours_of_daylight {-62.3481, 82.5018}, ~D[2019-06-07]
{:ok, ~T[23:59:59]}
# No sunrise in winter
iex> Astro.hours_of_daylight {-62.3481, 82.5018}, ~D[2019-12-07]
{:ok, ~T[00:00:00]}
## Notes
In latitudes above the polar circles (approximately
+/- 66.5631 degrees) there will be no hours of daylight
in winter and 24 hours of daylight in summer.
"""
@spec hours_of_daylight(Astro.location(), Calendar.date()) :: {:ok, Time.t()}
def hours_of_daylight(location, date) do
with {:ok, sunrise} <- sunrise(location, date),
{:ok, sunset} <- sunset(location, date) do
seconds_of_sunlight = DateTime.diff(sunset, sunrise)
{hours, minutes, seconds} = Astro.Time.seconds_to_hms(seconds_of_sunlight)
Time.new(hours, minutes, seconds)
else
{:error, :no_time} ->
if no_daylight_hours?(location, date) do
Time.new(0, 0, 0)
else
Time.new(23, 59, 59)
end
end
end
@polar_circle_latitude 66.5631
defp no_daylight_hours?(location, date) do
%Geo.PointZ{coordinates: {_longitude, latitude, _elevation}} =
Utils.normalize_location(location)
cond do
latitude >= @polar_circle_latitude and date.month in 10..12 or date.month in 1..3 -> true
latitude <= -@polar_circle_latitude and date.month in 4..9 -> true
true -> false
end
end
@doc false
def default_options do
[
solar_elevation: Solar.solar_elevation(:geometric),
time_zone: :default,
time_zone_database: Tzdata.TimeZoneDatabase
]
end
end | lib/astro.ex | 0.949412 | 0.811415 | astro.ex | starcoder |
defmodule ExAws.Rekognition do
@moduledoc """
Operations on ExAws Rekognition
"""
use ExAws.Utils,
format_type: :json,
non_standard_keys: %{}
alias ExAws.Rekognition.S3Object
alias ExAws.Rekognition.NotificationChannelObject
# https://docs.aws.amazon.com/rekognition/latest/dg/API_Operations.html
@type image :: binary() | S3Object.t()
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_CompareFaces.html
NOTE: When using an S3Object, you may need to ensure that
the S3 uses the same region as Rekognition
"""
@type compare_faces_opt :: {:similarity_threshold, 0..100}
@spec compare_faces(image(), image()) :: ExAws.Operation.JSON.t()
@spec compare_faces(image(), image(), list(compare_faces_opt())) :: ExAws.Operation.JSON.t()
def compare_faces(source_image, target_image, opts \\ []) do
request(:compare_faces, %{
"SourceImage" => map_image(source_image),
"TargetImage" => map_image(target_image)
} |> Map.merge(camelize_keys(opts)))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_CreateCollection.html
"""
@spec create_collection(binary()) :: ExAws.Operation.JSON.t()
def create_collection(collection_id) when is_binary(collection_id) do
request(:create_collection, %{
"CollectionId" => collection_id
})
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_CreateStreamProcessor.html
"""
@type create_stream_processor_opt :: {:collection_id, binary()} | {:face_match_threshold, 0..100}
@spec create_stream_processor(binary(), binary(), binary(), binary()) :: ExAws.Operation.JSON.t()
@spec create_stream_processor(binary(), binary(), binary(), binary(), list(create_stream_processor_opt())) :: ExAws.Operation.JSON.t()
def create_stream_processor(input, output, name, role_arn, opts \\ [])
when is_binary(input) and is_binary(output) and is_binary(name) and is_binary(role_arn) do
request(:create_stream_processor, %{
"Input" => %{
"KinesisVideoStream" => %{
"Arn" => input
}
},
"Name" => name,
"Output" => %{
"KinesisDataStream" => %{
"Arn" => output
}
},
"RoleArn" => role_arn,
"Settings" => %{
"FaceSearch" => camelize_keys(opts)
}
})
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_DeleteCollection.html
"""
@spec delete_collection(binary()) :: ExAws.Operation.JSON.t()
def delete_collection(collection_id) when is_binary(collection_id) do
request(:delete_collection, %{
"CollectionId" => collection_id
})
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_DeleteFaces.html
"""
@spec delete_faces(binary(), list(binary())) :: ExAws.Operation.JSON.t()
def delete_faces(collection_id, face_ids) when is_binary(collection_id) and is_list(face_ids) do
request(:delete_faces, %{
"CollectionId" => collection_id,
"FaceIds" => face_ids
})
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_DeleteStreamProcessor.html
"""
@spec delete_stream_processor(binary()) :: ExAws.Operation.JSON.t()
def delete_stream_processor(name) when is_binary(name) do
request(:delete_stream_processor, %{
"Name" => name
})
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_DescribeCollection.html
"""
@spec describe_collection(binary()) :: ExAws.Operation.JSON.t()
def describe_collection(collection_id) when is_binary(collection_id) do
request(:describe_collection, %{
"CollectionId" => collection_id
})
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_DescribeStreamProcessor.html
"""
@spec describe_stream_processor(binary()) :: ExAws.Operation.JSON.t()
def describe_stream_processor(name) when is_binary(name) do
request(:describe_stream_processor, %{
"Name" => name
})
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_DetectFaces.html
NOTE: When using an S3Object, you may need to ensure that
the S3 uses the same region as Rekognition
"""
@type detect_faces_opt :: {:attributes, list(binary())}
@spec detect_faces(image()) :: ExAws.Operation.JSON.t()
@spec detect_faces(image(), list(detect_faces_opt())) :: ExAws.Operation.JSON.t()
def detect_faces(image, opts \\ []) do
request(:detect_faces, %{
"Image" => map_image(image)
} |> Map.merge(camelize_keys(opts)))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_DetectLabels.html
NOTE: When using an S3Object, you may need to ensure that
the S3 uses the same region as Rekognition
"""
@type detect_labels_opt :: {:max_labels, non_neg_integer()} | {:min_confidence, 0..100}
@spec detect_labels(image()) :: ExAws.Operation.JSON.t()
@spec detect_labels(image(), list(detect_labels_opt())) :: ExAws.Operation.JSON.t()
def detect_labels(image, opts \\ []) do
request(:detect_labels, %{
"Image" => map_image(image)
} |> Map.merge(camelize_keys(opts)))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_DetectModerationLabels.html
NOTE: When using an S3Object, you may need to ensure that
the S3 uses the same region as Rekognition
"""
@type detect_moderation_labels_opt :: {:min_confidence, 0..100}
@spec detect_moderation_labels(image()) :: ExAws.Operation.JSON.t()
@spec detect_moderation_labels(image(), list(detect_moderation_labels_opt())) :: ExAws.Operation.JSON.t()
def detect_moderation_labels(image, opts \\ []) do
request(:detect_moderation_labels, %{
"Image" => map_image(image)
} |> Map.merge(camelize_keys(opts)))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_DetectText.html
NOTE: When using an S3Object, you may need to ensure that
the S3 uses the same region as Rekognition
"""
@spec detect_text(image()) :: ExAws.Operation.JSON.t()
def detect_text(image) do
request(:detect_text, %{
"Image" => map_image(image)
})
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_IndexFaces.html
NOTE: When using an S3Object, you may need to ensure that
the S3 uses the same region as Rekognition
"""
@type index_faces_opt :: {:external_image_id, binary()} | {:detection_attributes, list(binary())} | {:max_faces, pos_integer()} | {:quality_filter, :auto | :none}
@spec index_faces(binary(), image()) :: ExAws.Operation.JSON.t()
@spec index_faces(binary(), image(), list(index_faces_opt())) :: ExAws.Operation.JSON.t()
def index_faces(collection_id, image, opts \\ []) when is_binary(collection_id) do
request(:index_faces, %{
"CollectionId" => collection_id,
"Image" => map_image(image),
} |> Map.merge(opts |> stringify_enum_opts([:quality_filter]) |> camelize_keys()))
end
@type list_opt :: {:max_results, 0..4096} | {:next_token, binary()}
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_ListCollections.html
"""
@type list_collections_opt :: list_opt()
@spec list_collections() :: ExAws.Operation.JSON.t()
@spec list_collections(list(list_collections_opt())) :: ExAws.Operation.JSON.t()
def list_collections(opts \\ []) do
request(:list_collections, camelize_keys(opts))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_ListFaces.html
"""
@type list_faces_opt :: list_opt()
@spec list_faces(binary()) :: ExAws.Operation.JSON.t()
@spec list_faces(binary(), list(list_faces_opt())) :: ExAws.Operation.JSON.t()
def list_faces(collection_id, opts \\ []) when is_binary(collection_id) do
request(:list_faces, %{
"CollectionId" => collection_id
} |> Map.merge(camelize_keys(opts)))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_ListStreamProcessors.html
"""
@type list_stream_processors_opt :: list_opt()
@spec list_stream_processors() :: ExAws.Operation.JSON.t()
@spec list_stream_processors(list(list_stream_processors_opt())) :: ExAws.Operation.JSON.t()
def list_stream_processors(opts \\ []) do
request(:list_stream_processors, camelize_keys(opts))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_GetCelebrityInfo.html
"""
@spec get_celebrity_info(binary()) :: ExAws.Operation.JSON.t()
def get_celebrity_info(id) when is_binary(id) do
request(:get_celebrity_info, %{
"Id" => id
})
end
@type get_opt :: {:max_results, pos_integer()} | {:next_token, binary()}
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_GetCelebrityRecognition.html
"""
@type get_celebrity_recognition_opt :: get_opt() | {:sort_by, :id | :timestamp}
@spec get_celebrity_recognition(binary()) :: ExAws.Operation.JSON.t()
@spec get_celebrity_recognition(binary(), list(get_celebrity_recognition_opt())) :: ExAws.Operation.JSON.t()
def get_celebrity_recognition(job_id, opts \\ []) when is_binary(job_id) do
request(:get_celebrity_recognition, %{
"JobId" => job_id
} |> Map.merge(opts |> stringify_enum_opts([:sort_by]) |> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_GetContentModeration.html
"""
@type get_content_moderation_opt :: get_opt() | {:sort_by, :name | :timestamp}
@spec get_content_moderation(binary()) :: ExAws.Operation.JSON.t()
@spec get_content_moderation(binary(), list(get_content_moderation_opt())) :: ExAws.Operation.JSON.t()
def get_content_moderation(job_id, opts \\ []) when is_binary(job_id) do
request(:get_content_moderation, %{
"JobId" => job_id
} |> Map.merge(opts |> stringify_enum_opts([:sort_by]) |> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_GetFaceDetection.html
"""
@type get_face_detection_opt :: get_opt()
@spec get_face_detection(binary()) :: ExAws.Operation.JSON.t()
@spec get_face_detection(binary(), list(get_face_detection_opt())) :: ExAws.Operation.JSON.t()
def get_face_detection(job_id, opts \\ []) when is_binary(job_id) do
request(:get_face_detection, %{
"JobId" => job_id
} |> Map.merge(camelize_keys(opts)))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_GetFaceSearch.html
"""
@type get_face_search_opt :: get_opt() | {:sort_by, :index | :timestamp}
@spec get_face_search(binary()) :: ExAws.Operation.JSON.t()
@spec get_face_search(binary(), list(get_face_search_opt())) :: ExAws.Operation.JSON.t()
def get_face_search(job_id, opts \\ []) when is_binary(job_id) do
request(:get_face_search, %{
"JobId" => job_id
} |> Map.merge(opts |> stringify_enum_opts([:sort_by]) |> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_GetLabelDetection.html
"""
@type get_label_detection_opt :: get_opt() | {:sort_by, :name | :timestamp}
@spec get_label_detection(binary()) :: ExAws.Operation.JSON.t()
@spec get_label_detection(binary(), list(get_label_detection_opt())) :: ExAws.Operation.JSON.t()
def get_label_detection(job_id, opts \\ []) when is_binary(job_id) do
request(:get_label_detection, %{
"JobId" => job_id
} |> Map.merge(opts |> stringify_enum_opts([:sort_by]) |> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_GetPersonTracking.html
"""
@type get_person_tracking_opt :: get_opt() | {:sort_by, :index | :timestamp}
@spec get_person_tracking(binary()) :: ExAws.Operation.JSON.t()
@spec get_person_tracking(binary(), list(get_person_tracking_opt())) :: ExAws.Operation.JSON.t()
def get_person_tracking(job_id, opts \\ []) when is_binary(job_id) do
request(:get_person_tracking, %{
"JobId" => job_id
} |> Map.merge(opts |> stringify_enum_opts([:sort_by]) |> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_RecognizeCelebrities.html
"""
@spec recognize_celebrities(image()) :: ExAws.Operation.JSON.t()
def recognize_celebrities(image) do
request(:recognize_celebrities, %{
"Image" => map_image(image)
})
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_SearchFaces.html
"""
@type search_faces_opt :: {:face_match_threshold, 0..100} | {:max_faces, 1..4096}
@spec search_faces(binary(), binary()) :: ExAws.Operation.JSON.t()
@spec search_faces(binary(), binary(), list(search_faces_opt())) :: ExAws.Operation.JSON.t()
def search_faces(collection_id, face_id, opts \\ []) when is_binary(collection_id) and is_binary(face_id) do
request(:search_faces, %{
"CollectionId" => collection_id,
"FaceId" => face_id
} |> Map.merge(camelize_keys(opts)))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_SearchFacesByImage.html
NOTE: When using an S3Object, you may need to ensure that the S3 uses the
same region as Rekognition.
"""
@type search_faces_by_image_opt :: search_faces_opt()
@spec search_faces_by_image(binary(), image()) :: ExAws.Operation.JSON.t()
@spec search_faces_by_image(binary(), image(), list(search_faces_by_image_opt())) :: ExAws.Operation.JSON.t()
def search_faces_by_image(collection_id, image, opts \\ []) when is_binary(collection_id) do
request(:search_faces_by_image, %{
"CollectionId" => collection_id,
"Image" => map_image(image)
} |> Map.merge(camelize_keys(opts)))
end
@type start_opt :: {:client_request_token, binary()} | {:job_tag, binary()} | {:notification_channel, NotificationChannelObject.t()}
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_StartCelebrityRecognition.html
"""
@type start_celebrity_recognition_opt :: start_opt()
@spec start_celebrity_recognition(S3Object.t()) :: ExAws.Operation.JSON.t()
@spec start_celebrity_recognition(S3Object.t(), list(start_celebrity_recognition_opt())) :: ExAws.Operation.JSON.t()
def start_celebrity_recognition(video, opts \\ []) do
request(:start_celebrity_recognition, %{
"Video" => S3Object.map(video)
} |> Map.merge(opts |> map_notification_channel() |> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_StartContentModeration.html
"""
@type start_content_moderation_opt :: start_opt() | {:min_confidence, 0..100}
@spec start_content_moderation(S3Object.t()) :: ExAws.Operation.JSON.t()
@spec start_content_moderation(S3Object.t(), list(start_content_moderation_opt())) :: ExAws.Operation.JSON.t()
def start_content_moderation(video, opts \\ []) do
request(:start_content_moderation, %{
"Video" => S3Object.map(video)
} |> Map.merge(opts |> map_notification_channel() |> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_StartFaceDetection.html
"""
@type start_face_detection_opt :: start_opt() | {:face_attributes, :default | :all}
@spec start_face_detection(S3Object.t()) :: ExAws.Operation.JSON.t()
@spec start_face_detection(S3Object.t(), list(start_face_detection_opt())) :: ExAws.Operation.JSON.t()
def start_face_detection(video, opts \\ []) do
request(:start_face_detection, %{
"Video" => S3Object.map(video)
} |> Map.merge(opts |> map_notification_channel()
|> stringify_enum_opts([:face_attributes])
|> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_StartFaceSearch.html
"""
@type start_face_search_opt :: start_opt() | {:face_match_threshold, 0..100}
@spec start_face_search(S3Object.t(), binary()) :: ExAws.Operation.JSON.t()
@spec start_face_search(S3Object.t(), binary(), list(start_face_search_opt())) :: ExAws.Operation.JSON.t()
def start_face_search(video, collection_id, opts \\ []) when is_binary(collection_id) do
request(:start_face_search, %{
"CollectionId" => collection_id,
"Video" => S3Object.map(video)
} |> Map.merge(opts |> map_notification_channel() |> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_StartLabelDetection.html
"""
@type start_label_detection_opt :: start_opt() | {:min_confidence, 0..100}
@spec start_label_detection(S3Object.t()) :: ExAws.Operation.JSON.t()
@spec start_label_detection(S3Object.t(), list(start_label_detection_opt())) :: ExAws.Operation.JSON.t()
def start_label_detection(video, opts \\ []) do
request(:start_label_detection, %{
"Video" => S3Object.map(video)
} |> Map.merge(opts |> map_notification_channel() |> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_StartPersonTracking.html
"""
@type start_person_tracking_opt :: start_opt()
@spec start_person_tracking(S3Object.t()) :: ExAws.Operation.JSON.t()
@spec start_person_tracking(S3Object.t(), list(start_person_tracking_opt())) :: ExAws.Operation.JSON.t()
def start_person_tracking(video, opts \\ []) do
request(:start_celebrity_recognition, %{
"Video" => S3Object.map(video)
} |> Map.merge(opts |> map_notification_channel() |> camelize_keys()))
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_StartStreamProcessor.html
"""
@spec start_stream_processor(binary()) :: ExAws.Operation.JSON.t()
def start_stream_processor(name) when is_binary(name) do
request(:start_stream_processor, %{
"Name" => name
})
end
@doc """
https://docs.aws.amazon.com/rekognition/latest/dg/API_StopStreamProcessor.html
"""
@spec stop_stream_processor(binary()) :: ExAws.Operation.JSON.t()
def stop_stream_processor(name) when is_binary(name) do
request(:stop_stream_processor, %{
"Name" => name
})
end
# Utility
defp map_image(image) when is_binary(image) do
%{"Bytes" => Base.encode64(image)}
end
defp map_image(%S3Object{} = object) do
S3Object.map(object)
end
defp stringify_enum_opts(opts, keys) do
{enum_opts, opts} = Keyword.split(opts, keys)
opts ++ Enum.map(enum_opts, fn {k, v} -> {k, stringify_enum(v)} end)
end
defp stringify_enum(value) do
value |> Atom.to_string() |> String.upcase()
end
defp map_notification_channel(opts) do
if value = opts[:notification_channel] do
Keyword.replace!(opts, :notification_channel, NotificationChannelObject.map(value))
else
opts
end
end
defp request(action, data) do
operation =
action
|> Atom.to_string()
|> Macro.camelize()
headers = [
{"content-type", "application/x-amz-json-1.1"},
{"x-amz-target", "RekognitionService.#{operation}"}
]
ExAws.Operation.JSON.new(:rekognition, %{
data: data,
headers: headers
})
end
end | lib/ex_aws/rekognition.ex | 0.881545 | 0.423875 | rekognition.ex | starcoder |
defmodule BeamInspect do
@moduledoc """
Inspect how your elixir module looks like in erlang / core erlang.
"""
@doc """
Returns erlang code.
Abstract erlang code is fetched from .beam file.
It requires `:debug_info` or `:abstract_code` to be available in compiled module.
## Example
iex > Foo |> BeamInspect.to_erlang() |> IO.puts()
:ok
"""
@spec to_erlang(module()) :: charlist() | no_return()
def to_erlang(module) when is_atom(module) do
module
|> abstract_code()
|> to_erl()
end
@format_opts [:noann]
@doc """
Returns core erlang code.
Abstract erlang code is fetched from .beam file.
It requires `:debug_info` or `:abstract_code` to be available in compiled module.
Erlang abstract code is compiled with `+to_core` flag by `:compile.noenv_forms/2` function.
## Options
* erlc flags - e.g. erlc +time should be passed as `:time` atom
* `:noann` - removes compiler annotations
## Examples
iex > Foo |> BeamInspect.to_core_erlang() |> IO.puts()
:ok
iex > Foo |> BeamInspect.to_core_erlang(:noann) |> IO.puts()
:ok
iex > Foo |> BeamInspect.to_core_erlang(:time) |> IO.puts()
:ok
iex > Foo |> BeamInspect.to_core_erlang([:noann, :time]) |> IO.puts()
:ok
"""
@spec to_core_erlang(module(), atom() | [atom()]) :: charlist() | no_return()
def to_core_erlang(module, opts \\ []) when is_atom(module) do
{format_opts, erlc_flags} = opts |> List.wrap() |> split_opts(&(&1 in @format_opts))
module
|> abstract_code()
|> to_core(erlc_flags, :noann in format_opts)
end
defp abstract_code(module) do
file = :code.which(module)
case :beam_lib.chunks(file, [:debug_info]) do
{:ok, {^module, [{:debug_info, {:debug_info_v1, backend, {_, _, _} = metadata}}]}} ->
{:ok, abstract_code} = backend.debug_info(:erlang_v1, module, metadata, [])
abstract_code
_ ->
case :beam_lib.chunks(file, [:abstract_code]) do
{:ok, {^module, [{:abstract_code, {:raw_abstract_v1, abstract_code}}]}} ->
abstract_code
_ ->
raise "abstract code unavailable"
end
end
end
defp to_erl(abstract_code) do
abstract_code
|> :erl_syntax.form_list()
|> :erl_prettypr.format()
end
defp to_core(abstract_code, erlc_flags, noann) do
{:ok, _, core} = :compile.noenv_forms(abstract_code, [:to_core | erlc_flags])
:cerl_prettypr.format(core, noann: noann)
end
@compile {:inline, split_opts: 2}
if Version.match?(System.version(), "< 1.4.0") do
defp split_opts(opts, fun), do: Enum.partition(opts, fun)
else
defp split_opts(opts, fun), do: Enum.split_with(opts, fun)
end
end | lib/beam_inspect.ex | 0.814643 | 0.417212 | beam_inspect.ex | starcoder |
defmodule StepFlow.Amqp.ErrorConsumer do
@moduledoc """
Consumer of all job with error status.
"""
require Logger
alias StepFlow.Amqp.ErrorConsumer
alias StepFlow.Jobs
alias StepFlow.Jobs.Status
alias StepFlow.Metrics.{JobInstrumenter, WorkflowInstrumenter}
alias StepFlow.Workflows
use StepFlow.Amqp.CommonConsumer, %{
queue: "job_error",
prefetch_count: 1,
consumer: &ErrorConsumer.consume/4
}
@doc """
Consume message with error topic, update Job and send a notification.
"""
def consume(channel, tag, _redelivered, %{"job_id" => job_id, "error" => description} = payload) do
case Jobs.get_job(job_id) do
nil ->
Basic.reject(channel, tag, requeue: false)
job ->
Logger.error("Job error #{inspect(payload)}")
JobInstrumenter.inc(:step_flow_jobs_error, job.name)
WorkflowInstrumenter.inc(:step_flow_workflows_error, job_id)
{:ok, job_status} = Status.set_job_status(job_id, :error, %{message: description})
Workflows.Status.define_workflow_status(job.workflow_id, :job_error, job_status)
Workflows.notification_from_job(job_id, description)
Basic.ack(channel, tag)
end
end
def consume(
channel,
tag,
_redelivered,
%{
"job_id" => job_id,
"parameters" => [%{"id" => "message", "type" => "string", "value" => description}],
"status" => "error"
} = payload
) do
case Jobs.get_job(job_id) do
nil ->
Basic.reject(channel, tag, requeue: false)
job ->
Logger.error("Job error #{inspect(payload)}")
JobInstrumenter.inc(:step_flow_jobs_error, job.name)
WorkflowInstrumenter.inc(:step_flow_workflows_error, job_id)
{:ok, job_status} = Status.set_job_status(job_id, :error, %{message: description})
Workflows.Status.define_workflow_status(job.workflow_id, :job_error, job_status)
Workflows.notification_from_job(job_id, description)
Basic.ack(channel, tag)
end
end
def consume(channel, tag, _redelivered, payload) do
Logger.error("Job error #{inspect(payload)}")
Basic.reject(channel, tag, requeue: false)
end
end | lib/step_flow/amqp/error_consumer.ex | 0.622918 | 0.414129 | error_consumer.ex | starcoder |
defmodule TrainLoc.Encoder.VehiclePositionsEnhanced do
@moduledoc """
Encodes a list of vehicle structs into GTFS-realtime enhanced JSON format.
"""
import TrainLoc.Utilities.Time
alias TrainLoc.Vehicles.Vehicle
@spec encode([Vehicle.t()]) :: String.t()
def encode(list) when is_list(list) do
message = %{
header: feed_header(),
entity: feed_entity(list)
}
Jason.encode!(message)
end
defp feed_header do
%{
gtfs_realtime_version: "1.0",
incrementality: 0,
timestamp: unix_now()
}
end
defp feed_entity(list), do: Enum.map(list, &build_entity/1)
defp build_entity(%Vehicle{} = vehicle) do
%{
id: "#{:erlang.phash2(vehicle)}",
vehicle: %{
trip: entity_trip(vehicle),
vehicle: entity_vehicle(vehicle),
position: %{
latitude: vehicle.latitude,
longitude: vehicle.longitude,
bearing: vehicle.heading,
speed: miles_per_hour_to_meters_per_second(vehicle.speed)
},
timestamp: format_timestamp(vehicle.timestamp)
}
}
end
defp build_entity(_), do: []
defp entity_trip(%{trip: "000"} = vehicle) do
%{start_date: start_date(vehicle.timestamp)}
end
defp entity_trip(%{trip: trip} = vehicle) do
entity_trip = entity_trip(Map.delete(vehicle, :trip))
Map.put(entity_trip, :trip_short_name, trip)
end
defp entity_trip(vehicle) do
%{start_date: start_date(vehicle.timestamp)}
end
defp entity_vehicle(%{trip: "000"} = vehicle) do
%{
id: vehicle.vehicle_id,
assignment_status: "unassigned"
}
end
defp entity_vehicle(vehicle) do
%{
id: vehicle.vehicle_id
}
end
def start_date(%DateTime{} = timestamp) do
timestamp
|> get_service_date()
|> Date.to_iso8601(:basic)
end
defp miles_per_hour_to_meters_per_second(miles_per_hour) do
miles_per_hour * 0.447
end
defp format_timestamp(%DateTime{} = timestamp) do
DateTime.to_unix(timestamp)
end
end | apps/train_loc/lib/train_loc/encoder/vehicle_positions_enhanced.ex | 0.7874 | 0.497864 | vehicle_positions_enhanced.ex | starcoder |
defmodule Kitsune.Aws.Canonical do
@moduledoc """
This module defines functions that are used to build the canonical request
The canonical request is a string used to generate a signature of the request.
It contains the following data, each one in its own line:
1. The request method
2. The request path
3. The request query string
4. The request headers (including the `Host` header)
5. The headers that should be used to build the request signature
6. The hash of the request payload (empty bodies should use the hash of an empty string)
In this implementation, all headers that are sent to AWS are signed
"""
@doc """
Predicate for `URI.encode/2`
Return true whenever a character should not be URI encoded
For AWS parameters, this is true whenever a character matches the group [A-Za-z0-9_~.-]
"""
@spec encode_param?(char()) :: boolean()
def encode_param?(ch), do:
(ch >= ?A && ch <= ?Z) || (ch >= ?a && ch <= ?z) ||
(ch >= ?0 && ch <= ?9) ||
ch == ?_ || ch == ?- ||
ch == ?~ || ch == ?.
@doc """
Predicate for `URI.encode/2`
Return true whenever a character should not be URI encoded
For AWS URIs, this is true whenever a character should not be param encoded (see `encode_param?/2`)
or when it is the forward slash character (`/`)
"""
@spec encode_uri?(char()) :: boolean()
def encode_uri?(ch), do: encode_param?(ch) || ch == ?/
@doc """
Encodes a string for URIs, using `encode_uri?/1` as encoder
"""
@spec uri_encode(String.t()) :: String.t()
def uri_encode(string), do: URI.encode(string, &encode_uri?/1)
@doc """
Encodes a string for query parameters, using `encode_param?/1` as encoder
"""
@spec param_encode(String.t()) :: String.t()
def param_encode(string), do: URI.encode(string, &encode_param?/1)
@doc """
Returns the HTTP method in its canonical form: trimmed and all characters are uppercase
## Examples
iex> Kitsune.Aws.Canonical.get_canonical_method(" get ")
"GET"
iex> Kitsune.Aws.Canonical.get_canonical_method("POST")
"POST"
"""
@spec get_canonical_method(String.t()) :: String.t()
def get_canonical_method(method), do: String.trim String.upcase method
@doc """
Returns the canonical path for the given URI
The canonical path is:
- A forward slash (`/`) for empty paths (like `http://google.com`, for example)
- URI encoded (see `uri_encode/1`)
## Examples
iex> Kitsune.Aws.Canonical.get_canonical_path("http://www.google.com")
"/"
iex> Kitsune.Aws.Canonical.get_canonical_path("http://www.google.com?foo=bar")
"/"
iex> Kitsune.Aws.Canonical.get_canonical_path("http://www.google.com/foo/bar")
"/foo/bar"
"""
@spec get_canonical_path(String.t()) :: String.t()
def get_canonical_path(uri), do: uri_encode(URI.parse(uri).path || "/")
@doc """
Returns the canonical query string for the given URI
The query string has the following properties:
- All parameters are sorted alphabetically ascending by its key
- Both keys and values are encoded using `param_encode/1`
## Examples
iex> Kitsune.Aws.Canonical.get_canonical_query_string("http://www.google.com")
""
iex> Kitsune.Aws.Canonical.get_canonical_query_string("http://www.google.com?foo!=bar@")
"foo%21=bar%40"
iex> Kitsune.Aws.Canonical.get_canonical_query_string("http://www.google.com?foo!=bar@&baz=123")
"baz=123&foo%21=bar%40"
"""
@spec get_canonical_query_string(String.t()) :: String.t()
def get_canonical_query_string(uri) do
(URI.parse(uri).query || "")
|> URI.decode_query
|> Enum.to_list
|> Enum.map_join("&", fn {k,v} -> param_encode(to_string(k)) <> "=" <> param_encode(to_string(v)) end)
end
@doc ~S"""
Returns the canonical headers string
The returned string has the following characteristics:
- Every header is in its own line
- If no headers are passed, a string containing the new line character is returned
- Header keys are all lowercase
- Values are trimmed
## Examples
iex> Kitsune.Aws.Canonical.get_canonical_headers([])
"\n"
iex> Kitsune.Aws.Canonical.get_canonical_headers([{"Content-type", "application/json"}, {"Accept", " application/json"}])
"accept:application/json\ncontent-type:application/json\n"
"""
@spec get_canonical_headers([{String.t(),String.t()}]) :: String.t()
def get_canonical_headers(headers) do
Stream.map(headers, fn {k, v} -> {String.downcase(k), v} end)
|> Enum.sort_by(fn {k, _v} -> k end)
|> get_canonical_headers_unsorted()
end
defp get_canonical_headers_unsorted(headers) do
Enum.map_join(headers, "\n", fn {k, v} -> k <> ":" <> String.trim(to_string(v)) end) <> "\n"
end
@doc """
Returns the headers that should be used for signing
This is a semicolon separated list of all headers keys, in lowercase, that should be used to sign the request
## Examples
iex> Kitsune.Aws.Canonical.get_signed_headers([])
""
iex> Kitsune.Aws.Canonical.get_signed_headers([{"Content-type", "application/json"}, {"Accept", " application/json"}])
"accept;content-type"
"""
@spec get_signed_headers([{String.t(),String.t()}]) :: String.t()
def get_signed_headers(headers) do
Stream.map(headers, fn {k, v} -> {String.downcase(k), v} end)
|> Enum.sort_by(fn {k, _v} -> k end)
|> get_signed_headers_unsorted()
end
defp get_signed_headers_unsorted(headers) do
Enum.map_join(headers, ";", fn {k, _v} -> k end)
end
@doc """
Hashes the request payload for the canonical request
This returns the SHA2-256 hash of the payload, in a lower case hex string
"""
@spec get_hash(String.t()) :: String.t()
def get_hash(payload), do: :crypto.hash(:sha256, payload) |> Base.encode16 |> String.downcase
@doc ~S"""
Builds the canonical request string from the given request parameters
This is a convenience function and the preferred way to build the canonical request string, since it avoids sorting
twice the headers that would happen when using the `get_canonical_headers/1` and `get_signed_headers/1` directly.
## Example
iex> Kitsune.Aws.Canonical.get_canonical_request("GET", "http://examplebucket.s3.amazonaws.com/test.txt", [], "")
"GET\n/test.txt\n\nhost:examplebucket.s3.amazonaws.com\n\nhost\ne3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
"""
@spec get_canonical_request(String.t(),String.t(),[{String.t(),String.t()}],String.t()) :: String.t()
def get_canonical_request(method, uri, headers, payload) do
headers_with_host = (headers ++ [{"host", URI.parse(uri).host}])
|> Stream.map(fn {k, v} -> {String.downcase(k), v} end)
|> Enum.sort_by(fn {k, _v} -> k end)
|> Enum.dedup_by(fn {k, _v} -> k end)
get_canonical_method(method) <> "\n" <>
get_canonical_path(uri) <> "\n" <>
get_canonical_query_string(uri) <> "\n" <>
get_canonical_headers_unsorted(headers_with_host) <> "\n" <>
get_signed_headers_unsorted(headers_with_host) <> "\n" <>
get_hash(payload)
end
end | apps/kitsune_aws_core/lib/kitsune/aws/canonical.ex | 0.93913 | 0.548915 | canonical.ex | starcoder |
defmodule Indicado.Bollinger do
@moduledoc """
This is the Bollinger module used for calculating Bollinger Bands.
"""
@doc """
Calculates BB for the list.
Returns list of map `[{lower: x, mean: y, upper: z}]` or `{:error, reason}`
- `lower` represents low band of bollinger band
- `mean` represents mean value
- `upper` represents upper value of bollinger band
## Examples
iex> Indicado.Bollinger.eval([1, 2, 3, 4, 5], 2, 2)
{:ok, [%{lower: 0.5, mean: 1.5, upper: 2.5},
%{lower: 1.5, mean: 2.5, upper: 3.5},
%{lower: 2.5, mean: 3.5, upper: 4.5},
%{lower: 3.5, mean: 4.5, upper: 5.5}]}
iex> Indicado.Bollinger.eval([1, 2, 3, 4, 5], 2, 3)
{:ok, [%{lower: 0.0, mean: 1.5, upper: 3.0},
%{lower: 1.0, mean: 2.5, upper: 4.0},
%{lower: 2.0, mean: 3.5, upper: 5.0},
%{lower: 3.0, mean: 4.5, upper: 6.0}]}
iex> Indicado.Bollinger.eval([1, 2, 3, 4, 5], 0, 3)
{:error, :bad_period}
iex> Indicado.Bollinger.eval([1, 2, 3, 4, 5], 5, 0)
{:error, :bad_deviation}
"""
@spec eval(list, pos_integer, pos_integer) :: {:ok, nonempty_list(map)} | {:error, atom}
def eval(list, period, devation), do: calc(list, period, devation)
@doc """
Calculates BB for the list. Raises exceptions when argument does not satisfy needed conditions
to calculate Bollinger Bands.
Returns list of map `[{lower: x, mean: y, upper: z}]` or `{:error, reason}`
- `lower` represents low band of bollinger band
- `mean` represents mean value
- `upper` represents upper value of bollinger band
Raises `NotEnoughDataError` if the given list is not longh enough for calculating SMA.
Raises `BadPeriodError` if period is an unacceptable number.
Raises `BadDeviationError` if deviation is an unacceptable number.
## Examples
iex> Indicado.Bollinger.eval!([1, 2, 3, 4, 5], 2, 2)
[%{lower: 0.5, mean: 1.5, upper: 2.5},
%{lower: 1.5, mean: 2.5, upper: 3.5},
%{lower: 2.5, mean: 3.5, upper: 4.5},
%{lower: 3.5, mean: 4.5, upper: 5.5}]
iex> Indicado.Bollinger.eval!([1, 2, 3, 4, 5], 2, 3)
[%{lower: 0.0, mean: 1.5, upper: 3.0},
%{lower: 1.0, mean: 2.5, upper: 4.0},
%{lower: 2.0, mean: 3.5, upper: 5.0},
%{lower: 3.0, mean: 4.5, upper: 6.0}]
iex> Indicado.Bollinger.eval!([], 2, 3)
** (NotEnoughDataError) not enough data
iex> Indicado.Bollinger.eval!([1, 2, 3, 4, 5], 0, 3)
** (BadPeriodError) bad period
iex> Indicado.Bollinger.eval!([1, 2, 3, 4, 5], 5, 0)
** (BadDeviationError) bad deviation
"""
@spec eval!(list, pos_integer, pos_integer) :: nonempty_list(map) | no_return
def eval!(list, period, deviation) do
case calc(list, period, deviation) do
{:ok, result} -> result
{:error, :not_enough_data} -> raise NotEnoughDataError
{:error, :bad_period} -> raise BadPeriodError
{:error, :bad_deviation} -> raise BadDeviationError
end
end
defp calc(list, period, deviation, results \\ [])
defp calc(_list, period, _deviation, _result) when period < 1, do: {:error, :bad_period}
defp calc(_list, _period, deviation, _result) when deviation < 1, do: {:error, :bad_deviation}
defp calc([], _period, _deviation, []), do: {:error, :not_enough_data}
defp calc([], _period, _deviation, results), do: {:ok, Enum.reverse(results)}
defp calc([_head | tail] = list, period, deviation, results) when length(list) >= period do
row =
list
|> Enum.take(period)
|> bb_row(deviation)
calc(tail, period, deviation, [row | results])
end
defp calc([_head | tail], period, deviation, results) do
calc(tail, period, deviation, results)
end
defp bb_row(list, deviation) do
mean = Indicado.Math.mean(list)
stddev = Indicado.Math.stddev(list, mean)
%{lower: mean - stddev * deviation, mean: mean, upper: mean + stddev * deviation}
end
end | lib/indicado/bollinger.ex | 0.947781 | 0.86431 | bollinger.ex | starcoder |
defmodule Membrane.VideoMerger do
@moduledoc """
Membrane element that merges multiple raw videos into one.
The element expects each frame to be received in a separate buffer, so the parser
(`Membrane.Element.RawVideo.Parser`) may be required in a pipeline before
the merger (e.g. when input is read from `Membrane.File.Source`).
The element expects to receive frames in order from each input.
Currently, `VideoMerger` may not be suitable for live merging streams: the element
awaits for at least one frame from each of the inputs, and forwards the one
with the lowest presentation timestamp.
"""
use Membrane.Filter
alias __MODULE__.BufferQueue
alias Membrane.Caps.Video.Raw
alias Membrane.Pad
def_input_pad :input,
caps: {Raw, aligned: true},
demand_unit: :buffers,
availability: :on_request
def_output_pad :output,
caps: {Raw, aligned: true}
@impl true
def handle_init(_opts) do
{:ok, BufferQueue.new()}
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
demands =
state
|> BufferQueue.get_empty_ids()
|> Enum.map(&{:demand, {Pad.ref(:input, &1), size}})
{{:ok, demands}, state}
end
@impl true
def handle_end_of_stream({_pad, :input, id}, _ctx, state) do
state
|> BufferQueue.enqueue_eos(id)
|> get_actions()
end
@impl true
def handle_pad_added({_pad, :input, id}, _ctx, state) do
{:ok, Map.put_new(state, id, [])}
end
@impl true
def handle_process_list({_pad, :input, id}, buffers, _ctx, state) do
if Enum.any?(buffers, &is_nil(&1.pts)) do
raise("Cannot merge stream without pts")
end
state
|> BufferQueue.enqueue_list(id, buffers)
|> get_actions()
end
defp get_actions(state) do
{atom, buffers, new_state} = BufferQueue.dequeue_buffers(state)
actions =
case {atom, buffers} do
{:empty, []} -> [end_of_stream: :output]
{:empty, buffers} -> [buffer: {:output, buffers}, end_of_stream: :output]
{:ok, []} -> [redemand: :output]
{:ok, buffers} -> [buffer: {:output, buffers}]
end
{{:ok, actions}, new_state}
end
end | lib/video_merger.ex | 0.871871 | 0.517571 | video_merger.ex | starcoder |
defmodule Absinthe.Validation.PreventCircularFragments do
alias Absinthe.{Language, Traversal}
@moduledoc false
def validate(doc, {_, errors}) do
doc.definitions
|> Enum.filter(fn
%Language.Fragment{} -> true
_ -> false
end)
|> check(errors)
end
# The overall approach here is to create a digraph with an `acyclic`
# constraint. Then we just add the fragments as vectors, and fragment
# spreads are used to create edges. If at any point :digraph returns
# an error we have a cycle! Thank you :digraph for doing the hard part
# :)
# NOTE: `:digraph` is MUTABLE, as it's backed by `:ets`
def check(fragments, errors) do
graph = :digraph.new([:acyclic])
result = fragments
|> Enum.reduce({errors, graph}, &check_fragment/2)
|> case do
{[], _} -> {:ok, []}
{errors, _} -> {:error, errors}
end
# The table will get deleted when the process exits, but we might
# as well clean up for ourselves explicitly.
:digraph.delete(graph)
result
end
def check([], errors, _), do: errors
def check_fragment(fragment, {errors, graph}) do
_ = :digraph.add_vertex(graph, fragment.name)
Traversal.reduce(fragment, :unused, {errors, graph}, fn
%Language.FragmentSpread{} = spread, traversal, {errors, graph} ->
_ = :digraph.add_vertex(graph, spread.name)
case :digraph.add_edge(graph, fragment.name, spread.name) do
{:error, {:bad_edge, path}} ->
# All just error generation logic
deps = [fragment.name | path]
|> Enum.map(&"`#{&1}'")
|> Enum.join(" => ")
msg = """
Fragment Cycle Error
Fragment `#{fragment.name}' forms a cycle via: (#{deps})
"""
error = %{
message: String.strip(msg),
locations: [%{line: spread.loc.start_line, column: 0}]
}
{:ok, {[error | errors], graph}, traversal}
_ ->
{:ok, {errors, graph}, traversal}
end
_, traversal, acc ->
{:ok, acc, traversal}
end)
end
end | lib/absinthe/validation/prevent_circular_fragments.ex | 0.849238 | 0.456773 | prevent_circular_fragments.ex | starcoder |
defmodule Volley.PersistentSubscription do
@moduledoc """
A GenStage/Broadway producer for persistent subscriptions
Persistent subscriptions are a feature of EventStoreDB which offload the
responsibilities of tracking processed events, back-pressure, and subscriber
dispatch to the EventStoreDB server. This allows subscribers to more easily
implement complex subscription strategies, such as allowing multiple
subscribers across services without gossip, avoiding head-of-line blocking,
and enabling concurrent and batch processing schemes and rate-limiting.
Broadway features an acknowledgement interface which integrates well with
the persistent subscription `Spear.ack/3` and `Spear.nack/4` system. Consumers
intending to use this producer should prefer writing handlers with
`Broadway` (over `GenStage`) where possible.
## Configuration
* `:broadway?` - (default: `false`) whether to emit events as
`t:Broadway.Message.t/0` messages or as `t:Spear.Event.t/0` events.
`true` should be set for this option if this producer is being used
in a Broadway topology, `false` for use in a `GenStage` pipeline.
When set as `true`, this producer will set the ack interface for
each message to `Volley.PersistentSubscription.Acknowledger` with the
proper connection details for sending acks and nacks. When `false`, the
`Spear.Event.metadata.subscription` field will be replaced with a
`t:Volley.PersistentSubscription.Subscription.t/0` struct with any necessary
connection details.
* `:subscriptions` - (default: `[]`) a list of subscription configurations.
Broadway does not currently allow more than one producer in a topology,
however one may wish to subscribe to multiple persistent subscriptions,
potentially across EventStoreDBs. Since back-pressure is controlled
on the EventStoreDB side, a handler may specify multiple subscriptions
in a single producer without any special considerations. The schema of
each subscription is as follows
* `:connection` - (required) a `t:Spear.Connection.t/0` process which
can either be specified as a PID or any valid `t:GenServer.name/0`
* `:stream_name` - (required) the EventStoreDB stream
* `:group_name` - (required) the EventStoreDB group name
* `:opts` - (default: `[]`) options to pass to
`Spear.connect_to_persistent_subscription/5`. The main use of this
options field is to configure the `:buffer_size` option (default: `1`).
The `:buffer_size` option controls the number of events allowed to be
sent to this producer before any events are acknowledged.
Remaining options are passed to `GenStage.start_link/3` and the
`{:producer, state, opts}` tuple in `c:GenStage.init/1`.
## Examples
```elixir
defmodule MyHandler do
use Broadway
alias Broadway.Message
def start_link(_opts) do
subscription_opts = [
broadway?: true,
subscriptions: [
[
stream_name: "MyStream",
group_name: inspect(__MODULE__),
connection: MyApp.SpearClient,
opts: [
# 10 events allowed in-flight at a time
buffer_size: 10
]
]
]
]
Broadway.start_link(__MODULE__,
name: __MODULE__,
producer: [
module: {Volley.PersistentSubscription, subscription_opts}
],
processors: [
default: [concurrency: 2]
],
batchers: [
default: [concurrency: 1, batch_size: 5]
]
)
end
@impl Broadway
def handle_message(:default, %Message{} = message, _context) do
message
|> Message.put_batcher(:default)
end
@impl Broadway
def handle_batch(:default, messages, _batch_info, context) do
# do something batchy with messages...
end
end
```
"""
use GenStage
import Volley
alias __MODULE__.Subscription
defstruct [:config, subscriptions: %{}]
# coveralls-ignore-start
@doc false
def start_link(opts) do
{start_link_opts, opts} = pop_genserver_opts(opts)
GenStage.start_link(__MODULE__, opts, start_link_opts)
end
# coveralls-ignore-stop
@impl GenStage
def init(opts) do
{producer_opts, opts} = pop_producer_opts(opts)
subscriptions =
opts
|> Keyword.get(:subscriptions, [])
|> Enum.map(&Subscription.from_config/1)
Enum.each(subscriptions, fn sub -> send(self(), {:connect, sub}) end)
config =
opts
|> Map.new()
|> Map.put(:subscriptions, subscriptions)
{:producer, %__MODULE__{config: config}, producer_opts}
end
@impl GenStage
def handle_info({:connect, subscription}, state) do
state =
case Subscription.connect(subscription) do
{:ok, %Subscription{ref: ref} = subscription} ->
put_in(state.subscriptions[ref], subscription)
:error ->
state
end
{:noreply, [], state}
end
def handle_info({:eos, subscription, _reason}, state) do
{%Subscription{} = sub, state} = pop_in(state.subscriptions[subscription])
Subscription.reconnect(sub)
{:noreply, [], state}
end
def handle_info(%Spear.Event{} = event, state) do
{:noreply, [map_event(event, state)], state}
end
@impl GenStage
def handle_demand(_demand, state) do
{:noreply, [], state}
end
if_broadway do
defp map_event(event, %__MODULE__{config: %{broadway?: true}} = state) do
subscription = state.subscriptions[event.metadata.subscription]
%Broadway.Message{
data: event,
acknowledger:
{Volley.PersistentSubscription.Acknowledger, subscription, %{}}
}
end
end
defp map_event(event, state) do
update_in(event.metadata.subscription, &state.subscriptions[&1])
end
end | lib/volley/persistent_subscription.ex | 0.897767 | 0.857231 | persistent_subscription.ex | starcoder |
defmodule AWS.RDS do
@moduledoc """
Amazon Relational Database Service
Amazon Relational Database Service (Amazon RDS) is a web service that makes
it easier to set up, operate, and scale a relational database in the cloud.
It provides cost-efficient, resizeable capacity for an industry-standard
relational database and manages common database administration tasks,
freeing up developers to focus on what makes their applications and
businesses unique.
Amazon RDS gives you access to the capabilities of a MySQL, MariaDB,
PostgreSQL, Microsoft SQL Server, Oracle, or Amazon Aurora database server.
These capabilities mean that the code, applications, and tools you already
use today with your existing databases work with Amazon RDS without
modification. Amazon RDS automatically backs up your database and maintains
the database software that powers your DB instance. Amazon RDS is flexible:
you can scale your DB instance's compute resources and storage capacity to
meet your application's demand. As with all Amazon Web Services, there are
no up-front investments, and you pay only for the resources you use.
This interface reference for Amazon RDS contains documentation for a
programming or command line interface you can use to manage Amazon RDS.
Amazon RDS is asynchronous, which means that some interfaces might require
techniques such as polling or callback functions to determine when a
command has been applied. In this reference, the parameter descriptions
indicate whether a command is applied immediately, on the next instance
reboot, or during the maintenance window. The reference structure is as
follows, and we list following some related topics from the user guide.
**Amazon RDS API Reference**
<ul> <li> For the alphabetical list of API actions, see [API
Actions](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_Operations.html).
</li> <li> For the alphabetical list of data types, see [Data
Types](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_Types.html).
</li> <li> For a list of common query parameters, see [Common
Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/CommonParameters.html).
</li> <li> For descriptions of the error codes, see [Common
Errors](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/CommonErrors.html).
</li> </ul> **Amazon RDS User Guide**
<ul> <li> For a summary of the Amazon RDS interfaces, see [Available RDS
Interfaces](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Welcome.html#Welcome.Interfaces).
</li> <li> For more information about how to use the Query API, see [Using
the Query
API](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Using_the_Query_API.html).
</li> </ul>
"""
@doc """
Associates an Identity and Access Management (IAM) role from an Amazon
Aurora DB cluster. For more information, see [Authorizing Amazon Aurora
MySQL to Access Other AWS Services on Your
Behalf](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Integrating.Authorizing.html)
in the *Amazon Aurora User Guide*.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def add_role_to_d_b_cluster(client, input, options \\ []) do
request(client, "AddRoleToDBCluster", input, options)
end
@doc """
Associates an AWS Identity and Access Management (IAM) role with a DB
instance.
<note> To add a role to a DB instance, the status of the DB instance must
be `available`.
</note>
"""
def add_role_to_d_b_instance(client, input, options \\ []) do
request(client, "AddRoleToDBInstance", input, options)
end
@doc """
Adds a source identifier to an existing RDS event notification
subscription.
"""
def add_source_identifier_to_subscription(client, input, options \\ []) do
request(client, "AddSourceIdentifierToSubscription", input, options)
end
@doc """
Adds metadata tags to an Amazon RDS resource. These tags can also be used
with cost allocation reporting to track cost associated with Amazon RDS
resources, or used in a Condition statement in an IAM policy for Amazon
RDS.
For an overview on tagging Amazon RDS resources, see [Tagging Amazon RDS
Resources](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.Tagging.html).
"""
def add_tags_to_resource(client, input, options \\ []) do
request(client, "AddTagsToResource", input, options)
end
@doc """
Applies a pending maintenance action to a resource (for example, to a DB
instance).
"""
def apply_pending_maintenance_action(client, input, options \\ []) do
request(client, "ApplyPendingMaintenanceAction", input, options)
end
@doc """
Enables ingress to a DBSecurityGroup using one of two forms of
authorization. First, EC2 or VPC security groups can be added to the
DBSecurityGroup if the application using the database is running on EC2 or
VPC instances. Second, IP ranges are available if the application accessing
your database is running on the Internet. Required parameters for this API
are one of CIDR range, EC2SecurityGroupId for VPC, or
(EC2SecurityGroupOwnerId and either EC2SecurityGroupName or
EC2SecurityGroupId for non-VPC).
<note> You can't authorize ingress from an EC2 security group in one AWS
Region to an Amazon RDS DB instance in another. You can't authorize ingress
from a VPC security group in one VPC to an Amazon RDS DB instance in
another.
</note> For an overview of CIDR ranges, go to the [Wikipedia
Tutorial](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing).
"""
def authorize_d_b_security_group_ingress(client, input, options \\ []) do
request(client, "AuthorizeDBSecurityGroupIngress", input, options)
end
@doc """
Backtracks a DB cluster to a specific time, without creating a new DB
cluster.
For more information on backtracking, see [ Backtracking an Aurora DB
Cluster](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Managing.Backtrack.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora MySQL DB clusters.
</note>
"""
def backtrack_d_b_cluster(client, input, options \\ []) do
request(client, "BacktrackDBCluster", input, options)
end
@doc """
Cancels an export task in progress that is exporting a snapshot to Amazon
S3. Any data that has already been written to the S3 bucket isn't removed.
"""
def cancel_export_task(client, input, options \\ []) do
request(client, "CancelExportTask", input, options)
end
@doc """
Copies the specified DB cluster parameter group.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def copy_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "CopyDBClusterParameterGroup", input, options)
end
@doc """
Copies a snapshot of a DB cluster.
To copy a DB cluster snapshot from a shared manual DB cluster snapshot,
`SourceDBClusterSnapshotIdentifier` must be the Amazon Resource Name (ARN)
of the shared DB cluster snapshot.
You can copy an encrypted DB cluster snapshot from another AWS Region. In
that case, the AWS Region where you call the `CopyDBClusterSnapshot` action
is the destination AWS Region for the encrypted DB cluster snapshot to be
copied to. To copy an encrypted DB cluster snapshot from another AWS
Region, you must provide the following values:
<ul> <li> `KmsKeyId` - The AWS Key Management System (AWS KMS) key
identifier for the key to use to encrypt the copy of the DB cluster
snapshot in the destination AWS Region.
</li> <li> `PreSignedUrl` - A URL that contains a Signature Version 4
signed request for the `CopyDBClusterSnapshot` action to be called in the
source AWS Region where the DB cluster snapshot is copied from. The
pre-signed URL must be a valid request for the `CopyDBClusterSnapshot` API
action that can be executed in the source AWS Region that contains the
encrypted DB cluster snapshot to be copied.
The pre-signed URL request must contain the following parameter values:
<ul> <li> `KmsKeyId` - The KMS key identifier for the key to use to encrypt
the copy of the DB cluster snapshot in the destination AWS Region. This is
the same identifier for both the `CopyDBClusterSnapshot` action that is
called in the destination AWS Region, and the action contained in the
pre-signed URL.
</li> <li> `DestinationRegion` - The name of the AWS Region that the DB
cluster snapshot is to be created in.
</li> <li> `SourceDBClusterSnapshotIdentifier` - The DB cluster snapshot
identifier for the encrypted DB cluster snapshot to be copied. This
identifier must be in the Amazon Resource Name (ARN) format for the source
AWS Region. For example, if you are copying an encrypted DB cluster
snapshot from the us-west-2 AWS Region, then your
`SourceDBClusterSnapshotIdentifier` looks like the following example:
`arn:aws:rds:us-west-2:123456789012:cluster-snapshot:aurora-cluster1-snapshot-20161115`.
</li> </ul> To learn how to generate a Signature Version 4 signed request,
see [ Authenticating Requests: Using Query Parameters (AWS Signature
Version
4)](https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html)
and [ Signature Version 4 Signing
Process](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
<note> If you are using an AWS SDK tool or the AWS CLI, you can specify
`SourceRegion` (or `--source-region` for the AWS CLI) instead of specifying
`PreSignedUrl` manually. Specifying `SourceRegion` autogenerates a
pre-signed URL that is a valid request for the operation that can be
executed in the source AWS Region.
</note> </li> <li> `TargetDBClusterSnapshotIdentifier` - The identifier for
the new copy of the DB cluster snapshot in the destination AWS Region.
</li> <li> `SourceDBClusterSnapshotIdentifier` - The DB cluster snapshot
identifier for the encrypted DB cluster snapshot to be copied. This
identifier must be in the ARN format for the source AWS Region and is the
same value as the `SourceDBClusterSnapshotIdentifier` in the pre-signed
URL.
</li> </ul> To cancel the copy operation once it is in progress, delete the
target DB cluster snapshot identified by
`TargetDBClusterSnapshotIdentifier` while that DB cluster snapshot is in
"copying" status.
For more information on copying encrypted DB cluster snapshots from one AWS
Region to another, see [ Copying a
Snapshot](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/USER_CopySnapshot.html)
in the *Amazon Aurora User Guide.*
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def copy_d_b_cluster_snapshot(client, input, options \\ []) do
request(client, "CopyDBClusterSnapshot", input, options)
end
@doc """
Copies the specified DB parameter group.
"""
def copy_d_b_parameter_group(client, input, options \\ []) do
request(client, "CopyDBParameterGroup", input, options)
end
@doc """
Copies the specified DB snapshot. The source DB snapshot must be in the
`available` state.
You can copy a snapshot from one AWS Region to another. In that case, the
AWS Region where you call the `CopyDBSnapshot` action is the destination
AWS Region for the DB snapshot copy.
For more information about copying snapshots, see [Copying a DB
Snapshot](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_CopySnapshot.html#USER_CopyDBSnapshot)
in the *Amazon RDS User Guide.*
"""
def copy_d_b_snapshot(client, input, options \\ []) do
request(client, "CopyDBSnapshot", input, options)
end
@doc """
Copies the specified option group.
"""
def copy_option_group(client, input, options \\ []) do
request(client, "CopyOptionGroup", input, options)
end
@doc """
Creates a custom Availability Zone (AZ).
A custom AZ is an on-premises AZ that is integrated with a VMware vSphere
cluster.
For more information about RDS on VMware, see the [ *RDS on VMware User
Guide.*
](https://docs.aws.amazon.com/AmazonRDS/latest/RDSonVMwareUserGuide/rds-on-vmware.html)
"""
def create_custom_availability_zone(client, input, options \\ []) do
request(client, "CreateCustomAvailabilityZone", input, options)
end
@doc """
Creates a new Amazon Aurora DB cluster.
You can use the `ReplicationSourceIdentifier` parameter to create the DB
cluster as a read replica of another DB cluster or Amazon RDS MySQL DB
instance. For cross-region replication where the DB cluster identified by
`ReplicationSourceIdentifier` is encrypted, you must also specify the
`PreSignedUrl` parameter.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def create_d_b_cluster(client, input, options \\ []) do
request(client, "CreateDBCluster", input, options)
end
@doc """
Creates a new custom endpoint and associates it with an Amazon Aurora DB
cluster.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def create_d_b_cluster_endpoint(client, input, options \\ []) do
request(client, "CreateDBClusterEndpoint", input, options)
end
@doc """
Creates a new DB cluster parameter group.
Parameters in a DB cluster parameter group apply to all of the instances in
a DB cluster.
A DB cluster parameter group is initially created with the default
parameters for the database engine used by instances in the DB cluster. To
provide custom values for any of the parameters, you must modify the group
after creating it using `ModifyDBClusterParameterGroup`. Once you've
created a DB cluster parameter group, you need to associate it with your DB
cluster using `ModifyDBCluster`. When you associate a new DB cluster
parameter group with a running DB cluster, you need to reboot the DB
instances in the DB cluster without failover for the new DB cluster
parameter group and associated settings to take effect.
<important> After you create a DB cluster parameter group, you should wait
at least 5 minutes before creating your first DB cluster that uses that DB
cluster parameter group as the default parameter group. This allows Amazon
RDS to fully complete the create action before the DB cluster parameter
group is used as the default for a new DB cluster. This is especially
important for parameters that are critical when creating the default
database for a DB cluster, such as the character set for the default
database defined by the `character_set_database` parameter. You can use the
*Parameter Groups* option of the [Amazon RDS
console](https://console.aws.amazon.com/rds/) or the
`DescribeDBClusterParameters` action to verify that your DB cluster
parameter group has been created or modified.
</important> For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def create_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "CreateDBClusterParameterGroup", input, options)
end
@doc """
Creates a snapshot of a DB cluster. For more information on Amazon Aurora,
see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def create_d_b_cluster_snapshot(client, input, options \\ []) do
request(client, "CreateDBClusterSnapshot", input, options)
end
@doc """
Creates a new DB instance.
"""
def create_d_b_instance(client, input, options \\ []) do
request(client, "CreateDBInstance", input, options)
end
@doc """
Creates a new DB instance that acts as a read replica for an existing
source DB instance. You can create a read replica for a DB instance running
MySQL, MariaDB, Oracle, PostgreSQL, or SQL Server. For more information,
see [Working with Read
Replicas](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_ReadRepl.html)
in the *Amazon RDS User Guide*.
Amazon Aurora doesn't support this action. Call the `CreateDBInstance`
action to create a DB instance for an Aurora DB cluster.
All read replica DB instances are created with backups disabled. All other
DB instance attributes (including DB security groups and DB parameter
groups) are inherited from the source DB instance, except as specified.
<important> Your source DB instance must have backup retention enabled.
</important>
"""
def create_d_b_instance_read_replica(client, input, options \\ []) do
request(client, "CreateDBInstanceReadReplica", input, options)
end
@doc """
Creates a new DB parameter group.
A DB parameter group is initially created with the default parameters for
the database engine used by the DB instance. To provide custom values for
any of the parameters, you must modify the group after creating it using
*ModifyDBParameterGroup*. Once you've created a DB parameter group, you
need to associate it with your DB instance using *ModifyDBInstance*. When
you associate a new DB parameter group with a running DB instance, you need
to reboot the DB instance without failover for the new DB parameter group
and associated settings to take effect.
<important> After you create a DB parameter group, you should wait at least
5 minutes before creating your first DB instance that uses that DB
parameter group as the default parameter group. This allows Amazon RDS to
fully complete the create action before the parameter group is used as the
default for a new DB instance. This is especially important for parameters
that are critical when creating the default database for a DB instance,
such as the character set for the default database defined by the
`character_set_database` parameter. You can use the *Parameter Groups*
option of the [Amazon RDS console](https://console.aws.amazon.com/rds/) or
the *DescribeDBParameters* command to verify that your DB parameter group
has been created or modified.
</important>
"""
def create_d_b_parameter_group(client, input, options \\ []) do
request(client, "CreateDBParameterGroup", input, options)
end
@doc """
Creates a new DB proxy.
"""
def create_d_b_proxy(client, input, options \\ []) do
request(client, "CreateDBProxy", input, options)
end
@doc """
Creates a new DB security group. DB security groups control access to a DB
instance.
<note> A DB security group controls access to EC2-Classic DB instances that
are not in a VPC.
</note>
"""
def create_d_b_security_group(client, input, options \\ []) do
request(client, "CreateDBSecurityGroup", input, options)
end
@doc """
Creates a snapshot of a DB instance. The source DB instance must be in the
`available` or `storage-optimization`state.
"""
def create_d_b_snapshot(client, input, options \\ []) do
request(client, "CreateDBSnapshot", input, options)
end
@doc """
Creates a new DB subnet group. DB subnet groups must contain at least one
subnet in at least two AZs in the AWS Region.
"""
def create_d_b_subnet_group(client, input, options \\ []) do
request(client, "CreateDBSubnetGroup", input, options)
end
@doc """
Creates an RDS event notification subscription. This action requires a
topic Amazon Resource Name (ARN) created by either the RDS console, the SNS
console, or the SNS API. To obtain an ARN with SNS, you must create a topic
in Amazon SNS and subscribe to the topic. The ARN is displayed in the SNS
console.
You can specify the type of source (`SourceType`) that you want to be
notified of and provide a list of RDS sources (`SourceIds`) that triggers
the events. You can also provide a list of event categories
(`EventCategories`) for events that you want to be notified of. For
example, you can specify `SourceType` = `db-instance`, `SourceIds` =
`mydbinstance1`, `mydbinstance2` and `EventCategories` = `Availability`,
`Backup`.
If you specify both the `SourceType` and `SourceIds`, such as `SourceType`
= `db-instance` and `SourceIdentifier` = `myDBInstance1`, you are notified
of all the `db-instance` events for the specified source. If you specify a
`SourceType` but do not specify a `SourceIdentifier`, you receive notice of
the events for that source type for all your RDS sources. If you don't
specify either the SourceType or the `SourceIdentifier`, you are notified
of events generated from all RDS sources belonging to your customer
account.
<note> RDS event notification is only available for unencrypted SNS topics.
If you specify an encrypted SNS topic, event notifications aren't sent for
the topic.
</note>
"""
def create_event_subscription(client, input, options \\ []) do
request(client, "CreateEventSubscription", input, options)
end
@doc """
Creates an Aurora global database spread across multiple AWS Regions. The
global database contains a single primary cluster with read-write
capability, and a read-only secondary cluster that receives data from the
primary cluster through high-speed replication performed by the Aurora
storage subsystem.
You can create a global database that is initially empty, and then add a
primary cluster and a secondary cluster to it. Or you can specify an
existing Aurora cluster during the create operation, and this cluster
becomes the primary cluster of the global database.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def create_global_cluster(client, input, options \\ []) do
request(client, "CreateGlobalCluster", input, options)
end
@doc """
Creates a new option group. You can create up to 20 option groups.
"""
def create_option_group(client, input, options \\ []) do
request(client, "CreateOptionGroup", input, options)
end
@doc """
Deletes a custom Availability Zone (AZ).
A custom AZ is an on-premises AZ that is integrated with a VMware vSphere
cluster.
For more information about RDS on VMware, see the [ *RDS on VMware User
Guide.*
](https://docs.aws.amazon.com/AmazonRDS/latest/RDSonVMwareUserGuide/rds-on-vmware.html)
"""
def delete_custom_availability_zone(client, input, options \\ []) do
request(client, "DeleteCustomAvailabilityZone", input, options)
end
@doc """
The DeleteDBCluster action deletes a previously provisioned DB cluster.
When you delete a DB cluster, all automated backups for that DB cluster are
deleted and can't be recovered. Manual DB cluster snapshots of the
specified DB cluster are not deleted.
<p/> For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def delete_d_b_cluster(client, input, options \\ []) do
request(client, "DeleteDBCluster", input, options)
end
@doc """
Deletes a custom endpoint and removes it from an Amazon Aurora DB cluster.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def delete_d_b_cluster_endpoint(client, input, options \\ []) do
request(client, "DeleteDBClusterEndpoint", input, options)
end
@doc """
Deletes a specified DB cluster parameter group. The DB cluster parameter
group to be deleted can't be associated with any DB clusters.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def delete_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "DeleteDBClusterParameterGroup", input, options)
end
@doc """
Deletes a DB cluster snapshot. If the snapshot is being copied, the copy
operation is terminated.
<note> The DB cluster snapshot must be in the `available` state to be
deleted.
</note> For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def delete_d_b_cluster_snapshot(client, input, options \\ []) do
request(client, "DeleteDBClusterSnapshot", input, options)
end
@doc """
The DeleteDBInstance action deletes a previously provisioned DB instance.
When you delete a DB instance, all automated backups for that instance are
deleted and can't be recovered. Manual DB snapshots of the DB instance to
be deleted by `DeleteDBInstance` are not deleted.
If you request a final DB snapshot the status of the Amazon RDS DB instance
is `deleting` until the DB snapshot is created. The API action
`DescribeDBInstance` is used to monitor the status of this operation. The
action can't be canceled or reverted once submitted.
When a DB instance is in a failure state and has a status of `failed`,
`incompatible-restore`, or `incompatible-network`, you can only delete it
when you skip creation of the final snapshot with the `SkipFinalSnapshot`
parameter.
If the specified DB instance is part of an Amazon Aurora DB cluster, you
can't delete the DB instance if both of the following conditions are true:
<ul> <li> The DB cluster is a read replica of another Amazon Aurora DB
cluster.
</li> <li> The DB instance is the only instance in the DB cluster.
</li> </ul> To delete a DB instance in this case, first call the
`PromoteReadReplicaDBCluster` API action to promote the DB cluster so it's
no longer a read replica. After the promotion completes, then call the
`DeleteDBInstance` API action to delete the final instance in the DB
cluster.
"""
def delete_d_b_instance(client, input, options \\ []) do
request(client, "DeleteDBInstance", input, options)
end
@doc """
Deletes automated backups based on the source instance's `DbiResourceId`
value or the restorable instance's resource ID.
"""
def delete_d_b_instance_automated_backup(client, input, options \\ []) do
request(client, "DeleteDBInstanceAutomatedBackup", input, options)
end
@doc """
Deletes a specified DB parameter group. The DB parameter group to be
deleted can't be associated with any DB instances.
"""
def delete_d_b_parameter_group(client, input, options \\ []) do
request(client, "DeleteDBParameterGroup", input, options)
end
@doc """
Deletes an existing proxy.
"""
def delete_d_b_proxy(client, input, options \\ []) do
request(client, "DeleteDBProxy", input, options)
end
@doc """
Deletes a DB security group.
<note> The specified DB security group must not be associated with any DB
instances.
</note>
"""
def delete_d_b_security_group(client, input, options \\ []) do
request(client, "DeleteDBSecurityGroup", input, options)
end
@doc """
Deletes a DB snapshot. If the snapshot is being copied, the copy operation
is terminated.
<note> The DB snapshot must be in the `available` state to be deleted.
</note>
"""
def delete_d_b_snapshot(client, input, options \\ []) do
request(client, "DeleteDBSnapshot", input, options)
end
@doc """
Deletes a DB subnet group.
<note> The specified database subnet group must not be associated with any
DB instances.
</note>
"""
def delete_d_b_subnet_group(client, input, options \\ []) do
request(client, "DeleteDBSubnetGroup", input, options)
end
@doc """
Deletes an RDS event notification subscription.
"""
def delete_event_subscription(client, input, options \\ []) do
request(client, "DeleteEventSubscription", input, options)
end
@doc """
Deletes a global database cluster. The primary and secondary clusters must
already be detached or destroyed first.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def delete_global_cluster(client, input, options \\ []) do
request(client, "DeleteGlobalCluster", input, options)
end
@doc """
Deletes the installation medium for a DB engine that requires an
on-premises customer provided license, such as Microsoft SQL Server.
"""
def delete_installation_media(client, input, options \\ []) do
request(client, "DeleteInstallationMedia", input, options)
end
@doc """
Deletes an existing option group.
"""
def delete_option_group(client, input, options \\ []) do
request(client, "DeleteOptionGroup", input, options)
end
@doc """
Remove the association between one or more `DBProxyTarget` data structures
and a `DBProxyTargetGroup`.
"""
def deregister_d_b_proxy_targets(client, input, options \\ []) do
request(client, "DeregisterDBProxyTargets", input, options)
end
@doc """
Lists all of the attributes for a customer account. The attributes include
Amazon RDS quotas for the account, such as the number of DB instances
allowed. The description for a quota includes the quota name, current usage
toward that quota, and the quota's maximum value.
This command doesn't take any parameters.
"""
def describe_account_attributes(client, input, options \\ []) do
request(client, "DescribeAccountAttributes", input, options)
end
@doc """
Lists the set of CA certificates provided by Amazon RDS for this AWS
account.
"""
def describe_certificates(client, input, options \\ []) do
request(client, "DescribeCertificates", input, options)
end
@doc """
Returns information about custom Availability Zones (AZs).
A custom AZ is an on-premises AZ that is integrated with a VMware vSphere
cluster.
For more information about RDS on VMware, see the [ *RDS on VMware User
Guide.*
](https://docs.aws.amazon.com/AmazonRDS/latest/RDSonVMwareUserGuide/rds-on-vmware.html)
"""
def describe_custom_availability_zones(client, input, options \\ []) do
request(client, "DescribeCustomAvailabilityZones", input, options)
end
@doc """
Returns information about backtracks for a DB cluster.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora MySQL DB clusters.
</note>
"""
def describe_d_b_cluster_backtracks(client, input, options \\ []) do
request(client, "DescribeDBClusterBacktracks", input, options)
end
@doc """
Returns information about endpoints for an Amazon Aurora DB cluster.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def describe_d_b_cluster_endpoints(client, input, options \\ []) do
request(client, "DescribeDBClusterEndpoints", input, options)
end
@doc """
Returns a list of `DBClusterParameterGroup` descriptions. If a
`DBClusterParameterGroupName` parameter is specified, the list will contain
only the description of the specified DB cluster parameter group.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def describe_d_b_cluster_parameter_groups(client, input, options \\ []) do
request(client, "DescribeDBClusterParameterGroups", input, options)
end
@doc """
Returns the detailed parameter list for a particular DB cluster parameter
group.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def describe_d_b_cluster_parameters(client, input, options \\ []) do
request(client, "DescribeDBClusterParameters", input, options)
end
@doc """
Returns a list of DB cluster snapshot attribute names and values for a
manual DB cluster snapshot.
When sharing snapshots with other AWS accounts,
`DescribeDBClusterSnapshotAttributes` returns the `restore` attribute and a
list of IDs for the AWS accounts that are authorized to copy or restore the
manual DB cluster snapshot. If `all` is included in the list of values for
the `restore` attribute, then the manual DB cluster snapshot is public and
can be copied or restored by all AWS accounts.
To add or remove access for an AWS account to copy or restore a manual DB
cluster snapshot, or to make the manual DB cluster snapshot public or
private, use the `ModifyDBClusterSnapshotAttribute` API action.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def describe_d_b_cluster_snapshot_attributes(client, input, options \\ []) do
request(client, "DescribeDBClusterSnapshotAttributes", input, options)
end
@doc """
Returns information about DB cluster snapshots. This API action supports
pagination.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def describe_d_b_cluster_snapshots(client, input, options \\ []) do
request(client, "DescribeDBClusterSnapshots", input, options)
end
@doc """
Returns information about provisioned Aurora DB clusters. This API supports
pagination.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This operation can also return information for Amazon Neptune DB
instances and Amazon DocumentDB instances.
</note>
"""
def describe_d_b_clusters(client, input, options \\ []) do
request(client, "DescribeDBClusters", input, options)
end
@doc """
Returns a list of the available DB engines.
"""
def describe_d_b_engine_versions(client, input, options \\ []) do
request(client, "DescribeDBEngineVersions", input, options)
end
@doc """
Displays backups for both current and deleted instances. For example, use
this operation to find details about automated backups for previously
deleted instances. Current instances with retention periods greater than
zero (0) are returned for both the `DescribeDBInstanceAutomatedBackups` and
`DescribeDBInstances` operations.
All parameters are optional.
"""
def describe_d_b_instance_automated_backups(client, input, options \\ []) do
request(client, "DescribeDBInstanceAutomatedBackups", input, options)
end
@doc """
Returns information about provisioned RDS instances. This API supports
pagination.
<note> This operation can also return information for Amazon Neptune DB
instances and Amazon DocumentDB instances.
</note>
"""
def describe_d_b_instances(client, input, options \\ []) do
request(client, "DescribeDBInstances", input, options)
end
@doc """
Returns a list of DB log files for the DB instance.
"""
def describe_d_b_log_files(client, input, options \\ []) do
request(client, "DescribeDBLogFiles", input, options)
end
@doc """
Returns a list of `DBParameterGroup` descriptions. If a
`DBParameterGroupName` is specified, the list will contain only the
description of the specified DB parameter group.
"""
def describe_d_b_parameter_groups(client, input, options \\ []) do
request(client, "DescribeDBParameterGroups", input, options)
end
@doc """
Returns the detailed parameter list for a particular DB parameter group.
"""
def describe_d_b_parameters(client, input, options \\ []) do
request(client, "DescribeDBParameters", input, options)
end
@doc """
Returns information about DB proxies.
"""
def describe_d_b_proxies(client, input, options \\ []) do
request(client, "DescribeDBProxies", input, options)
end
@doc """
Returns information about DB proxy target groups, represented by
`DBProxyTargetGroup` data structures.
"""
def describe_d_b_proxy_target_groups(client, input, options \\ []) do
request(client, "DescribeDBProxyTargetGroups", input, options)
end
@doc """
Returns information about `DBProxyTarget` objects. This API supports
pagination.
"""
def describe_d_b_proxy_targets(client, input, options \\ []) do
request(client, "DescribeDBProxyTargets", input, options)
end
@doc """
Returns a list of `DBSecurityGroup` descriptions. If a
`DBSecurityGroupName` is specified, the list will contain only the
descriptions of the specified DB security group.
"""
def describe_d_b_security_groups(client, input, options \\ []) do
request(client, "DescribeDBSecurityGroups", input, options)
end
@doc """
Returns a list of DB snapshot attribute names and values for a manual DB
snapshot.
When sharing snapshots with other AWS accounts,
`DescribeDBSnapshotAttributes` returns the `restore` attribute and a list
of IDs for the AWS accounts that are authorized to copy or restore the
manual DB snapshot. If `all` is included in the list of values for the
`restore` attribute, then the manual DB snapshot is public and can be
copied or restored by all AWS accounts.
To add or remove access for an AWS account to copy or restore a manual DB
snapshot, or to make the manual DB snapshot public or private, use the
`ModifyDBSnapshotAttribute` API action.
"""
def describe_d_b_snapshot_attributes(client, input, options \\ []) do
request(client, "DescribeDBSnapshotAttributes", input, options)
end
@doc """
Returns information about DB snapshots. This API action supports
pagination.
"""
def describe_d_b_snapshots(client, input, options \\ []) do
request(client, "DescribeDBSnapshots", input, options)
end
@doc """
Returns a list of DBSubnetGroup descriptions. If a DBSubnetGroupName is
specified, the list will contain only the descriptions of the specified
DBSubnetGroup.
For an overview of CIDR ranges, go to the [Wikipedia
Tutorial](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing).
"""
def describe_d_b_subnet_groups(client, input, options \\ []) do
request(client, "DescribeDBSubnetGroups", input, options)
end
@doc """
Returns the default engine and system parameter information for the cluster
database engine.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
"""
def describe_engine_default_cluster_parameters(client, input, options \\ []) do
request(client, "DescribeEngineDefaultClusterParameters", input, options)
end
@doc """
Returns the default engine and system parameter information for the
specified database engine.
"""
def describe_engine_default_parameters(client, input, options \\ []) do
request(client, "DescribeEngineDefaultParameters", input, options)
end
@doc """
Displays a list of categories for all event source types, or, if specified,
for a specified source type. You can see a list of the event categories and
source types in [
Events](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Events.html)
in the *Amazon RDS User Guide.*
"""
def describe_event_categories(client, input, options \\ []) do
request(client, "DescribeEventCategories", input, options)
end
@doc """
Lists all the subscription descriptions for a customer account. The
description for a subscription includes `SubscriptionName`, `SNSTopicARN`,
`CustomerID`, `SourceType`, `SourceID`, `CreationTime`, and `Status`.
If you specify a `SubscriptionName`, lists the description for that
subscription.
"""
def describe_event_subscriptions(client, input, options \\ []) do
request(client, "DescribeEventSubscriptions", input, options)
end
@doc """
Returns events related to DB instances, DB clusters, DB parameter groups,
DB security groups, DB snapshots, and DB cluster snapshots for the past 14
days. Events specific to a particular DB instances, DB clusters, DB
parameter groups, DB security groups, DB snapshots, and DB cluster
snapshots group can be obtained by providing the name as a parameter.
<note> By default, the past hour of events are returned.
</note>
"""
def describe_events(client, input, options \\ []) do
request(client, "DescribeEvents", input, options)
end
@doc """
Returns information about a snapshot export to Amazon S3. This API
operation supports pagination.
"""
def describe_export_tasks(client, input, options \\ []) do
request(client, "DescribeExportTasks", input, options)
end
@doc """
Returns information about Aurora global database clusters. This API
supports pagination.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def describe_global_clusters(client, input, options \\ []) do
request(client, "DescribeGlobalClusters", input, options)
end
@doc """
Describes the available installation media for a DB engine that requires an
on-premises customer provided license, such as Microsoft SQL Server.
"""
def describe_installation_media(client, input, options \\ []) do
request(client, "DescribeInstallationMedia", input, options)
end
@doc """
Describes all available options.
"""
def describe_option_group_options(client, input, options \\ []) do
request(client, "DescribeOptionGroupOptions", input, options)
end
@doc """
Describes the available option groups.
"""
def describe_option_groups(client, input, options \\ []) do
request(client, "DescribeOptionGroups", input, options)
end
@doc """
Returns a list of orderable DB instance options for the specified engine.
"""
def describe_orderable_d_b_instance_options(client, input, options \\ []) do
request(client, "DescribeOrderableDBInstanceOptions", input, options)
end
@doc """
Returns a list of resources (for example, DB instances) that have at least
one pending maintenance action.
"""
def describe_pending_maintenance_actions(client, input, options \\ []) do
request(client, "DescribePendingMaintenanceActions", input, options)
end
@doc """
Returns information about reserved DB instances for this account, or about
a specified reserved DB instance.
"""
def describe_reserved_d_b_instances(client, input, options \\ []) do
request(client, "DescribeReservedDBInstances", input, options)
end
@doc """
Lists available reserved DB instance offerings.
"""
def describe_reserved_d_b_instances_offerings(client, input, options \\ []) do
request(client, "DescribeReservedDBInstancesOfferings", input, options)
end
@doc """
Returns a list of the source AWS Regions where the current AWS Region can
create a read replica or copy a DB snapshot from. This API action supports
pagination.
"""
def describe_source_regions(client, input, options \\ []) do
request(client, "DescribeSourceRegions", input, options)
end
@doc """
You can call `DescribeValidDBInstanceModifications` to learn what
modifications you can make to your DB instance. You can use this
information when you call `ModifyDBInstance`.
"""
def describe_valid_d_b_instance_modifications(client, input, options \\ []) do
request(client, "DescribeValidDBInstanceModifications", input, options)
end
@doc """
Downloads all or a portion of the specified log file, up to 1 MB in size.
"""
def download_d_b_log_file_portion(client, input, options \\ []) do
request(client, "DownloadDBLogFilePortion", input, options)
end
@doc """
Forces a failover for a DB cluster.
A failover for a DB cluster promotes one of the Aurora Replicas (read-only
instances) in the DB cluster to be the primary instance (the cluster
writer).
Amazon Aurora will automatically fail over to an Aurora Replica, if one
exists, when the primary instance fails. You can force a failover when you
want to simulate a failure of a primary instance for testing. Because each
instance in a DB cluster has its own endpoint address, you will need to
clean up and re-establish any existing connections that use those endpoint
addresses when the failover is complete.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def failover_d_b_cluster(client, input, options \\ []) do
request(client, "FailoverDBCluster", input, options)
end
@doc """
Imports the installation media for a DB engine that requires an on-premises
customer provided license, such as SQL Server.
"""
def import_installation_media(client, input, options \\ []) do
request(client, "ImportInstallationMedia", input, options)
end
@doc """
Lists all tags on an Amazon RDS resource.
For an overview on tagging an Amazon RDS resource, see [Tagging Amazon RDS
Resources](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.Tagging.html)
in the *Amazon RDS User Guide*.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Override the system-default Secure Sockets Layer/Transport Layer Security
(SSL/TLS) certificate for Amazon RDS for new DB instances temporarily, or
remove the override.
By using this operation, you can specify an RDS-approved SSL/TLS
certificate for new DB instances that is different from the default
certificate provided by RDS. You can also use this operation to remove the
override, so that new DB instances use the default certificate provided by
RDS.
You might need to override the default certificate in the following
situations:
<ul> <li> You already migrated your applications to support the latest
certificate authority (CA) certificate, but the new CA certificate is not
yet the RDS default CA certificate for the specified AWS Region.
</li> <li> RDS has already moved to a new default CA certificate for the
specified AWS Region, but you are still in the process of supporting the
new CA certificate. In this case, you temporarily need additional time to
finish your application changes.
</li> </ul> For more information about rotating your SSL/TLS certificate
for RDS DB engines, see [ Rotating Your SSL/TLS
Certificate](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL-certificate-rotation.html)
in the *Amazon RDS User Guide*.
For more information about rotating your SSL/TLS certificate for Aurora DB
engines, see [ Rotating Your SSL/TLS
Certificate](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/UsingWithRDS.SSL-certificate-rotation.html)
in the *Amazon Aurora User Guide*.
"""
def modify_certificates(client, input, options \\ []) do
request(client, "ModifyCertificates", input, options)
end
@doc """
Set the capacity of an Aurora Serverless DB cluster to a specific value.
Aurora Serverless scales seamlessly based on the workload on the DB
cluster. In some cases, the capacity might not scale fast enough to meet a
sudden change in workload, such as a large number of new transactions. Call
`ModifyCurrentDBClusterCapacity` to set the capacity explicitly.
After this call sets the DB cluster capacity, Aurora Serverless can
automatically scale the DB cluster based on the cooldown period for scaling
up and the cooldown period for scaling down.
For more information about Aurora Serverless, see [Using Amazon Aurora
Serverless](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-serverless.html)
in the *Amazon Aurora User Guide*.
<important> If you call `ModifyCurrentDBClusterCapacity` with the default
`TimeoutAction`, connections that prevent Aurora Serverless from finding a
scaling point might be dropped. For more information about scaling points,
see [ Autoscaling for Aurora
Serverless](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-serverless.how-it-works.html#aurora-serverless.how-it-works.auto-scaling)
in the *Amazon Aurora User Guide*.
</important> <note> This action only applies to Aurora DB clusters.
</note>
"""
def modify_current_d_b_cluster_capacity(client, input, options \\ []) do
request(client, "ModifyCurrentDBClusterCapacity", input, options)
end
@doc """
Modify a setting for an Amazon Aurora DB cluster. You can change one or
more database configuration parameters by specifying these parameters and
the new values in the request. For more information on Amazon Aurora, see [
What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def modify_d_b_cluster(client, input, options \\ []) do
request(client, "ModifyDBCluster", input, options)
end
@doc """
Modifies the properties of an endpoint in an Amazon Aurora DB cluster.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def modify_d_b_cluster_endpoint(client, input, options \\ []) do
request(client, "ModifyDBClusterEndpoint", input, options)
end
@doc """
Modifies the parameters of a DB cluster parameter group. To modify more
than one parameter, submit a list of the following: `ParameterName`,
`ParameterValue`, and `ApplyMethod`. A maximum of 20 parameters can be
modified in a single request.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> Changes to dynamic parameters are applied immediately. Changes to
static parameters require a reboot without failover to the DB cluster
associated with the parameter group before the change can take effect.
</note> <important> After you create a DB cluster parameter group, you
should wait at least 5 minutes before creating your first DB cluster that
uses that DB cluster parameter group as the default parameter group. This
allows Amazon RDS to fully complete the create action before the parameter
group is used as the default for a new DB cluster. This is especially
important for parameters that are critical when creating the default
database for a DB cluster, such as the character set for the default
database defined by the `character_set_database` parameter. You can use the
*Parameter Groups* option of the [Amazon RDS
console](https://console.aws.amazon.com/rds/) or the
`DescribeDBClusterParameters` action to verify that your DB cluster
parameter group has been created or modified.
If the modified DB cluster parameter group is used by an Aurora Serverless
cluster, Aurora applies the update immediately. The cluster restart might
interrupt your workload. In that case, your application must reopen any
connections and retry any transactions that were active when the parameter
changes took effect.
</important> <note> This action only applies to Aurora DB clusters.
</note>
"""
def modify_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "ModifyDBClusterParameterGroup", input, options)
end
@doc """
Adds an attribute and values to, or removes an attribute and values from, a
manual DB cluster snapshot.
To share a manual DB cluster snapshot with other AWS accounts, specify
`restore` as the `AttributeName` and use the `ValuesToAdd` parameter to add
a list of IDs of the AWS accounts that are authorized to restore the manual
DB cluster snapshot. Use the value `all` to make the manual DB cluster
snapshot public, which means that it can be copied or restored by all AWS
accounts.
<note> Don't add the `all` value for any manual DB cluster snapshots that
contain private information that you don't want available to all AWS
accounts.
</note> If a manual DB cluster snapshot is encrypted, it can be shared, but
only by specifying a list of authorized AWS account IDs for the
`ValuesToAdd` parameter. You can't use `all` as a value for that parameter
in this case.
To view which AWS accounts have access to copy or restore a manual DB
cluster snapshot, or whether a manual DB cluster snapshot is public or
private, use the `DescribeDBClusterSnapshotAttributes` API action. The
accounts are returned as values for the `restore` attribute.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def modify_d_b_cluster_snapshot_attribute(client, input, options \\ []) do
request(client, "ModifyDBClusterSnapshotAttribute", input, options)
end
@doc """
Modifies settings for a DB instance. You can change one or more database
configuration parameters by specifying these parameters and the new values
in the request. To learn what modifications you can make to your DB
instance, call `DescribeValidDBInstanceModifications` before you call
`ModifyDBInstance`.
"""
def modify_d_b_instance(client, input, options \\ []) do
request(client, "ModifyDBInstance", input, options)
end
@doc """
Modifies the parameters of a DB parameter group. To modify more than one
parameter, submit a list of the following: `ParameterName`,
`ParameterValue`, and `ApplyMethod`. A maximum of 20 parameters can be
modified in a single request.
<note> Changes to dynamic parameters are applied immediately. Changes to
static parameters require a reboot without failover to the DB instance
associated with the parameter group before the change can take effect.
</note> <important> After you modify a DB parameter group, you should wait
at least 5 minutes before creating your first DB instance that uses that DB
parameter group as the default parameter group. This allows Amazon RDS to
fully complete the modify action before the parameter group is used as the
default for a new DB instance. This is especially important for parameters
that are critical when creating the default database for a DB instance,
such as the character set for the default database defined by the
`character_set_database` parameter. You can use the *Parameter Groups*
option of the [Amazon RDS console](https://console.aws.amazon.com/rds/) or
the *DescribeDBParameters* command to verify that your DB parameter group
has been created or modified.
</important>
"""
def modify_d_b_parameter_group(client, input, options \\ []) do
request(client, "ModifyDBParameterGroup", input, options)
end
@doc """
Changes the settings for an existing DB proxy.
"""
def modify_d_b_proxy(client, input, options \\ []) do
request(client, "ModifyDBProxy", input, options)
end
@doc """
Modifies the properties of a `DBProxyTargetGroup`.
"""
def modify_d_b_proxy_target_group(client, input, options \\ []) do
request(client, "ModifyDBProxyTargetGroup", input, options)
end
@doc """
Updates a manual DB snapshot with a new engine version. The snapshot can be
encrypted or unencrypted, but not shared or public.
Amazon RDS supports upgrading DB snapshots for MySQL, Oracle, and
PostgreSQL.
"""
def modify_d_b_snapshot(client, input, options \\ []) do
request(client, "ModifyDBSnapshot", input, options)
end
@doc """
Adds an attribute and values to, or removes an attribute and values from, a
manual DB snapshot.
To share a manual DB snapshot with other AWS accounts, specify `restore` as
the `AttributeName` and use the `ValuesToAdd` parameter to add a list of
IDs of the AWS accounts that are authorized to restore the manual DB
snapshot. Uses the value `all` to make the manual DB snapshot public, which
means it can be copied or restored by all AWS accounts.
<note> Don't add the `all` value for any manual DB snapshots that contain
private information that you don't want available to all AWS accounts.
</note> If the manual DB snapshot is encrypted, it can be shared, but only
by specifying a list of authorized AWS account IDs for the `ValuesToAdd`
parameter. You can't use `all` as a value for that parameter in this case.
To view which AWS accounts have access to copy or restore a manual DB
snapshot, or whether a manual DB snapshot public or private, use the
`DescribeDBSnapshotAttributes` API action. The accounts are returned as
values for the `restore` attribute.
"""
def modify_d_b_snapshot_attribute(client, input, options \\ []) do
request(client, "ModifyDBSnapshotAttribute", input, options)
end
@doc """
Modifies an existing DB subnet group. DB subnet groups must contain at
least one subnet in at least two AZs in the AWS Region.
"""
def modify_d_b_subnet_group(client, input, options \\ []) do
request(client, "ModifyDBSubnetGroup", input, options)
end
@doc """
Modifies an existing RDS event notification subscription. You can't modify
the source identifiers using this call. To change source identifiers for a
subscription, use the `AddSourceIdentifierToSubscription` and
`RemoveSourceIdentifierFromSubscription` calls.
You can see a list of the event categories for a given source type
(`SourceType`) in
[Events](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Events.html)
in the *Amazon RDS User Guide* or by using the `DescribeEventCategories`
operation.
"""
def modify_event_subscription(client, input, options \\ []) do
request(client, "ModifyEventSubscription", input, options)
end
@doc """
Modify a setting for an Amazon Aurora global cluster. You can change one or
more database configuration parameters by specifying these parameters and
the new values in the request. For more information on Amazon Aurora, see [
What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def modify_global_cluster(client, input, options \\ []) do
request(client, "ModifyGlobalCluster", input, options)
end
@doc """
Modifies an existing option group.
"""
def modify_option_group(client, input, options \\ []) do
request(client, "ModifyOptionGroup", input, options)
end
@doc """
Promotes a read replica DB instance to a standalone DB instance.
<note> <ul> <li> Backup duration is a function of the amount of changes to
the database since the previous backup. If you plan to promote a read
replica to a standalone instance, we recommend that you enable backups and
complete at least one backup prior to promotion. In addition, a read
replica cannot be promoted to a standalone instance when it is in the
`backing-up` status. If you have enabled backups on your read replica,
configure the automated backup window so that daily backups do not
interfere with read replica promotion.
</li> <li> This command doesn't apply to Aurora MySQL and Aurora
PostgreSQL.
</li> </ul> </note>
"""
def promote_read_replica(client, input, options \\ []) do
request(client, "PromoteReadReplica", input, options)
end
@doc """
Promotes a read replica DB cluster to a standalone DB cluster.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def promote_read_replica_d_b_cluster(client, input, options \\ []) do
request(client, "PromoteReadReplicaDBCluster", input, options)
end
@doc """
Purchases a reserved DB instance offering.
"""
def purchase_reserved_d_b_instances_offering(client, input, options \\ []) do
request(client, "PurchaseReservedDBInstancesOffering", input, options)
end
@doc """
You might need to reboot your DB instance, usually for maintenance reasons.
For example, if you make certain modifications, or if you change the DB
parameter group associated with the DB instance, you must reboot the
instance for the changes to take effect.
Rebooting a DB instance restarts the database engine service. Rebooting a
DB instance results in a momentary outage, during which the DB instance
status is set to rebooting.
For more information about rebooting, see [Rebooting a DB
Instance](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_RebootInstance.html)
in the *Amazon RDS User Guide.*
"""
def reboot_d_b_instance(client, input, options \\ []) do
request(client, "RebootDBInstance", input, options)
end
@doc """
Associate one or more `DBProxyTarget` data structures with a
`DBProxyTargetGroup`.
"""
def register_d_b_proxy_targets(client, input, options \\ []) do
request(client, "RegisterDBProxyTargets", input, options)
end
@doc """
Detaches an Aurora secondary cluster from an Aurora global database
cluster. The cluster becomes a standalone cluster with read-write
capability instead of being read-only and receiving data from a primary
cluster in a different region.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def remove_from_global_cluster(client, input, options \\ []) do
request(client, "RemoveFromGlobalCluster", input, options)
end
@doc """
Disassociates an AWS Identity and Access Management (IAM) role from an
Amazon Aurora DB cluster. For more information, see [Authorizing Amazon
Aurora MySQL to Access Other AWS Services on Your Behalf
](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Integrating.Authorizing.html)
in the *Amazon Aurora User Guide*.
<note> This action only applies to Aurora DB clusters.
</note>
"""
def remove_role_from_d_b_cluster(client, input, options \\ []) do
request(client, "RemoveRoleFromDBCluster", input, options)
end
@doc """
Disassociates an AWS Identity and Access Management (IAM) role from a DB
instance.
"""
def remove_role_from_d_b_instance(client, input, options \\ []) do
request(client, "RemoveRoleFromDBInstance", input, options)
end
@doc """
Removes a source identifier from an existing RDS event notification
subscription.
"""
def remove_source_identifier_from_subscription(client, input, options \\ []) do
request(client, "RemoveSourceIdentifierFromSubscription", input, options)
end
@doc """
Removes metadata tags from an Amazon RDS resource.
For an overview on tagging an Amazon RDS resource, see [Tagging Amazon RDS
Resources](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.Tagging.html)
in the *Amazon RDS User Guide.*
"""
def remove_tags_from_resource(client, input, options \\ []) do
request(client, "RemoveTagsFromResource", input, options)
end
@doc """
Modifies the parameters of a DB cluster parameter group to the default
value. To reset specific parameters submit a list of the following:
`ParameterName` and `ApplyMethod`. To reset the entire DB cluster parameter
group, specify the `DBClusterParameterGroupName` and `ResetAllParameters`
parameters.
When resetting the entire group, dynamic parameters are updated immediately
and static parameters are set to `pending-reboot` to take effect on the
next DB instance restart or `RebootDBInstance` request. You must call
`RebootDBInstance` for every DB instance in your DB cluster that you want
the updated static parameter to apply to.
For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def reset_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "ResetDBClusterParameterGroup", input, options)
end
@doc """
Modifies the parameters of a DB parameter group to the engine/system
default value. To reset specific parameters, provide a list of the
following: `ParameterName` and `ApplyMethod`. To reset the entire DB
parameter group, specify the `DBParameterGroup` name and
`ResetAllParameters` parameters. When resetting the entire group, dynamic
parameters are updated immediately and static parameters are set to
`pending-reboot` to take effect on the next DB instance restart or
`RebootDBInstance` request.
"""
def reset_d_b_parameter_group(client, input, options \\ []) do
request(client, "ResetDBParameterGroup", input, options)
end
@doc """
Creates an Amazon Aurora DB cluster from MySQL data stored in an Amazon S3
bucket. Amazon RDS must be authorized to access the Amazon S3 bucket and
the data must be created using the Percona XtraBackup utility as described
in [ Migrating Data from MySQL by Using an Amazon S3
Bucket](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Migrating.ExtMySQL.html#AuroraMySQL.Migrating.ExtMySQL.S3)
in the *Amazon Aurora User Guide*.
<note> This action only restores the DB cluster, not the DB instances for
that DB cluster. You must invoke the `CreateDBInstance` action to create DB
instances for the restored DB cluster, specifying the identifier of the
restored DB cluster in `DBClusterIdentifier`. You can create DB instances
only after the `RestoreDBClusterFromS3` action has completed and the DB
cluster is available.
</note> For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters. The source DB engine
must be MySQL.
</note>
"""
def restore_d_b_cluster_from_s3(client, input, options \\ []) do
request(client, "RestoreDBClusterFromS3", input, options)
end
@doc """
Creates a new DB cluster from a DB snapshot or DB cluster snapshot. This
action only applies to Aurora DB clusters.
The target DB cluster is created from the source snapshot with a default
configuration. If you don't specify a security group, the new DB cluster is
associated with the default security group.
<note> This action only restores the DB cluster, not the DB instances for
that DB cluster. You must invoke the `CreateDBInstance` action to create DB
instances for the restored DB cluster, specifying the identifier of the
restored DB cluster in `DBClusterIdentifier`. You can create DB instances
only after the `RestoreDBClusterFromSnapshot` action has completed and the
DB cluster is available.
</note> For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def restore_d_b_cluster_from_snapshot(client, input, options \\ []) do
request(client, "RestoreDBClusterFromSnapshot", input, options)
end
@doc """
Restores a DB cluster to an arbitrary point in time. Users can restore to
any point in time before `LatestRestorableTime` for up to
`BackupRetentionPeriod` days. The target DB cluster is created from the
source DB cluster with the same configuration as the original DB cluster,
except that the new DB cluster is created with the default DB security
group.
<note> This action only restores the DB cluster, not the DB instances for
that DB cluster. You must invoke the `CreateDBInstance` action to create DB
instances for the restored DB cluster, specifying the identifier of the
restored DB cluster in `DBClusterIdentifier`. You can create DB instances
only after the `RestoreDBClusterToPointInTime` action has completed and the
DB cluster is available.
</note> For more information on Amazon Aurora, see [ What Is Amazon
Aurora?](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def restore_d_b_cluster_to_point_in_time(client, input, options \\ []) do
request(client, "RestoreDBClusterToPointInTime", input, options)
end
@doc """
Creates a new DB instance from a DB snapshot. The target database is
created from the source database restore point with the most of original
configuration with the default security group and the default DB parameter
group. By default, the new DB instance is created as a single-AZ deployment
except when the instance is a SQL Server instance that has an option group
that is associated with mirroring; in this case, the instance becomes a
mirrored AZ deployment and not a single-AZ deployment.
If your intent is to replace your original DB instance with the new,
restored DB instance, then rename your original DB instance before you call
the RestoreDBInstanceFromDBSnapshot action. RDS doesn't allow two DB
instances with the same name. Once you have renamed your original DB
instance with a different identifier, then you can pass the original name
of the DB instance as the DBInstanceIdentifier in the call to the
RestoreDBInstanceFromDBSnapshot action. The result is that you will replace
the original DB instance with the DB instance created from the snapshot.
If you are restoring from a shared manual DB snapshot, the
`DBSnapshotIdentifier` must be the ARN of the shared DB snapshot.
<note> This command doesn't apply to Aurora MySQL and Aurora PostgreSQL.
For Aurora, use `RestoreDBClusterFromSnapshot`.
</note>
"""
def restore_d_b_instance_from_d_b_snapshot(client, input, options \\ []) do
request(client, "RestoreDBInstanceFromDBSnapshot", input, options)
end
@doc """
Amazon Relational Database Service (Amazon RDS) supports importing MySQL
databases by using backup files. You can create a backup of your
on-premises database, store it on Amazon Simple Storage Service (Amazon
S3), and then restore the backup file onto a new Amazon RDS DB instance
running MySQL. For more information, see [Importing Data into an Amazon RDS
MySQL DB
Instance](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/MySQL.Procedural.Importing.html)
in the *Amazon RDS User Guide.*
"""
def restore_d_b_instance_from_s3(client, input, options \\ []) do
request(client, "RestoreDBInstanceFromS3", input, options)
end
@doc """
Restores a DB instance to an arbitrary point in time. You can restore to
any point in time before the time identified by the LatestRestorableTime
property. You can restore to a point up to the number of days specified by
the BackupRetentionPeriod property.
The target database is created with most of the original configuration, but
in a system-selected Availability Zone, with the default security group,
the default subnet group, and the default DB parameter group. By default,
the new DB instance is created as a single-AZ deployment except when the
instance is a SQL Server instance that has an option group that is
associated with mirroring; in this case, the instance becomes a mirrored
deployment and not a single-AZ deployment.
<note> This command doesn't apply to Aurora MySQL and Aurora PostgreSQL.
For Aurora, use `RestoreDBClusterToPointInTime`.
</note>
"""
def restore_d_b_instance_to_point_in_time(client, input, options \\ []) do
request(client, "RestoreDBInstanceToPointInTime", input, options)
end
@doc """
Revokes ingress from a DBSecurityGroup for previously authorized IP ranges
or EC2 or VPC Security Groups. Required parameters for this API are one of
CIDRIP, EC2SecurityGroupId for VPC, or (EC2SecurityGroupOwnerId and either
EC2SecurityGroupName or EC2SecurityGroupId).
"""
def revoke_d_b_security_group_ingress(client, input, options \\ []) do
request(client, "RevokeDBSecurityGroupIngress", input, options)
end
@doc """
Starts a database activity stream to monitor activity on the database. For
more information, see [Database Activity
Streams](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/DBActivityStreams.html)
in the *Amazon Aurora User Guide*.
"""
def start_activity_stream(client, input, options \\ []) do
request(client, "StartActivityStream", input, options)
end
@doc """
Starts an Amazon Aurora DB cluster that was stopped using the AWS console,
the stop-db-cluster AWS CLI command, or the StopDBCluster action.
For more information, see [ Stopping and Starting an Aurora
Cluster](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-cluster-stop-start.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def start_d_b_cluster(client, input, options \\ []) do
request(client, "StartDBCluster", input, options)
end
@doc """
Starts an Amazon RDS DB instance that was stopped using the AWS console,
the stop-db-instance AWS CLI command, or the StopDBInstance action.
For more information, see [ Starting an Amazon RDS DB instance That Was
Previously
Stopped](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_StartInstance.html)
in the *Amazon RDS User Guide.*
<note> This command doesn't apply to Aurora MySQL and Aurora PostgreSQL.
For Aurora DB clusters, use `StartDBCluster` instead.
</note>
"""
def start_d_b_instance(client, input, options \\ []) do
request(client, "StartDBInstance", input, options)
end
@doc """
Starts an export of a snapshot to Amazon S3. The provided IAM role must
have access to the S3 bucket.
"""
def start_export_task(client, input, options \\ []) do
request(client, "StartExportTask", input, options)
end
@doc """
Stops a database activity stream that was started using the AWS console,
the `start-activity-stream` AWS CLI command, or the `StartActivityStream`
action.
For more information, see [Database Activity
Streams](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/DBActivityStreams.html)
in the *Amazon Aurora User Guide*.
"""
def stop_activity_stream(client, input, options \\ []) do
request(client, "StopActivityStream", input, options)
end
@doc """
Stops an Amazon Aurora DB cluster. When you stop a DB cluster, Aurora
retains the DB cluster's metadata, including its endpoints and DB parameter
groups. Aurora also retains the transaction logs so you can do a
point-in-time restore if necessary.
For more information, see [ Stopping and Starting an Aurora
Cluster](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-cluster-stop-start.html)
in the *Amazon Aurora User Guide.*
<note> This action only applies to Aurora DB clusters.
</note>
"""
def stop_d_b_cluster(client, input, options \\ []) do
request(client, "StopDBCluster", input, options)
end
@doc """
Stops an Amazon RDS DB instance. When you stop a DB instance, Amazon RDS
retains the DB instance's metadata, including its endpoint, DB parameter
group, and option group membership. Amazon RDS also retains the transaction
logs so you can do a point-in-time restore if necessary.
For more information, see [ Stopping an Amazon RDS DB Instance
Temporarily](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_StopInstance.html)
in the *Amazon RDS User Guide.*
<note> This command doesn't apply to Aurora MySQL and Aurora PostgreSQL.
For Aurora clusters, use `StopDBCluster` instead.
</note>
"""
def stop_d_b_instance(client, input, options \\ []) do
request(client, "StopDBInstance", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "rds"}
host = build_host("rds", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-www-form-urlencoded"}
]
input = Map.merge(input, %{"Action" => action, "Version" => "2014-10-31"})
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :query)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :xml)
end
end | lib/aws/generated/rds.ex | 0.854217 | 0.602266 | rds.ex | starcoder |
defmodule Matrix.Agents do
@moduledoc """
Holds state about posible agent types and currently running agents in cluster.
This module is meant to be used when new agent centers are registered / unregistered
to / from cluster.
When new agent center is registered to cluster, it's available agent types are
sent to every other agent center.
When agent center is unregistered from cluster, it's agent types are deleted from
every other agent center.
"""
use GenServer
alias Matrix.{Env, Cluster, AgentCenter, AgentType}
defmodule State do
@moduledoc """
Represents state of agents in cluster.
* agent_types - All supported agent types in cluster.
Map where key is agent center alias and value is list of agent types
available on that agent center.
* running_agents - Running agents in cluster.
Map where key is agent center alias and value is list of Agent structs
running on that agent center.
"""
defstruct agent_types: %{}, running_agents: %{}
@type t :: %__MODULE__{agent_types: Map.t, running_agents: Map.t}
end
def start_link(args \\ []) do
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
# Client API
@doc """
Adds list of agent types to given agent center.
Args:
* `agent_center` - Alias of agent center
* `types` - List of supported agent types
## Example
Agents.add_types("Mars", [%AgentType{name: "Ping", module: "Test"}])
"""
@spec add_types(agent_center :: String.t, types :: list[Matrix.AgentType.t]) :: :ok
def add_types(agent_center, types) do
GenServer.cast(__MODULE__, {:add_types, agent_center, types})
end
@doc """
Deletes agent types of given agent center.
Args:
* `agent_center` - Alias of agent center
## Example
Agents.delete_types_for("Mars")
"""
@spec delete_types_for(agent_center :: String.t) :: :ok
def delete_types_for(agent_center) do
GenServer.cast(__MODULE__, {:delete_types, agent_center})
end
@doc """
Returns all supported agent types in cluster for each agent center.
## Example
Agents.types
# => `%{"Mars" => [%AgentType{name: "Ping", module: "Agents"}]}`
"""
@spec types :: %{required(String.t) => list(Matrix.AgentType.t)}
def types do
GenServer.call(__MODULE__, {:types})
end
@doc """
Returns list of supported agent types for given agent center.
## Example
Agents.add_types("Mars", [%AgentType{name: "Ping", module: "Agents"}])
Agents.types_for("Mars")
# => [`%AgentType{name: "Ping", module: "Agents"}]
"""
@spec types_for(agent_center :: String.t) :: list(Matrix.AgentType.t)
def types_for(agent_center) do
GenServer.call(__MODULE__, {:types, agent_center})
end
@spec find_agent_center_with_type(type :: AgentType.t) :: AgentCenter.t | nil
def find_agent_center_with_type(type) do
GenServer.call(__MODULE__, {:find_agent_center_with_type, type})
end
@doc """
Returns all running agents on cluster.
## Example
Agents.running
# => `[%Agent{id: %AID{}]`
"""
@spec running :: %{required(String.t) => list(Matrix.Agent.t)}
def running do
GenServer.call(__MODULE__, {:running})
end
@doc """
Returns all running agents on given agent center.
## Example
Agents.running_on("Mars")
# => `[%Agent{id: %AID{}]`
"""
@spec running_on(agent_center :: String.t) :: list[Matrix.Agent.t]
def running_on(agent_center) do
GenServer.call(__MODULE__, {:running, agent_center})
end
@doc """
Returns map where key is agent center alias,
value is list of running agents on that center.
## Example
Agents.running_per_agent_center
# => `%{"Mars" => [%Agent{id: %AID{}}]}`
"""
@spec running_per_agent_center :: Map.t
def running_per_agent_center do
GenServer.call(__MODULE__, {:running_per_agent_center})
end
@doc """
Adds new agent for given agent center.
## Example
Agents.add_running("Mars", %Agent{})
"""
@spec add_running(agent_center :: String.t, running_agents :: list(Matrix.Agent)) :: :ok
def add_running(agent_center, running_agents) do
GenServer.cast(__MODULE__, {:add_running, agent_center, running_agents})
end
@doc """
Deletes running agent from agent center.
## Example
ping = Agent.new(...)
Agents.delete_running(ping)
"""
@spec delete_running(agent :: Agent.t) :: any
def delete_running(agent) do
GenServer.cast(__MODULE__, {:delete_running, agent})
end
@spec delete_running_for(agent_center :: String.t) :: :ok
def delete_running_for(agent_center) do
GenServer.cast(__MODULE__, {:delete_running_for, agent_center})
end
@doc """
Resets data about agent types and running agents.
"""
@spec reset :: :ok
def reset do
GenServer.cast(__MODULE__, {:reset})
end
@spec exists?(name :: String.t) :: boolean
def exists?(name) do
running() |> Enum.any?(fn agent -> agent.id.name == name end)
end
@spec find_by_name(agent_name :: String.t) :: Agent.t | nil
def find_by_name(agent_name) do
running() |> Enum.find(fn agent -> agent.id.name =~ agent_name end)
end
# Server callbacks
def handle_call({:types}, _from, state) do
{:reply, state.agent_types, state}
end
def handle_call({:types, aliaz}, _from, state) do
{:reply, state.agent_types[aliaz] || [], state}
end
def handle_call({:running}, _from, state) do
running_agents =
state.running_agents
|> Enum.reduce([], fn {_, agents}, acc ->
acc ++ agents
end)
{:reply, running_agents, state}
end
def handle_call({:running, agent_center}, _from, state) do
{:reply, state.running_agents[agent_center] || [], state}
end
def handle_call({:running_per_agent_center}, _from, state) do
{:reply, state.running_agents, state}
end
def handle_call({:find_agent_center_with_type, type}, _from, state) do
pair =
state.agent_types
|> Enum.find(fn {aliaz, agent_types} ->
(Env.this_aliaz != aliaz) && (type in agent_types)
end)
case pair do
{aliaz, _} ->
{:reply, %AgentCenter{aliaz: aliaz, address: Cluster.address_for(aliaz)}, state}
nil ->
{:reply, nil, state}
_ -> raise "Invalid state struct"
end
end
def handle_cast({:add_types, aliaz, types}, state) do
types = state.agent_types |> Map.put_new(aliaz, types)
{:noreply, %State{agent_types: types, running_agents: state.running_agents}}
end
def handle_cast({:add_running, aliaz, running_agents}, state) do
agents = (state.running_agents[aliaz] || []) ++ running_agents
running_agents_map = Map.put(state.running_agents, aliaz, agents)
{:noreply, %State{agent_types: state.agent_types, running_agents: running_agents_map}}
end
def handle_cast({:delete_types, aliaz}, state) do
types = state.agent_types |> Map.delete(aliaz)
{:noreply, %State{agent_types: types, running_agents: state.running_agents}}
end
def handle_cast({:delete_running, agent}, state) do
new_running_agents = state.running_agents[agent.id.host.aliaz] |> List.delete(agent)
{:noreply, put_in(state.running_agents[agent.id.host.aliaz], new_running_agents)}
end
def handle_cast({:delete_running_for, agent_center}, state) do
{:noreply, put_in(state.running_agents[agent_center], [])}
end
def handle_cast({:reset}, _state) do
{:noreply, %State{}}
end
def init(agent_types) do
{:ok, %State{agent_types: %{Matrix.Env.this_aliaz => agent_types}}}
end
end | lib/matrix/agents.ex | 0.888233 | 0.711152 | agents.ex | starcoder |
defmodule QRCode do
@default_ecc :M
@doc """
## about `version`
Todo.
## about `ecc`
- 'L': recovers 7% of data
- 'M': recovers 15% of data (default)
- 'Q': recovers 25% of data
- 'H': recovers 30% of data
## about `dimension`
Todo.
## about `data`
Todo.
"""
defstruct version: nil, ecc: nil, dimension: nil, data: nil
@doc """
Encode text as binary according ISO/IEC 18004.
"""
def encode(text, ecc \\ @default_ecc) when is_binary(text) do
{:qrcode, version, ecc, dimension, data } = :qrcode.encode(text, ecc)
%QRCode{
version: version,
ecc: ecc,
dimension: dimension,
data: data
}
end
@doc """
Returns QR code as string consists of {\#, \.}.
## Examples
iex> QRCode.as_ascii("Hello, World!", ecc: :M) |> IO.puts
:ok
"""
def as_ascii(text, opts \\ []) when is_binary(text) do
ecc = Keyword.get(opts, :ecc, @default_ecc)
%QRCode{dimension: dimension, data: data} = encode(text, ecc)
nl = "\n"
data
|> to_ascii()
|> Enum.chunk_every(dimension)
|> Enum.join(nl)
|> (fn s -> s <> nl end).()
end
defp to_ascii(list), do: to_ascii(list, [])
defp to_ascii(<< 0 :: size(1), tail :: bitstring >>, acc) do
bg = "."
to_ascii(tail, [bg | acc])
end
defp to_ascii(<< 1 :: size(1), tail :: bitstring >>, acc) do
fg = "#"
to_ascii(tail, [fg | acc])
end
defp to_ascii(<<>>, acc) do
Enum.reverse(acc)
end
@doc """
Return QR code as ANSI escaped string.
## Examples
iex> QRCode.as_ansi("Hello, World!", ecc: :M) |> IO.puts
:ok
"""
def as_ansi(text, opts \\ []) when is_binary(text) do
ecc = Keyword.get(opts, :ecc, @default_ecc)
%QRCode{dimension: dimension, data: data} = encode(text, ecc)
nl = IO.ANSI.reset() <> "\n"
data
|> to_ansi()
|> Enum.chunk_every(dimension)
|> Enum.join(nl)
|> (fn s -> s <> nl end).()
end
defp to_ansi(list), do: to_ansi(list, [])
defp to_ansi(<< 0 :: size(1), tail :: bitstring >>, acc) do
bg = IO.ANSI.white_background() <> " "
to_ansi(tail, [bg | acc])
end
defp to_ansi(<< 1 :: size(1), tail :: bitstring >>, acc) do
fg = IO.ANSI.black_background() <> " "
to_ansi(tail, [fg | acc])
end
defp to_ansi(<<>>, acc) do
Enum.reverse(acc)
end
@doc """
Return QR code as string in SVG format.
## Examples
iex> QRCode.as_svg("Hello, World!", ecc: :M) |> IO.puts
:ok
"""
def as_svg(text, opts \\ []) when is_binary(text) do
ecc = Keyword.get(opts, :ecc, @default_ecc)
type = Keyword.get(opts, :type, :file)
block_size = Keyword.get(opts, :size, 8)
padding_size = Keyword.get(opts, :padding_size, 16)
fg_color = Keyword.get(opts, :fg_color, "#000000")
bg_color = Keyword.get(opts, :bg_color, "#ffffff")
%QRCode{dimension: dimension, data: data} = encode(text, ecc)
size = block_size * dimension + 2 * padding_size
bg = generate_svg_block(0, 0, size, bg_color)
blocks = data
|> to_ascii()
|> Enum.chunk_every(dimension)
|> Enum.with_index
|> Enum.map(fn({row, i}) ->
row
|> Enum.with_index
|> Enum.map(fn ({block, j}) -> { block, i, j } end)
end)
|> Enum.concat()
|> Enum.filter(fn ({block, _i, _j}) -> block === "#" end)
|> Enum.map(fn {_block, i, j} ->
x = i * block_size + padding_size
y = j * block_size + padding_size
generate_svg_block(x, y, block_size, fg_color)
end)
|> Enum.join("")
generate_svg(size, bg, blocks, type: type)
end
defp generate_svg_block(x, y, block_size, color) do
"""
<rect x="#{x}" y="#{y}" width="#{block_size}" height="#{block_size}" style="fill: #{color}; shape-rendering: crispEdges;"/>
"""
end
defp generate_svg(size, bg, blocks, opts) do
type = Keyword.get(opts, :type)
header = case type do
:file ->
"<?xml version=\"1.0\" standalone=\"yes\"?>\n"
:embeded ->
""
end
"""
#{header}<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="#{size}" height="#{size}">
#{bg}
#{blocks}
</svg>
"""
end
end | lib/ex_qrcode.ex | 0.718989 | 0.566378 | ex_qrcode.ex | starcoder |
defmodule Rubber.Search do
@moduledoc """
The search APIs are used to query indices.
[Elastic documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/search.html)
"""
import Rubber.HTTP, only: [prepare_url: 2]
alias Rubber.{HTTP, JSON}
@doc """
Makes a request to the `_search` or the `_msearch` endpoint depending on the type of
`data`.
When passing a map for data, it'll make a simple search, but you can pass a list of
header and body params to make a [multi search](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html).
## Examples
iex> Rubber.Search.search("http://localhost:9200", "twitter", ["tweet"], %{query: %{term: %{user: "kimchy"}}})
{:ok, %HTTPoison.Response{...}}
"""
@spec search(
elastic_url :: String.t(),
index :: String.t(),
types :: list,
data :: map | list
) :: HTTP.resp()
def search(elastic_url, index, types, data) when is_list(data),
do: search(elastic_url, index, types, data, [])
def search(elastic_url, index, types, data),
do: search(elastic_url, index, types, data, [])
@doc """
Same as `search/4` but allows to specify query params and options for
[`HTTPoison.request/5`](https://hexdocs.pm/httpoison/HTTPoison.html#request/5).
"""
@spec search(
elastic_url :: String.t(),
index :: String.t(),
types :: list,
data :: map | list,
query_params :: Keyword.t(),
options :: Keyword.t()
) :: HTTP.resp()
def search(elastic_url, index, types, data, query_params, options \\ [])
def search(elastic_url, index, types, data, query_params, options)
when is_list(data) do
data =
Enum.reduce(data, [], fn d, acc -> ["\n", JSON.encode!(d) | acc] end)
|> Enum.reverse()
|> IO.iodata_to_binary()
prepare_url(elastic_url, make_path(index, types, query_params, "_msearch"))
|> HTTP.post(data, [], options)
end
def search(elastic_url, index, types, data, query_params, options) do
prepare_url(elastic_url, make_path(index, types, query_params))
|> HTTP.post(JSON.encode!(data), [], options)
end
@doc """
Uses the [Scroll API](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html)
to allow scrolling through a list of results.
## Examples
iex> Rubber.Search.scroll("http://localhost:9200", %{query: %{term: %{user: "kimchy"}}})
{:ok, %HTTPoison.Response{...}}
"""
@spec scroll(elastic_url :: String.t(), data :: map, options :: Keyword.t()) ::
HTTP.resp()
def scroll(elastic_url, data, options \\ []) do
prepare_url(elastic_url, "_search/scroll")
|> HTTP.post(JSON.encode!(data), [], options)
end
@doc """
Returns the number of results for a query using the
[Count API](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-count.html).
## Examples
iex> Rubber.Search.count("http://localhost:9200", "twitter", ["tweet"], %{query: %{term: %{user: "kimchy"}}})
{:ok, %HTTPoison.Response{...}}
"""
@spec count(elastic_url :: String.t(), index :: String.t(), types :: list, data :: map) ::
HTTP.resp()
def count(elastic_url, index, types, data),
do: count(elastic_url, index, types, data, [])
@doc """
Same as `count/4` but allows to specify query params and options for
[`HTTPoison.request/5`](https://hexdocs.pm/httpoison/HTTPoison.html#request/5).
"""
@spec count(
elastic_url :: String.t(),
index :: String.t(),
types :: list,
data :: map,
query_params :: Keyword.t(),
options :: Keyword.t()
) :: HTTP.resp()
def count(elastic_url, index, types, data, query_params, options \\ []) do
(elastic_url <> make_path(index, types, query_params, "_count"))
|> HTTP.post(JSON.encode!(data), [], options)
end
@doc false
def make_path(index, types, query_params, api_type \\ "_search") do
path_root = "/#{index}"
path =
case types do
[] -> path_root
_ -> path_root <> "/" <> Enum.join(types, ",")
end
full_path = "#{path}/#{api_type}"
case query_params do
[] -> full_path
_ -> HTTP.append_query_string(full_path, query_params)
end
end
end | lib/rubber/search.ex | 0.895101 | 0.543166 | search.ex | starcoder |
defmodule Delivery.Content.Readers.Pressbooks do
alias Delivery.Content.Readers.Reader
alias Delivery.Content.Document
alias Delivery.Content.Block
alias Delivery.Content.Text
alias Delivery.Content.Inline
alias Delivery.Content.Mark
alias Delivery.Content.Module
alias Delivery.Content.Reference
alias Delivery.Content.Organization
@behaviour Reader
@spec segment(binary) :: {:ok, %{pages: [any()], toc: any()}} | {:error, String.t()}
def segment(input) do
parsed = Floki.parse(input)
{:ok,
%{
pages: parsed |> Floki.find("div[class=\"chapter standard\"]"),
toc: parsed |> Floki.find("div[id=\"toc\"]") |> hd
}}
end
def get_attr_by_key(items, key, def) do
case Enum.find(items, {nil, def}, fn {k, _} -> k == key end) do
{_, value} -> value
end
end
def get_div_by_class(items, class) do
case Enum.find(items, nil, fn {_, a, _} -> get_attr_by_key(a, "class", "") == class end) do
{_, _, c} -> c
nil -> []
end
end
def organization(root) do
modules =
Floki.find(root, "li")
|> Enum.reduce([], fn item, acc ->
parsed =
case item do
{"li", [{"class", "part"}], [{"a", [{"href", "#" <> id}], [title]}]} ->
%Module{id: id, title: title}
{"li", [{"class", "chapter standard"}], [{"a", [{"href", "#" <> id}], _}]} ->
%Reference{id: id}
_ ->
:ignore
end
case parsed do
:ignore ->
acc
%Module{} = m ->
[m] ++ acc
%Reference{} = r ->
case acc do
[hd | rest] -> [%{hd | nodes: hd.nodes ++ [r]}] ++ rest
end
end
end)
|> Enum.reverse()
%Organization{nodes: modules}
end
def sections(root) do
sections =
Floki.find(root, "li[class=\"section\"]")
|> Enum.map(fn item ->
case item do
{"li", _, [{"a", [{"href", "#" <> id}], _}]} ->
%Reference{id: id}
_ ->
:ignore
end
end)
%Organization{nodes: sections}
end
def page({"div", attributes, children} = item) do
id = get_attr_by_key(attributes, "id", "unknown")
title =
case get_attr_by_key(attributes, "title", "unknown") do
"unknown" -> Floki.find(item, ".chapter-title") |> Floki.text()
title -> title
end
content_nodes =
get_div_by_class(children, "ugc chapter-ugc")
|> Enum.map(fn n -> handle(n) end)
licensing_nodes =
get_div_by_class(children, "licensing")
|> Enum.map(fn n -> handle(n) end)
%Document{
data: %{id: id, title: title},
nodes: content_nodes ++ licensing_nodes
}
|> clean()
end
def clean(%Document{} = doc) do
nodes =
List.flatten(doc.nodes)
|> Enum.map(fn n ->
case n do
n when is_binary(n) -> %Block{nodes: [%Text{text: n}], type: "paragraph"}
%{object: "text"} -> %Block{nodes: [n], type: "paragraph"}
%{object: "inline"} -> %Block{nodes: [n], type: "paragraph"}
nil -> %Block{nodes: [%Text{text: ""}], type: "paragraph"}
n -> n
end
end)
%Document{
data: doc.data,
nodes: Enum.map(nodes, fn c -> clean(c) end)
}
end
def clean(%Block{} = block) do
no_markup = fn b ->
%{b | marks: []}
end
collapse = fn b ->
Enum.map(b.nodes, fn n -> clean(n) end)
end
check = fn b ->
cond do
is_binary(b) ->
%Text{text: b}
b.object == "block" and b.type == "paragraph" and block.type == "paragraph" ->
collapse.(b)
b.object == "block" and b.type == "paragraph" and block.type == "blockquote" ->
collapse.(b)
block.type == "codeblock" and b.object == "text" and length(b.marks) > 0 ->
no_markup.(b)
true ->
clean(b)
end
end
nodes = List.flatten(block.nodes)
nodes = Enum.reduce(nodes, [], fn c, acc ->
case check.(c) do
item when is_list(item) -> acc ++ item
scalar -> acc ++ [scalar]
end
end)
%Block{
type: block.type,
data: block.data,
nodes: nodes
}
end
def clean(other) do
other
end
def extract_id(attributes) do
case attributes do
[{"id", id}] -> %{id: id}
_ -> %{}
end
end
def extract(attributes) do
Enum.reduce(attributes, %{}, fn {k, v}, m -> Map.put(m, k, v) end)
end
def handle({"div", _attributes, children}) do
case children do
[{"ul", _, c}] ->
%Block{type: "unordered-list", nodes: Enum.map(c, fn c -> handle(c) end)}
[{"ol", _, c}] ->
%Block{type: "ordered-list", nodes: Enum.map(c, fn c -> handle(c) end)}
[{"h1", _, _} | _] ->
Enum.map(children, fn c -> handle(c) end)
[{"h2", _, _} | _] ->
Enum.map(children, fn c -> handle(c) end)
[{"h3", _, _} | _] ->
Enum.map(children, fn c -> handle(c) end)
[{"h4", _, _} | _] ->
Enum.map(children, fn c -> handle(c) end)
[{"h5", _, _} | _] ->
Enum.map(children, fn c -> handle(c) end)
[{"h6", _, _} | _] ->
Enum.map(children, fn c -> handle(c) end)
[{"div", _, [{"div", _, [{"div", _, c}]}]}] ->
Enum.map(c, fn c -> handle(c) end)
[{"div", _, [{"div", _, c} | more]} | rest] ->
Enum.map(c ++ more ++ rest, fn c -> handle(c) end)
[{"div", [{"class", "textbox tryit"}], c}] ->
%Block{type: "paragraph", nodes: Enum.map(c, fn c -> handle(c) end)}
[{"div", _, c} | rest] ->
Enum.map(c ++ rest, fn c -> handle(c) end)
[{"p", _, _} | _] ->
Enum.map(children, fn c -> handle(c) end)
c ->
%Block{type: "paragraph", nodes: Enum.map(c, fn c -> handle(c) end)}
end
end
def handle({"p", attributes, children}) do
%Block{
type: "paragraph",
data: extract_id(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"small", _, [child]}) do
handle(child)
end
def handle({"cite", _, [child]}) do
handle(child)
end
def handle({"pre", _, children}) do
%Block{
type: "codeblock",
data: %{"syntax" => "text"},
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"hr", _, _}) do
%Block{
type: "paragraph",
nodes: []
}
end
def handle({"ul", attributes, children}) do
%Block{
type: "unordered-list",
data: extract_id(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"dl", attributes, children}) do
%Block{
type: "dl",
data: extract_id(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"dd", attributes, children}) do
%Block{
type: "dd",
data: extract_id(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"dt", attributes, children}) do
%Block{
type: "dt",
data: extract_id(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"ol", attributes, children}) do
%Block{
type: "ordered-list",
data: extract_id(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"li", attributes, children}) do
%Block{
type: "list-item",
data: extract_id(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"h1", _, children}) do
%Block{
type: "heading-one",
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"h2", _, children}) do
%Block{
type: "heading-two",
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"h3", _, children}) do
%Block{
type: "heading-three",
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"h4", _, children}) do
%Block{
type: "heading-four",
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"h5", _, children}) do
%Block{
type: "heading-five",
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"table", attributes, children}) do
%Block{
type: "table",
data: extract(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"thead", attributes, children}) do
%Block{
type: "thead",
data: extract(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"tbody", attributes, children}) do
%Block{
type: "tbody",
data: extract(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"tfoot", attributes, children}) do
%Block{
type: "tfoot",
data: extract(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"caption", attributes, children}) do
%Block{
type: "caption",
data: extract(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"tr", attributes, children}) do
%Block{
type: "tr",
data: extract(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"td", attributes, children}) do
%Block{
type: "td",
data: extract(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"th", attributes, children}) do
%Block{
type: "th",
data: extract(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"img", attributes, children}) do
%Block{
type: "image",
data: extract(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"blockquote", attributes, [{"p", _, c}]}) do
%Block{
type: "blockquote",
data: extract(attributes),
nodes: Enum.map(c, fn c -> handle(c) end)
}
end
def handle({"blockquote", attributes, [{"p", _, _} | _tail] = children}) do
nodes = Enum.flat_map(children, fn {_, _, c} -> c end)
%Block{
type: "blockquote",
data: extract(attributes),
nodes: Enum.map(nodes, fn c -> handle(c) end)
}
end
def handle({"blockquote", attributes, children}) do
%Block{
type: "blockquote",
data: extract(attributes),
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"a", attributes, children}) do
data = extract(attributes)
data = case Map.get(data, "href", "") do
"#" <> _ -> Map.put(data, "href", "https://oli.cmu.edu")
_ -> data
end
%Inline{
type: "link",
data: data,
nodes: Enum.map(children, fn c -> handle(c) end)
}
end
def handle({"script", _, _}) do
%Text{
text: "script removed"
}
end
def handle({"textarea", _, _}) do
%Text{
text: "textarea removed"
}
end
def handle({"em", _, [text]}) when is_binary(text) do
%Text{
text: latex(text),
marks: [%Mark{type: "italic"}]
}
end
def handle({"em", _, [{"a", _, _} = inline]}) do
handle(inline)
end
def handle({"em", _, [item]}) when is_tuple(item) do
inner = handle(item)
case inner do
"" -> %Text{text: " "}
m -> if Map.has_key?(m, :text) do
%Text{
text: latex(m.text),
marks: [%Mark{type: "italic"}] ++ m.marks
}
else
%Text{
text: " ",
marks: [%Mark{type: "italic"}]
}
end
end
end
def handle({"em", _, text}) do
%Text{
text: span(text),
marks: [%Mark{type: "bold"}]
}
end
def handle({"strong", _, [{"br", [], []}]}) do
%Text{
text: ""
}
end
def handle({"strong", _, text}) do
%Text{
text: span(text),
marks: [%Mark{type: "bold"}]
}
end
def handle({"del", _, text}) do
%Text{
text: span(text),
marks: [%Mark{type: "strikethrough"}]
}
end
def handle({"b", _, text}) do
%Text{
text: span(text),
marks: [%Mark{type: "bold"}]
}
end
def handle({"i", _, text}) do
%Text{
text: span(text),
marks: [%Mark{type: "italic"}]
}
end
def handle({"sub", _, text}) do
%Text{
text: span(text),
marks: [%Mark{type: "sub"}]
}
end
def handle({"sup", _, text}) do
%Text{
text: span(text),
marks: [%Mark{type: "sup"}]
}
end
def handle({"span", _, children}) do
span(children)
end
def handle({"br", [], []}) do
""
end
def handle(text) when is_binary(text) do
%Text{
text: latex(text)
}
end
def handle(unsupported) do
IO.puts("Unsupported")
IO.inspect(unsupported)
""
end
def span({_, _, children}) when is_list(children) do
Enum.map(children, fn c -> span(c) end)
|> Enum.join(" ")
end
def span({_, _, text}) when is_binary(text) do
latex(text)
end
def span(list) when is_list(list) do
Enum.map(list, fn c -> span(c) end)
|> Enum.join(" ")
end
def span(text) when is_binary(text) do
latex(text)
end
def latex(text) do
String.replace(text, "[latex]", "\\(", global: true) |> String.replace("[/latex]", "\\)", global: true)
end
def determine_type(input) do
end
end | lib/delivery/content/readers/pressbooks.ex | 0.566738 | 0.4231 | pressbooks.ex | starcoder |
defmodule Payjp.Cards do
@moduledoc """
Functions for working with cards at Payjp. Through this API you can:
* create a card,
* update a card,
* get a card,
* delete a card,
* delete all cards,
* list cards,
* list all cards,
All requests require `owner_type` and `owner_id` parameters to be specified.
`owner_type` must be one of the following:
* `customer`
`owner_id` must be the ID of the owning object.
Payjp API reference: https://pay.jp/docs/api/#顧客のカードを作成
"""
def endpoint_for_entity(entity_type, entity_id) do
case entity_type do
:customer -> "customers/#{entity_id}/cards"
end
end
@doc """
Create a card.
Creates a card for given owner type, owner ID using params.
`params` must contain a "card" object. Inside the "card" object, the following parameters are required:
* number,
* cvs,
* exp_month,
* exp_year.
Returns a `{:ok, card}` tuple.
## Examples
params = [
card: [
number: "4242424242424242",
cvc: 123,
exp_month: 12,
exp_year: 2020,
],
metadata: [
test_field: "test val"
]
]
{:ok, card} = Payjp.Cards.create(:customer, customer_id, params)
"""
def create(owner_type, owner_id, params) do
create owner_type, owner_id, params, Payjp.config_or_env_key
end
@doc """
Create a card. Accepts Payjp API key.
Creates a card for given owner using params.
`params` must contain a "card" object. Inside the "card" object, the following parameters are required:
* number,
* cvs,
* exp_month,
* exp_year.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, card} = Payjp.Cards.create(:customer, customer_id, params, key)
"""
def create(owner_type, owner_id, params, key) do
Payjp.make_request_with_key(:post, endpoint_for_entity(owner_type, owner_id), key, params)
|> Payjp.Util.handle_payjp_response
end
@doc """
Update a card.
Updates a card for given owner using card ID and params.
* `owner_type` must be one of the following:
* `customer`,
* `owner_id` must be the ID of the owning object.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, card} = Payjp.Cards.update(:customer, customer_id, card_id, params)
"""
def update(owner_type, owner_id, id, params) do
update(owner_type, owner_id, id, params, Payjp.config_or_env_key)
end
@doc """
Update a card. Accepts Payjp API key.
Updates a card for given owner using card ID and params.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, card} = Payjp.Cards.update(:customer, customer_id, card_id, params, key)
"""
def update(owner_type, owner_id, id, params, key) do
Payjp.make_request_with_key(:post, "#{endpoint_for_entity(owner_type, owner_id)}/#{id}", key, params)
|> Payjp.Util.handle_payjp_response
end
@doc """
Get a card.
Gets a card for given owner using card ID.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, card} = Payjp.Cards.get(:customer, customer_id, card_id)
"""
def get(owner_type, owner_id, id) do
get owner_type, owner_id, id, Payjp.config_or_env_key
end
@doc """
Get a card. Accepts Payjp API key.
Gets a card for given owner using card ID.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, card} = Payjp.Cards.get(:customer, customer_id, card_id, key)
"""
def get(owner_type, owner_id, id, key) do
Payjp.make_request_with_key(:get, "#{endpoint_for_entity(owner_type, owner_id)}/#{id}", key)
|> Payjp.Util.handle_payjp_response
end
@doc """
Get a list of cards.
Gets a list of cards for given owner.
Accepts the following parameters:
* `limit` - a limit of items to be returned (optional; defaults to 10).
* `offset` - an offset (optional),
* `since` - a timestamp for returning subsequent data specified here (optional),
* `until` - a timestamp for returning the previous data specified here (optional),
Returns a `{:ok, cards}` tuple, where `cards` is a list of cards.
## Examples
{:ok, cards} = Payjp.Cards.list(:customer, customer_id, offset: 5) # Get a list of up to 10 cards, skipping first 5 cards
{:ok, cards} = Payjp.Cards.list(:customer, customer_id, offset: 5, limit: 20) # Get a list of up to 20 cards, skipping first 5 cards
"""
def list(owner_type, owner_id, opts \\ []) do
list owner_type, owner_id, Payjp.config_or_env_key, opts
end
@doc """
Get a list of cards. Accepts Payjp API key.
Gets a list of cards for a given owner.
Accepts the following parameters:
* `limit` - a limit of items to be returned (optional; defaults to 10).
* `offset` - an offset (optional),
* `since` - a timestamp for returning subsequent data specified here (optional),
* `until` - a timestamp for returning the previous data specified here (optional),
Returns a `{:ok, cards}` tuple, where `cards` is a list of cards.
## Examples
{:ok, cards} = Payjp.Cards.list(:customer, customer_id, offset: 5) # Get a list of up to 10 cards, skipping first 5 cards
{:ok, cards} = Payjp.Cards.list(:customer, customer_id, offset: 5, limit: 20) # Get a list of up to 20 cards, skipping first 5 cards
"""
def list(owner_type, owner_id, key, opts) do
Payjp.Util.list endpoint_for_entity(owner_type, owner_id), key, opts
end
@doc """
Delete a card.
Deletes a card for given owner using card ID.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, deleted_card} = Payjp.Cards.delete("card_id")
"""
def delete(owner_type, owner_id, id) do
delete owner_type, owner_id, id, Payjp.config_or_env_key
end
@doc """
Delete a card. Accepts Payjp API key.
Deletes a card for given owner using card ID.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, deleted_card} = Payjp.Cards.delete("card_id", key)
"""
def delete(owner_type, owner_id, id,key) do
Payjp.make_request_with_key(:delete, "#{endpoint_for_entity(owner_type, owner_id)}/#{id}", key)
|> Payjp.Util.handle_payjp_response
end
@doc """
Delete all cards.
Deletes all cards from given owner.
Returns `:ok` atom.
## Examples
:ok = Payjp.Cards.delete_all(:customer, customer_id)
"""
def delete_all(owner_type, owner_id) do
case all(owner_type, owner_id) do
{:ok, cards} ->
Enum.each cards, fn c -> delete(owner_type, owner_id, c["id"]) end
{:error, err} -> raise err
end
end
@doc """
Delete all cards. Accepts Payjp API key.
Deletes all cards from given owner.
Returns `:ok` atom.
## Examples
:ok = Payjp.Cards.delete_all(:customer, customer_id, key)
"""
def delete_all(owner_type, owner_id, key) do
case all(owner_type, owner_id) do
{:ok, customers} ->
Enum.each customers, fn c -> delete(owner_type, owner_id, c["id"], key) end
{:error, err} -> raise err
end
end
@max_fetch_size 100
@doc """
List all cards.
Lists all cards for a given owner.
Accepts the following parameters:
* `accum` - a list to start accumulating cards to (optional; defaults to `[]`).,
* `since` - an offset (optional; defaults to `""`).
Returns `{:ok, cards}` tuple.
## Examples
{:ok, cards} = Payjp.Cards.all(:customer, customer_id, accum, since)
"""
def all(owner_type, owner_id, accum \\ [], opts \\ [limit: @max_fetch_size]) do
all owner_type, owner_id, Payjp.config_or_env_key, accum, opts
end
@doc """
List all cards. Accepts Payjp API key.
Lists all cards for a given owner.
Accepts the following parameters:
* `accum` - a list to start accumulating cards to (optional; defaults to `[]`).,
* `since` - an offset (optional; defaults to `""`).
Returns `{:ok, cards}` tuple.
## Examples
{:ok, cards} = Payjp.Cards.all(:customer, customer_id, accum, since, key)
"""
def all(owner_type, owner_id, key, accum, opts) do
case Payjp.Util.list_raw("#{endpoint_for_entity(owner_type, owner_id)}", key, opts) do
{:ok, resp} ->
case resp[:has_more] do
true ->
last_sub = List.last( resp[:data] )
all(owner_type, owner_id, key, resp[:data] ++ accum, until: last_sub["created"], limit: @max_fetch_size)
false ->
result = resp[:data] ++ accum
{:ok, result}
end
{:error, err} -> raise err
end
end
end | lib/payjp/cards.ex | 0.904661 | 0.513668 | cards.ex | starcoder |
defmodule Chunky.Sequence.OEIS.Primes do
@moduledoc """
OEIS Sequences dealing with Primes, Pseudo-primes, and primality. For related sequences, see `Chunky.Sequences.OEIS.Factors`
## Available Sequences
### Pseudoprimes
Fermat pseudoprimes to specific bases:
- `create_sequence_a001567/1` - A001567 - Fermat pseudoprimes to base 2, also called Sarrus numbers or Poulet numbers.
- `create_sequence_a005935/1` - A005935 - Pseudoprimes to base 3.
- `create_sequence_a020136/1` - A020136 - Fermat pseudoprimes to base 4.
- `create_sequence_a005936/1` - A005936 - Pseudoprimes to base 5.
- `create_sequence_a005937/1` - A005937 - Pseudoprimes to base 6.
- `create_sequence_a005938/1` - A005938 - Pseudoprimes to base 7.
- `create_sequence_a020137/1` - A020137 - Pseudoprimes to base 8.
- `create_sequence_a020138/1` - A020138 - Pseudoprimes to base 9.
- `create_sequence_a005939/1` - A005939 - Pseudoprimes to base 10.
- `create_sequence_a020139/1` - A020139 - Pseudoprimes to base 11.
- `create_sequence_a020140/1` - A020140 - Pseudoprimes to base 12.
- `create_sequence_a020141/1` - A020141 - Pseudoprimes to base 13.
- `create_sequence_a020142/1` - A020142 - Pseudoprimes to base 14.
- `create_sequence_a020143/1` - A020143 - Pseudoprimes to base 15.
- `create_sequence_a020144/1` - A020144 - Pseudoprimes to base 16.
- `create_sequence_a020145/1` - A020145 - Pseudoprimes to base 17.
- `create_sequence_a020146/1` - A020146 - Pseudoprimes to base 18.
- `create_sequence_a020147/1` - A020147 - Pseudoprimes to base 19.
- `create_sequence_a020148/1` - A020148 - Pseudoprimes to base 20.
- `create_sequence_a020149/1` - A020149 - Pseudoprimes to base 21.
- `create_sequence_a020150/1` - A020150 - Pseudoprimes to base 22.
- `create_sequence_a020151/1` - A020151 - Pseudoprimes to base 23.
- `create_sequence_a020152/1` - A020152 - Pseudoprimes to base 24.
- `create_sequence_a020153/1` - A020153 - Pseudoprimes to base 25.
- `create_sequence_a020154/1` - A020154 - Pseudoprimes to base 26.
- `create_sequence_a020155/1` - A020155 - Pseudoprimes to base 27.
- `create_sequence_a020156/1` - A020156 - Pseudoprimes to base 28.
- `create_sequence_a020157/1` - A020157 - Pseudoprimes to base 29.
- `create_sequence_a020158/1` - A020158 - Pseudoprimes to base 30.
- `create_sequence_a020159/1` - A020159 - Pseudoprimes to base 31.
- `create_sequence_a020160/1` - A020160 - Pseudoprimes to base 32.
- `create_sequence_a020161/1` - A020161 - Pseudoprimes to base 33.
- `create_sequence_a020162/1` - A020162 - Pseudoprimes to base 34.
- `create_sequence_a020163/1` - A020163 - Pseudoprimes to base 35.
- `create_sequence_a020164/1` - A020164 - Pseudoprimes to base 36.
- `create_sequence_a020165/1` - A020165 - Pseudoprimes to base 37.
- `create_sequence_a020166/1` - A020166 - Pseudoprimes to base 38.
- `create_sequence_a020167/1` - A020167 - Pseudoprimes to base 39.
- `create_sequence_a020168/1` - A020168 - Pseudoprimes to base 40.
- `create_sequence_a020169/1` - A020169 - Pseudoprimes to base 41.
- `create_sequence_a020170/1` - A020170 - Pseudoprimes to base 42.
- `create_sequence_a020171/1` - A020171 - Pseudoprimes to base 43.
- `create_sequence_a020172/1` - A020172 - Pseudoprimes to base 44.
- `create_sequence_a020173/1` - A020173 - Pseudoprimes to base 45.
- `create_sequence_a020174/1` - A020174 - Pseudoprimes to base 46.
- `create_sequence_a020175/1` - A020175 - Pseudoprimes to base 47.
- `create_sequence_a020176/1` - A020176 - Pseudoprimes to base 48.
- `create_sequence_a020177/1` - A020177 - Pseudoprimes to base 49.
- `create_sequence_a020178/1` - A020178 - Pseudoprimes to base 50.
- `create_sequence_a020179/1` - A020179 - Pseudoprimes to base 51.
- `create_sequence_a020180/1` - A020180 - Pseudoprimes to base 52.
- `create_sequence_a020181/1` - A020181 - Pseudoprimes to base 53.
- `create_sequence_a020182/1` - A020182 - Pseudoprimes to base 54.
- `create_sequence_a020183/1` - A020183 - Pseudoprimes to base 55.
- `create_sequence_a020184/1` - A020184 - Pseudoprimes to base 56.
- `create_sequence_a020185/1` - A020185 - Pseudoprimes to base 57.
- `create_sequence_a020186/1` - A020186 - Pseudoprimes to base 58.
- `create_sequence_a020187/1` - A020187 - Pseudoprimes to base 59.
- `create_sequence_a020188/1` - A020188 - Pseudoprimes to base 60.
- `create_sequence_a020189/1` - A020189 - Pseudoprimes to base 61.
- `create_sequence_a020190/1` - A020190 - Pseudoprimes to base 62.
- `create_sequence_a020191/1` - A020191 - Pseudoprimes to base 63.
- `create_sequence_a020192/1` - A020192 - Pseudoprimes to base 64.
- `create_sequence_a020193/1` - A020193 - Pseudoprimes to base 65.
- `create_sequence_a020194/1` - A020194 - Pseudoprimes to base 66.
- `create_sequence_a020195/1` - A020195 - Pseudoprimes to base 67.
- `create_sequence_a020196/1` - A020196 - Pseudoprimes to base 68.
- `create_sequence_a020197/1` - A020197 - Pseudoprimes to base 69.
- `create_sequence_a020198/1` - A020198 - Pseudoprimes to base 70.
- `create_sequence_a020199/1` - A020199 - Pseudoprimes to base 71.
- `create_sequence_a020200/1` - A020200 - Pseudoprimes to base 72.
- `create_sequence_a020201/1` - A020201 - Pseudoprimes to base 73.
- `create_sequence_a020202/1` - A020202 - Pseudoprimes to base 74.
- `create_sequence_a020203/1` - A020203 - Pseudoprimes to base 75.
- `create_sequence_a020204/1` - A020204 - Pseudoprimes to base 76.
- `create_sequence_a020205/1` - A020205 - Pseudoprimes to base 77.
- `create_sequence_a020206/1` - A020206 - Pseudoprimes to base 78.
- `create_sequence_a020207/1` - A020207 - Pseudoprimes to base 79.
- `create_sequence_a020208/1` - A020208 - Pseudoprimes to base 80.
- `create_sequence_a020209/1` - A020209 - Pseudoprimes to base 81.
- `create_sequence_a020210/1` - A020210 - Pseudoprimes to base 82.
- `create_sequence_a020211/1` - A020211 - Pseudoprimes to base 83.
- `create_sequence_a020212/1` - A020212 - Pseudoprimes to base 84.
- `create_sequence_a020213/1` - A020213 - Pseudoprimes to base 85.
- `create_sequence_a020214/1` - A020214 - Pseudoprimes to base 86.
- `create_sequence_a020215/1` - A020215 - Pseudoprimes to base 87.
- `create_sequence_a020216/1` - A020216 - Pseudoprimes to base 88.
- `create_sequence_a020217/1` - A020217 - Pseudoprimes to base 89.
- `create_sequence_a020218/1` - A020218 - Pseudoprimes to base 90.
- `create_sequence_a020219/1` - A020219 - Pseudoprimes to base 91.
- `create_sequence_a020220/1` - A020220 - Pseudoprimes to base 92.
- `create_sequence_a020221/1` - A020221 - Pseudoprimes to base 93.
- `create_sequence_a020222/1` - A020222 - Pseudoprimes to base 94.
- `create_sequence_a020223/1` - A020223 - Pseudoprimes to base 95.
- `create_sequence_a020224/1` - A020224 - Pseudoprimes to base 96.
- `create_sequence_a020225/1` - A020225 - Pseudoprimes to base 97.
- `create_sequence_a020226/1` - A020226 - Pseudoprimes to base 98.
- `create_sequence_a020227/1` - A020227 - Pseudoprimes to base 99.
- `create_sequence_a020228/1` - A020228 - Pseudoprimes to base 100.
### Characterizations of Primes
- `create_sequence_a162511/1` - A162511 - Multiplicative function with a(p^e)=(-1)^(e-1)
"""
import Chunky.Sequence, only: [sequence_for_function: 1]
alias Chunky.Math
# require Integer
@doc """
OEIS Sequence `A001567` - Fermat pseudoprimes to base 2, also called Sarrus numbers or Poulet numbers.
From [OEIS A001567](https://oeis.org/A001567):
> Fermat pseudoprimes to base 2, also called Sarrus numbers or Poulet numbers.
> (Formerly M5441 N2365)
**Sequence IDs**: `:a001567`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a001567) |> Sequence.take!(40)
[341,561,645,1105,1387,1729,1905,2047,2465,2701,2821,3277,4033,4369,4371,4681,5461,6601,7957,8321,8481,8911,10261,10585,11305,12801,13741,13747,13981,14491,15709,15841,16705,18705,18721,19951,23001,23377,25761,29341]
"""
@doc offset: 1,
sequence: "Fermat pseudoprimes to base 2, also called Sarrus numbers or Poulet numbers.",
references: [{:oeis, :a001567, "https://oeis.org/A001567"}]
def create_sequence_a001567(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a001567/2)
end
@doc false
@doc offset: 1
def seq_a001567(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 2) end, last)
end
@doc """
OEIS Sequence `A005935` - Pseudoprimes to base 3.
From [OEIS A005935](https://oeis.org/A005935):
> Pseudoprimes to base 3.
> (Formerly M5362)
**Sequence IDs**: `:a005935`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a005935) |> Sequence.take!(40)
[91,121,286,671,703,949,1105,1541,1729,1891,2465,2665,2701,2821,3281,3367,3751,4961,5551,6601,7381,8401,8911,10585,11011,12403,14383,15203,15457,15841,16471,16531,18721,19345,23521,24046,24661,24727,28009,29161]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 3.",
references: [{:oeis, :a005935, "https://oeis.org/A005935"}]
def create_sequence_a005935(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a005935/2)
end
@doc false
@doc offset: 1
def seq_a005935(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 3) end, last)
end
@doc """
OEIS Sequence `A005936` - Pseudoprimes to base 5.
From [OEIS A005936](https://oeis.org/A005936):
> Pseudoprimes to base 5.
> (Formerly M3712)
**Sequence IDs**: `:a005936`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a005936) |> Sequence.take!(39)
[4,124,217,561,781,1541,1729,1891,2821,4123,5461,5611,5662,5731,6601,7449,7813,8029,8911,9881,11041,11476,12801,13021,13333,13981,14981,15751,15841,16297,17767,21361,22791,23653,24211,25327,25351,29341,29539]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 5.",
references: [{:oeis, :a005936, "https://oeis.org/A005936"}]
def create_sequence_a005936(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a005936/2)
end
@doc false
@doc offset: 1
def seq_a005936(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 5) end, last)
end
@doc """
OEIS Sequence `A005937` - Pseudoprimes to base 6.
From [OEIS A005937](https://oeis.org/A005937):
> Pseudoprimes to base 6.
> (Formerly M5246)
**Sequence IDs**: `:a005937`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a005937) |> Sequence.take!(39)
[35,185,217,301,481,1105,1111,1261,1333,1729,2465,2701,2821,3421,3565,3589,3913,4123,4495,5713,6533,6601,8029,8365,8911,9331,9881,10585,10621,11041,11137,12209,14315,14701,15841,16589,17329,18361,18721]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 6.",
references: [{:oeis, :a005937, "https://oeis.org/A005937"}]
def create_sequence_a005937(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a005937/2)
end
@doc false
@doc offset: 1
def seq_a005937(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 6) end, last)
end
@doc """
OEIS Sequence `A005938` - Pseudoprimes to base 7.
From [OEIS A005938](https://oeis.org/A005938):
> Pseudoprimes to base 7.
> (Formerly M4168)
**Sequence IDs**: `:a005938`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a005938) |> Sequence.take!(40)
[6,25,325,561,703,817,1105,1825,2101,2353,2465,3277,4525,4825,6697,8321,10225,10585,10621,11041,11521,12025,13665,14089,16725,16806,18721,19345,20197,20417,20425,22945,25829,26419,29234,29341,29857,29891,30025,30811]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 7.",
references: [{:oeis, :a005938, "https://oeis.org/A005938"}]
def create_sequence_a005938(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a005938/2)
end
@doc false
@doc offset: 1
def seq_a005938(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 7) end, last)
end
@doc """
OEIS Sequence `A005939` - Pseudoprimes to base 10.
From [OEIS A005939](https://oeis.org/A005939):
> Pseudoprimes to base 10.
> (Formerly M4612)
**Sequence IDs**: `:a005939`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a005939) |> Sequence.take!(43)
[9,33,91,99,259,451,481,561,657,703,909,1233,1729,2409,2821,2981,3333,3367,4141,4187,4521,5461,6533,6541,6601,7107,7471,7777,8149,8401,8911,10001,11111,11169,11649,12403,12801,13833,13981,14701,14817,14911,15211]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 10.",
references: [{:oeis, :a005939, "https://oeis.org/A005939"}]
def create_sequence_a005939(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a005939/2)
end
@doc false
@doc offset: 1
def seq_a005939(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 10) end, last)
end
@doc """
OEIS Sequence `A020136` - Fermat pseudoprimes to base 4.
From [OEIS A020136](https://oeis.org/A020136):
> Fermat pseudoprimes to base 4.
> (Formerly )
**Sequence IDs**: `:a020136`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020136) |> Sequence.take!(44)
[15,85,91,341,435,451,561,645,703,1105,1247,1271,1387,1581,1695,1729,1891,1905,2047,2071,2465,2701,2821,3133,3277,3367,3683,4033,4369,4371,4681,4795,4859,5461,5551,6601,6643,7957,8321,8481,8695,8911,9061,9131]
"""
@doc offset: 1,
sequence: "Fermat pseudoprimes to base 4.",
references: [{:oeis, :a020136, "https://oeis.org/A020136"}]
def create_sequence_a020136(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020136/2)
end
@doc false
@doc offset: 1
def seq_a020136(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 4) end, last)
end
@doc """
OEIS Sequence `A020137` - Pseudoprimes to base 8.
From [OEIS A020137](https://oeis.org/A020137):
> Pseudoprimes to base 8.
> (Formerly )
**Sequence IDs**: `:a020137`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020137) |> Sequence.take!(47)
[9,21,45,63,65,105,117,133,153,231,273,341,481,511,561,585,645,651,861,949,1001,1105,1281,1365,1387,1417,1541,1649,1661,1729,1785,1905,2047,2169,2465,2501,2701,2821,3145,3171,3201,3277,3605,3641,4005,4033,4097]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 8.",
references: [{:oeis, :a020137, "https://oeis.org/A020137"}]
def create_sequence_a020137(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020137/2)
end
@doc false
@doc offset: 1
def seq_a020137(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 8) end, last)
end
@doc """
OEIS Sequence `A020138` - Pseudoprimes to base 9.
From [OEIS A020138](https://oeis.org/A020138):
> Pseudoprimes to base 9.
> (Formerly )
**Sequence IDs**: `:a020138`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020138) |> Sequence.take!(47)
[4,8,28,52,91,121,205,286,364,511,532,616,671,697,703,946,949,1036,1105,1288,1387,1541,1729,1891,2465,2501,2665,2701,2806,2821,2926,3052,3281,3367,3751,4376,4636,4961,5356,5551,6364,6601,6643,7081,7381,7913,8401]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 9.",
references: [{:oeis, :a020138, "https://oeis.org/A020138"}]
def create_sequence_a020138(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020138/2)
end
@doc false
@doc offset: 1
def seq_a020138(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 9) end, last)
end
@doc """
OEIS Sequence `A020139` - Pseudoprimes to base 11.
From [OEIS A020139](https://oeis.org/A020139):
> Pseudoprimes to base 11.
> (Formerly )
**Sequence IDs**: `:a020139`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020139) |> Sequence.take!(43)
[10,15,70,133,190,259,305,481,645,703,793,1105,1330,1729,2047,2257,2465,2821,4577,4921,5041,5185,6601,7869,8113,8170,8695,8911,9730,10585,12403,13333,14521,14981,15841,16705,17711,18705,23377,24130,24727,26335,26467]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 11.",
references: [{:oeis, :a020139, "https://oeis.org/A020139"}]
def create_sequence_a020139(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020139/2)
end
@doc false
@doc offset: 1
def seq_a020139(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 11) end, last)
end
@doc """
OEIS Sequence `A020140` - Pseudoprimes to base 12.
From [OEIS A020140](https://oeis.org/A020140):
> Pseudoprimes to base 12.
> (Formerly )
**Sequence IDs**: `:a020140`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020140) |> Sequence.take!(42)
[65,91,133,143,145,247,377,385,703,1045,1099,1105,1649,1729,1885,1891,2041,2233,2465,2701,2821,2983,3367,3553,5005,5365,5551,5785,6061,6305,6601,8911,9073,10585,11077,12403,12673,12905,13051,13333,13345,13585]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 12.",
references: [{:oeis, :a020140, "https://oeis.org/A020140"}]
def create_sequence_a020140(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020140/2)
end
@doc false
@doc offset: 1
def seq_a020140(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 12) end, last)
end
@doc """
OEIS Sequence `A020141` - Pseudoprimes to base 13.
From [OEIS A020141](https://oeis.org/A020141):
> Pseudoprimes to base 13.
> (Formerly )
**Sequence IDs**: `:a020141`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020141) |> Sequence.take!(43)
[4,6,12,21,85,105,231,244,276,357,427,561,1099,1785,1891,2465,2806,3605,5028,5149,5185,5565,6601,7107,8841,8911,9577,9637,10308,10585,11305,12403,12621,13019,13345,13461,13685,14491,14981,15051,15505,15841,17803]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 13.",
references: [{:oeis, :a020141, "https://oeis.org/A020141"}]
def create_sequence_a020141(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020141/2)
end
@doc false
@doc offset: 1
def seq_a020141(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 13) end, last)
end
@doc """
OEIS Sequence `A020142` - Pseudoprimes to base 14.
From [OEIS A020142](https://oeis.org/A020142):
> Pseudoprimes to base 14.
> (Formerly )
**Sequence IDs**: `:a020142`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020142) |> Sequence.take!(43)
[15,39,65,195,481,561,781,793,841,985,1105,1111,1541,1891,2257,2465,2561,2665,2743,3277,5185,5713,6501,6533,6541,7107,7171,7449,7543,7585,8321,9073,10585,12403,12505,12545,12805,12871,13429,14111,14689,15067,15457]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 14.",
references: [{:oeis, :a020142, "https://oeis.org/A020142"}]
def create_sequence_a020142(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020142/2)
end
@doc false
@doc offset: 1
def seq_a020142(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 14) end, last)
end
@doc """
OEIS Sequence `A020143` - Pseudoprimes to base 15.
From [OEIS A020143](https://oeis.org/A020143):
> Pseudoprimes to base 15.
> (Formerly )
**Sequence IDs**: `:a020143`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020143) |> Sequence.take!(39)
[14,341,742,946,1477,1541,1687,1729,1891,1921,2821,3133,3277,4187,6541,6601,7471,8701,8911,9073,10279,10649,12871,14041,14701,15409,15841,16841,19201,20017,24521,25313,25546,28063,29341,30889,31021,38963,41041]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 15.",
references: [{:oeis, :a020143, "https://oeis.org/A020143"}]
def create_sequence_a020143(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020143/2)
end
@doc false
@doc offset: 1
def seq_a020143(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 15) end, last)
end
@doc """
OEIS Sequence `A020144` - Pseudoprimes to base 16.
From [OEIS A020144](https://oeis.org/A020144):
> Pseudoprimes to base 16.
> (Formerly )
**Sequence IDs**: `:a020144`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020144) |> Sequence.take!(45)
[15,51,85,91,255,341,435,451,561,595,645,703,1105,1247,1261,1271,1285,1387,1581,1687,1695,1729,1891,1905,2047,2071,2091,2431,2465,2701,2821,3133,3277,3367,3655,3683,4033,4369,4371,4681,4795,4859,5083,5151,5461]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 16.",
references: [{:oeis, :a020144, "https://oeis.org/A020144"}]
def create_sequence_a020144(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020144/2)
end
@doc false
@doc offset: 1
def seq_a020144(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 16) end, last)
end
@doc """
OEIS Sequence `A020145` - Pseudoprimes to base 17.
From [OEIS A020145](https://oeis.org/A020145):
> Pseudoprimes to base 17.
> (Formerly )
**Sequence IDs**: `:a020145`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020145) |> Sequence.take!(43)
[4,8,9,16,45,91,145,261,781,1111,1228,1305,1729,1885,2149,2821,3991,4005,4033,4187,4912,5365,5662,5833,6601,6697,7171,8481,8911,10585,11476,12403,12673,13333,13833,15805,15841,16705,19345,19729,20591,21781,22791]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 17.",
references: [{:oeis, :a020145, "https://oeis.org/A020145"}]
def create_sequence_a020145(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020145/2)
end
@doc false
@doc offset: 1
def seq_a020145(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 17) end, last)
end
@doc """
OEIS Sequence `A020146` - Pseudoprimes to base 18.
From [OEIS A020146](https://oeis.org/A020146):
> Pseudoprimes to base 18.
> (Formerly )
**Sequence IDs**: `:a020146`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020146) |> Sequence.take!(44)
[25,49,65,85,133,221,323,325,343,425,451,637,931,1105,1225,1369,1387,1649,1729,1921,2149,2465,2701,2821,2825,2977,3325,4165,4577,4753,5525,5725,5833,5941,6305,6517,6601,7345,8911,9061,10349,10585,10961,11221]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 18.",
references: [{:oeis, :a020146, "https://oeis.org/A020146"}]
def create_sequence_a020146(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020146/2)
end
@doc false
@doc offset: 1
def seq_a020146(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 18) end, last)
end
@doc """
OEIS Sequence `A020147` - Pseudoprimes to base 19.
From [OEIS A020147](https://oeis.org/A020147):
> Pseudoprimes to base 19.
> (Formerly )
**Sequence IDs**: `:a020147`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020147) |> Sequence.take!(45)
[6,9,15,18,45,49,153,169,343,561,637,889,905,906,1035,1105,1629,1661,1849,1891,2353,2465,2701,2821,2955,3201,4033,4681,5461,5466,5713,6223,6541,6601,6697,7957,8145,8281,8401,8869,9211,9997,10021,10515,10585]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 19.",
references: [{:oeis, :a020147, "https://oeis.org/A020147"}]
def create_sequence_a020147(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020147/2)
end
@doc false
@doc offset: 1
def seq_a020147(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 19) end, last)
end
@doc """
OEIS Sequence `A020148` - Pseudoprimes to base 20.
From [OEIS A020148](https://oeis.org/A020148):
> Pseudoprimes to base 20.
> (Formerly )
**Sequence IDs**: `:a020148`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020148) |> Sequence.take!(42)
[21,57,133,231,399,561,671,861,889,1281,1653,1729,1891,2059,2413,2501,2761,2821,2947,3059,3201,4047,5271,5461,5473,5713,5833,6601,6817,7999,8421,8911,11229,11557,11837,12801,13051,13981,14091,15251,15311,15841]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 20.",
references: [{:oeis, :a020148, "https://oeis.org/A020148"}]
def create_sequence_a020148(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020148/2)
end
@doc false
@doc offset: 1
def seq_a020148(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 20) end, last)
end
@doc """
OEIS Sequence `A020149` - Pseudoprimes to base 21.
From [OEIS A020149](https://oeis.org/A020149):
> Pseudoprimes to base 21.
> (Formerly )
**Sequence IDs**: `:a020149`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020149) |> Sequence.take!(42)
[4,10,20,55,65,85,221,703,793,1045,1105,1852,2035,2465,3781,4630,5185,5473,5995,6541,7363,8695,8965,9061,10585,10945,11647,13019,13051,13981,14491,17767,18103,18721,19345,19669,19909,21667,22681,23155,24013,25465]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 21.",
references: [{:oeis, :a020149, "https://oeis.org/A020149"}]
def create_sequence_a020149(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020149/2)
end
@doc false
@doc offset: 1
def seq_a020149(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 21) end, last)
end
@doc """
OEIS Sequence `A020150` - Pseudoprimes to base 22.
From [OEIS A020150](https://oeis.org/A020150):
> Pseudoprimes to base 22.
> (Formerly )
**Sequence IDs**: `:a020150`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020150) |> Sequence.take!(45)
[21,69,91,105,161,169,345,483,485,645,805,1105,1183,1247,1261,1541,1649,1729,1891,2037,2041,2047,2413,2465,2737,2821,3241,3605,3801,5551,5565,5963,6019,6601,6693,7081,7107,7267,7665,8119,8365,8421,8911,9453,10185]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 22.",
references: [{:oeis, :a020150, "https://oeis.org/A020150"}]
def create_sequence_a020150(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020150/2)
end
@doc false
@doc offset: 1
def seq_a020150(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 22) end, last)
end
@doc """
OEIS Sequence `A020151` - Pseudoprimes to base 23.
From [OEIS A020151](https://oeis.org/A020151):
> Pseudoprimes to base 23.
> (Formerly )
**Sequence IDs**: `:a020151`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020151) |> Sequence.take!(46)
[22,33,91,154,165,169,265,341,385,451,481,553,561,638,946,1027,1045,1065,1105,1183,1271,1729,1738,1749,2059,2321,2465,2501,2701,2821,2926,3097,3445,4033,4081,4345,4371,4681,5005,5149,6253,6369,6533,6541,7189,7267]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 23.",
references: [{:oeis, :a020151, "https://oeis.org/A020151"}]
def create_sequence_a020151(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020151/2)
end
@doc false
@doc offset: 1
def seq_a020151(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 23) end, last)
end
@doc """
OEIS Sequence `A020152` - Pseudoprimes to base 24.
From [OEIS A020152](https://oeis.org/A020152):
> Pseudoprimes to base 24.
> (Formerly )
**Sequence IDs**: `:a020152`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020152) |> Sequence.take!(43)
[25,115,175,325,553,575,805,949,1105,1541,1729,1771,1825,1975,2413,2425,2465,2701,2737,2821,2885,3781,4207,4537,6601,6931,6943,7081,7189,7471,7501,7813,8725,8911,9085,9361,9809,10465,10585,11557,12025,13825,14425]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 24.",
references: [{:oeis, :a020152, "https://oeis.org/A020152"}]
def create_sequence_a020152(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020152/2)
end
@doc false
@doc offset: 1
def seq_a020152(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 24) end, last)
end
@doc """
OEIS Sequence `A020153` - Pseudoprimes to base 25.
From [OEIS A020153](https://oeis.org/A020153):
> Pseudoprimes to base 25.
> (Formerly )
**Sequence IDs**: `:a020153`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020153) |> Sequence.take!(50)
[4,6,8,12,24,28,39,66,91,124,217,232,276,403,426,451,532,561,616,703,781,804,868,946,1128,1288,1541,1729,1891,2047,2701,2806,2821,2911,2926,3052,3126,3367,3592,3976,4069,4123,4207,4564,4636,4686,5321,5461,5551,5611]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 25.",
references: [{:oeis, :a020153, "https://oeis.org/A020153"}]
def create_sequence_a020153(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020153/2)
end
@doc false
@doc offset: 1
def seq_a020153(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 25) end, last)
end
@doc """
OEIS Sequence `A020154` - Pseudoprimes to base 26.
From [OEIS A020154](https://oeis.org/A020154):
> Pseudoprimes to base 26.
> (Formerly )
**Sequence IDs**: `:a020154`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020154) |> Sequence.take!(47)
[9,15,25,27,45,75,133,135,153,175,217,225,259,425,475,561,589,675,703,775,925,1035,1065,1147,2465,3145,3325,3385,3565,3825,4123,4525,4741,4921,5041,5425,6093,6475,6525,6601,6697,8029,8695,8911,9073,10585,11005]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 26.",
references: [{:oeis, :a020154, "https://oeis.org/A020154"}]
def create_sequence_a020154(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020154/2)
end
@doc false
@doc offset: 1
def seq_a020154(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 26) end, last)
end
@doc """
OEIS Sequence `A020155` - Pseudoprimes to base 27.
From [OEIS A020155](https://oeis.org/A020155):
> Pseudoprimes to base 27.
> (Formerly )
**Sequence IDs**: `:a020155`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020155) |> Sequence.take!(45)
[26,65,91,121,133,247,259,286,341,365,481,671,703,949,1001,1105,1541,1649,1729,1891,2071,2465,2665,2701,2821,2981,2993,3146,3281,3367,3605,3751,4033,4745,4921,4961,5299,5461,5551,5611,5621,6305,6533,6601,7381]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 27.",
references: [{:oeis, :a020155, "https://oeis.org/A020155"}]
def create_sequence_a020155(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020155/2)
end
@doc false
@doc offset: 1
def seq_a020155(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 27) end, last)
end
@doc """
OEIS Sequence `A020156` - Pseudoprimes to base 28.
From [OEIS A020156](https://oeis.org/A020156):
> Pseudoprimes to base 28.
> (Formerly )
**Sequence IDs**: `:a020156`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020156) |> Sequence.take!(43)
[9,27,45,87,145,261,361,529,561,703,783,785,1105,1305,1413,1431,1885,2041,2413,2465,2871,3201,3277,4553,4699,5149,5181,5365,7065,8149,8321,8401,9841,10027,10585,12673,13333,13345,13357,13833,14383,14769,14981]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 28.",
references: [{:oeis, :a020156, "https://oeis.org/A020156"}]
def create_sequence_a020156(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020156/2)
end
@doc false
@doc offset: 1
def seq_a020156(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 28) end, last)
end
@doc """
OEIS Sequence `A020157` - Pseudoprimes to base 29.
From [OEIS A020157](https://oeis.org/A020157):
> Pseudoprimes to base 29.
> (Formerly )
**Sequence IDs**: `:a020157`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020157) |> Sequence.take!(48)
[4,14,15,21,28,35,52,91,105,231,268,341,364,469,481,561,651,793,871,1105,1729,1876,1897,2105,2257,2821,3484,3523,4069,4371,4411,5149,5185,5356,5473,5565,5611,6097,6601,7161,7294,8321,8401,8421,8841,8911,11041,11581]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 29.",
references: [{:oeis, :a020157, "https://oeis.org/A020157"}]
def create_sequence_a020157(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020157/2)
end
@doc false
@doc offset: 1
def seq_a020157(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 29) end, last)
end
@doc """
OEIS Sequence `A020158` - Pseudoprimes to base 30.
From [OEIS A020158](https://oeis.org/A020158):
> Pseudoprimes to base 30.
> (Formerly )
**Sequence IDs**: `:a020158`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020158) |> Sequence.take!(46)
[49,91,133,217,247,341,403,469,493,589,637,703,871,899,901,931,1273,1519,1537,1729,2059,2077,2821,3097,3277,3283,3367,3577,4081,4097,4123,5729,6031,6061,6097,6409,6601,6817,7657,8023,8029,8401,8911,9881,11041,11713]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 30.",
references: [{:oeis, :a020158, "https://oeis.org/A020158"}]
def create_sequence_a020158(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020158/2)
end
@doc false
@doc offset: 1
def seq_a020158(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 30) end, last)
end
@doc """
OEIS Sequence `A020159` - Pseudoprimes to base 31.
From [OEIS A020159](https://oeis.org/A020159):
> Pseudoprimes to base 31.
> (Formerly )
**Sequence IDs**: `:a020159`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020159) |> Sequence.take!(45)
[6,10,15,30,49,65,66,133,185,451,481,561,637,931,946,1105,1221,1729,1813,2317,2405,2465,2553,3310,4753,4921,6241,6289,6601,7107,7421,7449,8177,8911,9073,9131,10470,10585,10963,11041,12403,14191,16219,17767,18721]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 31.",
references: [{:oeis, :a020159, "https://oeis.org/A020159"}]
def create_sequence_a020159(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020159/2)
end
@doc false
@doc offset: 1
def seq_a020159(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 31) end, last)
end
@doc """
OEIS Sequence `A020160` - Pseudoprimes to base 32.
From [OEIS A020160](https://oeis.org/A020160):
> Pseudoprimes to base 32.
> (Formerly )
**Sequence IDs**: `:a020160`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020160) |> Sequence.take!(46)
[25,33,93,165,205,217,325,341,385,425,465,561,645,697,793,825,1023,1025,1045,1057,1065,1105,1353,1387,1525,1705,1729,1905,2047,2317,2325,2465,2665,2701,2761,2821,3053,3157,3277,3565,3813,4033,4123,4141,4369,4371]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 32.",
references: [{:oeis, :a020160, "https://oeis.org/A020160"}]
def create_sequence_a020160(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020160/2)
end
@doc false
@doc offset: 1
def seq_a020160(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 32) end, last)
end
@doc """
OEIS Sequence `A020161` - Pseudoprimes to base 33.
From [OEIS A020161](https://oeis.org/A020161):
> Pseudoprimes to base 33.
> (Formerly )
**Sequence IDs**: `:a020161`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020161) |> Sequence.take!(43)
[4,8,16,32,85,496,545,703,1057,1105,1417,1649,1729,1853,2465,2501,2821,3368,4033,4492,4681,5461,5713,5833,6533,6601,7861,8911,9061,9073,9265,10585,11305,11359,12209,12403,13741,15841,16589,16745,17968,18103,19909]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 33.",
references: [{:oeis, :a020161, "https://oeis.org/A020161"}]
def create_sequence_a020161(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020161/2)
end
@doc false
@doc offset: 1
def seq_a020161(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 33) end, last)
end
@doc """
OEIS Sequence `A020162` - Pseudoprimes to base 34.
From [OEIS A020162](https://oeis.org/A020162):
> Pseudoprimes to base 34.
> (Formerly )
**Sequence IDs**: `:a020162`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020162) |> Sequence.take!(47)
[15,21,33,35,55,65,77,105,165,231,273,385,429,435,445,671,703,1001,1045,1065,1155,1157,1281,1365,1729,1869,1891,2035,2059,2071,2145,2779,2821,2937,3201,3605,4033,4795,5005,5161,5565,5785,5995,6305,6533,6601,6853]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 34.",
references: [{:oeis, :a020162, "https://oeis.org/A020162"}]
def create_sequence_a020162(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020162/2)
end
@doc false
@doc offset: 1
def seq_a020162(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 34) end, last)
end
@doc """
OEIS Sequence `A020163` - Pseudoprimes to base 35.
From [OEIS A020163](https://oeis.org/A020163):
> Pseudoprimes to base 35.
> (Formerly )
**Sequence IDs**: `:a020163`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020163) |> Sequence.take!(41)
[9,34,51,153,341,442,561,782,1247,1261,1581,1921,2278,2431,2701,2871,3298,3601,4371,5083,5161,5517,7543,7633,7969,8398,10421,11041,12403,13051,13833,14689,15051,16441,16589,17391,19006,19041,19951,20026,22681]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 35.",
references: [{:oeis, :a020163, "https://oeis.org/A020163"}]
def create_sequence_a020163(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020163/2)
end
@doc false
@doc offset: 1
def seq_a020163(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 35) end, last)
end
@doc """
OEIS Sequence `A020164` - Pseudoprimes to base 36.
From [OEIS A020164](https://oeis.org/A020164):
> Pseudoprimes to base 36.
> (Formerly )
**Sequence IDs**: `:a020164`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020164) |> Sequence.take!(45)
[35,91,185,217,259,301,403,481,559,679,703,1105,1111,1147,1261,1295,1333,1387,1591,1729,1891,2465,2651,2701,2821,3007,3145,3367,3421,3565,3589,3913,4123,4141,4171,4495,5551,5611,5713,6001,6485,6533,6601,6643,7471]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 36.",
references: [{:oeis, :a020164, "https://oeis.org/A020164"}]
def create_sequence_a020164(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020164/2)
end
@doc false
@doc offset: 1
def seq_a020164(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 36) end, last)
end
@doc """
OEIS Sequence `A020165` - Pseudoprimes to base 37.
From [OEIS A020165](https://oeis.org/A020165):
> Pseudoprimes to base 37.
> (Formerly )
**Sequence IDs**: `:a020165`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020165) |> Sequence.take!(50)
[4,6,9,12,18,28,36,45,57,66,133,171,217,246,268,285,301,396,451,469,561,589,685,801,817,1065,1105,1233,1273,1333,1387,1476,1653,1729,1876,1881,2044,2077,2413,2465,2501,2556,2706,2821,2881,3556,3565,3781,3913,4047]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 37.",
references: [{:oeis, :a020165, "https://oeis.org/A020165"}]
def create_sequence_a020165(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020165/2)
end
@doc false
@doc offset: 1
def seq_a020165(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 37) end, last)
end
@doc """
OEIS Sequence `A020166` - Pseudoprimes to base 38.
From [OEIS A020166](https://oeis.org/A020166):
> Pseudoprimes to base 38.
> (Formerly )
**Sequence IDs**: `:a020166`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020166) |> Sequence.take!(46)
[39,65,85,91,111,185,221,259,289,469,481,561,629,697,871,1105,1221,1443,1445,2405,2465,2479,2553,2665,2701,2821,3145,3367,3585,3757,4033,4187,4681,5291,5461,6031,6097,6601,6931,7449,7585,7613,7957,8177,9073,9919]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 38.",
references: [{:oeis, :a020166, "https://oeis.org/A020166"}]
def create_sequence_a020166(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020166/2)
end
@doc false
@doc offset: 1
def seq_a020166(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 38) end, last)
end
@doc """
OEIS Sequence `A020167` - Pseudoprimes to base 39.
From [OEIS A020167](https://oeis.org/A020167):
> Pseudoprimes to base 39.
> (Formerly )
**Sequence IDs**: `:a020167`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020167) |> Sequence.take!(40)
[38,95,133,341,1561,1834,1891,2047,2101,2465,3053,3439,3805,4141,4237,4411,5662,5921,6533,6601,6697,8149,8321,8911,10381,10585,12403,12431,13889,13981,15841,16297,16441,16589,17081,20567,22681,23521,26885,28153]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 39.",
references: [{:oeis, :a020167, "https://oeis.org/A020167"}]
def create_sequence_a020167(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020167/2)
end
@doc false
@doc offset: 1
def seq_a020167(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 39) end, last)
end
@doc """
OEIS Sequence `A020168` - Pseudoprimes to base 40.
From [OEIS A020168](https://oeis.org/A020168):
> Pseudoprimes to base 40.
> (Formerly )
**Sequence IDs**: `:a020168`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020168) |> Sequence.take!(42)
[39,91,121,123,289,451,533,561,703,793,1541,1561,1599,1729,1921,2821,2899,3097,3367,3751,3829,4961,5461,5729,6031,6601,7111,7201,7381,8911,9073,9881,10897,11011,11041,11121,11521,12403,12801,13073,13333,13981]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 40.",
references: [{:oeis, :a020168, "https://oeis.org/A020168"}]
def create_sequence_a020168(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020168/2)
end
@doc false
@doc offset: 1
def seq_a020168(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 40) end, last)
end
@doc """
OEIS Sequence `A020169` - Pseudoprimes to base 41.
From [OEIS A020169](https://oeis.org/A020169):
> Pseudoprimes to base 41.
> (Formerly )
**Sequence IDs**: `:a020169`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020169) |> Sequence.take!(48)
[4,8,10,15,20,21,35,40,105,145,231,344,561,609,645,671,703,841,1065,1105,1281,1387,1417,1729,1885,1891,2121,2465,2701,2821,3045,3053,3829,4033,4205,4521,4870,5365,5565,6161,6892,7957,8295,8321,8695,8905,8911,9253]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 41.",
references: [{:oeis, :a020169, "https://oeis.org/A020169"}]
def create_sequence_a020169(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020169/2)
end
@doc false
@doc offset: 1
def seq_a020169(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 41) end, last)
end
@doc """
OEIS Sequence `A020170` - Pseudoprimes to base 42.
From [OEIS A020170](https://oeis.org/A020170):
> Pseudoprimes to base 42.
> (Formerly )
**Sequence IDs**: `:a020170`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020170) |> Sequence.take!(40)
[205,451,529,559,697,1105,1247,1441,1541,1763,1765,1807,1891,1921,1991,2465,2665,5371,5611,5977,6001,7345,7421,8041,8749,9773,10585,10621,11041,12167,12403,13333,13981,14473,14491,14981,15457,17611,18721,22399]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 42.",
references: [{:oeis, :a020170, "https://oeis.org/A020170"}]
def create_sequence_a020170(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020170/2)
end
@doc false
@doc offset: 1
def seq_a020170(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 42) end, last)
end
@doc """
OEIS Sequence `A020171` - Pseudoprimes to base 43.
From [OEIS A020171](https://oeis.org/A020171):
> Pseudoprimes to base 43.
> (Formerly )
**Sequence IDs**: `:a020171`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020171) |> Sequence.take!(49)
[6,14,21,25,33,42,77,91,105,165,185,231,325,385,425,481,525,561,777,825,861,925,973,1045,1105,1221,1541,1729,1785,1807,1825,1925,2071,2425,2465,2553,2821,2849,3145,3281,3439,3781,3885,4033,4417,4825,5005,5565,6105]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 43.",
references: [{:oeis, :a020171, "https://oeis.org/A020171"}]
def create_sequence_a020171(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020171/2)
end
@doc false
@doc offset: 1
def seq_a020171(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 43) end, last)
end
@doc """
OEIS Sequence `A020172` - Pseudoprimes to base 44.
From [OEIS A020172](https://oeis.org/A020172):
> Pseudoprimes to base 44.
> (Formerly )
**Sequence IDs**: `:a020172`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020172) |> Sequence.take!(46)
[9,15,45,65,117,129,215,301,369,387,585,645,703,745,1017,1035,1105,1341,1677,1729,1921,1935,1937,1981,2047,2193,2465,2665,2821,3585,3913,4005,4097,4417,4633,5289,6273,6533,6601,6705,7281,7345,8385,8695,8911,9331]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 44.",
references: [{:oeis, :a020172, "https://oeis.org/A020172"}]
def create_sequence_a020172(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020172/2)
end
@doc false
@doc offset: 1
def seq_a020172(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 44) end, last)
end
@doc """
OEIS Sequence `A020173` - Pseudoprimes to base 45.
From [OEIS A020173](https://oeis.org/A020173):
> Pseudoprimes to base 45.
> (Formerly )
**Sequence IDs**: `:a020173`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020173) |> Sequence.take!(45)
[4,22,44,76,133,253,418,436,451,481,638,763,1247,1417,1541,1562,1729,1771,1891,1981,2047,2059,2071,2356,2398,2737,2821,3053,3289,3553,4033,4807,4921,5377,5963,6322,6533,6601,6817,7337,8284,8321,8911,9361,10879]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 45.",
references: [{:oeis, :a020173, "https://oeis.org/A020173"}]
def create_sequence_a020173(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020173/2)
end
@doc false
@doc offset: 1
def seq_a020173(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 45) end, last)
end
@doc """
OEIS Sequence `A020174` - Pseudoprimes to base 46.
From [OEIS A020174](https://oeis.org/A020174):
> Pseudoprimes to base 46.
> (Formerly )
**Sequence IDs**: `:a020174`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020174) |> Sequence.take!(47)
[9,15,45,133,141,145,235,261,341,365,423,561,657,703,705,721,763,781,949,1105,1305,1417,1551,1645,1729,1885,1891,1957,1991,2071,2115,2117,2201,2465,2701,2821,3201,3285,4033,4089,4187,4371,4465,4681,5365,5611,5781]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 46.",
references: [{:oeis, :a020174, "https://oeis.org/A020174"}]
def create_sequence_a020174(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020174/2)
end
@doc false
@doc offset: 1
def seq_a020174(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 46) end, last)
end
@doc """
OEIS Sequence `A020175` - Pseudoprimes to base 47.
From [OEIS A020175](https://oeis.org/A020175):
> Pseudoprimes to base 47.
> (Formerly )
**Sequence IDs**: `:a020175`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020175) |> Sequence.take!(47)
[46,65,69,85,221,259,341,345,427,481,506,561,645,703,721,793,805,874,897,946,1105,1173,1426,1581,1649,1702,1729,1771,1891,2257,2465,2737,2806,2821,3145,3201,3811,4301,4485,5185,5461,5865,6283,6305,6601,6943,8911]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 47.",
references: [{:oeis, :a020175, "https://oeis.org/A020175"}]
def create_sequence_a020175(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020175/2)
end
@doc false
@doc offset: 1
def seq_a020175(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 47) end, last)
end
@doc """
OEIS Sequence `A020176` - Pseudoprimes to base 48.
From [OEIS A020176](https://oeis.org/A020176):
> Pseudoprimes to base 48.
> (Formerly )
**Sequence IDs**: `:a020176`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020176) |> Sequence.take!(45)
[49,91,245,259,329,427,481,637,703,793,833,1105,1267,1645,1729,1813,1891,1921,2257,2303,2305,2353,2465,2701,2821,2989,3367,3439,4465,4753,5185,5537,5551,5611,5951,6533,6601,6697,6721,7345,8869,8911,9457,9881,10021]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 48.",
references: [{:oeis, :a020176, "https://oeis.org/A020176"}]
def create_sequence_a020176(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020176/2)
end
@doc false
@doc offset: 1
def seq_a020176(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 48) end, last)
end
@doc """
OEIS Sequence `A020177` - Pseudoprimes to base 49.
From [OEIS A020177](https://oeis.org/A020177):
> Pseudoprimes to base 49.
> (Formerly )
**Sequence IDs**: `:a020177`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020177) |> Sequence.take!(50)
[4,6,8,12,15,16,24,25,48,66,75,76,172,176,232,247,276,304,325,425,435,475,496,559,561,688,703,817,904,946,949,1075,1105,1128,1146,1695,1825,1891,2101,2353,2356,2465,2486,2509,2701,3056,3091,3268,3277,3439]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 49.",
references: [{:oeis, :a020177, "https://oeis.org/A020177"}]
def create_sequence_a020177(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020177/2)
end
@doc false
@doc offset: 1
def seq_a020177(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 49) end, last)
end
@doc """
OEIS Sequence `A020178` - Pseudoprimes to base 50.
From [OEIS A020178](https://oeis.org/A020178):
> Pseudoprimes to base 50.
> (Formerly )
**Sequence IDs**: `:a020178`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020178) |> Sequence.take!(46)
[21,49,51,119,133,147,231,301,357,561,637,697,793,817,833,861,931,1037,1281,1649,1729,2009,2041,2047,2107,2499,2501,2701,2821,2989,3201,3281,3913,3977,4753,5461,5719,6601,7693,7701,8041,8113,8911,9061,9073,9331]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 50.",
references: [{:oeis, :a020178, "https://oeis.org/A020178"}]
def create_sequence_a020178(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020178/2)
end
@doc false
@doc offset: 1
def seq_a020178(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 50) end, last)
end
@doc """
OEIS Sequence `A020179` - Pseudoprimes to base 51.
From [OEIS A020179](https://oeis.org/A020179):
> Pseudoprimes to base 51.
> (Formerly )
**Sequence IDs**: `:a020179`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020179) |> Sequence.take!(43)
[10,25,50,65,70,91,175,325,451,481,925,1247,1681,1729,1825,2059,2275,2653,2821,3053,3790,4745,4927,5461,6175,6305,6505,6601,7201,8365,8911,9031,9475,9730,9850,10585,11041,12025,12209,12403,13366,13427,13747]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 51.",
references: [{:oeis, :a020179, "https://oeis.org/A020179"}]
def create_sequence_a020179(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020179/2)
end
@doc false
@doc offset: 1
def seq_a020179(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 51) end, last)
end
@doc """
OEIS Sequence `A020180` - Pseudoprimes to base 52.
From [OEIS A020180](https://oeis.org/A020180):
> Pseudoprimes to base 52.
> (Formerly )
**Sequence IDs**: `:a020180`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020180) |> Sequence.take!(42)
[51,85,159,265,561,671,901,1513,1541,1891,2413,2465,2653,2703,2705,3111,3201,3421,4081,4187,4505,5151,6307,6433,6533,6601,6943,7201,8365,8911,9197,9773,9911,10349,10585,11305,12403,13019,13333,14491,15051,15841]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 52.",
references: [{:oeis, :a020180, "https://oeis.org/A020180"}]
def create_sequence_a020180(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020180/2)
end
@doc false
@doc offset: 1
def seq_a020180(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 52) end, last)
end
@doc """
OEIS Sequence `A020181` - Pseudoprimes to base 53.
From [OEIS A020181](https://oeis.org/A020181):
> Pseudoprimes to base 53.
> (Formerly )
**Sequence IDs**: `:a020181`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020181) |> Sequence.take!(48)
[4,9,26,27,28,39,45,52,65,91,117,153,286,351,364,561,585,703,946,1036,1105,1405,1441,1541,1636,1729,2209,2465,2529,2821,2863,2871,3097,3277,3367,3406,3481,3653,3861,4005,4564,4777,5317,5833,6031,6364,6433,6601]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 53.",
references: [{:oeis, :a020181, "https://oeis.org/A020181"}]
def create_sequence_a020181(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020181/2)
end
@doc false
@doc offset: 1
def seq_a020181(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 53) end, last)
end
@doc """
OEIS Sequence `A020182` - Pseudoprimes to base 54.
From [OEIS A020182](https://oeis.org/A020182):
> Pseudoprimes to base 54.
> (Formerly )
**Sequence IDs**: `:a020182`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020182) |> Sequence.take!(42)
[55,265,341,361,385,583,781,1045,1105,1247,1729,1855,2201,2465,2701,2821,2863,2915,3445,4033,4069,4081,5005,5317,5461,6095,6601,7471,7957,8321,8911,9073,10585,11713,13357,14585,14701,14905,15409,15841,17755,18721]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 54.",
references: [{:oeis, :a020182, "https://oeis.org/A020182"}]
def create_sequence_a020182(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020182/2)
end
@doc false
@doc offset: 1
def seq_a020182(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 54) end, last)
end
@doc """
OEIS Sequence `A020183` - Pseudoprimes to base 55.
From [OEIS A020183](https://oeis.org/A020183):
> Pseudoprimes to base 55.
> (Formerly )
**Sequence IDs**: `:a020183`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020183) |> Sequence.take!(45)
[6,9,18,21,27,54,63,91,153,189,357,369,553,697,801,1027,1266,1387,1513,1729,1869,2701,2821,3213,3649,4033,4431,6273,6533,6541,6601,6643,7189,7957,8911,9773,9937,10649,10761,13333,13617,13833,14981,15457,15841]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 55.",
references: [{:oeis, :a020183, "https://oeis.org/A020183"}]
def create_sequence_a020183(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020183/2)
end
@doc false
@doc offset: 1
def seq_a020183(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 55) end, last)
end
@doc """
OEIS Sequence `A020184` - Pseudoprimes to base 56.
From [OEIS A020184](https://oeis.org/A020184):
> Pseudoprimes to base 56.
> (Formerly )
**Sequence IDs**: `:a020184`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020184) |> Sequence.take!(45)
[15,33,55,57,95,165,209,247,285,403,561,589,627,715,1027,1045,1105,1339,1501,1653,1705,1891,1957,1991,2449,2465,3135,3193,3277,3553,3565,4345,5611,5665,6441,7657,8137,8321,10585,11041,11077,12403,13585,13695,15685]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 56.",
references: [{:oeis, :a020184, "https://oeis.org/A020184"}]
def create_sequence_a020184(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020184/2)
end
@doc false
@doc offset: 1
def seq_a020184(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 56) end, last)
end
@doc """
OEIS Sequence `A020185` - Pseudoprimes to base 57.
From [OEIS A020185](https://oeis.org/A020185):
> Pseudoprimes to base 57.
> (Formerly )
**Sequence IDs**: `:a020185`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020185) |> Sequence.take!(47)
[4,8,14,25,28,56,65,125,145,203,217,325,377,451,721,725,781,1001,1105,1625,1885,1891,2047,2296,2465,2701,2821,2911,3193,3277,3565,3625,3976,4141,4187,5365,5425,6461,6533,6601,7501,7613,8029,8401,9373,9425,10325]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 57.",
references: [{:oeis, :a020185, "https://oeis.org/A020185"}]
def create_sequence_a020185(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020185/2)
end
@doc false
@doc offset: 1
def seq_a020185(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 57) end, last)
end
@doc """
OEIS Sequence `A020186` - Pseudoprimes to base 58.
From [OEIS A020186](https://oeis.org/A020186):
> Pseudoprimes to base 58.
> (Formerly )
**Sequence IDs**: `:a020186`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020186) |> Sequence.take!(43)
[57,133,177,285,341,561,671,703,885,1105,1121,1141,1441,1541,1729,1891,2065,2821,3009,3097,3165,3363,3365,3781,4061,4071,4371,5605,6031,6601,7363,7471,7991,8119,8321,8749,8911,9073,11441,11859,12027,12331,12403]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 58.",
references: [{:oeis, :a020186, "https://oeis.org/A020186"}]
def create_sequence_a020186(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020186/2)
end
@doc false
@doc offset: 1
def seq_a020186(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 58) end, last)
end
@doc """
OEIS Sequence `A020187` - Pseudoprimes to base 59.
From [OEIS A020187](https://oeis.org/A020187):
> Pseudoprimes to base 59.
> (Formerly )
**Sequence IDs**: `:a020187`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020187) |> Sequence.take!(43)
[15,58,87,145,435,451,561,645,946,1015,1105,1141,1247,1541,1661,1729,1885,1991,2413,2465,2755,2821,3097,4215,4681,4795,5365,5611,5729,6191,6409,6533,6601,7421,8149,8321,8705,8911,9637,10081,10217,10585,11041]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 59.",
references: [{:oeis, :a020187, "https://oeis.org/A020187"}]
def create_sequence_a020187(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020187/2)
end
@doc false
@doc offset: 1
def seq_a020187(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 59) end, last)
end
@doc """
OEIS Sequence `A020188` - Pseudoprimes to base 60.
From [OEIS A020188](https://oeis.org/A020188):
> Pseudoprimes to base 60.
> (Formerly )
**Sequence IDs**: `:a020188`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020188) |> Sequence.take!(40)
[341,427,481,671,793,841,1729,1891,2257,2821,3133,3277,3599,3601,3661,4577,4777,6001,6161,6533,6601,6943,8911,8917,9937,10249,11521,12331,13333,13481,14701,14981,15841,16897,18889,20591,20801,21361,22321,23479]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 60.",
references: [{:oeis, :a020188, "https://oeis.org/A020188"}]
def create_sequence_a020188(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020188/2)
end
@doc false
@doc offset: 1
def seq_a020188(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 60) end, last)
end
@doc """
OEIS Sequence `A020189` - Pseudoprimes to base 61.
From [OEIS A020189](https://oeis.org/A020189):
> Pseudoprimes to base 61.
> (Formerly )
**Sequence IDs**: `:a020189`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020189) |> Sequence.take!(49)
[4,6,10,12,15,20,30,52,60,91,93,130,155,190,217,341,388,403,465,561,679,786,970,1105,1261,1441,1729,2388,2465,2701,2821,3007,3406,3565,3661,4061,4123,4371,4577,4580,5044,5356,5461,6541,6601,6697,6799,7107,7372]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 61.",
references: [{:oeis, :a020189, "https://oeis.org/A020189"}]
def create_sequence_a020189(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020189/2)
end
@doc false
@doc offset: 1
def seq_a020189(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 61) end, last)
end
@doc """
OEIS Sequence `A020190` - Pseudoprimes to base 62.
From [OEIS A020190](https://oeis.org/A020190):
> Pseudoprimes to base 62.
> (Formerly )
**Sequence IDs**: `:a020190`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020190) |> Sequence.take!(47)
[9,21,45,63,91,105,183,231,305,361,427,549,561,671,679,703,793,861,1105,1261,1281,1541,1729,2121,2465,2501,2745,2871,3367,3439,3843,3845,4141,4187,4577,5185,5307,5551,5565,5901,5917,6161,6405,6533,6601,6697,6849]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 62.",
references: [{:oeis, :a020190, "https://oeis.org/A020190"}]
def create_sequence_a020190(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020190/2)
end
@doc false
@doc offset: 1
def seq_a020190(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 62) end, last)
end
@doc """
OEIS Sequence `A020191` - Pseudoprimes to base 63.
From [OEIS A020191](https://oeis.org/A020191):
> Pseudoprimes to base 63.
> (Formerly )
**Sequence IDs**: `:a020191`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020191) |> Sequence.take!(41)
[62,341,481,529,703,841,1105,1147,1417,1985,2071,2465,2509,3379,3565,3683,4033,4577,5161,5461,5662,6119,6533,6943,7141,7711,9073,9265,10585,13333,13747,14089,14689,14981,15458,18721,19345,19685,19951,21037,21361]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 63.",
references: [{:oeis, :a020191, "https://oeis.org/A020191"}]
def create_sequence_a020191(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020191/2)
end
@doc false
@doc offset: 1
def seq_a020191(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 63) end, last)
end
@doc """
OEIS Sequence `A020192` - Pseudoprimes to base 64.
From [OEIS A020192](https://oeis.org/A020192):
> Pseudoprimes to base 64.
> (Formerly )
**Sequence IDs**: `:a020192`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020192) |> Sequence.take!(52)
[9,15,21,35,39,45,63,65,85,91,105,117,133,153,195,221,231,247,259,273,315,341,357,435,451,455,481,511,561,585,645,651,671,703,763,765,819,861,873,949,1001,1035,1105,1205,1247,1271,1281,1365,1387,1417,1541,1581]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 64.",
references: [{:oeis, :a020192, "https://oeis.org/A020192"}]
def create_sequence_a020192(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020192/2)
end
@doc false
@doc offset: 1
def seq_a020192(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 64) end, last)
end
@doc """
OEIS Sequence `A020193` - Pseudoprimes to base 65.
From [OEIS A020193](https://oeis.org/A020193):
> Pseudoprimes to base 65.
> (Formerly )
**Sequence IDs**: `:a020193`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020193) |> Sequence.take!(45)
[4,8,16,28,32,33,64,112,133,232,289,448,511,561,703,1111,1247,1387,1856,1891,1921,2452,2701,3439,3553,3729,4291,4564,5068,6533,6601,6697,8321,8911,9537,9709,9808,9809,10681,11077,11584,11647,12754,13213,14981]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 65.",
references: [{:oeis, :a020193, "https://oeis.org/A020193"}]
def create_sequence_a020193(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020193/2)
end
@doc false
@doc offset: 1
def seq_a020193(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 65) end, last)
end
@doc """
OEIS Sequence `A020194` - Pseudoprimes to base 66.
From [OEIS A020194](https://oeis.org/A020194):
> Pseudoprimes to base 66.
> (Formerly )
**Sequence IDs**: `:a020194`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020194) |> Sequence.take!(42)
[65,91,335,469,481,871,1105,1271,1541,1729,1891,2071,2201,2465,2821,2911,3145,4033,4291,4355,4681,5461,5551,6097,6601,6953,7969,8911,9211,9919,10585,11305,11647,13019,13741,15211,15841,17353,19345,19757,20591,21785]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 66.",
references: [{:oeis, :a020194, "https://oeis.org/A020194"}]
def create_sequence_a020194(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020194/2)
end
@doc false
@doc offset: 1
def seq_a020194(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 66) end, last)
end
@doc """
OEIS Sequence `A020195` - Pseudoprimes to base 67.
From [OEIS A020195](https://oeis.org/A020195):
> Pseudoprimes to base 67.
> (Formerly )
**Sequence IDs**: `:a020195`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020195) |> Sequence.take!(47)
[6,22,33,49,51,66,85,154,165,187,217,385,561,637,682,703,946,1045,1078,1105,1309,1519,1705,1729,2047,2209,2245,2465,2701,2805,2821,3165,3201,3565,4123,4165,4566,4631,4774,5005,5214,5611,5797,6119,6369,6601,7633]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 67.",
references: [{:oeis, :a020195, "https://oeis.org/A020195"}]
def create_sequence_a020195(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020195/2)
end
@doc false
@doc offset: 1
def seq_a020195(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 67) end, last)
end
@doc """
OEIS Sequence `A020196` - Pseudoprimes to base 68.
From [OEIS A020196](https://oeis.org/A020196):
> Pseudoprimes to base 68.
> (Formerly )
**Sequence IDs**: `:a020196`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020196) |> Sequence.take!(48)
[25,49,69,91,125,133,185,201,217,247,325,345,361,403,469,481,589,637,805,871,925,931,1005,1025,1221,1225,1273,1417,1519,1541,1725,1729,1771,1813,2077,2185,2211,2413,2527,2553,2665,2725,2821,3283,3325,3565,4033,4123]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 68.",
references: [{:oeis, :a020196, "https://oeis.org/A020196"}]
def create_sequence_a020196(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020196/2)
end
@doc false
@doc offset: 1
def seq_a020196(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 68) end, last)
end
@doc """
OEIS Sequence `A020197` - Pseudoprimes to base 69.
From [OEIS A020197](https://oeis.org/A020197):
> Pseudoprimes to base 69.
> (Formerly )
**Sequence IDs**: `:a020197`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020197) |> Sequence.take!(44)
[4,34,35,68,85,91,119,133,247,361,595,1105,1387,1615,1729,1921,2431,2465,2527,2701,2821,3605,3655,4187,4693,4849,5713,6161,6643,6943,7345,7735,8911,10349,10585,11191,11305,11905,13019,13357,14246,14315,15181,15841]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 69.",
references: [{:oeis, :a020197, "https://oeis.org/A020197"}]
def create_sequence_a020197(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020197/2)
end
@doc false
@doc offset: 1
def seq_a020197(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 69) end, last)
end
@doc """
OEIS Sequence `A020198` - Pseudoprimes to base 70.
From [OEIS A020198](https://oeis.org/A020198):
> Pseudoprimes to base 70.
> (Formerly )
**Sequence IDs**: `:a020198`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020198) |> Sequence.take!(43)
[69,169,213,341,377,561,671,703,781,897,949,1441,1541,1633,1649,1891,2001,2201,2701,2769,2873,3053,3201,4061,4331,4371,4899,4901,6001,6177,6409,6681,7449,7991,9301,9361,11661,12121,12209,12337,12441,12673,12881]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 70.",
references: [{:oeis, :a020198, "https://oeis.org/A020198"}]
def create_sequence_a020198(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020198/2)
end
@doc false
@doc offset: 1
def seq_a020198(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 70) end, last)
end
@doc """
OEIS Sequence `A020199` - Pseudoprimes to base 71.
From [OEIS A020199](https://oeis.org/A020199):
> Pseudoprimes to base 71.
> (Formerly )
**Sequence IDs**: `:a020199`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020199) |> Sequence.take!(46)
[9,10,14,15,21,35,45,63,70,105,231,315,370,435,561,703,1035,1105,1387,1729,1921,2071,2209,2321,2465,2701,2821,2871,3290,4005,4033,4431,5565,6541,6601,7345,7957,8295,8365,8695,8911,9637,9730,9809,10349,10585]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 71.",
references: [{:oeis, :a020199, "https://oeis.org/A020199"}]
def create_sequence_a020199(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020199/2)
end
@doc false
@doc offset: 1
def seq_a020199(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 71) end, last)
end
@doc """
OEIS Sequence `A020200` - Pseudoprimes to base 72.
From [OEIS A020200](https://oeis.org/A020200):
> Pseudoprimes to base 72.
> (Formerly )
**Sequence IDs**: `:a020200`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020200) |> Sequence.take!(42)
[85,305,365,451,511,781,793,949,1037,1105,1241,1387,1541,1729,2465,2485,2501,2701,2821,2911,4381,4411,4453,5183,5185,5257,6205,6601,6697,8449,8911,9061,10585,11305,13213,13981,14111,15841,16441,17803,18721,19345]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 72.",
references: [{:oeis, :a020200, "https://oeis.org/A020200"}]
def create_sequence_a020200(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020200/2)
end
@doc false
@doc offset: 1
def seq_a020200(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 72) end, last)
end
@doc """
OEIS Sequence `A020201` - Pseudoprimes to base 73.
From [OEIS A020201](https://oeis.org/A020201):
> Pseudoprimes to base 73.
> (Formerly )
**Sequence IDs**: `:a020201`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020201) |> Sequence.take!(50)
[4,6,8,9,12,18,24,36,45,65,72,111,117,185,205,259,276,333,369,481,533,561,585,703,1105,1221,1441,1517,1665,1729,1845,1891,1921,2047,2405,2465,2466,2553,2556,2665,2806,2821,3439,4005,4329,4636,4797,5257,5461,5662]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 73.",
references: [{:oeis, :a020201, "https://oeis.org/A020201"}]
def create_sequence_a020201(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020201/2)
end
@doc false
@doc offset: 1
def seq_a020201(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 73) end, last)
end
@doc """
OEIS Sequence `A020202` - Pseudoprimes to base 74.
From [OEIS A020202](https://oeis.org/A020202):
> Pseudoprimes to base 74.
> (Formerly )
**Sequence IDs**: `:a020202`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020202) |> Sequence.take!(45)
[15,25,75,91,175,219,325,365,427,435,511,561,793,949,1095,1105,1387,1525,1729,1825,1891,2275,2465,2821,4453,4577,4795,5185,5475,5551,6175,6533,6541,6601,6643,7421,7613,8911,10585,10675,11041,12607,12775,12871,13019]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 74.",
references: [{:oeis, :a020202, "https://oeis.org/A020202"}]
def create_sequence_a020202(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020202/2)
end
@doc false
@doc offset: 1
def seq_a020202(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 74) end, last)
end
@doc """
OEIS Sequence `A020203` - Pseudoprimes to base 75.
From [OEIS A020203](https://oeis.org/A020203):
> Pseudoprimes to base 75.
> (Formerly )
**Sequence IDs**: `:a020203`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020203) |> Sequence.take!(44)
[74,91,133,247,259,289,427,481,703,793,1073,1159,1261,1387,1541,1649,1729,1849,1891,2071,2257,2413,2701,2813,2821,3367,3589,4033,4681,4699,4921,5551,5917,6061,6533,6601,6643,7957,8113,8321,8614,8911,9139,9211]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 75.",
references: [{:oeis, :a020203, "https://oeis.org/A020203"}]
def create_sequence_a020203(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020203/2)
end
@doc false
@doc offset: 1
def seq_a020203(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 75) end, last)
end
@doc """
OEIS Sequence `A020204` - Pseudoprimes to base 76.
From [OEIS A020204](https://oeis.org/A020204):
> Pseudoprimes to base 76.
> (Formerly )
**Sequence IDs**: `:a020204`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020204) |> Sequence.take!(49)
[15,21,25,33,35,55,75,77,105,165,175,231,265,275,325,385,425,525,545,561,781,825,949,1105,1113,1155,1325,1369,1417,1491,1541,1749,1785,1825,1891,1925,2289,2465,2701,2725,2821,3445,3597,3605,4033,4081,4097,4505,4681]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 76.",
references: [{:oeis, :a020204, "https://oeis.org/A020204"}]
def create_sequence_a020204(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020204/2)
end
@doc false
@doc offset: 1
def seq_a020204(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 76) end, last)
end
@doc """
OEIS Sequence `A020205` - Pseudoprimes to base 77.
From [OEIS A020205](https://oeis.org/A020205):
> Pseudoprimes to base 77.
> (Formerly )
**Sequence IDs**: `:a020205`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020205) |> Sequence.take!(44)
[4,38,39,57,65,76,247,285,703,741,969,1105,1387,1513,1653,1891,2465,2701,2806,2965,3705,4033,4371,4636,5073,5461,5713,5785,6305,6441,6533,6541,7633,7709,7957,10081,10585,10777,11229,12871,13051,16017,16745,17081]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 77.",
references: [{:oeis, :a020205, "https://oeis.org/A020205"}]
def create_sequence_a020205(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020205/2)
end
@doc false
@doc offset: 1
def seq_a020205(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 77) end, last)
end
@doc """
OEIS Sequence `A020206` - Pseudoprimes to base 78.
From [OEIS A020206](https://oeis.org/A020206):
> Pseudoprimes to base 78.
> (Formerly )
**Sequence IDs**: `:a020206`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020206) |> Sequence.take!(42)
[77,341,385,451,553,703,869,1045,1247,1271,1441,1849,1921,2047,2465,2765,3097,4061,4187,4345,4577,5371,6031,6083,6085,6545,6601,8321,8911,8965,10585,13333,13981,15211,15251,15841,17711,17767,20689,22801,23281,23617]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 78.",
references: [{:oeis, :a020206, "https://oeis.org/A020206"}]
def create_sequence_a020206(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020206/2)
end
@doc false
@doc offset: 1
def seq_a020206(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 78) end, last)
end
@doc """
OEIS Sequence `A020207` - Pseudoprimes to base 79.
From [OEIS A020207](https://oeis.org/A020207):
> Pseudoprimes to base 79.
> (Formerly )
**Sequence IDs**: `:a020207`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020207) |> Sequence.take!(45)
[6,15,26,39,49,65,78,91,195,301,559,561,637,1105,1649,1729,2107,2465,2626,2665,2701,2821,3201,3913,4215,4753,5055,6305,6533,6601,7051,7107,7361,7543,8149,8321,8911,9331,9773,9881,10585,10621,12001,14491,14689]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 79.",
references: [{:oeis, :a020207, "https://oeis.org/A020207"}]
def create_sequence_a020207(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020207/2)
end
@doc false
@doc offset: 1
def seq_a020207(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 79) end, last)
end
@doc """
OEIS Sequence `A020208` - Pseudoprimes to base 80.
From [OEIS A020208](https://oeis.org/A020208):
> Pseudoprimes to base 80.
> (Formerly )
**Sequence IDs**: `:a020208`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020208) |> Sequence.take!(46)
[9,27,49,81,169,237,301,333,481,553,561,637,711,891,1221,1377,1557,1729,1813,1891,2107,2133,2553,2821,2871,2997,3321,3397,3439,3537,3577,3871,3913,5461,6253,6399,6401,6533,6601,6697,7107,7189,7613,7821,8261,8281]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 80.",
references: [{:oeis, :a020208, "https://oeis.org/A020208"}]
def create_sequence_a020208(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020208/2)
end
@doc false
@doc offset: 1
def seq_a020208(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 80) end, last)
end
@doc """
OEIS Sequence `A020209` - Pseudoprimes to base 81.
From [OEIS A020209](https://oeis.org/A020209):
> Pseudoprimes to base 81.
> (Formerly )
**Sequence IDs**: `:a020209`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020209) |> Sequence.take!(53)
[4,8,10,16,20,28,40,52,70,80,85,91,112,121,130,176,190,205,208,232,280,286,292,364,370,451,496,511,520,532,616,671,697,703,730,742,910,946,949,965,976,1036,1105,1168,1261,1288,1387,1435,1456,1541,1606,1729,1891]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 81.",
references: [{:oeis, :a020209, "https://oeis.org/A020209"}]
def create_sequence_a020209(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020209/2)
end
@doc false
@doc offset: 1
def seq_a020209(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 81) end, last)
end
@doc """
OEIS Sequence `A020210` - Pseudoprimes to base 82.
From [OEIS A020210](https://oeis.org/A020210):
> Pseudoprimes to base 82.
> (Formerly )
**Sequence IDs**: `:a020210`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020210) |> Sequence.take!(47)
[9,25,27,45,81,91,225,249,325,405,481,511,561,645,747,793,891,925,949,1105,1245,1247,1345,1377,1387,1431,1525,1541,1729,1825,2025,2071,2101,2241,2257,2421,2465,2701,2821,2871,2905,3825,4033,4239,4453,5185,5611]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 82.",
references: [{:oeis, :a020210, "https://oeis.org/A020210"}]
def create_sequence_a020210(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020210/2)
end
@doc false
@doc offset: 1
def seq_a020210(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 82) end, last)
end
@doc """
OEIS Sequence `A020211` - Pseudoprimes to base 83.
From [OEIS A020211](https://oeis.org/A020211):
> Pseudoprimes to base 83.
> (Formerly )
**Sequence IDs**: `:a020211`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020211) |> Sequence.take!(46)
[21,65,82,105,123,133,205,231,265,273,287,451,533,561,689,697,703,861,1001,1105,1113,1241,1365,1558,1729,1785,1891,2173,2465,2569,2665,2821,2993,3034,3277,3445,4081,4305,4411,4505,4641,4745,5565,5713,6541,6601]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 83.",
references: [{:oeis, :a020211, "https://oeis.org/A020211"}]
def create_sequence_a020211(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020211/2)
end
@doc false
@doc offset: 1
def seq_a020211(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 83) end, last)
end
@doc """
OEIS Sequence `A020212` - Pseudoprimes to base 84.
From [OEIS A020212](https://oeis.org/A020212):
> Pseudoprimes to base 84.
> (Formerly )
**Sequence IDs**: `:a020212`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020212) |> Sequence.take!(41)
[85,415,481,703,1105,1111,1411,1615,2465,2501,2509,2981,3145,3655,3667,4141,5713,6161,6533,6973,7055,7141,7201,7885,8401,8695,9061,10585,11441,13019,13579,13981,14023,14383,14491,15181,15251,15355,15521,16405,16745]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 84.",
references: [{:oeis, :a020212, "https://oeis.org/A020212"}]
def create_sequence_a020212(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020212/2)
end
@doc false
@doc offset: 1
def seq_a020212(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 84) end, last)
end
@doc """
OEIS Sequence `A020213` - Pseudoprimes to base 85.
From [OEIS A020213](https://oeis.org/A020213):
> Pseudoprimes to base 85.
> (Formerly )
**Sequence IDs**: `:a020213`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020213) |> Sequence.take!(48)
[4,6,12,14,21,28,42,84,129,231,259,276,301,341,481,532,651,703,781,903,1281,1351,1491,1591,1729,2047,2201,2509,2701,2821,3097,3201,3277,3311,3913,3972,4371,4564,4577,4681,4859,5289,5662,6031,6601,7141,7161,7543]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 85.",
references: [{:oeis, :a020213, "https://oeis.org/A020213"}]
def create_sequence_a020213(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020213/2)
end
@doc false
@doc offset: 1
def seq_a020213(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 85) end, last)
end
@doc """
OEIS Sequence `A020214` - Pseudoprimes to base 86.
From [OEIS A020214](https://oeis.org/A020214):
> Pseudoprimes to base 86.
> (Formerly )
**Sequence IDs**: `:a020214`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020214) |> Sequence.take!(45)
[15,51,65,85,87,145,221,255,377,435,451,493,561,595,703,1015,1105,1479,1729,1885,1891,2091,2465,2755,2821,2845,3091,3451,4033,5365,5685,6409,6601,7395,7397,7483,7701,8695,8911,9061,9673,10585,10991,11041,11305]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 86.",
references: [{:oeis, :a020214, "https://oeis.org/A020214"}]
def create_sequence_a020214(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020214/2)
end
@doc false
@doc offset: 1
def seq_a020214(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 86) end, last)
end
@doc """
OEIS Sequence `A020215` - Pseudoprimes to base 87.
From [OEIS A020215](https://oeis.org/A020215):
> Pseudoprimes to base 87.
> (Formerly )
**Sequence IDs**: `:a020215`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020215) |> Sequence.take!(45)
[86,91,133,217,247,301,385,403,473,559,589,817,1045,1105,1111,1333,1661,1705,1729,2047,2365,2626,2651,2821,3553,3565,3785,3913,4123,4141,4681,5005,5461,5719,6533,6601,7471,7483,7657,8029,8041,8401,8686,8911,9331]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 87.",
references: [{:oeis, :a020215, "https://oeis.org/A020215"}]
def create_sequence_a020215(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020215/2)
end
@doc false
@doc offset: 1
def seq_a020215(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 87) end, last)
end
@doc """
OEIS Sequence `A020216` - Pseudoprimes to base 88.
From [OEIS A020216](https://oeis.org/A020216):
> Pseudoprimes to base 88.
> (Formerly )
**Sequence IDs**: `:a020216`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020216) |> Sequence.take!(44)
[87,91,133,145,217,247,267,403,445,481,589,645,1105,1247,1729,1885,1891,2047,2465,2581,2611,2821,3165,3565,4123,4331,4849,4921,5365,5551,5785,6119,6601,6697,7087,7657,7743,7745,8029,8911,9073,10585,11481,11563]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 88.",
references: [{:oeis, :a020216, "https://oeis.org/A020216"}]
def create_sequence_a020216(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020216/2)
end
@doc false
@doc offset: 1
def seq_a020216(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 88) end, last)
end
@doc """
OEIS Sequence `A020217` - Pseudoprimes to base 89.
From [OEIS A020217](https://oeis.org/A020217):
> Pseudoprimes to base 89.
> (Formerly )
**Sequence IDs**: `:a020217`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020217) |> Sequence.take!(49)
[4,8,9,15,22,33,44,45,55,85,88,99,153,165,169,341,385,495,561,765,1035,1045,1105,1165,1309,1387,1441,1541,1581,1649,1729,2097,2465,2529,2611,2701,2805,2821,2977,3015,3201,3961,4015,4061,4187,4371,4777,4849,5005]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 89.",
references: [{:oeis, :a020217, "https://oeis.org/A020217"}]
def create_sequence_a020217(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020217/2)
end
@doc false
@doc offset: 1
def seq_a020217(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 89) end, last)
end
@doc """
OEIS Sequence `A020218` - Pseudoprimes to base 90.
From [OEIS A020218](https://oeis.org/A020218):
> Pseudoprimes to base 90.
> (Formerly )
**Sequence IDs**: `:a020218`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020218) |> Sequence.take!(39)
[91,623,703,793,1001,1157,1729,2047,2413,2821,3091,3367,4033,4699,6281,6601,8099,8321,8401,8911,11521,11557,12403,13021,13333,13981,14701,14981,15841,17701,19307,19951,20017,20263,24641,24661,25571,29341,31021]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 90.",
references: [{:oeis, :a020218, "https://oeis.org/A020218"}]
def create_sequence_a020218(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020218/2)
end
@doc false
@doc offset: 1
def seq_a020218(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 90) end, last)
end
@doc """
OEIS Sequence `A020219` - Pseudoprimes to base 91.
From [OEIS A020219](https://oeis.org/A020219):
> Pseudoprimes to base 91.
> (Formerly )
**Sequence IDs**: `:a020219`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020219) |> Sequence.take!(48)
[6,9,10,15,18,30,45,66,69,90,115,205,207,341,345,369,435,505,561,909,1035,1065,1387,1446,1541,1845,2047,2059,2465,2651,2701,2829,2871,3015,4005,4141,4187,4371,4545,5963,6273,6533,6969,7471,8646,9361,9881,10585]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 91.",
references: [{:oeis, :a020219, "https://oeis.org/A020219"}]
def create_sequence_a020219(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020219/2)
end
@doc false
@doc offset: 1
def seq_a020219(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 91) end, last)
end
@doc """
OEIS Sequence `A020220` - Pseudoprimes to base 92.
From [OEIS A020220](https://oeis.org/A020220):
> Pseudoprimes to base 92.
> (Formerly )
**Sequence IDs**: `:a020220`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020220) |> Sequence.take!(47)
[21,39,65,91,93,105,217,231,273,301,341,403,451,465,559,561,651,861,1001,1085,1105,1209,1271,1333,1365,1393,1661,1729,2465,2587,2701,2821,3171,3731,3781,3913,4033,4123,4371,4641,4681,5565,6045,6169,6191,6697,7161]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 92.",
references: [{:oeis, :a020220, "https://oeis.org/A020220"}]
def create_sequence_a020220(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020220/2)
end
@doc false
@doc offset: 1
def seq_a020220(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 92) end, last)
end
@doc """
OEIS Sequence `A020221` - Pseudoprimes to base 93.
From [OEIS A020221](https://oeis.org/A020221):
> Pseudoprimes to base 93.
> (Formerly )
**Sequence IDs**: `:a020221`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020221) |> Sequence.take!(46)
[4,25,28,46,92,301,322,325,425,506,532,793,805,865,874,1081,1105,1393,1525,1645,1729,1771,2047,2071,2425,2465,2737,2926,3781,3913,4033,4187,4325,4465,4945,4996,5071,5149,5185,5405,5593,5713,5833,6601,6697,6721]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 93.",
references: [{:oeis, :a020221, "https://oeis.org/A020221"}]
def create_sequence_a020221(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020221/2)
end
@doc false
@doc offset: 1
def seq_a020221(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 93) end, last)
end
@doc """
OEIS Sequence `A020222` - Pseudoprimes to base 94.
From [OEIS A020222](https://oeis.org/A020222):
> Pseudoprimes to base 94.
> (Formerly )
**Sequence IDs**: `:a020222`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020222) |> Sequence.take!(47)
[15,57,91,93,95,121,133,155,217,247,285,341,403,435,465,561,589,645,969,1065,1105,1247,1541,1603,1653,1729,1767,2059,2451,2465,2821,2945,2977,3053,3565,3751,4047,4123,4351,4495,4961,5461,5685,6601,6665,7099,7107]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 94.",
references: [{:oeis, :a020222, "https://oeis.org/A020222"}]
def create_sequence_a020222(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020222/2)
end
@doc false
@doc offset: 1
def seq_a020222(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 94) end, last)
end
@doc """
OEIS Sequence `A020223` - Pseudoprimes to base 95.
From [OEIS A020223](https://oeis.org/A020223):
> Pseudoprimes to base 95.
> (Formerly )
**Sequence IDs**: `:a020223`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020223) |> Sequence.take!(42)
[91,94,141,341,561,658,671,742,1111,1551,1603,1891,1921,2806,2821,2977,3131,3367,3421,3601,4089,4371,4741,5461,5551,6161,6533,6601,6721,7107,8249,8601,9121,9641,10011,11041,13361,14241,15841,16939,17907,18019]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 95.",
references: [{:oeis, :a020223, "https://oeis.org/A020223"}]
def create_sequence_a020223(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020223/2)
end
@doc false
@doc offset: 1
def seq_a020223(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 95) end, last)
end
@doc """
OEIS Sequence `A020224` - Pseudoprimes to base 96.
From [OEIS A020224](https://oeis.org/A020224):
> Pseudoprimes to base 96.
> (Formerly )
**Sequence IDs**: `:a020224`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020224) |> Sequence.take!(42)
[65,95,133,469,485,679,781,973,1105,1261,1273,1649,1729,1843,2465,2641,2665,2701,2821,3545,3977,4187,4577,5617,6097,6305,6499,6601,8905,8911,9121,9215,9217,9313,10585,11155,11881,12649,12901,13289,13333,13483]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 96.",
references: [{:oeis, :a020224, "https://oeis.org/A020224"}]
def create_sequence_a020224(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020224/2)
end
@doc false
@doc offset: 1
def seq_a020224(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 96) end, last)
end
@doc """
OEIS Sequence `A020225` - Pseudoprimes to base 97.
From [OEIS A020225](https://oeis.org/A020225):
> Pseudoprimes to base 97.
> (Formerly )
**Sequence IDs**: `:a020225`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020225) |> Sequence.take!(50)
[4,6,8,12,16,21,24,32,48,49,66,96,105,147,176,186,231,245,341,344,469,481,496,561,637,645,651,833,946,949,973,1056,1065,1068,1105,1128,1729,1813,1891,2046,2047,2465,2701,2821,2976,3053,3277,3283,3577,4187]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 97.",
references: [{:oeis, :a020225, "https://oeis.org/A020225"}]
def create_sequence_a020225(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020225/2)
end
@doc false
@doc offset: 1
def seq_a020225(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 97) end, last)
end
@doc """
OEIS Sequence `A020226` - Pseudoprimes to base 98.
From [OEIS A020226](https://oeis.org/A020226):
> Pseudoprimes to base 98.
> (Formerly )
**Sequence IDs**: `:a020226`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020226) |> Sequence.take!(46)
[9,33,45,85,99,153,165,291,451,485,561,565,765,873,1017,1045,1067,1105,1261,1649,1705,1921,2465,2501,2651,2701,2805,3007,3201,3277,3565,3585,3609,3729,4005,4069,4365,5085,5797,6817,7345,7471,7701,8245,8321,8965]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 98.",
references: [{:oeis, :a020226, "https://oeis.org/A020226"}]
def create_sequence_a020226(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020226/2)
end
@doc false
@doc offset: 1
def seq_a020226(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 98) end, last)
end
@doc """
OEIS Sequence `A020227` - Pseudoprimes to base 99.
From [OEIS A020227](https://oeis.org/A020227):
> Pseudoprimes to base 99.
> (Formerly )
**Sequence IDs**: `:a020227`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020227) |> Sequence.take!(47)
[14,25,35,49,65,98,145,169,175,217,245,325,361,377,637,703,725,742,775,833,845,1105,1225,1421,1519,1729,1834,1885,2191,2198,2413,2465,2821,3185,3277,3565,4069,4123,4225,4699,4753,4795,4901,5365,5425,5611,6601]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 99.",
references: [{:oeis, :a020227, "https://oeis.org/A020227"}]
def create_sequence_a020227(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020227/2)
end
@doc false
@doc offset: 1
def seq_a020227(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 99) end, last)
end
@doc """
OEIS Sequence `A020228` - Pseudoprimes to base 100.
From [OEIS A020228](https://oeis.org/A020228):
> Pseudoprimes to base 100.
> (Formerly )
**Sequence IDs**: `:a020228`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a020228) |> Sequence.take!(46)
[9,33,91,99,153,259,303,451,481,561,657,703,909,949,1111,1233,1241,1729,1891,2047,2329,2409,2701,2821,2871,2981,3333,3367,4141,4187,4521,5461,5551,6001,6533,6541,6601,6931,7107,7373,7471,7633,7777,8149,8401,8911]
"""
@doc offset: 1,
sequence: "Pseudoprimes to base 100.",
references: [{:oeis, :a020228, "https://oeis.org/A020228"}]
def create_sequence_a020228(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a020228/2)
end
@doc false
@doc offset: 1
def seq_a020228(_idx, last) do
Math.next_number(fn v -> Math.is_pseudo_prime?(v, 100) end, last)
end
@doc """
OEIS Sequence `A162511` - Multiplicative function with a(p^e)=(-1)^(e-1)
From [OEIS A162511](https://oeis.org/A162511):
> Multiplicative function with a(p^e)=(-1)^(e-1)
**Sequence IDs**: `:a162511`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Primes, :a162511) |> Sequence.take!(92)
[1,1,1,-1,1,1,1,1,-1,1,1,-1,1,1,1,-1,1,-1,1,-1,1,1,1,1,-1,1,1,-1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,-1,-1,1,1,-1,-1,-1,1,-1,1,1,1,1,1,1,1,-1,1,1,-1,-1,1,1,1,-1,1,1,1,-1,1,1,-1,-1,1,1,1,-1,-1,1,1,-1,1,1,1,1,1,-1,1,-1]
"""
@doc offset: 1,
sequence: "Multiplicative function with a(p^e)=(-1)^(e-1)",
references: [{:oeis, :a162511, "https://oeis.org/A162511"}]
def create_sequence_a162511(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Primes.seq_a162511/1)
end
@doc false
@doc offset: 1
def seq_a162511(idx) do
-1 |> Math.pow(Math.bigomega(idx) - Math.omega(idx))
end
end | lib/sequence/oeis/primes.ex | 0.824285 | 0.599954 | primes.ex | starcoder |
defmodule Grizzly.SmartStart.MetaExtension.AdvancedJoining do
@moduledoc """
This extension is used to advertise the Security keys to grant during S2
bootstrapping to a SmartStart node in the provisioning list
For more information about S2 security see the `Grizzly.Security` module
"""
@behaviour Grizzly.SmartStart.MetaExtension
import Bitwise
alias Grizzly.Security
@type t :: %__MODULE__{
keys: nonempty_list(Security.key())
}
@enforce_keys [:keys]
defstruct keys: nil
@doc """
Create a new `AdvancedJoining.t()`
This will validate the keys passed in are valid S2 keys. If a key is not a
valid S2 key this function will return `{:error, :invalid_keys}`.
The `key` parameter cannot be an empty list. If an empty list is passed in
then this function will return `{:error, :empty_keys}`
"""
@spec new(nonempty_list(Security.key())) :: {:ok, t()} | {:error, :invalid_keys | :empty_keys}
def new([]), do: {:error, :empty_keys}
def new(keys) do
if Enum.all?(keys, &key_valid?/1) do
{:ok, %__MODULE__{keys: keys}}
else
{:error, :invalid_keys}
end
end
@doc """
Create a binary string from an `AdvancedJoining.t()`
"""
@impl Grizzly.SmartStart.MetaExtension
@spec from_binary(binary()) :: {:ok, t()} | {:error, :invalid_binary | :critical_bit_not_set}
def from_binary(<<0x35::size(7), 0x01::size(1), 0x01, keys>>) do
{:ok, %__MODULE__{keys: unmask_keys(keys)}}
end
def from_binary(<<0x35::size(7), 0x00::size(1), _rest::binary>>) do
{:error, :critical_bit_not_set}
end
def from_binary(_), do: {:error, :invalid_binary}
@doc """
Create an `AdvancedJoining.t()` from a binary string
If the binary string does not have the critical bit set then this function
will return `{:error, :critical_bit_not_set}`
"""
@impl Grizzly.SmartStart.MetaExtension
@spec to_binary(t()) :: {:ok, binary()}
def to_binary(%__MODULE__{keys: keys}) do
keys_byte =
Enum.reduce(keys, 0, fn
:s2_unauthenticated, byte -> byte ||| 0x01
:s2_authenticated, byte -> byte ||| 0x02
:s2_access_control, byte -> byte ||| 0x04
:s0, byte -> byte ||| 0x40
end)
{:ok, <<0x35::size(7), 0x01::size(1), 0x01, keys_byte>>}
end
defp unmask_keys(byte) do
Enum.reduce(Security.keys(), [], fn key, keys ->
if byte_has_key?(<<byte>>, key) do
[key | keys]
else
keys
end
end)
end
defp byte_has_key?(<<_::size(7), 1::size(1)>>, :s2_unauthenticated), do: true
defp byte_has_key?(<<_::size(6), 1::size(1), _::size(1)>>, :s2_authenticated), do: true
defp byte_has_key?(<<_::size(5), 1::size(1), _::size(2)>>, :s2_access_control), do: true
defp byte_has_key?(<<_::size(1), 1::size(1), _::size(6)>>, :s0), do: true
defp byte_has_key?(_byte, _key), do: false
defp key_valid?(key) do
key in Security.keys()
end
end | lib/grizzly/smart_start/meta_extension/advanced_joining.ex | 0.886525 | 0.4436 | advanced_joining.ex | starcoder |
defmodule Bolt.Cogs.LastJoins do
@moduledoc false
@behaviour Nosedrum.Command
alias Bolt.{Constants, Helpers, Paginator, Parsers}
alias Nosedrum.MessageCache.Agent, as: MessageCache
alias Nosedrum.Predicates
alias Nostrum.Api
alias Nostrum.Cache.GuildCache
alias Nostrum.Struct.Embed
alias Nostrum.Struct.Guild.Member
alias Nostrum.Struct.User
# The default number of members shown in the response.
@default_shown 15
# Members shown per page.
@shown_per_page @default_shown
# The maximum number of members shown in the response.
@maximum_shown @shown_per_page * 9
@impl true
def usage, do: ["lastjoins [options...]"]
@impl true
def description,
do: """
Display the most recently joined members.
Requires the `MANAGE_MESSAGES` permission.
The result of this command can be customized with the following options:
`--no-roles`: Display only new members without any roles
`--roles`: Display only new members with any roles
`--total`: The total amount of members to display, defaults to #{@default_shown}, maximum is #{@maximum_shown}
Returned members will be sorted by their account creation time.
**Examples**:
```rs
// display the #{@default_shown} most recently joined members
.lastjoins
// display the 30 most recently joined members that do not have a role assigned
.lastjoins --total 10 --no-roles
```
"""
@impl true
def predicates, do: [&Predicates.guild_only/1, Predicates.has_permission(:manage_messages)]
@impl true
def parse_args(args) do
OptionParser.parse(
args,
strict: [
# --roles | --no-roles
# display only new members with or without roles
roles: :boolean,
# --total <int>
# the total amount of users to display
total: :integer
]
)
end
@impl true
def command(msg, {options, _args, []}) do
# we can avoid copying around things we don't care about by just selecting the members here
case GuildCache.select(msg.guild_id, &Map.values(&1.members)) do
{:ok, members} ->
{limit, options} = Keyword.pop_first(options, :total, 5)
pages =
members
|> Stream.reject(&(&1.joined_at == nil))
|> Stream.reject(&(&1.user != nil and &1.user.bot))
|> Enum.sort_by(&joindate_to_unix/1, &>=/2)
|> filter_by_options(msg.guild_id, options)
|> apply_limit(limit)
|> Stream.map(&format_member/1)
|> Stream.chunk_every(@shown_per_page)
|> Enum.map(&%Embed{fields: &1})
base_page = %Embed{
title: "Recently joined members",
color: Constants.color_blue()
}
Paginator.paginate_over(msg, base_page, pages)
{:error, _reason} ->
{:ok, _msg} = Api.create_message(msg.channel_id, "guild uncached, sorry")
end
end
def command(msg, {_parsed, _args, invalid}) when invalid != [] do
invalid_args = Parsers.describe_invalid_args(invalid)
{:ok, _msg} =
Api.create_message(
msg.channel_id,
"🚫 unrecognized argument(s) or invalid value: #{invalid_args}"
)
end
defp filter_by_options(members, guild_id, [{:roles, true} | options]) do
members
|> Stream.filter(&Enum.any?(&1.roles))
|> filter_by_options(guild_id, options)
end
defp filter_by_options(members, guild_id, [{:roles, false} | options]) do
members
|> Stream.filter(&Enum.empty?(&1.roles))
|> filter_by_options(guild_id, options)
end
# these two fellas brilliantly inefficient, but we want to hand out
# full result sets later. and that said, we only ever evaluate as many
# results as needed due to streams
defp filter_by_options(members, guild_id, [{:messages, true} | options]) do
messages = MessageCache.recent_in_guild(guild_id, :infinity, Bolt.MessageCache)
recent_authors = MapSet.new(messages, & &1.author.id)
members
|> Stream.filter(&MapSet.member?(recent_authors, &1.user.id))
|> filter_by_options(guild_id, options)
end
defp filter_by_options(members, guild_id, [{:messages, false} | options]) do
messages = MessageCache.recent_in_guild(guild_id, :infinity, Bolt.MessageCache)
recent_authors = MapSet.new(messages, & &1.author.id)
members
|> Stream.filter(&(not MapSet.member?(recent_authors, &1.user.id)))
|> filter_by_options(guild_id, options)
end
defp filter_by_options(members, _guild_id, []) do
members
end
defp apply_limit(members, n) when n < 1, do: Enum.take(members, @default_shown)
defp apply_limit(members, n) when n > @maximum_shown, do: Enum.take(members, @maximum_shown)
defp apply_limit(members, n), do: Enum.take(members, n)
@spec format_member(Member.t()) :: Embed.Field.t()
defp format_member(member) do
joined_at_human =
member.joined_at
|> DateTime.from_iso8601()
|> elem(1)
|> DateTime.to_unix()
|> then(&"<t:#{&1}:R>")
total_roles = length(member.roles)
%Embed.Field{
name: User.full_name(member.user),
value: """
ID: `#{member.user.id}`
Joined: #{joined_at_human}
has #{total_roles} #{Helpers.pluralize(total_roles, "role", "roles")}
""",
inline: true
}
end
@spec joindate_to_unix(Member.t()) :: pos_integer()
defp joindate_to_unix(member) do
member.joined_at
|> DateTime.from_iso8601()
|> elem(1)
|> DateTime.to_unix()
end
end | lib/bolt/cogs/lastjoins.ex | 0.867303 | 0.572544 | lastjoins.ex | starcoder |
defmodule Combine.Parsers.Base do
@moduledoc """
This module defines common abstract parsers, i.e. ignore, repeat, many, etc.
To use them, just add `import Combine.Parsers.Base` to your module, or
reference them directly.
"""
alias Combine.ParserState
use Combine.Helpers
@type predicate :: (term -> boolean)
@type transform :: (term -> term)
@type transform2 :: ((term, term) -> term)
@doc """
This parser will fail with no error.
"""
@spec zero(previous_parser) :: parser
defparser zero(%ParserState{status: :ok} = state), do: %{state | :status => :error, :error => nil}
@doc """
This parser will fail with the given error message.
"""
@spec fail(previous_parser, String.t) :: parser
defparser fail(%ParserState{status: :ok} = state, message), do: %{state | :status => :error, :error => message}
@doc """
This parser will fail fatally with the given error message.
"""
@spec fatal(previous_parser, String.t) :: parser
defparser fatal(%ParserState{status: :ok} = state, message), do: %{state | :status => :error, :error => {:fatal, message}}
@doc """
This parser succeeds if the end of the input has been reached,
otherwise it fails.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse(" ", spaces() |> eof())
[" "]
"""
@spec eof(previous_parser) :: parser
defparser eof(%ParserState{status: :ok, input: <<>>} = state), do: state
defp eof_impl(%ParserState{status: :ok, line: line, column: col} = state) do
%{state | :status => :error, :error => "Expected end of input at line #{line}, column #{col}"}
end
@doc """
Applies a transformation function to the result of the given parser. If the
result returned is of the form `{:error, reason}`, the parser will fail with
that reason.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("1234", map(integer(), &(&1 * 2)))
[2468]
"""
@spec map(previous_parser, parser, transform) :: parser
defparser map(%ParserState{status: :ok} = state, parser, transform) do
case parser.(state) do
%ParserState{status: :ok, results: [h|rest]} = s ->
case transform.(h) do
{:error, reason} -> %{s | :status => :error, :error => reason}
result -> %{s | :results => [result|rest]}
end
s -> s
end
end
@doc """
Applies parser if possible. Returns the parse result if successful
or nil if not.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("Hi", option(integer()) |> word())
[nil, "Hi"]
"""
@spec option(previous_parser, parser) :: parser
defparser option(%ParserState{status: :ok, results: results} = state, parser) when is_function(parser, 1) do
case parser.(state) do
%ParserState{status: :ok} = s -> s
%ParserState{status: :error} -> %{state | :results => [nil|results]}
end
end
@doc """
Tries to apply `parser1` and if it fails, tries `parser2`, if both fail,
then this parser fails. Returns whichever result was successful otherwise.
# Example
iex> import #{__MODULE__}
iex> import Combine.Parsers.Text
...> Combine.parse("1234", either(float(), integer()))
[1234]
"""
@spec either(previous_parser, parser, parser) :: parser
defparser either(%ParserState{status: :ok} = state, parser1, parser2) do
case parser1.(state) do
%ParserState{status: :ok} = s1 -> s1
%ParserState{error: error1} ->
case parser2.(state) do
%ParserState{status: :ok} = s2 -> s2
%ParserState{error: error2} ->
%{state | :status => :error, :error => "#{error1}, or: #{error2}"}
end
end
end
@doc """
This parser is a generalized form of either which allows multiple parsers to be attempted.
# Example
iex> import #{__MODULE__}
iex> import Combine.Parsers.Text
...> Combine.parse("test", choice([float(), integer(), word()]))
["test"]
"""
@spec choice(previous_parser, [parser]) :: parser
defparser choice(%ParserState{status: :ok} = state, parsers) do
try_choice(parsers, state, nil)
end
defp try_choice([parser|rest], state, nil), do: try_choice(rest, state, parser.(state))
defp try_choice([_|_], _, %ParserState{status: :ok} = success), do: success
defp try_choice([parser|rest], state, %ParserState{}), do: try_choice(rest, state, parser.(state))
defp try_choice([], _, %ParserState{status: :ok} = success), do: success
defp try_choice([], %ParserState{line: line, column: col} = state, _) do
%{state | :status => :error, :error => "Expected at least one parser to succeed at line #{line}, column #{col}."}
end
@doc """
Applies each parser in `parsers`, then sends the results to the provided function
to be transformed. The result of the transformation is the final result of this parser.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("123", pipe([digit(), digit(), digit()], fn digits -> {n, _} = Integer.parse(Enum.join(digits)); n end))
[123]
"""
@spec pipe(previous_parser, [parser], transform) :: parser
defparser pipe(%ParserState{status: :ok} = state, parsers, transform) when is_list(parsers) and is_function(transform, 1) do
orig_results = state.results
case do_pipe(parsers, %{state | :results => []}) do
{:ok, acc, %ParserState{status: :ok} = new_state} ->
transformed = transform.(Enum.reverse(acc))
%{new_state | :results => [transformed | orig_results]}
{:error, _acc, state} ->
state
end
end
defp do_pipe(parsers, state), do: do_pipe(parsers, state, [])
defp do_pipe([], state, acc), do: {:ok, acc, state}
defp do_pipe([parser|parsers], %ParserState{status: :ok} = current, acc) do
case parser.(%{current | :results => []}) do
%ParserState{status: :ok, results: [:__ignore]} = next -> do_pipe(parsers, %{next | :results => []}, acc)
%ParserState{status: :ok, results: []} = next -> do_pipe(parsers, next, acc)
%ParserState{status: :ok, results: rs} = next -> do_pipe(parsers, %{next | :results => []}, rs ++ acc)
%ParserState{} = next -> {:error, acc, next}
end
end
defp do_pipe(_parsers, %ParserState{} = state, acc), do: {:error, acc, state}
@doc """
Applies a sequence of parsers and returns their results as a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("123", sequence([digit(), digit(), digit()]))
[[1, 2, 3]]
...> Combine.parse("123-234", sequence([integer(), char()]) |> map(sequence([integer()]), fn [x] -> x * 2 end))
[[123, "-"], 468]
"""
@spec sequence(previous_parser, [parser]) :: parser
defparser sequence(%ParserState{status: :ok} = state, parsers) when is_list(parsers) do
pipe(parsers, &(&1)).(state)
end
@doc """
Applies `parser1` and `parser2` in sequence, then sends their results
to the given function to be transformed. The transformed value is then
returned as the result of this parser.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> to_int = fn ("-", y) -> y * -1; (_, y) -> y end
...> Combine.parse("1234-234", both(integer(), both(char(), integer(), to_int), &(&1 + &2)))
[1000]
"""
@spec both(previous_parser, parser, parser, transform2) :: parser
defparser both(%ParserState{status: :ok} = state, parser1, parser2, transform) do
pipe([parser1, parser2], fn results -> apply(transform, results) end).(state)
end
@doc """
Applies both `parser1` and `parser2`, returning the result of `parser1` only.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("234-", pair_left(integer(), char()))
[234]
"""
@spec pair_left(previous_parser, parser, parser) :: parser
defparser pair_left(%ParserState{status: :ok} = state, parser1, parser2) do
pipe([preserve_ignored(parser1), preserve_ignored(parser2)],
fn
[:__preserved_ignore, _] -> :__ignore
[result1, _] -> result1
end).(state)
end
@doc """
Applies both `parser1` and `parser2`, returning the result of `parser2` only.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("-234", pair_right(char(), integer()))
[234]
"""
@spec pair_right(previous_parser, parser, parser) :: parser
defparser pair_right(%ParserState{status: :ok} = state, parser1, parser2) do
pipe([preserve_ignored(parser1), preserve_ignored(parser2)],
fn
[_, :__preserved_ignore] -> :__ignore
[_, result2] -> result2
end).(state)
end
@doc """
Applies both `parser1` and `parser2`, returning both results as a tuple.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("-234", pair_both(char(), integer()))
[{"-", 234}]
"""
@spec pair_both(previous_parser, parser, parser) :: parser
defparser pair_both(%ParserState{status: :ok} = state, parser1, parser2) do
pipe([preserve_ignored(parser1), preserve_ignored(parser2)],
fn
[:__preserved_ignore, :__preserved_ignore] -> {:__ignore, :__ignore}
[:__preserved_ignore, result2] -> {:__ignore, result2}
[result1, :__preserved_ignore] -> {result1, :__ignore}
[result1, result2] -> {result1, result2}
end).(state)
end
@doc """
Applies `parser1`, `parser2`, and `parser3` in sequence, returning the result
of `parser2`.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("(234)", between(char("("), integer(), char(")")))
[234]
"""
@spec between(previous_parser, parser, parser, parser) :: parser
defparser between(%ParserState{status: :ok} = state, parser1, parser2, parser3) do
pipe([preserve_ignored(parser1), preserve_ignored(parser2), preserve_ignored(parser3)],
fn
[_, :__preserved_ignore, _] -> :__ignore
[_, result, _] -> result
end).(state)
end
@doc """
Applies `parser` to the input `n` many times. Returns the result as a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("123", times(digit(), 3))
[[1,2,3]]
"""
@spec times(previous_parser, parser, pos_integer) :: parser
defparser times(%ParserState{status: :ok} = state, parser, n) when is_function(parser, 1) and is_integer(n) do
case do_times(n, parser, state) do
{:ok, acc, %ParserState{status: :ok, results: rs} = new_state} ->
res = Enum.reverse(acc)
%{new_state | :results => [res | rs]}
{:error, _acc, state} ->
state
end
end
defp do_times(count, parser, state), do: do_times(count, parser, state, [])
defp do_times(0, _parser, state, acc), do: {:ok, acc, state}
defp do_times(count, parser, %ParserState{status: :ok} = current, acc) do
case parser.(current) do
%ParserState{status: :ok, results: [:__ignore|rs]} = next -> do_times(count - 1, parser, %{next | :results => rs}, acc)
%ParserState{status: :ok, results: []} = next -> do_times(count - 1, parser, next, acc)
%ParserState{status: :ok, results: [last|rs]} = next -> do_times(count - 1, parser, %{next | :results => rs}, [last|acc])
%ParserState{} = next -> {:error, acc, next}
end
end
@doc """
Applies `parser` one or more times. Returns results as a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("abc", many1(char()))
[["a", "b", "c"]]
...> Combine.parse("abc", many1(ignore(char())))
[[]]
...> Combine.parse("12abc", digit() |> digit() |> many1(ignore(char())))
[1, 2, []]
"""
@spec many1(previous_parser, parser) :: parser
defparser many1(%ParserState{status: :ok, results: initial_results} = state, parser) when is_function(parser, 1) do
case many1_loop(0, [], state, parser.(state), parser) do
{results, %ParserState{status: :ok} = s} ->
results = Enum.reverse(results)
%{s | :results => [results|initial_results]}
%ParserState{} = s -> s
end
end
defp many1_loop(0, _, _, %ParserState{status: :error} = err, _parser),
do: err
defp many1_loop(iteration, acc, _last, %ParserState{status: :ok, results: []} = s, parser),
do: many1_loop(iteration + 1, acc, s, parser.(s), parser)
defp many1_loop(iteration, acc, _last, %ParserState{status: :ok, results: [:__ignore|rs]} = s, parser),
do: many1_loop(iteration + 1, acc, s, parser.(%{s | :results => rs}), parser)
defp many1_loop(iteration, acc, _last, %ParserState{status: :ok, results: [h|rs]} = s, parser),
do: many1_loop(iteration + 1, [h|acc], s, parser.(%{s | :results => rs}), parser)
defp many1_loop(_, acc, s, %ParserState{status: :error}, _parser),
do: {acc, s}
@doc """
Applies `parser` zero or more times. Returns results as a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("abc", many(char()))
[["a", "b", "c"]]
...> Combine.parse("", many(char()))
[[]]
"""
@spec many(previous_parser, parser) :: parser
defparser many(%ParserState{status: :ok, results: results} = state, parser) when is_function(parser, 1) do
case many1(parser).(state) do
%ParserState{status: :ok} = s -> s
%ParserState{status: :error} -> %{state | :results => [[] | results]}
end
end
@doc """
Applies `parser1` one or more times, separated by `parser2`. Returns
results of `parser1` in a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("1, 2, 3", sep_by1(digit(), string(", ")))
[[1, 2, 3]]
"""
@spec sep_by1(previous_parser, parser, parser) :: parser
defparser sep_by1(%ParserState{status: :ok} = state, parser1, parser2) do
pipe([parser1, many(pair_right(parser2, parser1))], fn [h, t] -> [h|t] end).(state)
end
@doc """
Applies `parser1` zero or more times, separated by `parser2`. Returns
results of `parser1` in a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("1, 2, 3", sep_by(digit(), string(", ")))
[[1, 2, 3]]
...> Combine.parse("", sep_by(digit(), string(", ")))
[[]]
"""
@spec sep_by(previous_parser, parser, parser) :: parser
defparser sep_by(%ParserState{status: :ok, results: results} = state, parser1, parser2)
when is_function(parser1, 1) and is_function(parser2, 1) do
case sep_by1_impl(state, parser1, parser2) do
%ParserState{status: :ok} = s -> s
%ParserState{status: :error} -> %{state | :results => [[] | results]}
end
end
@doc """
Applies `parser` if possible, ignores the result.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse(" abc", skip(spaces()) |> word)
["abc"]
...> Combine.parse("", skip(spaces()))
[]
"""
@spec skip(previous_parser, parser) :: parser
defparser skip(%ParserState{status: :ok} = state, parser) when is_function(parser, 1) do
case ignore_impl(state, option(parser)) do
%ParserState{status: :ok, results: [:__ignore|rs]} = s ->
%{s | :results => rs}
%ParserState{} = s ->
s
end
end
@doc """
Applies `parser` zero or more times, ignores the result.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse(" abc", skip_many(space()) |> word)
["abc"]
...> Combine.parse("", skip_many(space()))
[]
"""
@spec skip_many(previous_parser, parser) :: parser
defparser skip_many(%ParserState{status: :ok} = state, parser) when is_function(parser, 1) do
ignore_impl(state, many(parser))
end
@doc """
Applies `parser` one or more times, ignores the result.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse(" abc", skip_many1(space()) |> word)
["abc"]
...> Combine.parse("", skip_many1(space()))
{:error, "Expected space, but hit end of input."}
"""
@spec skip_many1(previous_parser, parser) :: parser
defparser skip_many1(%ParserState{status: :ok} = state, parser) when is_function(parser, 1) do
ignore_impl(state, many1(parser))
end
@doc """
This parser will apply the given parser to the input, and if successful,
will ignore the parse result. If the parser fails, this one fails as well.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> parser = ignore(char("h"))
...> Combine.parse("h", parser)
[]
...> parser = char("h") |> char("i") |> ignore(space()) |> char("!")
...> Combine.parse("hi !", parser)
["h", "i", "!"]
"""
@spec ignore(previous_parser, parser) :: parser
defparser ignore(%ParserState{status: :ok} = state, parser) when is_function(parser, 1) do
case parser.(state) do
%ParserState{status: :ok, results: [_|t]} = s -> %{s | :results => [:__ignore|t]}
%ParserState{} = s -> s
end
end
@doc false
defparser preserve_ignored(%ParserState{status: :ok, results: rs} = state, parser) when is_function(parser, 1) do
case parser.(%{state | :results => []}) do
%ParserState{status: :ok, results: []} = s -> %{s | :results => [:__preserved_ignore|rs]}
%ParserState{status: :ok, results: [:__ignore]} = s -> %{s | :results => [:__preserved_ignore|rs]}
%ParserState{status: :ok, results: [result]} = s -> %{s | :results => [result|rs]}
%ParserState{status: :error} = s -> %{s | :results => rs}
end
end
@doc """
This parser applies the given parser, and if successful, passes the result to
the predicate for validation. If either the parser or the predicate assertion fail,
this parser fails.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> parser = satisfy(char(), fn x -> x == "H" end)
...> Combine.parse("Hi", parser)
["H"]
...> parser = char("H") |> satisfy(char(), fn x -> x == "i" end)
...> Combine.parse("Hi", parser)
["H", "i"]
"""
@spec satisfy(previous_parser, parser, predicate) :: parser
defparser satisfy(%ParserState{status: :ok, line: line, column: col} = state, parser, predicate)
when is_function(parser, 1) and is_function(predicate, 1) do
case parser.(state) do
%ParserState{status: :ok, results: [h|_]} = s ->
cond do
predicate.(h) -> s
true ->
%{s | :status => :error,
:error => "Could not satisfy predicate for #{inspect(h)} at line #{line}, column #{col}",
:line => line,
:column => col
}
end
%ParserState{} = s -> s
end
end
@doc """
Applies a parser and then verifies that the result is contained in the provided list of matches.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> parser = one_of(char(), ?a..?z |> Enum.map(&(<<&1::utf8>>)))
...> Combine.parse("abc", parser)
["a"]
...> parser = upper() |> one_of(char(), ["i", "I"])
...> Combine.parse("Hi", parser)
["H", "i"]
"""
@spec one_of(previous_parser, parser, Range.t | list()) :: parser
defparser one_of(%ParserState{status: :ok, line: line, column: col} = state, parser, items)
when is_function(parser, 1) do
case parser.(state) do
%ParserState{status: :ok, results: [h|_]} = s ->
cond do
h in items ->
s
true ->
stringified = Enum.join(items, ", ")
%{s | :status => :error, :error => "Expected one of [#{stringified}], but found `#{h}`, at line #{line}, column #{col}"}
end
%ParserState{} = s -> s
end
end
@doc """
Applies a parser and then verifies that the result is not contained in the provided list of matches.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> parser = none_of(char(), ?a..?z |> Enum.map(&(<<&1::utf8>>)))
...> Combine.parse("ABC", parser)
["A"]
...> parser = upper() |> none_of(char(), ["i", "I"])
...> Combine.parse("Hello", parser)
["H", "e"]
"""
@spec none_of(previous_parser, parser, Range.t | list()) :: parser
defparser none_of(%ParserState{status: :ok, line: line, column: col} = state, parser, items)
when is_function(parser, 1) do
case parser.(state) do
%ParserState{status: :ok, results: [h|_]} = s ->
cond do
h in items ->
stringified = Enum.join(items, ", ")
%{s | :status => :error, :error => "Expected none of [#{stringified}], but found `#{h}`, at line #{line}, column #{col}"}
true ->
s
end
%ParserState{} = s -> s
end
end
defp none_of_impl(%ParserState{status: :ok} = state, parser, %Range{} = items),
do: none_of_impl(state, parser, items)
@doc """
Applies `parser`. If it fails, it's error is modified to contain the given label for easier troubleshooting.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("abc", label(integer(), "year"))
{:error, "Expected `year` at line 1, column 1."}
"""
@spec label(previous_parser, parser, String.t) :: parser
defparser label(%ParserState{status: :ok} = state, parser, name) when is_function(parser, 1) do
case parser.(state) do
%ParserState{status: :ok, labels: labels} = s -> %{s | labels: [name | labels]}
%ParserState{line: line, column: col} = s ->
%{s | :error => "Expected `#{name}` at line #{line}, column #{col + 1}."}
end
end
@doc """
Applies a `parser` and then verifies that the remaining input allows `other_parser` to succeed.
This allows lookahead without mutating the parser state
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> parser = letter() |> followed_by(letter())
...> Combine.parse("AB", parser)
["A"]
"""
@spec followed_by(previous_parser, parser, parser) :: parser
defparser followed_by(%ParserState{status: :ok} = state, parser, other_parser)
when is_function(parser, 1) and is_function(other_parser, 1) do
case parser.(state) do
%ParserState{status: :ok, input: new_input} = new_state ->
case other_parser.(new_state) do
%ParserState{status: :ok} ->
new_state
%ParserState{error: other_parser_err} ->
%{new_state |
:status => :error,
:error => other_parser_err
}
end
%ParserState{} = s ->
s
end
end
@doc """
Applies a `parser` if and only if `predicate_parser` fails.
This helps conditional parsing.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> parser = if_not(letter(), char())
...> Combine.parse("^", parser)
["^"]
"""
@spec if_not(previous_parser, parser, parser) :: parser
defparser if_not(%ParserState{status: :ok, line: line, column: col} = state, predicate_parser, parser)
when is_function(predicate_parser, 1) and is_function(parser, 1) do
case predicate_parser.(state) do
%ParserState{status: :ok} ->
%{state |
:status => :error,
:error => "Expected `if_not(predicate_parser, ...)` to fail at line #{line}, column #{col + 1}."
}
%ParserState{} ->
parser.(state)
end
end
end | lib/combine/parsers/base.ex | 0.799364 | 0.514766 | base.ex | starcoder |
defmodule Monetized.Money do
import Monetized.Money.Utils
alias Monetized.Currency
@moduledoc """
Defines the money struct and functions to handle it.
Also defines `Money` Ecto.Type.
Although we're able to override any configuration when
calling functions that create/handle money, it is possible
to change any of the default values seen below, through config.
Below are the configuration options.
## Examples
config :monetized, config: [
delimiter: ",",
separator: ".",
currency: "USD",
format: "%c %n%s%d"
]
"""
@typedoc """
A money struct containing the a Decimal tha holds the amount
and the currency.
"""
@type t :: %__MODULE__{
value: Decimal.t(),
currency: String.t()
}
defstruct value: Decimal.new("0.00"), currency: nil
if Code.ensure_loaded?(Ecto) do
@behaviour Ecto.Type
end
@doc """
The Ecto primitive type.
"""
def type, do: :string
@doc """
Casts the given value to money.
It supports:
* A string (if currency not relevant).
* A float (if currency not relevant).
* An `Decimal` struct (if currency not relevant).
* An integer (if currency not relevant).
* A map with `:value` and `:currency` keys.
* A map with "value" and "currency" keys.
* An `Monetized.Money` struct.
"""
def cast(%Monetized.Money{} = money) do
{:ok, money}
end
def cast(%{"value" => v, "currency" => c}) do
{:ok, Monetized.Money.make(v, currency: c)}
end
def cast(%{value: v, currency: c}) do
{:ok, Monetized.Money.make(v, currency: c)}
end
def cast(value) when is_bitstring(value) do
{:ok, Monetized.Money.from_string(value)}
end
def cast(value) when is_float(value) do
{:ok, Monetized.Money.from_float(value)}
end
def cast(value) when is_integer(value) do
{:ok, Monetized.Money.from_integer(value)}
end
def cast(%Decimal{} = value) do
{:ok, Monetized.Money.from_decimal(value)}
end
def cast(_), do: :error
@doc """
Converts an `Monetized.Money` into a string for
saving to the db. ie: "100.50 EUR"
"""
def dump(%Monetized.Money{} = money) do
{:ok, Monetized.Money.to_string(money, currency_code: true)}
end
def dump(_), do: :error
@doc """
Converts a string as saved to the db into a
`Monetized.Money` struct.
"""
def load(m) when is_bitstring(m) do
{:ok, Monetized.Money.make(m)}
end
def load(_), do: :error
@doc """
Returns a string representation of the given money.
## Examples
iex> money = Monetized.Money.make("£ 20150.25")
...> Monetized.Money.to_string(money, [currency_symbol: true])
"£ 20,150.25"
# Ignores currency as there isn't one
iex> money = Monetized.Money.make(999999999)
...> Monetized.Money.to_string(money, [delimiter: " ", separator: " ", currency_symbol: true])
"999 999 999 00"
iex> money = Monetized.Money.make(100_000_000, [currency: "USD"])
...> Monetized.Money.to_string(money, [format: "%n%s%d %cs", currency_symbol: true])
"100,000,000.00 $"
iex> money = Monetized.Money.make(-99.50, [currency: "USD"])
...> Monetized.Money.to_string(money, [currency_symbol: true])
"$ -99.50"
iex> money = Monetized.Money.make("100.50 EUR")
...> Monetized.Money.to_string(money, [currency_code: true])
"100.50 EUR"
iex> money = Monetized.Money.make(Decimal.new("10"))
...> Monetized.Money.to_string(money)
"10.00"
iex> money = Monetized.Money.make(Decimal.new("10"), currency: "USD")
...> Monetized.Money.to_string(money, currency_symbol: true)
"$ 10.00"
iex> money = Monetized.Money.make(Decimal.new(".005"), currency: "USD")
...> Monetized.Money.to_string(money, currency_symbol: true)
"$ 0.01"
"""
@spec to_string(t, list) :: String.t()
def to_string(%Monetized.Money{value: value} = money, options \\ []) do
delimiter = option_or_config(config(), options, :delimiter)
separator = option_or_config(config(), options, :separator)
decimal_places = option_or_config(config(), options, :decimal_places)
{base, decimal} =
value
|> Decimal.round(decimal_places)
|> Decimal.to_string(:normal)
|> decimal_parts(decimal_places)
number =
String.to_integer(base)
|> delimit_integer(delimiter)
|> String.Chars.to_string()
cs =
if options[:currency_symbol] && money.currency,
do: Currency.get(money.currency).symbol,
else: ""
cc =
if options[:currency_code] && money.currency,
do: Currency.get(money.currency).key,
else: ""
option_or_config(config(), options, :format)
|> String.replace(~r/%cs/, cs)
|> String.replace(~r/%n/, number)
|> String.replace(~r/%s/, separator)
|> String.replace(~r/%d/, decimal)
|> String.replace(~r/%cc/, cc)
|> String.trim()
end
defp decimal_parts(str, decimal_places) do
case String.split(str, ".") do
[int] -> {int, IO.iodata_to_binary(:lists.duplicate(decimal_places, "0"))}
[int, decimal] -> {int, String.pad_trailing(decimal, decimal_places, "0")}
end
end
@doc """
Creates a money struct from any of the supported
types for amount.
If a string is given with either the currency key/code
(ie "USD") or the symbol present, that currency will be
assumed.
Passing `currency` in the options will make it use that
despite of configured, or assumed from string.
## Examples
iex> Monetized.Money.make("20150.25 EUR")
#Money<20150.25EUR>
iex> Monetized.Money.make(20150.25, [currency: "EUR"])
#Money<20150.25EUR>
iex> Decimal.new("100.50") |> Monetized.Money.make
#Money<100.50>
iex> Monetized.Money.make("£ 100")
#Money<100.00GBP>
# currency in options takes precedence
iex> Monetized.Money.make("€ 50", [currency: "USD"])
#Money<50.00USD>
"""
@spec make(integer | float | String.t() | Decimal, list) :: t
def make(amount, options \\ []) do
do_make(amount, options)
end
defp do_make(%Decimal{} = value, options) do
from_decimal(value, options)
end
defp do_make(amount, options) when is_bitstring(amount) do
from_string(amount, options)
end
defp do_make(amount, options) when is_integer(amount) do
from_integer(amount, options)
end
defp do_make(amount, options) when is_float(amount) do
from_float(amount, options)
end
@doc """
Creates a money struct from a string value.
Passing currency in the options will make it use that
despite of configured default.
## Examples
iex> Monetized.Money.from_string("GBP 10.52")
#Money<10.52GBP>
iex> Monetized.Money.from_string("€ 100")
#Money<100.00EUR>
iex> Monetized.Money.from_string("100.00", [currency: "EUR"])
#Money<100.00EUR>
iex> Monetized.Money.from_string("$50")
#Money<50.00USD>
iex> Monetized.Money.from_string("1,000,000 EUR")
#Money<1000000.00EUR>
iex> Monetized.Money.from_string("200", currency: "THB")
#Money<200.00THB>
"""
@spec from_string(String.t(), list) :: t
def from_string(amount, options \\ []) when is_bitstring(amount) do
options =
if currency = Currency.parse(amount),
do: Keyword.merge([currency: currency.key], options),
else: options
amount =
Regex.run(~r/-?[0-9]{1,300}(,[0-9]{3})*(\.[0-9]+)?/, amount)
|> List.first()
|> String.replace(~r/\,/, "")
amount = if Regex.run(~r/\./, amount) == nil, do: Enum.join([amount, ".00"]), else: amount
amount
|> Decimal.new()
|> from_decimal(options)
end
@doc """
Creates a money struct from a integer value.
Passing currency in the options will make it use that
despite of configured default.
## Examples
iex> Monetized.Money.from_integer(152, [currency: "GBP"])
#Money<152.00GBP>
iex> Monetized.Money.from_integer(100_000, [currency: "GBP"])
#Money<100000.00GBP>
iex> Monetized.Money.from_integer(-100, [currency: "GBP"])
#Money<-100.00GBP>
"""
@spec from_integer(integer, list) :: t
def from_integer(amount, options \\ []) when is_integer(amount) do
currency_key = option_or_config(config(), options, :currency)
do_from_integer(amount, currency_key)
end
defp do_from_integer(amount, currency_key) when amount >= 0 do
%Decimal{
coef: amount * 100,
sign: 1,
exp: -2
}
|> create(currency_key)
end
defp do_from_integer(amount, currency_key) do
%Decimal{
coef: -(amount * 100),
sign: -1,
exp: -2
}
|> create(currency_key)
end
@doc """
Creates a money struct from a float value.
Passing currency in the options will make it use that
despite of configured default.
## Examples
iex> Monetized.Money.from_float(100.00, [currency: "EUR"])
#Money<100.00EUR>
iex> Monetized.Money.from_float(150.52)
#Money<150.52>
# iex> Monetized.Money.from_float(20.50)
#Money<20.50>
"""
@spec from_float(float, list) :: t
def from_float(amount, options \\ []) when is_float(amount) do
currency_key = option_or_config(config(), options, :currency)
amount
|> :erlang.float_to_binary(decimals: 2)
|> Decimal.new()
|> create(currency_key)
end
@doc """
Creates a money struct from a Decimal.
It uses the default currency ("USD") if one isn't
configured.
Passing currency in the options will make it use that
despite of configured default.
## Examples
iex> Decimal.new(100.00) |> Monetized.Money.from_decimal([currency: "EUR"])
#Money<100.00EUR>
iex> Decimal.new(150.52) |> Monetized.Money.from_decimal
#Money<150.52>
iex> Decimal.new("300.25") |> Monetized.Money.from_decimal
#Money<300.25>
"""
@spec from_decimal(Decimal, list) :: t
def from_decimal(value = %Decimal{}, options \\ []) do
currency_key = option_or_config(config(), options, :currency)
do_from_decimal(value, currency_key)
end
defp do_from_decimal(value = %Decimal{exp: -2}, currency_key) do
value
|> create(currency_key)
end
defp do_from_decimal(value, currency_key) do
str = Decimal.to_string(value)
Regex.replace(~r/\.(\d)$/, str, ".\\g{1}0")
|> Decimal.new()
|> create(currency_key)
end
@doc """
Creates a money struct with 0 value.
Useful for setting a default value of "0.00".
## Examples
iex> Monetized.Money.zero
#Money<0.00>
iex> Monetized.Money.zero([currency: "GBP"])
#Money<0.00GBP>
"""
def zero(options \\ []) do
from_integer(0, options)
end
defp create(value, currency_key) do
%Monetized.Money{value: value, currency: currency_key}
end
defp config do
defaults = [
delimiter: ",",
separator: ".",
format: "%cs %n%s%d %cc",
decimal_places: 2
]
Keyword.merge(defaults, Application.get_env(:monetized, :config, []))
end
defimpl Inspect, for: Monetized.Money do
def inspect(dec, _opts) do
if dec.currency do
"#Money<" <> Decimal.to_string(dec.value) <> dec.currency <> ">"
else
"#Money<" <> Decimal.to_string(dec.value) <> ">"
end
end
end
end | lib/money.ex | 0.885724 | 0.642573 | money.ex | starcoder |
defmodule EVM.Functions do
@moduledoc """
Set of functions defined in the Yellow Paper that do not logically
fit in other modules.
"""
alias EVM.{ExecEnv, Gas, MachineCode, MachineState, Operation, Stack}
alias EVM.Operation.Metadata
@max_stack 1024
def max_stack_depth, do: @max_stack
@doc """
Returns whether or not the current program is halting due to a `return` or terminal statement.
# Examples
iex> EVM.Functions.is_normal_halting?(%EVM.MachineState{program_counter: 0}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:add)>>})
nil
iex> EVM.Functions.is_normal_halting?(%EVM.MachineState{program_counter: 0}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:mul)>>})
nil
iex> EVM.Functions.is_normal_halting?(%EVM.MachineState{program_counter: 0}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:stop)>>})
<<>>
iex> EVM.Functions.is_normal_halting?(%EVM.MachineState{program_counter: 0}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:selfdestruct)>>})
<<>>
iex> EVM.Functions.is_normal_halting?(%EVM.MachineState{stack: [0, 1], memory: <<0xabcd::16>>}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:return)>>})
<<0xab>>
iex> EVM.Functions.is_normal_halting?(%EVM.MachineState{stack: [0, 2], memory: <<0xabcd::16>>}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:return)>>})
<<0xab, 0xcd>>
iex> EVM.Functions.is_normal_halting?(%EVM.MachineState{stack: [1, 1], memory: <<0xabcd::16>>}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:return)>>})
<<0xcd>>
"""
@spec is_normal_halting?(MachineState.t(), ExecEnv.t()) :: nil | binary() | {:revert, binary()}
def is_normal_halting?(machine_state, exec_env) do
case MachineCode.current_operation(machine_state, exec_env).sym do
:return -> h_return(machine_state)
:revert -> {:revert, h_return(machine_state)}
x when x == :stop or x == :selfdestruct -> <<>>
_ -> nil
end
end
# Defined in Appendix H of the Yellow Paper
@spec h_return(MachineState.t()) :: binary()
defp h_return(machine_state) do
{[offset, length], _} = EVM.Stack.pop_n(machine_state.stack, 2)
{result, _} = EVM.Memory.read(machine_state, offset, length)
result
end
@doc """
Returns whether or not the current program is in an exceptional halting state.
This may be due to running out of gas, having an invalid instruction, having
a stack underflow, having an invalid jump destination or having a stack overflow.
This is defined as `Z` in Eq.(137) of the Yellow Paper.
## Examples
# TODO: Once we add gas cost, make this more reasonable
# TODO: How do we pass in state?
iex> EVM.Functions.is_exception_halt?(%EVM.MachineState{program_counter: 0, gas: 0xffff}, %EVM.ExecEnv{machine_code: <<0xfee>>})
{:halt, :undefined_instruction}
iex> EVM.Functions.is_exception_halt?(%EVM.MachineState{program_counter: 0, gas: 0xffff, stack: []}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:add)>>})
{:halt, :stack_underflow}
iex> EVM.Functions.is_exception_halt?(%EVM.MachineState{program_counter: 0, gas: 0xffff, stack: [5]}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:jump)>>})
{:halt, :invalid_jump_destination}
iex> machine_code = <<EVM.Operation.encode(:jump), EVM.Operation.encode(:jumpdest)>>
iex> exec_env = EVM.ExecEnv.set_valid_jump_destinations(%EVM.ExecEnv{machine_code: machine_code})
iex> {:continue, _exec_env, cost} = EVM.Functions.is_exception_halt?(%EVM.MachineState{program_counter: 0, gas: 0xffff, stack: [1]}, exec_env)
iex> cost
{:original, 8}
iex> EVM.Functions.is_exception_halt?(%EVM.MachineState{program_counter: 0, gas: 0xffff, stack: [1, 5]}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:jumpi)>>})
{:halt, :invalid_jump_destination}
iex> machine_code = <<EVM.Operation.encode(:jumpi), EVM.Operation.encode(:jumpdest)>>
iex> exec_env = EVM.ExecEnv.set_valid_jump_destinations(%EVM.ExecEnv{machine_code: machine_code})
iex> {:continue, _exec_env, cost} = EVM.Functions.is_exception_halt?(%EVM.MachineState{program_counter: 0, gas: 0xffff, stack: [1, 5]}, exec_env)
iex> cost
{:original, 10}
iex> {:continue, _exec_env, cost} = EVM.Functions.is_exception_halt?(%EVM.MachineState{program_counter: 0, gas: 0xffff, stack: (for _ <- 1..1024, do: 0x0)}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:stop)>>})
iex> cost
{:original, 0}
iex> EVM.Functions.is_exception_halt?(%EVM.MachineState{program_counter: 0, gas: 0xffff, stack: (for _ <- 1..1024, do: 0x0)}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:push1)>>})
{:halt, :stack_overflow}
iex> EVM.Functions.is_exception_halt?(%EVM.MachineState{program_counter: 0, gas: 0xffff, stack: []}, %EVM.ExecEnv{machine_code: <<EVM.Operation.encode(:invalid)>>})
{:halt, :invalid_instruction}
"""
@spec is_exception_halt?(MachineState.t(), ExecEnv.t()) ::
{:continue, ExecEnv.t(), Gas.cost_with_status()} | {:halt, atom()}
# credo:disable-for-next-line
def is_exception_halt?(machine_state, exec_env) do
operation = Operation.get_operation_at(exec_env.machine_code, machine_state.program_counter)
operation_metadata = operation_metadata(operation, exec_env)
# dw
input_count = Map.get(operation_metadata || %{}, :input_count)
# aw
output_count = Map.get(operation_metadata || %{}, :output_count)
inputs =
if operation_metadata do
Operation.inputs(operation_metadata, machine_state)
end
halt_status =
cond do
is_invalid_instruction?(operation_metadata) ->
{:halt, :invalid_instruction}
is_nil(input_count) ->
{:halt, :undefined_instruction}
length(machine_state.stack) < input_count ->
{:halt, :stack_underflow}
Stack.length(machine_state.stack) - input_count + output_count > @max_stack ->
{:halt, :stack_overflow}
is_invalid_jump_destination?(operation_metadata, inputs, exec_env) ->
{:halt, :invalid_jump_destination}
exec_env.static && static_state_modification?(operation_metadata.sym, inputs) ->
{:halt, :static_state_modification}
out_of_memory_bounds?(operation_metadata.sym, machine_state, inputs) ->
{:halt, :out_of_memory_bounds}
true ->
:continue
end
case halt_status do
:continue ->
not_enough_gas?(machine_state, exec_env)
other ->
other
end
end
# credo:disable-for-next-line
def operation_metadata(operation, exec_env) do
operation_metadata = Operation.metadata(operation)
if operation_metadata do
config = exec_env.config
case operation_metadata.sym do
:delegatecall ->
if config.has_delegate_call, do: operation_metadata
:revert ->
if config.has_revert, do: operation_metadata
:staticcall ->
if config.has_static_call, do: operation_metadata
:returndatasize ->
if config.support_variable_length_return_value,
do: operation_metadata
:returndatacopy ->
if config.support_variable_length_return_value,
do: operation_metadata
:shl ->
if config.has_shift_operations, do: operation_metadata
:shr ->
if config.has_shift_operations, do: operation_metadata
:sar ->
if config.has_shift_operations, do: operation_metadata
:extcodehash ->
if config.has_extcodehash, do: operation_metadata
:create2 ->
if config.has_create2, do: operation_metadata
_ ->
operation_metadata
end
end
end
@spec not_enough_gas?(MachineState.t(), ExecEnv.t()) ::
{:halt, :out_of_gas} | {:continue, ExecEnv.t(), Gas.cost_with_status()}
defp not_enough_gas?(machine_state, exec_env) do
{updated_exec_env, cost_with_status} = Gas.cost_with_status(machine_state, exec_env)
cost =
case cost_with_status do
{:original, cost} -> cost
{:changed, value, _} -> value
end
if cost > machine_state.gas do
{:halt, :out_of_gas}
else
{:continue, updated_exec_env, cost_with_status}
end
end
@spec out_of_memory_bounds?(atom(), MachineState.t(), [EVM.val()]) :: boolean()
defp out_of_memory_bounds?(:returndatacopy, machine_state, [
_memory_start,
return_data_start,
size
]) do
return_data_start + size > byte_size(machine_state.last_return_data)
end
defp out_of_memory_bounds?(_, _, _), do: false
@spec is_invalid_instruction?(Metadata.t()) :: boolean()
defp is_invalid_instruction?(%Metadata{sym: :invalid}), do: true
defp is_invalid_instruction?(_), do: false
@spec is_invalid_jump_destination?(Metadata.t(), [EVM.val()], ExecEnv.t()) :: boolean()
defp is_invalid_jump_destination?(%Metadata{sym: :jump}, [position], exec_env) do
not Enum.member?(exec_env.valid_jump_destinations, position)
end
defp is_invalid_jump_destination?(%Metadata{sym: :jumpi}, [position, condition], exec_env) do
condition != 0 && not Enum.member?(exec_env.valid_jump_destinations, position)
end
defp is_invalid_jump_destination?(_operation, _inputs, _machine_code), do: false
defp static_state_modification?(:call, [_, _, value, _, _, _, _]), do: value > 0
defp static_state_modification?(:log0, _), do: true
defp static_state_modification?(:log1, _), do: true
defp static_state_modification?(:log2, _), do: true
defp static_state_modification?(:log3, _), do: true
defp static_state_modification?(:log4, _), do: true
defp static_state_modification?(:selfdestruct, _), do: true
defp static_state_modification?(:create, _), do: true
defp static_state_modification?(:create2, _), do: true
defp static_state_modification?(:sstore, _), do: true
defp static_state_modification?(_, _), do: false
end | apps/evm/lib/evm/functions.ex | 0.771456 | 0.504272 | functions.ex | starcoder |
defmodule DockerCompose do
@moduledoc """
Docker Compose CLI
Uses `docker-compose` executable, it must be installed and working.
"""
@type exit_code :: non_neg_integer
@type output :: Collectable.t()
@doc """
docker-compose up
The command is executed in detached mode, result is returned after the whole command finishes,
which might take a while if the images need to be pulled.
## Options
- `always_yes: true` - answer "yes" to all interactive questions
- `compose_path: path` - path to the compose if not in the standard location
- `project_name: name` - compose project name
- `force_recreate: true` - if true all specified services are forcefully recreated
- `remove_orphans: true` - if true orphaned containers are removed
- `service: name` - name of the service that should be started, can be specified multiple times
to start multiple services. If it's not specified at all then all services are started.
"""
@spec up(Keyword.t()) :: {:ok, output} | {:error, exit_code, output}
def up(opts) do
args =
[
compose_opts(opts),
"up",
up_opts(opts),
["-d", "--no-color" | service_opts(opts)]
]
|> List.flatten()
args
|> execute(opts)
|> result()
end
@doc """
docker-compose down
## Options
- `always_yes: true` - answer "yes" to all interactive questions
- `compose_path: path` - path to the compose if not in the standard location
- `project_name: name` - compose project name
- `remove_orphans: true` - if true orphaned containers are removed
## Result
The function returns either `{:ok, summary}` if the request is successful or `{:error, exit_code,
summary}`. The exit code is the exit code of the docker-compose process that failed.
Summary is a map with
- `stopped_containers` - which containers were stopped
- `removed_containers` - which containers were removed
- `removed_networks` - which networks were removed
- `removed_orphan_containers` - which containers were removed if `remove_orphans: true` is
specified
"""
@spec down(Keyword.t()) :: {:ok, output} | {:error, exit_code, output}
def down(opts) do
args =
[
compose_opts(opts),
"down",
down_opts(opts)
]
|> List.flatten()
args
|> execute(opts)
|> result()
end
# OPTS
# - service - optional, by default all
@doc """
docker-compose restart
## Options
- `always_yes: true` - answer "yes" to all interactive questions
- `compose_path: path` - path to the compose if not in the standard location
- `project_name: name` - compose project name
- `service: name` - name of the service to be restarted, can be specified multiple times to
restart multiple services at once. If not specified at all then all services are restarted.
"""
@spec restart(Keyword.t()) :: {:ok, output} | {:error, exit_code, output}
def restart(opts) do
args =
[
compose_opts(opts),
"restart",
service_opts(opts)
]
|> List.flatten()
args
|> execute(opts)
|> result()
end
@doc """
docker-compose stop
## Options
- `always_yes: true` - answer "yes" to all interactive questions
- `compose_path: path` - path to the compose if not in the standard location
- `project_name: name` - compose project name
- `service: name` - name of the service to be stopped, can be specified multiple times to stop
multiple services at once. If not specified at all then all services are stopped.
"""
@spec stop(Keyword.t()) :: {:ok, output} | {:error, exit_code, output}
def stop(opts) do
args =
[
compose_opts(opts),
"stop",
service_opts(opts)
]
|> List.flatten()
args
|> execute(opts)
|> result()
end
@doc """
docker-compose start
Note this can only be used to start previously created and stopped services. If you want to create
and start the services use `up/1`.
## Options
- `always_yes: true` - answer "yes" to all interactive questions
- `compose_path: path` - path to the compose if not in the standard location
- `project_name: name` - compose project name
- `service: name` - name of the service to be started, can be specified multiple times to start
multiple services at once. If not specified at all then all services are started.
"""
@spec start(Keyword.t()) :: {:ok, output} | {:error, exit_code, output}
def start(opts) do
args =
[
compose_opts(opts),
"start",
service_opts(opts)
]
|> List.flatten()
args
|> execute(opts)
|> result()
end
defp execute(args, opts) do
System.cmd(get_executable(), wrapper_opts(opts) ++ ["--ansi", "never" | args], [
{:stderr_to_stdout, true} | cmd_opts(opts)
])
end
defp get_executable do
case :os.type() do
{:win32, _} -> "docker-compose.exe"
_ -> Path.join(:code.priv_dir(:docker_compose), "docker-compose")
end
end
defp wrapper_opts([{:always_yes, true} | rest]) do
["--always-yes" | wrapper_opts(rest)]
end
defp wrapper_opts([_ | rest]), do: wrapper_opts(rest)
defp wrapper_opts([]), do: []
defp compose_opts([{:compose_path, path} | rest]) do
["-f", Path.basename(path) | compose_opts(rest)]
end
defp compose_opts([{:project_name, name} | rest]) do
["-p", name | compose_opts(rest)]
end
defp compose_opts([_ | rest]), do: compose_opts(rest)
defp compose_opts([]), do: []
defp up_opts(opts) do
opts
|> Keyword.take([:force_recreate, :remove_orphans])
|> command_opts()
end
defp down_opts(opts) do
opts
|> Keyword.take([:remove_orphans])
|> command_opts()
end
defp command_opts([{:force_recreate, true} | rest]),
do: ["--force-recreate" | command_opts(rest)]
defp command_opts([{:remove_orphans, true} | rest]),
do: ["--remove-orphans" | command_opts(rest)]
defp command_opts([_ | rest]), do: command_opts(rest)
defp command_opts([]), do: []
defp service_opts([{:service, name} | rest]), do: [name | service_opts(rest)]
defp service_opts([_ | rest]), do: service_opts(rest)
defp service_opts([]), do: []
defp cmd_opts([{:compose_path, path} | rest]) do
[{:cd, Path.dirname(path)} | cmd_opts(rest)]
end
defp cmd_opts([{:into, _collectable} = into | rest]) do
[into | cmd_opts(rest)]
end
defp cmd_opts([_ | rest]), do: cmd_opts(rest)
defp cmd_opts([]), do: []
defp result({output, exit_code}) do
if exit_code == 0 do
{:ok, output}
else
{:error, exit_code, output}
end
end
end | lib/docker_compose.ex | 0.84228 | 0.450964 | docker_compose.ex | starcoder |
defmodule Acquire.Query.ST do
@moduledoc """
Collection of functions that generate `Acquire.Query.Where.Function`
objects representing PrestoDB [ST_*](https://prestosql.io/docs/current/functions/geospatial.html#ST_Intersects) functions.
"""
alias Acquire.Query.Where.Function
alias Acquire.Queryable
import Acquire.Query.Where.Functions, only: [parameter: 1, array: 1]
@spec geometry_from_text(String.t() | Queryable.t()) :: {:ok, Function.t()} | {:error, term}
def geometry_from_text(text) when is_binary(text) do
Function.new(function: "ST_GeometryFromText", args: [parameter(text)])
end
def geometry_from_text(text) do
Function.new(function: "ST_GeometryFromText", args: [text])
end
@spec envelope(Queryable.t()) :: {:ok, Function.t()} | {:error, term}
def envelope(a) do
Function.new(function: "ST_Envelope", args: [a])
end
@spec line_string(Acquire.Query.Where.Array.t() | list) :: {:ok, Function.t()} | {:error, term}
def line_string(%Acquire.Query.Where.Array{} = array) do
Function.new(function: "ST_LineString", args: [array])
end
def line_string(list) when is_list(list) do
Function.new(function: "ST_LineString", args: [array(list)])
end
@spec intersects(Queryable.t(), Queryable.t()) :: {:ok, Function.t()} | {:error, term}
def intersects(a, b) do
Function.new(function: "ST_Intersects", args: [a, b])
end
@spec intersects!(Queryable.t(), Queryable.t()) :: Function.t()
def intersects!(a, b) do
case intersects(a, b) do
{:ok, value} -> value
{:error, reason} -> raise reason
end
end
@spec point(x :: float | Queryable.t(), y :: float | Queryable.t()) ::
{:ok, Function.t()} | {:error, term}
def point(x, y) when is_float(x) and is_float(y) do
point(parameter(x), parameter(y))
end
def point(x, y) do
Function.new(function: "ST_Point", args: [x, y])
end
@spec point!(x :: float | Queryable.t(), y :: float | Queryable.t()) :: Function.t()
def point!(x, y) do
case point(x, y) do
{:ok, value} -> value
{:error, reason} -> raise reason
end
end
end | apps/service_acquire/lib/acquire/query/st.ex | 0.813942 | 0.652684 | st.ex | starcoder |
defmodule TortoiseWebsocket.Client do
@moduledoc ~S"""
A Websocket client with a API similar to `:gen_tcp`.
Example of usage where a client is started and connected, then it is used to send and receive
data:
iex> {:ok, socket} = TortoiseWebsocket.Client.connect('example.com', 80, [])
iex> :ok = TortoiseWebsocket.Client.send(socket, "data")
iex> {:ok, data} = TortoiseWebsocket.Client.recv(socket, 100)
When the client has the option `:active` set to `true` or `:once` it will send to the process
that started the client or the controlling process defined with `controlling_process/2` function
messages with the format `{:websocket, socket, data}`, where `socket` is the client socket struct
and `data` is a binary with the data received from the websocket server. If the the `:active`
option was set to `:once` the client will set it to `false` after sending data once. When the
option is set to `false`, the `recv/3` function must be used to retrieve data or the option needs
to be set back to `true` or `:once` using the `set_active/2` function.
When the connection is lost unexpectedly a message is sent with the format
`{:websocket_closed, socket}`.
To close the client connection to the websocket server the function `close/1` can be used, the
connection to the websocket will be closed and the client process will be stopped.
"""
use GenStateMachine, restart: :transient
alias __MODULE__, as: Data
alias __MODULE__.Socket
alias :gun, as: Gun
require Logger
defstruct host: nil,
port: nil,
timeout: nil,
owner: nil,
owner_monitor_ref: nil,
transport: nil,
path: nil,
headers: nil,
ws_opts: nil,
active: nil,
caller: nil,
socket: nil,
buffer: "",
recv_queue: :queue.new()
@type reason() :: any()
@type opts() :: Keyword.t()
def start_link(args) do
GenStateMachine.start_link(__MODULE__, args)
end
@doc """
Starts and connects a client to the websocket server.
The `opts` paramameter is a Keyword list that expects the follwing optional entries:
* `:transport - An atom with the transport protocol, either `:tcp` or `:tls`, defaults to `:tcp`
* `:path` - A string with the websocket server path, defaults to `"/"`
* `:headers` - A list of tuples with the HTTP headers to send to the server, defaults to `[]`
* `:active` - A boolean or atom to indicate if the client should send back any received data, it
can be `true`, `false` and `:once`, defaults to `false`
* `:compress` - A boolean to indicate if the data should be compressed, defaults to `true`
"""
@spec connect(charlist(), :inet.port_number(), opts(), timeout()) ::
{:ok, Socket.t()} | {:error, reason()}
def connect(host, port, opts \\ [], timeout \\ 5_000)
when (is_list(host) or is_atom(host) or is_tuple(host)) and (is_integer(port) and port > 0) and
is_list(opts) and is_integer(timeout) and timeout > 0 do
case TortoiseWebsocket.Client.Supervisor.start_child({self(), host, port, opts, timeout}) do
{:ok, pid} -> GenStateMachine.call(pid, :connect)
error -> error
end
end
@doc """
Sends data to the websocket server.
Data must be a binary or a list of binaries.
"""
@spec send(Socket.t(), iodata()) :: :ok | {:error, reason()}
def send(%Socket{pid: pid}, data) when is_binary(data) or is_list(data) do
GenStateMachine.call(pid, {:send, data})
catch
:exit, _ -> {:error, :closed}
end
@doc """
When the client has the option `:active` set to `false`, the `recv/3` function can be used to
retrieve any data sent by the server.
If the provided length is `0`, it returns immediately with all data present in the client, even if
there is none. If the timeout expires it returns `{:error, :timeout}`.
"""
@spec recv(Socket.t(), non_neg_integer(), timeout()) :: {:ok, any()} | {:error, reason()}
def recv(%Socket{pid: pid}, length, timeout \\ 5_000)
when is_integer(length) and length >= 0 and is_integer(timeout) and timeout > 0 do
GenStateMachine.call(pid, {:recv, length, timeout})
catch
:exit, _ -> {:error, :closed}
end
@doc """
Defines the process to which the data received by the client is sent to, when the client option
`:active` is set to `true` or `:once`.
"""
@spec controlling_process(Socket.t(), pid()) :: :ok | {:error, reason()}
def controlling_process(%Socket{pid: pid}, new_owner_pid) when is_pid(new_owner_pid) do
GenStateMachine.call(pid, {:owner, new_owner_pid})
catch
:exit, _ -> {:error, :closed}
end
@doc """
Closes the client connection to the websocket server and stops the client. Any data retained by
the client will be lost.
"""
@spec close(Socket.t()) :: :ok
def close(%Socket{pid: pid}) do
GenStateMachine.call(pid, :close)
catch
:exit, _ -> {:error, :closed}
end
@doc """
It defines the client `:active` option.
The possible values for the `:active` options are `true`, `false` or `:once`. When the `:active`
option is set to `:once`, the client will send back the first received frame of data and set the
`:active` option to `false`.
"""
@spec set_active(Socket.t(), boolean | :once) :: :ok | {:error, reason()}
def set_active(%Socket{pid: pid}, active) when is_boolean(active) or active == :once do
GenStateMachine.call(pid, {:set_active, active})
catch
:exit, _ -> {:error, :closed}
end
def init({owner, host, port, opts, timeout}) do
Process.flag(:trap_exit, true)
ref = Process.monitor(owner)
{compress, opts} = Keyword.pop(opts, :compress, true)
ws_opts = %{compress: compress, protocols: [{"mqtt", :gun_ws_h}]}
args =
opts
|> Keyword.put(:host, host)
|> Keyword.put(:port, port)
|> Keyword.put(:timeout, timeout)
|> Keyword.put(:owner, owner)
|> Keyword.put(:owner_monitor_ref, ref)
|> Keyword.put_new(:transport, :tcp)
|> Keyword.put_new(:path, "/")
|> Keyword.put_new(:headers, [])
|> Keyword.put_new(:active, false)
|> Keyword.put(:ws_opts, ws_opts)
{:ok, :disconnected, struct(Data, args)}
end
def handle_event(
{:call, from},
:connect,
:disconnected,
%Data{
host: host,
port: port,
timeout: timeout,
transport: transport,
path: path,
headers: headers,
ws_opts: ws_opts
} = data
) do
Logger.debug("[tortoise_websocket] Opening connection")
with start_time <- DateTime.utc_now(),
{:ok, socket} <-
Gun.open(host, port, %{
transport: transport,
connect_timeout: timeout,
protocols: [:http],
retry: 0
}),
timeout <- remaining_timeout(timeout, start_time),
{:ok, _} <- Gun.await_up(socket, timeout) do
Logger.debug("[tortoise_websocket] Upgrading connection")
Gun.ws_upgrade(socket, path, headers, ws_opts)
{:keep_state, %Data{data | caller: from},
{:state_timeout, remaining_timeout(timeout, start_time), {:upgrade_timeout, socket, from}}}
else
{:error, {:shutdown, :econnrefused}} ->
Logger.debug(
"[tortoise_websocket] It was not possible to connect to host #{inspect(host)} on port #{
port
}"
)
error = {:error, :econnrefused}
{:stop_and_reply, {:shutdown, error}, {:reply, from, error}}
{:error, {:shutdown, :closed}} ->
Logger.debug(
"[tortoise_websocket] It was not possible to connect to host #{inspect(host)} on port #{
port
}"
)
error = {:error, :closed}
{:stop_and_reply, {:shutdown, error}, {:reply, from, error}}
{:error, {:shutdown, :nxdomain}} ->
Logger.debug("[tortoise_websocket] Host #{host} not found")
error = {:error, :nxdomain}
{:stop_and_reply, {:shutdown, error}, {:reply, from, error}}
{:error, {:shutdown, :timeout}} ->
Logger.debug("[tortoise_websocket] Timeout while trying to connect")
error = {:error, :timeout}
{:stop_and_reply, {:shutdown, error}, {:reply, from, error}}
error ->
Logger.debug(
"[tortoise_websocket] There was an error while trying to connect: #{inspect(error)}"
)
{:stop_and_reply, {:shutdown, error}, {:reply, from, error}}
end
end
def handle_event({:call, from}, {:send, data}, {:connected, socket}, _) do
Gun.ws_send(socket, {:binary, data})
{:keep_state_and_data, {:reply, from, :ok}}
end
def handle_event(
{:call, from},
{:recv, length, timeout},
{:connected, _},
%Data{buffer: buffer, recv_queue: recv_queue} = data
) do
cond do
length == 0 ->
{:keep_state, %Data{data | buffer: ""}, {:reply, from, {:ok, buffer}}}
byte_size(buffer) >= length ->
<<package::binary-size(length), rest::binary>> = buffer
{:keep_state, %Data{data | buffer: rest}, {:reply, from, {:ok, package}}}
true ->
{:keep_state, %Data{data | recv_queue: :queue.in({from, length}, recv_queue)},
{:timeout, timeout, {:recv_timeout, {from, length}}}}
end
end
def handle_event(
{:call, from},
{:owner, new_owner},
{:connected, _},
%Data{owner_monitor_ref: owner_monitor_ref} = data
) do
Process.demonitor(owner_monitor_ref)
ref = Process.monitor(new_owner)
{:keep_state, %Data{data | owner: new_owner, owner_monitor_ref: ref}, {:reply, from, :ok}}
end
def handle_event({:call, from}, :close, state, _) do
case state do
{:connected, socket} -> Gun.close(socket)
_ -> :ok
end
{:stop_and_reply, :normal, {:reply, from, :ok}}
end
def handle_event({:call, from}, {:set_active, active}, {:connected, _}, data) do
{:keep_state, %Data{data | active: active}, {:reply, from, :ok}}
end
def handle_event({:call, from}, _, :disconnected, _) do
{:keep_state_and_data, {:reply, from, {:error, :not_connected}}}
end
def handle_event({:call, from}, _, _, _) do
{:keep_state_and_data, {:reply, from, {:error, :unexpected_command}}}
end
def handle_event(:cast, _, _, _) do
:keep_state_and_data
end
def handle_event(
:info,
{:gun_upgrade, socket, _, ["websocket"], _},
:disconnected,
%Data{caller: caller} = data
) do
Logger.debug("[tortoise_websocket] Connection upgraded")
client_socket = %Socket{pid: self()}
{:next_state, {:connected, socket}, %Data{data | caller: nil, socket: client_socket},
{:reply, caller, {:ok, client_socket}}}
end
def handle_event(
:info,
{:gun_response, socket, _, _, status, _},
:disconnected,
%Data{caller: caller}
) do
Logger.debug(
"[tortoise_websocket] It was not possible to upgrade connection, response status: #{
inspect(status)
}"
)
Gun.close(socket)
error = {:error, {:ws_upgrade_failed, "Response status: #{status}"}}
{:stop_and_reply, {:shutdown, error}, {:reply, caller, error}}
end
def handle_event(
:info,
{:gun_error, socket, _, reason},
:disconnected,
%Data{caller: caller}
) do
Logger.debug(
"[tortoise_websocket] Error while trying to upgrade connection, reason: #{inspect(reason)}"
)
Gun.close(socket)
error = {:error, {:ws_upgrade_error, "Reason: #{inspect(reason)}"}}
{:stop_and_reply, {:shutdown, error}, {:reply, caller, error}}
end
def handle_event(
:info,
{:gun_down, socket, _, reason, _, _},
{:connected, socket},
%Data{owner: owner, socket: client_socket}
) do
Logger.debug("[tortoise_websocket] Connection went down, reason: #{inspect(reason)}")
Kernel.send(owner, {:websocket_closed, client_socket})
{:stop, {:shutdown, {:error, :down}}}
end
def handle_event(
:info,
{:gun_ws, socket, _, :close},
{:connected, socket},
%Data{owner: owner, socket: client_socket}
) do
Logger.debug("[tortoise_websocket] Server closed connection")
Kernel.send(owner, {:websocket_closed, client_socket})
{:stop, {:shutdown, {:error, :closed}}}
end
def handle_event(
:info,
{:gun_ws, _, _, {:binary, frame}},
{:connected, _},
%Data{
owner: owner,
active: active,
socket: socket,
buffer: buffer,
recv_queue: recv_queue
} = data
) do
buffer = <<buffer::binary, frame::binary>>
{buffer, recv_queue} = satisfy_queued_recv(buffer, recv_queue)
case active do
true ->
Kernel.send(owner, {:websocket, socket, buffer})
{:keep_state, %Data{data | buffer: "", recv_queue: recv_queue}}
:once ->
Kernel.send(owner, {:websocket, socket, buffer})
{:keep_state, %Data{data | active: false, buffer: "", recv_queue: recv_queue}}
false ->
{:keep_state, %Data{data | buffer: buffer, recv_queue: recv_queue}}
end
end
def handle_event(:info, {:DOWN, owner_monitor_ref, :process, owner, _}, state, %Data{
owner: owner,
owner_monitor_ref: owner_monitor_ref
}) do
case state do
{:connected, socket} -> Gun.close(socket)
_ -> :ok
end
:stop
end
def handle_event(:info, msg, _, _) do
Logger.debug("[tortoise_websocket] Received unexpected message: #{inspect(msg)}")
:keep_state_and_data
end
def handle_event(:state_timeout, {:upgrade_timeout, socket, caller}, :disconnected, _) do
Logger.debug("[tortoise_websocket] Timeout while trying to upgrade connection")
Gun.close(socket)
error = {:error, :timeout}
{:stop_and_reply, {:shutdown, error}, {:reply, caller, error}}
end
def handle_event(
:timeout,
{:recv_timeout, {caller, length} = queued_recv},
{:connected, _},
%Data{recv_queue: recv_queue} = data
) do
Logger.debug("[tortoise_websocket] Timeout while trying to receive data with #{length} bytes")
recv_queue = :queue.filter(&(&1 != queued_recv), recv_queue)
{:keep_state, %Data{data | recv_queue: recv_queue}, {:reply, caller, {:error, :timeout}}}
end
defp remaining_timeout(current_timeout, start_time),
do: max(current_timeout - time_since(start_time), 0)
defp time_since(datetime), do: DateTime.utc_now() |> DateTime.diff(datetime, :millisecond)
defp satisfy_queued_recv(buffer, recv_queue) do
recv_list = :queue.to_list(recv_queue)
{buffer, recv_remaining} = satisfy_recv_items(buffer, recv_list)
{buffer, :queue.from_list(recv_remaining)}
end
defp satisfy_recv_items(buffer, []), do: {buffer, []}
defp satisfy_recv_items(buffer, [{caller, 0} | remaining_items]) do
GenStateMachine.reply(caller, {:ok, buffer})
{"", remaining_items}
end
defp satisfy_recv_items(buffer, [{_, length} | _] = items) when byte_size(buffer) < length,
do: {buffer, items}
defp satisfy_recv_items(buffer, [{caller, length} | remaining_items]) do
<<data::binary-size(length), remaining_buffer::binary>> = buffer
GenStateMachine.reply(caller, {:ok, data})
satisfy_recv_items(remaining_buffer, remaining_items)
end
end | lib/tortoise_websocket/client.ex | 0.834238 | 0.500977 | client.ex | starcoder |
defmodule Distributed.Replicator.GenServer do
@moduledoc """
The functions in `Distributed.Replicator.GenServer` module helps to replicate an event by processing it on the all nodes in the network.
In `Distributed.Replicator.GenServer`, functions execute processes in parallel.
**Note**: Since this module is only a wrapper for `GenServer` module, there is no need to write a detailed documentation for this module.
Please check documentation of the `GenServer` module; you can basically think that the functions of the module run on every single node
without specifying nodes, and you will be replied with a list of results of the processes.
"""
use GenServer
@doc false
def start_link() do
GenServer.start_link(__MODULE__, [], name: __MODULE__.process_id())
end
@doc false
def init(_opts \\ []) do
{:ok, %{}}
end
@doc false
def process_id() do
Distributed.Replicator.GenServer
end
@doc """
Sends messages to the given dest on nodes and returns the message. See `Kernel.send/2`
"""
@spec info(dest :: pid | port | atom, msg :: any, opts :: [any]) :: any
def info(dest, msg, opts \\ []) do
Distributed.Parallel.map(Distributed.Node.list(opts), fn node_name ->
{node_name, send({dest, node_name}, msg)}
end)
end
@doc """
Makes synchronous calls to the servers on nodes and waits for their replies. See `GenServer.call/3`
"""
@spec call(server :: atom, term, opts :: [any]) :: [term]
def call(server, term, opts \\ []) do
timeout = Keyword.get(opts, :timeout, :infinity)
Distributed.Parallel.map(Distributed.Node.list(opts), fn node_name ->
{node_name, GenServer.call({server, node_name}, term, timeout)}
end)
end
@doc """
Sends asynchronous requests to the servers on nodes. See `GenServer.cast/2`
"""
@spec cast(server :: atom, term :: term, opts :: [any]) :: [term]
def cast(server, term, opts \\ []) do
Distributed.Parallel.map(Distributed.Node.list(opts), fn node_name ->
{node_name, GenServer.cast({server, node_name}, term)}
end)
end
end | lib/distributed/replicator/gen_server.ex | 0.70912 | 0.472927 | gen_server.ex | starcoder |
defmodule Representer do
@moduledoc """
Implementation of the Representer pattern for the API
"""
@extensions [
"json",
]
defguard known_extension?(extension) when extension in @extensions
defmodule Collection do
@moduledoc """
Struct for a collection of `Representer.Item`s
Contains the list of `:items`, `:pagination`, and a list of `:links`
"""
defstruct [:href, :name, :items, :pagination, links: []]
end
defmodule Item do
@moduledoc """
Struct for an item that can be rendered in various formats
Consists of an `:item` that contains a map of properties and a list
of `:links` that may be associated with the item.
"""
defstruct [:rel, :href, :data, :type, embedded: %{}, links: []]
end
defmodule Link do
@moduledoc """
Struct for a hypermedia link
"""
defstruct [:rel, :href, :title, :template]
end
defmodule Pagination do
@moduledoc """
Pagination struct and link generators
"""
defstruct [:base_url, :current_page, :total_pages, :total_count]
@doc """
Set up a new pagination record
"""
def new(base_url, pagination) do
%Representer.Pagination{
base_url: base_url,
current_page: pagination.current,
total_pages: pagination.total,
total_count: pagination.total_count
}
end
@doc """
Maybe add pagination links to the link list
If pagination is nil, skip this
"""
def maybe_paginate(links, nil), do: links
def maybe_paginate(links, pagination) do
cond do
pagination.total_pages == 0 ->
links
pagination.total_pages == 1 ->
links
pagination.current_page == 1 ->
[next_link(pagination) | links]
pagination.current_page == pagination.total_pages ->
[prev_link(pagination) | links]
true ->
[next_link(pagination) | [prev_link(pagination) | links]]
end
end
defp next_link(pagination) do
%Representer.Link{
rel: "next",
href: page_path(pagination.base_url, pagination.current_page + 1)
}
end
defp prev_link(pagination) do
%Representer.Link{
rel: "prev",
href: page_path(pagination.base_url, pagination.current_page - 1)
}
end
defp page_path(path, page) do
uri = URI.parse(path)
query =
uri.query
|> decode_query()
|> Map.put(:page, page)
|> URI.encode_query()
%{uri | query: query}
|> URI.to_string()
end
defp decode_query(nil), do: %{}
defp decode_query(query) do
URI.decode_query(query)
end
end
@doc """
Transform the internal representation based on the extension
"""
def transform(struct, extension) do
case extension do
"json" ->
Representer.JSON.transform(struct)
end
end
@doc """
Possibly add a link to an item or collection
"""
def maybe_link(item, true, link) do
%{item | links: [link | item.links]}
end
def maybe_link(item, false, _link), do: item
defmodule Adapter do
@moduledoc """
Behaviour for representations to implement
"""
@type json :: map()
@callback transform(collection :: %Representer.Collection{}) :: json()
@callback transform(item :: %Representer.Item{}) :: json()
end
defmodule JSON do
@moduledoc """
Adapter for plain JSON
Renders the representation almost directly
"""
@behaviour Representer.Adapter
@impl true
def transform(collection = %Representer.Collection{}) do
%{}
|> maybe_put("items", render_collection(collection))
|> maybe_put("links", render_links(collection))
end
def transform(item = %Representer.Item{}) do
item.data
|> maybe_put("links", transform_links(item.links))
|> render_embedded(item.embedded)
end
defp maybe_put(map, _key, nil), do: map
defp maybe_put(map, key, value) do
Map.put(map, key, value)
end
defp render_collection(collection) do
case collection.items do
nil ->
nil
[] ->
nil
items ->
Enum.map(items, &transform/1)
end
end
defp render_embedded(json, embedded) do
Map.merge(embedded, json)
end
defp render_links(collection) do
collection.links
|> Representer.Pagination.maybe_paginate(collection.pagination)
|> transform_links()
end
defp transform_links(links) do
Enum.map(links, fn link ->
%{"rel" => link.rel, "href" => link.href}
end)
end
end
end | lib/web/representer.ex | 0.842798 | 0.558207 | representer.ex | starcoder |
defmodule Changelog.Post do
use Changelog.Schema, default_sort: :published_at
alias Changelog.{Files, NewsItem, Person, PostTopic, Regexp}
schema "posts" do
field :title, :string
field :subtitle, :string
field :slug, :string
field :guid, :string
field :canonical_url, :string
field :image, Files.Image.Type
field :tldr, :string
field :body, :string
field :published, :boolean, default: false
field :published_at, :utc_datetime
belongs_to :author, Person
belongs_to :editor, Person
has_many :post_topics, PostTopic, on_delete: :delete_all
has_many :topics, through: [:post_topics, :topic]
timestamps()
end
def file_changeset(post, attrs \\ %{}),
do: cast_attachments(post, attrs, [:image], allow_urls: true)
def insert_changeset(post, attrs \\ %{}) do
post
|> cast(
attrs,
~w(title subtitle slug canonical_url author_id editor_id published published_at body tldr)a
)
|> validate_required([:title, :slug, :author_id])
|> validate_format(:canonical_url, Regexp.http(), message: Regexp.http_message())
|> validate_format(:slug, Regexp.slug(), message: Regexp.slug_message())
|> unique_constraint(:slug)
|> foreign_key_constraint(:author_id)
|> foreign_key_constraint(:editor_id)
|> validate_published_has_published_at()
|> cast_assoc(:post_topics)
end
def update_changeset(post, attrs \\ %{}) do
post
|> insert_changeset(attrs)
|> file_changeset(attrs)
end
def authored_by(query \\ __MODULE__, person),
do: from(q in query, where: q.author_id == ^person.id)
def contributed_by(query \\ __MODULE__, person),
do: from(q in query, where: q.author_id == ^person.id or q.editor_id == ^person.id)
def published(query \\ __MODULE__),
do: from(q in query, where: q.published, where: q.published_at <= ^Timex.now())
def scheduled(query \\ __MODULE__),
do: from(q in query, where: q.published, where: q.published_at > ^Timex.now())
def search(query \\ __MODULE__, term),
do: from(q in query, where: fragment("search_vector @@ plainto_tsquery('english', ?)", ^term))
def unpublished(query \\ __MODULE__), do: from(q in query, where: not q.published)
def is_public(post, as_of \\ Timex.now()) do
post.published && Timex.before?(post.published_at, as_of)
end
def is_published(post), do: post.published
def is_publishable(post) do
validated =
post
|> insert_changeset(%{})
|> validate_required([:slug, :title, :published_at, :tldr, :body])
validated.valid? && !is_published(post)
end
def preload_all(post) do
post
|> preload_author()
|> preload_editor()
|> preload_topics()
end
def preload_author(query = %Ecto.Query{}), do: Ecto.Query.preload(query, :author)
def preload_author(post), do: Repo.preload(post, :author)
def preload_editor(query = %Ecto.Query{}), do: Ecto.Query.preload(query, :editor)
def preload_editor(post), do: Repo.preload(post, :editor)
def preload_topics(query = %Ecto.Query{}) do
query
|> Ecto.Query.preload(post_topics: ^PostTopic.by_position())
|> Ecto.Query.preload(:topics)
end
def preload_topics(post) do
post
|> Repo.preload(post_topics: {PostTopic.by_position(), :topic})
|> Repo.preload(:topics)
end
def get_news_item(post) do
post
|> NewsItem.with_post()
|> Repo.one()
end
def load_news_item(post) do
item = post |> get_news_item() |> NewsItem.load_object(post)
Map.put(post, :news_item, item)
end
def object_id(post), do: "posts:#{post.slug}"
defp validate_published_has_published_at(changeset) do
published = get_field(changeset, :published)
published_at = get_field(changeset, :published_at)
if published && is_nil(published_at) do
add_error(changeset, :published_at, "can't be blank when published")
else
changeset
end
end
end | lib/changelog/schema/post/post.ex | 0.568655 | 0.424114 | post.ex | starcoder |
defmodule Maxwell.Multipart do
@moduledoc """
Process mutipart for adapter
"""
@type param_t :: {String.t(), String.t()}
@type params_t :: [param_t]
@type header_t :: {String.t(), String.t()} | {String.t(), String.t(), params_t}
@type headers_t :: Keyword.t()
@type disposition_t :: {String.t(), params_t}
@type boundary_t :: String.t()
@type name_t :: String.t()
@type file_content_t :: binary
@type part_t ::
{:file, Path.t()}
| {:file, Path.t(), headers_t}
| {:file, Path.t(), disposition_t, headers_t}
| {:file_content, file_content_t, String.t()}
| {:file_content, file_content_t, String.t(), headers_t}
| {:file_content, file_content_t, String.t(), disposition_t, headers_t}
| {:mp_mixed, String.t(), boundary_t}
| {:mp_mixed_eof, boundary_t}
| {name_t, binary}
| {name_t, binary, headers_t}
| {name_t, binary, disposition_t, headers_t}
@type t :: {:multipart, [part_t]}
@eof_size 2
@doc """
create a multipart struct
"""
@spec new() :: t
def new(), do: {:multipart, []}
@spec add_file(t, Path.t()) :: t
def add_file(multipart, path) when is_binary(path) do
append_part(multipart, {:file, path})
end
@spec add_file(t, Path.t(), headers_t) :: t
def add_file(multipart, path, extra_headers)
when is_binary(path) and is_list(extra_headers) do
append_part(multipart, {:file, path, extra_headers})
end
@spec add_file(t, Path.t(), disposition_t, headers_t) :: t
def add_file(multipart, path, disposition, extra_headers)
when is_binary(path) and is_tuple(disposition) and is_list(extra_headers) do
append_part(multipart, {:file, path, disposition, extra_headers})
end
@spec add_file_with_name(t, Path.t(), String.t()) :: t
@spec add_file_with_name(t, Path.t(), String.t(), headers_t) :: t
def add_file_with_name(multipart, path, name, extra_headers \\ []) do
filename = Path.basename(path)
disposition = {"form-data", [{"name", name}, {"filename", filename}]}
append_part(multipart, {:file, path, disposition, extra_headers})
end
@spec add_file_content(t, file_content_t, String.t()) :: t
def add_file_content(multipart, file_content, filename) do
append_part(multipart, {:file_content, file_content, filename})
end
@spec add_file_content(t, file_content_t, String.t(), headers_t) :: t
def add_file_content(multipart, file_content, filename, extra_headers) do
append_part(multipart, {:file_content, file_content, filename, extra_headers})
end
@spec add_file_content(t, file_content_t, String.t(), disposition_t, headers_t) :: t
def add_file_content(multipart, file_content, filename, disposition, extra_headers) do
append_part(multipart, {:file_content, file_content, filename, disposition, extra_headers})
end
@spec add_file_content_with_name(t, file_content_t, String.t(), String.t()) :: t
@spec add_file_content_with_name(t, file_content_t, String.t(), String.t(), headers_t) :: t
def add_file_content_with_name(multipart, file_content, filename, name, extra_headers \\ []) do
disposition = {"form-data", [{"name", name}, {"filename", filename}]}
append_part(multipart, {:file_content, file_content, filename, disposition, extra_headers})
end
@spec add_field(t, String.t(), binary) :: t
def add_field(multipart, name, value) when is_binary(name) and is_binary(value) do
append_part(multipart, {name, value})
end
@spec add_field(t, String.t(), binary, headers_t) :: t
def add_field(multipart, name, value, extra_headers)
when is_binary(name) and is_binary(value) and is_list(extra_headers) do
append_part(multipart, {name, value, extra_headers})
end
@spec add_field(t, String.t(), binary, disposition_t, headers_t) :: t
def add_field(multipart, name, value, disposition, extra_headers)
when is_binary(name) and is_binary(value) and is_tuple(disposition) and
is_list(extra_headers) do
append_part(multipart, {name, value, disposition, extra_headers})
end
defp append_part({:multipart, parts}, part) do
{:multipart, parts ++ [part]}
end
@doc """
multipart form encode.
* `parts` - receives lists list's member format:
1. `{:file, path}`
2. `{:file, path, extra_headers}`
3. `{:file, path, disposition, extra_headers}`
4. `{:file_content, file_content, filename}`
5. `{:file_content, file_content, filename, extra_headers}`
6. `{:file_content, file_content, filename, disposition, extra_headers}`
7. `{:mp_mixed, name, mixed_boundary}`
8. `{:mp_mixed_eof, mixed_boundary}`
9. `{name, bin_data}`
10. `{name, bin_data, extra_headers}`
11. `{name, bin_data, disposition, extra_headers}`
Returns `{body_binary, size}`
"""
@spec encode_form(parts :: [part_t]) :: {boundary_t, integer}
def encode_form(parts), do: encode_form(new_boundary(), parts)
@doc """
multipart form encode.
* `boundary` - multipart boundary.
* `parts` - receives lists list's member format:
1. `{:file, path}`
2. `{:file, path, extra_headers}`
3. `{:file, path, disposition, extra_headers}`
4. `{:file_content, file_content, filename}`
5. `{:file_content, file_content, filename, extra_headers}`
6. `{:file_content, file_content, filename, disposition, extra_headers}`
7. `{:mp_mixed, name, mixed_boundary}`
8. `{:mp_mixed_eof, mixed_boundary}`
9. `{name, bin_data}`
10. `{name, bin_data, extra_headers}`
11. `{name, bin_data, disposition, extra_headers}`
"""
@spec encode_form(boundary :: boundary_t, parts :: [part_t]) :: {boundary_t, integer}
def encode_form(boundary, parts) when is_list(parts) do
encode_form(parts, boundary, "", 0)
end
@doc """
Return a random boundary(binary)
### Examples
# "---------------------------mtynipxrmpegseog"
boundary = new_boundary()
"""
@spec new_boundary() :: boundary_t
def new_boundary, do: "---------------------------" <> unique(16)
@doc """
Get the size of a mp stream. Useful to calculate the content-length of a full multipart stream and send it as an identity
* `boundary` - multipart boundary
* `parts` - see `Maxwell.Multipart.encode_form`.
Returns stream size(integer)
"""
@spec len_mp_stream(boundary :: boundary_t, parts :: [part_t]) :: integer
def len_mp_stream(boundary, parts) do
size =
Enum.reduce(parts, 0, fn
{:file, path}, acc_size ->
{mp_header, len} = mp_file_header(%{path: path}, boundary)
acc_size + byte_size(mp_header) + len + @eof_size
{:file, path, extra_headers}, acc_size ->
{mp_header, len} = mp_file_header(%{path: path, extra_headers: extra_headers}, boundary)
acc_size + byte_size(mp_header) + len + @eof_size
{:file, path, disposition, extra_headers}, acc_size ->
file = %{path: path, extra_headers: extra_headers, disposition: disposition}
{mp_header, len} = mp_file_header(file, boundary)
acc_size + byte_size(mp_header) + len + @eof_size
{:file_content, file_content, filename}, acc_size ->
{mp_header, len} =
mp_file_header(%{path: filename, filesize: byte_size(file_content)}, boundary)
acc_size + byte_size(mp_header) + len + @eof_size
{:file_content, file_content, filename, extra_headers}, acc_size ->
{mp_header, len} =
mp_file_header(
%{path: filename, filesize: byte_size(file_content), extra_headers: extra_headers},
boundary
)
acc_size + byte_size(mp_header) + len + @eof_size
{:file_content, file_content, filename, disposition, extra_headers}, acc_size ->
file = %{
path: filename,
filesize: byte_size(file_content),
extra_headers: extra_headers,
disposition: disposition
}
{mp_header, len} = mp_file_header(file, boundary)
acc_size + byte_size(mp_header) + len + @eof_size
{:mp_mixed, name, mixed_boundary}, acc_size ->
{mp_header, _} = mp_mixed_header(name, mixed_boundary)
acc_size + byte_size(mp_header) + @eof_size + byte_size(mp_eof(mixed_boundary))
{:mp_mixed_eof, mixed_boundary}, acc_size ->
acc_size + byte_size(mp_eof(mixed_boundary)) + @eof_size
{name, bin}, acc_size when is_binary(bin) ->
{mp_header, len} = mp_data_header(name, %{binary: bin}, boundary)
acc_size + byte_size(mp_header) + len + @eof_size
{name, bin, extra_headers}, acc_size when is_binary(bin) ->
{mp_header, len} =
mp_data_header(name, %{binary: bin, extra_headers: extra_headers}, boundary)
acc_size + byte_size(mp_header) + len + @eof_size
{name, bin, disposition, extra_headers}, acc_size when is_binary(bin) ->
data = %{binary: bin, disposition: disposition, extra_headers: extra_headers}
{mp_header, len} = mp_data_header(name, data, boundary)
acc_size + byte_size(mp_header) + len + @eof_size
end)
size + byte_size(mp_eof(boundary))
end
defp encode_form([], boundary, acc, acc_size) do
mp_eof = mp_eof(boundary)
{acc <> mp_eof, acc_size + byte_size(mp_eof)}
end
defp encode_form([{:file, path} | parts], boundary, acc, acc_size) do
{mp_header, len} = mp_file_header(%{path: path}, boundary)
acc_size = acc_size + byte_size(mp_header) + len + @eof_size
file_content = File.read!(path)
acc = acc <> mp_header <> file_content <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp encode_form([{:file, path, extra_headers} | parts], boundary, acc, acc_size) do
file = %{path: path, extra_headers: extra_headers}
{mp_header, len} = mp_file_header(file, boundary)
acc_size = acc_size + byte_size(mp_header) + len + @eof_size
file_content = File.read!(path)
acc = acc <> mp_header <> file_content <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp encode_form([{:file, path, disposition, extra_headers} | parts], boundary, acc, acc_size) do
file = %{path: path, extra_headers: extra_headers, disposition: disposition}
{mp_header, len} = mp_file_header(file, boundary)
acc_size = acc_size + byte_size(mp_header) + len + @eof_size
file_content = File.read!(path)
acc = acc <> mp_header <> file_content <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp encode_form([{:file_content, file_content, filename} | parts], boundary, acc, acc_size) do
{mp_header, len} =
mp_file_header(%{path: filename, filesize: byte_size(file_content)}, boundary)
acc_size = acc_size + byte_size(mp_header) + len + @eof_size
acc = acc <> mp_header <> file_content <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp encode_form(
[{:file_content, file_content, filename, extra_headers} | parts],
boundary,
acc,
acc_size
) do
file = %{path: filename, filesize: byte_size(file_content), extra_headers: extra_headers}
{mp_header, len} = mp_file_header(file, boundary)
acc_size = acc_size + byte_size(mp_header) + len + @eof_size
acc = acc <> mp_header <> file_content <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp encode_form(
[{:file_content, file_content, filename, disposition, extra_headers} | parts],
boundary,
acc,
acc_size
) do
file = %{
path: filename,
filesize: byte_size(file_content),
extra_headers: extra_headers,
disposition: disposition
}
{mp_header, len} = mp_file_header(file, boundary)
acc_size = acc_size + byte_size(mp_header) + len + @eof_size
acc = acc <> mp_header <> file_content <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp encode_form([{:mp_mixed, name, mixed_boundary} | parts], boundary, acc, acc_size) do
{mp_header, _} = mp_mixed_header(name, mixed_boundary)
acc_size = acc_size + byte_size(mp_header) + @eof_size
acc = acc <> mp_header <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp encode_form([{:mp_mixed_eof, mixed_boundary} | parts], boundary, acc, acc_size) do
eof = mp_eof(mixed_boundary)
acc_size = acc_size + byte_size(eof) + @eof_size
acc = acc <> eof <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp encode_form([{name, bin} | parts], boundary, acc, acc_size) do
{mp_header, len} = mp_data_header(name, %{binary: bin}, boundary)
acc_size = acc_size + byte_size(mp_header) + len + @eof_size
acc = acc <> mp_header <> bin <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp encode_form([{name, bin, extra_headers} | parts], boundary, acc, acc_size) do
{mp_header, len} =
mp_data_header(name, %{binary: bin, extra_headers: extra_headers}, boundary)
acc_size = acc_size + byte_size(mp_header) + len + @eof_size
acc = acc <> mp_header <> bin <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp encode_form([{name, bin, disposition, extra_headers} | parts], boundary, acc, acc_size) do
data = %{binary: bin, extra_headers: extra_headers, disposition: disposition}
{mp_header, len} = mp_data_header(name, data, boundary)
acc_size = acc_size + byte_size(mp_header) + len + @eof_size
acc = acc <> mp_header <> bin <> "\r\n"
encode_form(parts, boundary, acc, acc_size)
end
defp mp_file_header(file, boundary) do
path = file[:path]
file_name = path |> :filename.basename() |> to_string
{disposition, params} =
file[:disposition] ||
{"form-data", [{"name", "\"file\""}, {"filename", "\"" <> file_name <> "\""}]}
content_type =
path
|> Path.extname()
|> String.trim_leading(".")
|> MIME.type()
len = file[:filesize] || :filelib.file_size(path)
extra_headers = file[:extra_headers] || []
extra_headers = extra_headers |> Enum.map(fn {k, v} -> {String.downcase(k), v} end)
headers =
[
{"content-length", len},
{"content-disposition", disposition, params},
{"content-type", content_type}
]
|> replace_header_from_extra(extra_headers)
|> mp_header(boundary)
{headers, len}
end
defp mp_mixed_header(name, boundary) do
headers = [
{"Content-Disposition", "form-data", [{"name", "\"" <> name <> "\""}]},
{"Content-Type", "multipart/mixed", [{"boundary", boundary}]}
]
{mp_header(headers, boundary), 0}
end
defp mp_eof(boundary), do: "--" <> boundary <> "--\r\n"
defp mp_data_header(name, data, boundary) do
{disposition, params} = data[:disposition] || {"form-data", [{"name", "\"" <> name <> "\""}]}
extra_headers = data[:extra_headers] || []
extra_headers = extra_headers |> Enum.map(fn {k, v} -> {String.downcase(k), v} end)
content_type =
name
|> Path.extname()
|> String.trim_leading(".")
|> MIME.type()
len = byte_size(data[:binary])
headers =
[
{"content-length", len},
{"content-type", content_type},
{"content-disposition", disposition, params}
]
|> replace_header_from_extra(extra_headers)
|> mp_header(boundary)
{headers, len}
end
defp mp_header(headers, boundary), do: "--" <> boundary <> "\r\n" <> headers_to_binary(headers)
defp unique(size, acc \\ [])
defp unique(0, acc), do: acc |> :erlang.list_to_binary()
defp unique(size, acc) do
random = Enum.random(?a..?z)
unique(size - 1, [random | acc])
end
defp headers_to_binary(headers) when is_list(headers) do
headers =
headers
|> Enum.reduce([], fn header, acc -> [make_header(header) | acc] end)
|> Enum.reverse()
|> join("\r\n")
:erlang.iolist_to_binary([headers, "\r\n\r\n"])
end
defp make_header({name, value}) do
value = value_to_binary(value)
name <> ": " <> value
end
defp make_header({name, value, params}) do
value =
value
|> value_to_binary
|> header_value(params)
name <> ": " <> value
end
defp header_value(value, params) do
params =
Enum.map(params, fn {k, v} ->
"#{value_to_binary(k)}=#{value_to_binary(v)}"
end)
join([value | params], "; ")
end
defp replace_header_from_extra(headers, extra_headers) do
extra_headers
|> Enum.reduce(headers, fn {ex_header, ex_value}, acc ->
case List.keymember?(acc, ex_header, 0) do
true -> List.keyreplace(acc, ex_header, 0, {ex_header, ex_value})
false -> [{ex_header, ex_value} | acc]
end
end)
end
defp value_to_binary(v) when is_list(v) do
:binary.list_to_bin(v)
end
defp value_to_binary(v) when is_atom(v) do
:erlang.atom_to_binary(v, :latin1)
end
defp value_to_binary(v) when is_integer(v) do
Integer.to_string(v)
end
defp value_to_binary(v) when is_binary(v) do
v
end
defp join([], _Separator), do: ""
# defp join([s], _separator), do: s
defp join(l, separator) do
l
|> Enum.reverse()
|> join(separator, [])
|> :erlang.iolist_to_binary()
end
defp join([], _separator, acc), do: acc
defp join([s | rest], separator, []), do: join(rest, separator, [s])
defp join([s | rest], separator, acc), do: join(rest, separator, [s, separator | acc])
end | lib/maxwell/multipart.ex | 0.801858 | 0.422505 | multipart.ex | starcoder |
defmodule OsCmd do
@moduledoc """
Managed execution of external commands.
This module provides similar functionality to `System.cmd/3`, with the difference that the
execution of commands is managed, which provides the following benefits:
1. The external OS process is logically linked to the parent BEAM process (the process which
started it). If the parent process terminates, the OS process will be taken down.
2. The external OS process will also be taken down if the entire BEAM instance goes down.
3. Support for timeout-based and manual termination of the OS process.
4. Polite-first termination of the OS process (SIGTERM followed by SIGKILL) in the spirit of
OTP termination (shutdown strategies).
In this regard, `OsCmd` is similar to [erlexec](http://saleyn.github.io/erlexec/) and
[porcelain](https://github.com/alco/porcelain), though it doesn't have all the features of those
projects.
For usage details, see `start_link/1`.
"""
defmodule Error do
@moduledoc "Error struct returned by various OsCmd operations."
@type t :: %__MODULE__{message: String.t(), exit_status: term, output: String.t()}
defexception [:message, :exit_status, :output]
end
use GenServer, shutdown: :infinity
alias OsCmd.Faker
@type start_opt ::
{:name, GenServer.name()}
| {:handler, handler}
| {:timeout, pos_integer() | :infinity}
| {:cd, String.t()}
| {:env, [{String.t() | atom, String.t() | nil}]}
| {:pty, boolean}
| {:propagate_exit?, boolean}
| {:terminate_cmd, String.t()}
@type handler :: (event -> any) | {acc, (event, acc -> acc)}
@type acc :: any
@type event ::
:starting
| {:output, output}
| {:stopped, exit_status}
@type mock ::
String.t()
| (command :: String.t(), [start_opt] -> {:ok, output} | {:error, Error.t()})
@type output :: String.t()
@type exit_status :: non_neg_integer() | (exit_reason :: :timeout | any)
@doc """
Starts the command owner process.
The owner process will start the command, handle its events, and stop when the command finishes.
The started process will synchronously stop the command during while being terminated. The owner
process never stops before the command finishes (unless the owner process is forcefully
terminated with the reason `:kill`), which makes it safe to run under a supervisor or `Parent`.
The command is a "free-form string" in the shape of: `"command arg1 arg2 ..."`. The command has
to be an executable that exists in standard search paths.
Args can be separated by one or more whitespaces, tabs, or newline characters. If any arg has
to contain whitespace characters, you can encircle it in double or single quotes. Inside the
quoted argument, you can use `\\"` or `\\'` to inject the quote character, and `\\\\` to inject
the backslash character.
Examples:
OsCmd.start_link("echo 1")`
OsCmd.start_link(~s/
some_cmd
arg1
"arg \\" \\\\ 2"
'arg \\' \\\\ 3'
/)
Due to support for free-form execution, it is possible to execute complex scripts, by starting
the shell process
OsCmd.start_link(~s/bash -c "..."/)
However, this is usually not advised, because `OsCmd` can't keep all of its guarantees. Any
child process started inside the shell is not guaranteed to be terminated before the owner
process stops, and some of them might even linger on forever.
## Options
You can pass additional options by invoking `OsCmd.start_link({cmd, opts})`. The following
options are supported:
- `:cd` - Folder in which the command will be started
- `:env`- OS environment variables which will be set in the command's own environment. Note
that the command OS process inherits the environment from the BEAM process. If you want to
unset some of the inherited variables, you can include `{var_to_unset, nil}` in this list.
- `:pty` - If set to `true`, the command will be started with a pseudo-terminal interface.
If the OS doesn't support pseudo-terminal, this flag is ignored. Defaults to `false`.
- `:timeout` - The duration after which the command will be automatically terminated.
Defaults to `:infinity`. If the command is timed out, the process will exit with the reason
`:timeout`, irrespective of the `propagate_exit?` setting.
- `propagate_exit?` - When set to `true` and the exit reason of the command is not zero, the
process will exit with `{:failed, exit_status}`. Otherwise, the process will always exit
with the reason `:normal` (unless the command times out).
- `terminate_cmd` - Custom command to use in place of SIGTERM when stopping the OS process.
See the "Command termination" section for details.
- `handler` - Custom event handler. See the "Event handling" section for details.
- `name` - Registered name of the process. If not provided, the process won't be registered.
## Event handling
During the lifetime of the command, the following events are emitted:
- `:starting` - the command is being started
- `{:output, output}` - stdout or stderr output
- `{:stopped, exit_status}` - the command stopped with the given exit status
You can install multiple custom handlers to deal with these events. By default, no handler is
created, which means that the command is executed silently. Handlers are functions which are
executed inside the command owner process. Handlers can be stateless or stateful.
A stateless handler is a function in the shape of `fun(event) -> ... end`. This function holds
no state, so its result is ignored. A stateful handler can be specified as
`{initial_acc, fun(event, acc) -> next_acc end}`. You can provide multiple handlers, and they
don't have to be of the same type.
Since handlers are executed in the owner process, an unhandled exception in the handler will
lead to process termination. The OS process will be properly taken down before the owner process
stops, but no additional event (including the `:stopped` event) will be fired.
It is advised to minimize the logic inside handlers. Handlers are best suited for very simple
tasks such as logging or notifying other processes.
### Output
The output events will contain output fragments, as they are received. It is therefore possible
that some fragment contains only a part of the output line, while another spans multiple lines.
It is the responsibility of the client to assemble these fragments according to its needs.
`OsCmd` operates on the assumption that output is in utf8 encoding, so it may not work correctly
for other encodings, such as plain binary.
## Command termination
When command is being externally terminated (e.g. due to a timeout), a polite termination is
first attempted, by sending a SIGTERM signal to the OS process (if the OS supports such signal),
or alternatively invoking a custom terminated command provided via `:terminate_cmd`. If the OS
process doesn't stop in 5 seconds (currently not configurable), a SIGKILL signal will be sent.
## Internals
The owner process starts the command as the Erlang port. The command is not started directly.
Instead a bridge program (implemented in go) is used to start and manage the OS process. Each
command uses its own bridge process. This approach ensures proper cleanup guarantees even if the
BEAM OS process is taken down.
As a result, compared to `System.cmd/3`, `OsCmd` will consume more resources (2x more OS process
instances) and require more hops to pass the output back to Elixir. Most often this won't matter,
but be aware of these trade-offs if you're starting a large number of external processes.
## Mocking in tests
Command execution can be mocked, which may be useful if you want to avoid starting long-running
commands in tests.
Mocking can done with `expect/1` and `stub/1`, and explicit allowances can be issued with
`allow/1`.
"""
@spec start_link(String.t()) :: GenServer.on_start()
def start_link(command) when is_binary(command), do: start_link({command, []})
@spec start_link({String.t(), [start_opt]}) :: GenServer.on_start()
def start_link({command, opts}) do
opts = normalize_opts(opts)
GenServer.start_link(__MODULE__, {command, opts}, Keyword.take(opts, [:name]))
end
@doc "Stops the command and the owner process."
@spec stop(GenServer.server(), :infinity | pos_integer()) :: :ok
def stop(server, timeout \\ :infinity) do
pid = whereis!(server)
mref = Process.monitor(pid)
GenServer.cast(pid, :stop)
receive do
{:DOWN, ^mref, :process, ^pid, _reason} -> :ok
after
timeout -> exit(:timeout)
end
end
@doc """
Returns a lazy stream of events.
This function is internally used by `run/2` and `await/1`. If you want to use it yourself, you
need to pass the handler `&send(some_pid, {self(), &1})` when starting the command. This
function can only be invoked in the process which receives the event messages.
"""
@spec events(GenServer.server()) :: Enumerable.t()
def events(server) do
pid = GenServer.whereis(server)
Stream.resource(
fn -> Process.monitor(pid) end,
fn
nil ->
{:halt, nil}
mref ->
receive do
{^pid, {:stopped, _} = stopped} ->
Process.demonitor(mref, [:flush])
{[stopped], nil}
{^pid, message} ->
{[message], mref}
{:DOWN, ^mref, :process, ^pid, reason} ->
{[{:stopped, reason}], nil}
end
end,
fn
nil -> :ok
mref -> Process.demonitor(mref, [:flush])
end
)
end
@doc """
Synchronously runs the command.
This function will start the owner process, wait for it to finish, and return the result which
will include the complete output of the command.
If the command exits with a zero exit status, an `:ok` tuple is returned. Otherwise, the function
returns an error tuple.
See `start_link/1` for detailed explanation.
"""
@spec run(String.t(), [start_opt]) :: {:ok, output} | {:error, Error.t()}
def run(cmd, opts \\ []) do
caller = self()
start_arg = {cmd, [handler: &send(caller, {self(), &1})] ++ opts}
with {:ok, pid} <- start_link(start_arg) do
try do
await(pid)
after
stop(pid)
end
end
end
@doc """
Awaits for the started command to finish.
This function is internally used by `run/2`. If you want to use it yourself, you need to pass the
handler `&send(some_pid, {self(), &1})` when starting the command. This function can only be
invoked in the process which receives the event messages.
"""
@spec await(GenServer.server()) :: {:ok, output :: String.t()} | {:error, Error.t()}
def await(server) do
server
|> whereis!()
|> events()
|> Enum.reduce(
%{output: [], exit_status: nil},
fn
:starting, acc -> acc
{:output, output}, acc -> update_in(acc.output, &[&1, output])
{:stopped, exit_status}, acc -> %{acc | exit_status: exit_status}
end
)
|> case do
%{exit_status: 0} = result ->
{:ok, to_string(result.output)}
result ->
{
:error,
%Error{
message: "command failed",
output: to_string(result.output),
exit_status: result.exit_status
}
}
end
end
@doc """
Returns a specification for running the command as a `Job` action.
The corresponding action will return `{:ok, output} | {:error, %OsCmdError{}}`
See `Job.start_action/2` for details.
"""
@spec action(String.t(), [start_opt | Job.action_opt()]) :: Job.action()
def action(cmd, opts \\ []) do
fn responder ->
{action_opts, opts} = Keyword.split(opts, ~w/telemetry_id temeletry_meta/a)
action_opts = Config.Reader.merge(action_opts, telemetry_meta: %{cmd: cmd})
handler_state = %{responder: responder, cmd: cmd, opts: opts, output: []}
{{__MODULE__, {cmd, [handler: {handler_state, &handle_event/2}] ++ opts}}, action_opts}
end
end
@doc """
Issues an explicit mock allowance to another process.
Note that mocks are automatically inherited by descendants, so you only need to use this for non
descendant processes. See `Mox.allow/3` for details.
"""
@spec allow(GenServer.server()) :: :ok
def allow(server), do: Faker.allow(whereis!(server))
@doc """
Sets up a mock expectation.
The argument can be either a string (the exact command text), or a function. If the string is
passed, the mocked command will succeed with the empty output. If the function is passed, it will
be invoked when the command is started. The function can then return ok or error tuple.
The expectation will be inherited by all descendant processes (unless overridden somewhere down
the process tree).
See `Mox.expect/4` for details on expectations.
"""
@spec expect(mock) :: :ok
def expect(fun), do: Faker.expect(fun)
@doc """
Sets up a mock stub.
This function works similarly to `expect/1`, except it sets up a stub. See `Mox.stub/3` for
details on stubs.
"""
@spec stub(mock) :: :ok
def stub(fun), do: Faker.stub(fun)
@impl GenServer
def init({cmd, opts}) do
Process.flag(:trap_exit, true)
state = %{
port: nil,
handlers: Keyword.fetch!(opts, :handlers),
propagate_exit?: Keyword.get(opts, :propagate_exit?, false),
buffer: "",
exit_reason: nil
}
state = invoke_handler(state, :starting)
with {:ok, timeout} <- Keyword.fetch(opts, :timeout),
do: Process.send_after(self(), :timeout, timeout)
starter =
case Faker.fetch() do
{:ok, pid} ->
Mox.allow(Faker.Port, pid, self())
Faker.Port
:error ->
OsCmd.Port
end
case starter.start(cmd, opts) do
{:ok, port} -> {:ok, %{state | port: port}}
{:error, reason} -> {:stop, reason}
end
end
@impl GenServer
def handle_info({port, {:exit_status, exit_status}}, %{port: port} = state),
# Delegating to `handle_continue` because we must invoke a custom handler which can crash, so
# we need to make sure that the correct state is committed.
do: {:noreply, %{state | port: nil}, {:continue, {:stop, exit_status}}}
def handle_info({port, {:data, message}}, %{port: port} = state) do
state = invoke_handler(state, message)
{:noreply, state}
end
def handle_info(:timeout, state) do
send_stop_command(state)
{:noreply, %{state | exit_reason: :timeout}}
end
@impl GenServer
def handle_continue({:stop, exit_status}, state) do
state = invoke_handler(state, {:stopped, exit_status})
exit_reason =
cond do
not is_nil(state.exit_reason) -> state.exit_reason
not state.propagate_exit? or exit_status == 0 -> :normal
true -> {:failed, exit_status}
end
{:stop, exit_reason, %{state | port: nil}}
end
@impl GenServer
def handle_cast(:stop, state) do
send_stop_command(state)
{:noreply, %{state | exit_reason: :normal}}
end
@impl GenServer
def terminate(_reason, %{port: port} = state) do
unless is_nil(port) do
send_stop_command(state)
# If we end up here, we still didn't receive the exit_status command, so we'll await for it
# indefinitely. We assume that the go bridge works flawlessly and that it will stop the
# program eventually, so there's no timeout clause. If there's a bug, this process will hang,
# but at least we won't leak OS processes.
receive do
{^port, {:exit_status, _exit_status}} -> :ok
end
end
end
defp normalize_opts(opts) do
{handlers, opts} = Keyword.pop_values(opts, :handler)
env =
opts
|> Keyword.get(:env, [])
|> Enum.map(fn
{name, nil} -> {env_name_to_charlist(name), false}
{name, value} -> {env_name_to_charlist(name), to_charlist(value)}
end)
Keyword.merge(opts, handlers: handlers, env: env)
end
defp env_name_to_charlist(atom) when is_atom(atom),
do: atom |> to_string() |> String.upcase() |> to_charlist()
defp env_name_to_charlist(name), do: to_charlist(name)
defp invoke_handler(state, message) do
message = with message when is_binary(message) <- message, do: :erlang.binary_to_term(message)
{message, state} = normalize_message(message, state)
handlers =
Enum.map(
state.handlers,
fn
{acc, fun} ->
{fun.(message, acc), fun}
fun ->
fun.(message)
fun
end
)
%{state | handlers: handlers}
end
defp normalize_message({:output, output}, state) do
{output, rest} = get_utf8_chars(state.buffer <> output)
{{:output, to_string(output)}, %{state | buffer: rest}}
end
defp normalize_message(message, state), do: {message, state}
defp get_utf8_chars(<<char::utf8, rest::binary>>) do
{remaining_bytes, rest} = get_utf8_chars(rest)
{[char | remaining_bytes], rest}
end
defp get_utf8_chars(other), do: {[], other}
defp send_stop_command(state) do
if not is_nil(state.port) do
try do
Port.command(state.port, "stop")
catch
_, _ -> :ok
end
end
end
defp handle_event({:output, output}, state),
do: update_in(state.output, &[&1, output])
defp handle_event({:stopped, exit_status}, state) do
output = to_string(state.output)
response =
if exit_status == 0 do
{:ok, output}
else
message = "#{state.cmd} exited with status #{exit_status}"
{:error, %Error{exit_status: exit_status, message: message, output: output}}
end
state.responder.(response)
nil
end
defp handle_event(_event, state), do: state
defp whereis!(server) do
case GenServer.whereis(server) do
pid when is_pid(pid) -> pid
nil -> raise "process #{inspect(server)} not found"
end
end
defmodule Program do
@moduledoc false
@type id :: any
@callback start(cmd :: String.t() | [String.t()], opts :: Keyword.t()) ::
{:ok, id} | {:error, reason :: any}
end
end | lib/os_cmd.ex | 0.827445 | 0.610541 | os_cmd.ex | starcoder |
defmodule AWS.GameLift do
@moduledoc """
Amazon GameLift Service
Amazon GameLift is a managed service for developers who need a scalable,
dedicated server solution for their multiplayer games. Amazon GameLift
provides tools to acquire computing resources and deploy game servers,
scale game server capacity to meed player demand, and track in-depth
metrics on player usage and server performance.
The Amazon GameLift service API includes important functionality to:
<ul> <li> Find game sessions and match players to games – Retrieve
information on available game sessions; create new game sessions; send
player requests to join a game session.
</li> <li> Configure and manage game server resources – Manage builds,
fleets, queues, and aliases; set autoscaling policies; retrieve logs and
metrics.
</li> </ul> This reference guide describes the low-level service API for
Amazon GameLift. We recommend using either the Amazon Web Services software
development kit ([AWS SDK](http://aws.amazon.com/tools/#sdk)), available in
multiple languages, or the [AWS command-line
interface](http://aws.amazon.com/cli/) (CLI) tool. Both of these align with
the low-level service API. In addition, you can use the [AWS Management
Console](https://console.aws.amazon.com/gamelift/home) for Amazon GameLift
for many administrative actions.
**MORE RESOURCES**
<ul> <li> [Amazon GameLift Developer
Guide](http://docs.aws.amazon.com/gamelift/latest/developerguide/) – Learn
more about Amazon GameLift features and how to use them.
</li> <li> [Lumberyard and Amazon GameLift
Tutorials](https://gamedev.amazon.com/forums/tutorials) – Get started fast
with walkthroughs and sample projects.
</li> <li> [GameDev Blog](http://aws.amazon.com/blogs/gamedev/) – Stay up
to date with new features and techniques.
</li> <li> [GameDev
Forums](https://gamedev.amazon.com/forums/spaces/123/gamelift-discussion.html)
– Connect with the GameDev community.
</li> <li> [Amazon GameLift Document
History](http://docs.aws.amazon.com/gamelift/latest/developerguide/doc-history.html)
– See changes to the Amazon GameLift service, SDKs, and documentation, as
well as links to release notes.
</li> </ul> **API SUMMARY**
This list offers a functional overview of the Amazon GameLift service API.
**Finding Games and Joining Players**
You can enable players to connect to game servers on Amazon GameLift from a
game client or through a game service (such as a matchmaking service). You
can use these operations to discover actively running game or start new
games. You can also match players to games, either singly or as a group.
<ul> <li> **Discover existing game sessions**
<ul> <li> `SearchGameSessions` – Get all available game sessions or search
for game sessions that match a set of criteria.
</li> </ul> </li> <li> **Start a new game session**
<ul> <li> Game session placement – Use a queue to process new game session
requests and create game sessions on fleets designated for the queue.
<ul> <li> `StartGameSessionPlacement` – Request a new game session
placement and add one or more players to it.
</li> <li> `DescribeGameSessionPlacement` – Get details on a placement
request, including status.
</li> <li> `StopGameSessionPlacement` – Cancel a placement request.
</li> </ul> </li> <li> `CreateGameSession` – Start a new game session on a
specific fleet.
</li> </ul> </li> <li> **Manage game session objects**
<ul> <li> `DescribeGameSessionDetails` – Retrieve metadata and protection
policies associated with one or more game sessions, including length of
time active and current player count.
</li> <li> `UpdateGameSession` – Change game session settings, such as
maximum player count and join policy.
</li> <li> `GetGameSessionLogUrl` – Get the location of saved logs for a
game session.
</li> </ul> </li> <li> **Manage player sessions objects**
<ul> <li> `CreatePlayerSession` – Send a request for a player to join a
game session.
</li> <li> `CreatePlayerSessions` – Send a request for multiple players to
join a game session.
</li> <li> `DescribePlayerSessions` – Get details on player activity,
including status, playing time, and player data.
</li> </ul> </li> </ul> **Setting Up and Managing Game Servers**
When setting up Amazon GameLift, first create a game build and upload the
files to Amazon GameLift. Then use these operations to set up a fleet of
resources to run your game servers. Manage games to scale capacity, adjust
configuration settings, access raw utilization data, and more.
<ul> <li> **Manage game builds**
<ul> <li> `CreateBuild` – Create a new build by uploading files stored in
an Amazon S3 bucket. (To create a build stored at a local file location,
use the AWS CLI command `upload-build`.)
</li> <li> `ListBuilds` – Get a list of all builds uploaded to a Amazon
GameLift region.
</li> <li> `DescribeBuild` – Retrieve information associated with a build.
</li> <li> `UpdateBuild` – Change build metadata, including build name and
version.
</li> <li> `DeleteBuild` – Remove a build from Amazon GameLift.
</li> </ul> </li> <li> **Manage fleets**
<ul> <li> `CreateFleet` – Configure and activate a new fleet to run a
build's game servers.
</li> <li> `DeleteFleet` – Terminate a fleet that is no longer running game
servers or hosting players.
</li> <li> View / update fleet configurations.
<ul> <li> `ListFleets` – Get a list of all fleet IDs in a Amazon GameLift
region (all statuses).
</li> <li> `DescribeFleetAttributes` / `UpdateFleetAttributes` – View or
change a fleet's metadata and settings for game session protection and
resource creation limits.
</li> <li> `DescribeFleetPortSettings` / `UpdateFleetPortSettings` – View
or change the inbound permissions (IP address and port setting ranges)
allowed for a fleet.
</li> <li> `DescribeRuntimeConfiguration` / `UpdateRuntimeConfiguration` –
View or change what server processes (and how many) to run on each instance
in a fleet.
</li> <li> `DescribeInstances` – Get information on each instance in a
fleet, including instance ID, IP address, and status.
</li> </ul> </li> </ul> </li> <li> **Control fleet capacity**
<ul> <li> `DescribeEC2InstanceLimits` – Retrieve maximum number of
instances allowed for the current AWS account and the current usage level.
</li> <li> `DescribeFleetCapacity` / `UpdateFleetCapacity` – Retrieve the
capacity settings and the current number of instances in a fleet; adjust
fleet capacity settings to scale up or down.
</li> <li> Autoscale – Manage autoscaling rules and apply them to a fleet.
<ul> <li> `PutScalingPolicy` – Create a new autoscaling policy, or update
an existing one.
</li> <li> `DescribeScalingPolicies` – Retrieve an existing autoscaling
policy.
</li> <li> `DeleteScalingPolicy` – Delete an autoscaling policy and stop it
from affecting a fleet's capacity.
</li> </ul> </li> </ul> </li> <li> **Access fleet activity statistics**
<ul> <li> `DescribeFleetUtilization` – Get current data on the number of
server processes, game sessions, and players currently active on a fleet.
</li> <li> `DescribeFleetEvents` – Get a fleet's logged events for a
specified time span.
</li> <li> `DescribeGameSessions` – Retrieve metadata associated with one
or more game sessions, including length of time active and current player
count.
</li> </ul> </li> <li> **Remotely access an instance**
<ul> <li> `GetInstanceAccess` – Request access credentials needed to
remotely connect to a specified instance on a fleet.
</li> </ul> </li> <li> **Manage fleet aliases**
<ul> <li> `CreateAlias` – Define a new alias and optionally assign it to a
fleet.
</li> <li> `ListAliases` – Get all fleet aliases defined in a Amazon
GameLift region.
</li> <li> `DescribeAlias` – Retrieve information on an existing alias.
</li> <li> `UpdateAlias` – Change settings for a alias, such as redirecting
it from one fleet to another.
</li> <li> `DeleteAlias` – Remove an alias from the region.
</li> <li> `ResolveAlias` – Get the fleet ID that a specified alias points
to.
</li> </ul> </li> <li> **Manage game session queues**
<ul> <li> `CreateGameSessionQueue` – Create a queue for processing requests
for new game sessions.
</li> <li> `DescribeGameSessionQueues` – Get data on all game session
queues defined in a Amazon GameLift region.
</li> <li> `UpdateGameSessionQueue` – Change the configuration of a game
session queue.
</li> <li> `DeleteGameSessionQueue` – Remove a game session queue from the
region.
</li> </ul> </li> </ul>
"""
@doc """
Creates an alias and sets a target fleet. A fleet alias can be used in
place of a fleet ID, such as when calling `CreateGameSession` from a game
client or game service or adding destinations to a game session queue. By
changing an alias's target fleet, you can switch your players to the new
fleet without changing any other component. In production, this feature is
particularly useful to redirect your player base seamlessly to the latest
game server update.
Amazon GameLift supports two types of routing strategies for aliases:
simple and terminal. Use a simple alias to point to an active fleet. Use a
terminal alias to display a message to incoming traffic instead of routing
players to an active fleet. This option is useful when a game server is no
longer supported but you want to provide better messaging than a standard
404 error.
To create a fleet alias, specify an alias name, routing strategy, and
optional description. If successful, a new alias record is returned,
including an alias ID, which you can reference when creating a game
session. To reassign the alias to another fleet ID, call `UpdateAlias`.
"""
def create_alias(client, input, options \\ []) do
request(client, "CreateAlias", input, options)
end
@doc """
Creates a new Amazon GameLift build from a set of game server binary files
stored in an Amazon Simple Storage Service (Amazon S3) location. When using
this API call, you must create a `.zip` file containing all of the build
files and store it in an Amazon S3 bucket under your AWS account. For help
on packaging your build files and creating a build, see [Uploading Your
Game to Amazon
GameLift](http://docs.aws.amazon.com/gamelift/latest/developerguide/gamelift-build-intro.html).
<important> Use this API action ONLY if you are storing your game build
files in an Amazon S3 bucket in your AWS account. To create a build using
files stored in a directory, use the CLI command [ `upload-build`
](http://docs.aws.amazon.com/cli/latest/reference/gamelift/upload-build.html),
which uploads the build files from a file location you specify and creates
a build.
</important> To create a new build using `CreateBuild`, identify the
storage location and operating system of your game build. You also have the
option of specifying a build name and version. If successful, this action
creates a new build record with an unique build ID and in `INITIALIZED`
status. Use the API call `DescribeBuild` to check the status of your build.
A build must be in `READY` status before it can be used to create fleets to
host your game.
"""
def create_build(client, input, options \\ []) do
request(client, "CreateBuild", input, options)
end
@doc """
Creates a new fleet to run your game servers. A fleet is a set of Amazon
Elastic Compute Cloud (Amazon EC2) instances, each of which can run
multiple server processes to host game sessions. You configure a fleet to
create instances with certain hardware specifications (see [Amazon EC2
Instance Types](http://aws.amazon.com/ec2/instance-types/) for more
information), and deploy a specified game build to each instance. A newly
created fleet passes through several statuses; once it reaches the `ACTIVE`
status, it can begin hosting game sessions.
To create a new fleet, provide a fleet name, an EC2 instance type, and a
build ID of the game build to deploy. You can also configure the new fleet
with the following settings: (1) a runtime configuration describing what
server processes to run on each instance in the fleet (required to create
fleet), (2) access permissions for inbound traffic, (3) fleet-wide game
session protection, and (4) the location of default log files for Amazon
GameLift to upload and store.
If the CreateFleet call is successful, Amazon GameLift performs the
following tasks:
<ul> <li> Creates a fleet record and sets the status to `NEW` (followed by
other statuses as the fleet is activated).
</li> <li> Sets the fleet's capacity to 1 "desired", which causes Amazon
GameLift to start one new EC2 instance.
</li> <li> Starts launching server processes on the instance. If the fleet
is configured to run multiple server processes per instance, Amazon
GameLift staggers each launch by a few seconds.
</li> <li> Begins writing events to the fleet event log, which can be
accessed in the Amazon GameLift console.
</li> <li> Sets the fleet's status to `ACTIVE` once one server process in
the fleet is ready to host a game session.
</li> </ul> After a fleet is created, use the following actions to change
fleet properties and configuration:
<ul> <li> `UpdateFleetAttributes` -- Update fleet metadata, including name
and description.
</li> <li> `UpdateFleetCapacity` -- Increase or decrease the number of
instances you want the fleet to maintain.
</li> <li> `UpdateFleetPortSettings` -- Change the IP address and port
ranges that allow access to incoming traffic.
</li> <li> `UpdateRuntimeConfiguration` -- Change how server processes are
launched in the fleet, including launch path, launch parameters, and the
number of concurrent processes.
</li> <li> `PutScalingPolicy` -- Create or update rules that are used to
set the fleet's capacity (autoscaling).
</li> </ul>
"""
def create_fleet(client, input, options \\ []) do
request(client, "CreateFleet", input, options)
end
@doc """
Creates a multiplayer game session for players. This action creates a game
session record and assigns an available server process in the specified
fleet to host the game session. A fleet must have an `ACTIVE` status before
a game session can be created in it.
To create a game session, specify either fleet ID or alias ID, and indicate
a maximum number of players to allow in the game session. You can also
provide a name and game-specific properties for this game session. If
successful, a `GameSession` object is returned containing session
properties, including an IP address. By default, newly created game
sessions allow new players to join. Use `UpdateGameSession` to change the
game session's player session creation policy.
When creating a game session on a fleet with a resource limit creation
policy, the request should include a creator ID. If none is provided,
Amazon GameLift does not evaluate the fleet's resource limit creation
policy.
"""
def create_game_session(client, input, options \\ []) do
request(client, "CreateGameSession", input, options)
end
@doc """
Establishes a new queue for processing requests for new game sessions. A
queue identifies where new game sessions can be hosted--by specifying a
list of fleet destinations--and how long a request can remain in the queue
waiting to be placed before timing out. Requests for new game sessions are
added to a queue by calling `StartGameSessionPlacement` and referencing the
queue name.
When processing a request for a game session, Amazon GameLift tries each
destination in order until it finds one with available resources to host
the new game session. A queue's default order is determined by how
destinations are listed. This default order can be overridden in a game
session placement request.
To create a new queue, provide a name, timeout value, and a list of
destinations. If successful, a new queue object is returned.
"""
def create_game_session_queue(client, input, options \\ []) do
request(client, "CreateGameSessionQueue", input, options)
end
@doc """
Adds a player to a game session and creates a player session record. Before
a player can be added, a game session must have an `ACTIVE` status, have a
creation policy of `ALLOW_ALL`, and have an open player slot. To add a
group of players to a game session, use `CreatePlayerSessions`.
To create a player session, specify a game session ID, player ID, and
optionally a string of player data. If successful, the player is added to
the game session and a new `PlayerSession` object is returned. Player
sessions cannot be updated.
"""
def create_player_session(client, input, options \\ []) do
request(client, "CreatePlayerSession", input, options)
end
@doc """
Adds a group of players to a game session. This action is useful with a
team matching feature. Before players can be added, a game session must
have an `ACTIVE` status, have a creation policy of `ALLOW_ALL`, and have an
open player slot. To add a single player to a game session, use
`CreatePlayerSession`.
To create player sessions, specify a game session ID, a list of player IDs,
and optionally a set of player data strings. If successful, the players are
added to the game session and a set of new `PlayerSession` objects is
returned. Player sessions cannot be updated.
"""
def create_player_sessions(client, input, options \\ []) do
request(client, "CreatePlayerSessions", input, options)
end
@doc """
Deletes a fleet alias. This action removes all record of the alias. Game
clients attempting to access a server process using the deleted alias
receive an error. To delete an alias, specify the alias ID to be deleted.
"""
def delete_alias(client, input, options \\ []) do
request(client, "DeleteAlias", input, options)
end
@doc """
Deletes a build. This action permanently deletes the build record and any
uploaded build files.
To delete a build, specify its ID. Deleting a build does not affect the
status of any active fleets using the build, but you can no longer create
new fleets with the deleted build.
"""
def delete_build(client, input, options \\ []) do
request(client, "DeleteBuild", input, options)
end
@doc """
Deletes everything related to a fleet. Before deleting a fleet, you must
set the fleet's desired capacity to zero. See `UpdateFleetCapacity`.
This action removes the fleet's resources and the fleet record. Once a
fleet is deleted, you can no longer use that fleet.
"""
def delete_fleet(client, input, options \\ []) do
request(client, "DeleteFleet", input, options)
end
@doc """
Deletes a game session queue. This action means that any
`StartGameSessionPlacement` requests that reference this queue will fail.
To delete a queue, specify the queue name.
"""
def delete_game_session_queue(client, input, options \\ []) do
request(client, "DeleteGameSessionQueue", input, options)
end
@doc """
Deletes a fleet scaling policy. This action means that the policy is no
longer in force and removes all record of it. To delete a scaling policy,
specify both the scaling policy name and the fleet ID it is associated
with.
"""
def delete_scaling_policy(client, input, options \\ []) do
request(client, "DeleteScalingPolicy", input, options)
end
@doc """
Retrieves properties for a fleet alias. This operation returns all alias
metadata and settings. To get just the fleet ID an alias is currently
pointing to, use `ResolveAlias`.
To get alias properties, specify the alias ID. If successful, an `Alias`
object is returned.
"""
def describe_alias(client, input, options \\ []) do
request(client, "DescribeAlias", input, options)
end
@doc """
Retrieves properties for a build. To get a build record, specify a build
ID. If successful, an object containing the build properties is returned.
"""
def describe_build(client, input, options \\ []) do
request(client, "DescribeBuild", input, options)
end
@doc """
Retrieves the following information for the specified EC2 instance type:
<ul> <li> maximum number of instances allowed per AWS account (service
limit)
</li> <li> current usage level for the AWS account
</li> </ul> Service limits vary depending on region. Available regions for
Amazon GameLift can be found in the AWS Management Console for Amazon
GameLift (see the drop-down list in the upper right corner).
"""
def describe_e_c2_instance_limits(client, input, options \\ []) do
request(client, "DescribeEC2InstanceLimits", input, options)
end
@doc """
Retrieves fleet properties, including metadata, status, and configuration,
for one or more fleets. You can request attributes for all fleets, or
specify a list of one or more fleet IDs. When requesting multiple fleets,
use the pagination parameters to retrieve results as a set of sequential
pages. If successful, a `FleetAttributes` object is returned for each
requested fleet ID. When specifying a list of fleet IDs, attribute objects
are returned only for fleets that currently exist.
<note> Some API actions may limit the number of fleet IDs allowed in one
request. If a request exceeds this limit, the request fails and the error
message includes the maximum allowed.
</note>
"""
def describe_fleet_attributes(client, input, options \\ []) do
request(client, "DescribeFleetAttributes", input, options)
end
@doc """
Retrieves the current status of fleet capacity for one or more fleets. This
information includes the number of instances that have been requested for
the fleet and the number currently active. You can request capacity for all
fleets, or specify a list of one or more fleet IDs. When requesting
multiple fleets, use the pagination parameters to retrieve results as a set
of sequential pages. If successful, a `FleetCapacity` object is returned
for each requested fleet ID. When specifying a list of fleet IDs, attribute
objects are returned only for fleets that currently exist.
<note> Some API actions may limit the number of fleet IDs allowed in one
request. If a request exceeds this limit, the request fails and the error
message includes the maximum allowed.
</note>
"""
def describe_fleet_capacity(client, input, options \\ []) do
request(client, "DescribeFleetCapacity", input, options)
end
@doc """
Retrieves entries from the specified fleet's event log. You can specify a
time range to limit the result set. Use the pagination parameters to
retrieve results as a set of sequential pages. If successful, a collection
of event log entries matching the request are returned.
"""
def describe_fleet_events(client, input, options \\ []) do
request(client, "DescribeFleetEvents", input, options)
end
@doc """
Retrieves the inbound connection permissions for a fleet. Connection
permissions include a range of IP addresses and port settings that incoming
traffic can use to access server processes in the fleet. To get a fleet's
inbound connection permissions, specify a fleet ID. If successful, a
collection of `IpPermission` objects is returned for the requested fleet
ID. If the requested fleet has been deleted, the result set is empty.
"""
def describe_fleet_port_settings(client, input, options \\ []) do
request(client, "DescribeFleetPortSettings", input, options)
end
@doc """
Retrieves utilization statistics for one or more fleets. You can request
utilization data for all fleets, or specify a list of one or more fleet
IDs. When requesting multiple fleets, use the pagination parameters to
retrieve results as a set of sequential pages. If successful, a
`FleetUtilization` object is returned for each requested fleet ID. When
specifying a list of fleet IDs, utilization objects are returned only for
fleets that currently exist.
<note> Some API actions may limit the number of fleet IDs allowed in one
request. If a request exceeds this limit, the request fails and the error
message includes the maximum allowed.
</note>
"""
def describe_fleet_utilization(client, input, options \\ []) do
request(client, "DescribeFleetUtilization", input, options)
end
@doc """
Retrieves properties, including the protection policy in force, for one or
more game sessions. This action can be used in several ways: (1) provide a
`GameSessionId` to request details for a specific game session; (2) provide
either a `FleetId` or an `AliasId` to request properties for all game
sessions running on a fleet.
To get game session record(s), specify just one of the following: game
session ID, fleet ID, or alias ID. You can filter this request by game
session status. Use the pagination parameters to retrieve results as a set
of sequential pages. If successful, a `GameSessionDetail` object is
returned for each session matching the request.
"""
def describe_game_session_details(client, input, options \\ []) do
request(client, "DescribeGameSessionDetails", input, options)
end
@doc """
Retrieves properties and current status of a game session placement
request. To get game session placement details, specify the placement ID.
If successful, a `GameSessionPlacement` object is returned.
"""
def describe_game_session_placement(client, input, options \\ []) do
request(client, "DescribeGameSessionPlacement", input, options)
end
@doc """
Retrieves the properties for one or more game session queues. When
requesting multiple queues, use the pagination parameters to retrieve
results as a set of sequential pages. If successful, a `GameSessionQueue`
object is returned for each requested queue. When specifying a list of
queues, objects are returned only for queues that currently exist in the
region.
"""
def describe_game_session_queues(client, input, options \\ []) do
request(client, "DescribeGameSessionQueues", input, options)
end
@doc """
Retrieves a set of one or more game sessions. Request a specific game
session or request all game sessions on a fleet. Alternatively, use
`SearchGameSessions` to request a set of active game sessions that are
filtered by certain criteria. To retrieve protection policy settings for
game sessions, use `DescribeGameSessionDetails`.
To get game sessions, specify one of the following: game session ID, fleet
ID, or alias ID. You can filter this request by game session status. Use
the pagination parameters to retrieve results as a set of sequential pages.
If successful, a `GameSession` object is returned for each game session
matching the request.
"""
def describe_game_sessions(client, input, options \\ []) do
request(client, "DescribeGameSessions", input, options)
end
@doc """
Retrieves information about a fleet's instances, including instance IDs.
Use this action to get details on all instances in the fleet or get details
on one specific instance.
To get a specific instance, specify fleet ID and instance ID. To get all
instances in a fleet, specify a fleet ID only. Use the pagination
parameters to retrieve results as a set of sequential pages. If successful,
an `Instance` object is returned for each result.
"""
def describe_instances(client, input, options \\ []) do
request(client, "DescribeInstances", input, options)
end
@doc """
Retrieves properties for one or more player sessions. This action can be
used in several ways: (1) provide a `PlayerSessionId` parameter to request
properties for a specific player session; (2) provide a `GameSessionId`
parameter to request properties for all player sessions in the specified
game session; (3) provide a `PlayerId` parameter to request properties for
all player sessions of a specified player.
To get game session record(s), specify only one of the following: a player
session ID, a game session ID, or a player ID. You can filter this request
by player session status. Use the pagination parameters to retrieve results
as a set of sequential pages. If successful, a `PlayerSession` object is
returned for each session matching the request.
"""
def describe_player_sessions(client, input, options \\ []) do
request(client, "DescribePlayerSessions", input, options)
end
@doc """
Retrieves the current runtime configuration for the specified fleet. The
runtime configuration tells Amazon GameLift how to launch server processes
on instances in the fleet.
"""
def describe_runtime_configuration(client, input, options \\ []) do
request(client, "DescribeRuntimeConfiguration", input, options)
end
@doc """
Retrieves all scaling policies applied to a fleet.
To get a fleet's scaling policies, specify the fleet ID. You can filter
this request by policy status, such as to retrieve only active scaling
policies. Use the pagination parameters to retrieve results as a set of
sequential pages. If successful, set of `ScalingPolicy` objects is returned
for the fleet.
"""
def describe_scaling_policies(client, input, options \\ []) do
request(client, "DescribeScalingPolicies", input, options)
end
@doc """
Retrieves the location of stored game session logs for a specified game
session. When a game session is terminated, Amazon GameLift automatically
stores the logs in Amazon S3. Use this URL to download the logs.
<note> See the [AWS Service
Limits](http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html#limits_gamelift)
page for maximum log file sizes. Log files that exceed this limit are not
saved.
</note>
"""
def get_game_session_log_url(client, input, options \\ []) do
request(client, "GetGameSessionLogUrl", input, options)
end
@doc """
Requests remote access to a fleet instance. Remote access is useful for
debugging, gathering benchmarking data, or watching activity in real time.
Access requires credentials that match the operating system of the
instance. For a Windows instance, Amazon GameLift returns a user name and
password as strings for use with a Windows Remote Desktop client. For a
Linux instance, Amazon GameLift returns a user name and RSA private key,
also as strings, for use with an SSH client. The private key must be saved
in the proper format to a `.pem` file before using. If you're making this
request using the AWS CLI, saving the secret can be handled as part of the
GetInstanceAccess request. (See the example later in this topic). For more
information on remote access, see [Remotely Accessing an
Instance](http://docs.aws.amazon.com/gamelift/latest/developerguide/fleets-remote-access.html).
To request access to a specific instance, specify the IDs of the instance
and the fleet it belongs to. If successful, an `InstanceAccess` object is
returned containing the instance's IP address and a set of credentials.
"""
def get_instance_access(client, input, options \\ []) do
request(client, "GetInstanceAccess", input, options)
end
@doc """
Retrieves a collection of alias records for this AWS account. You can
filter the result set by alias name and/or routing strategy type. Use the
pagination parameters to retrieve results in sequential pages.
<note> Aliases are not listed in any particular order.
</note>
"""
def list_aliases(client, input, options \\ []) do
request(client, "ListAliases", input, options)
end
@doc """
Retrieves build records for all builds associated with the AWS account in
use. You can limit results to builds that are in a specific status by using
the `Status` parameter. Use the pagination parameters to retrieve results
in a set of sequential pages.
<note> Build records are not listed in any particular order.
</note>
"""
def list_builds(client, input, options \\ []) do
request(client, "ListBuilds", input, options)
end
@doc """
Retrieves a collection of fleet records for this AWS account. You can
filter the result set by build ID. Use the pagination parameters to
retrieve results in sequential pages.
<note> Fleet records are not listed in any particular order.
</note>
"""
def list_fleets(client, input, options \\ []) do
request(client, "ListFleets", input, options)
end
@doc """
Creates or updates a scaling policy for a fleet. An active scaling policy
prompts Amazon GameLift to track a certain metric for a fleet and
automatically change the fleet's capacity in specific circumstances. Each
scaling policy contains one rule statement. Fleets can have multiple
scaling policies in force simultaneously.
A scaling policy rule statement has the following structure:
If `[MetricName]` is `[ComparisonOperator]` `[Threshold]` for
`[EvaluationPeriods]` minutes, then `[ScalingAdjustmentType]` to/by
`[ScalingAdjustment]`.
For example, this policy: "If the number of idle instances exceeds 20 for
more than 15 minutes, then reduce the fleet capacity by 10 instances" could
be implemented as the following rule statement:
If [IdleInstances] is [GreaterThanOrEqualToThreshold] [20] for [15]
minutes, then [ChangeInCapacity] by [-10].
To create or update a scaling policy, specify a unique combination of name
and fleet ID, and set the rule values. All parameters for this action are
required. If successful, the policy name is returned. Scaling policies
cannot be suspended or made inactive. To stop enforcing a scaling policy,
call `DeleteScalingPolicy`.
"""
def put_scaling_policy(client, input, options \\ []) do
request(client, "PutScalingPolicy", input, options)
end
@doc """
*This API call is not currently in use. * Retrieves a fresh set of upload
credentials and the assigned Amazon S3 storage location for a specific
build. Valid credentials are required to upload your game build files to
Amazon S3.
"""
def request_upload_credentials(client, input, options \\ []) do
request(client, "RequestUploadCredentials", input, options)
end
@doc """
Retrieves the fleet ID that a specified alias is currently pointing to.
"""
def resolve_alias(client, input, options \\ []) do
request(client, "ResolveAlias", input, options)
end
@doc """
Retrieves a set of game sessions that match a set of search criteria and
sorts them in a specified order. Currently a game session search is limited
to a single fleet. Search results include only game sessions that are in
`ACTIVE` status. If you need to retrieve game sessions with a status other
than active, use `DescribeGameSessions`. If you need to retrieve the
protection policy for each game session, use `DescribeGameSessionDetails`.
You can search or sort by the following game session attributes:
<ul> <li> **gameSessionId** -- ID value assigned to a game session. This
unique value is returned in a `GameSession` object when a new game session
is created.
</li> <li> **gameSessionName** -- Name assigned to a game session. This
value is set when requesting a new game session with `CreateGameSession` or
updating with `UpdateGameSession`. Game session names do not need to be
unique to a game session.
</li> <li> **creationTimeMillis** -- Value indicating when a game session
was created. It is expressed in Unix time as milliseconds.
</li> <li> **playerSessionCount** -- Number of players currently connected
to a game session. This value changes rapidly as players join the session
or drop out.
</li> <li> **maximumSessions** -- Maximum number of player sessions allowed
for a game session. This value is set when requesting a new game session
with `CreateGameSession` or updating with `UpdateGameSession`.
</li> <li> **hasAvailablePlayerSessions** -- Boolean value indicating
whether or not a game session has reached its maximum number of players.
When searching with this attribute, the search value must be `true` or
`false`. It is highly recommended that all search requests include this
filter attribute to optimize search performance and return only sessions
that players can join.
</li> </ul> To search or sort, specify either a fleet ID or an alias ID,
and provide a search filter expression, a sort expression, or both. Use the
pagination parameters to retrieve results as a set of sequential pages. If
successful, a collection of `GameSession` objects matching the request is
returned.
<note> Returned values for `playerSessionCount` and
`hasAvailablePlayerSessions` change quickly as players join sessions and
others drop out. Results should be considered a snapshot in time. Be sure
to refresh search results often, and handle sessions that fill up before a
player can join.
</note>
"""
def search_game_sessions(client, input, options \\ []) do
request(client, "SearchGameSessions", input, options)
end
@doc """
Places a request for a new game session in a queue (see
`CreateGameSessionQueue`). When processing a placement request, Amazon
GameLift attempts to create a new game session on one of the fleets
associated with the queue. If no resources are available, Amazon GameLift
tries again with another and so on until resources are found or the
placement request times out. A game session placement request can also
request player sessions. When a new game session is successfully created,
Amazon GameLift creates a player session for each player included in the
request.
When placing a game session, by default Amazon GameLift tries each fleet in
the order they are listed in the queue configuration. Ideally, a queue's
destinations are listed in preference order. Alternatively, when requesting
a game session with players, you can also provide latency data for each
player in relevant regions. Latency data indicates the performance lag a
player experiences when connected to a fleet in the region. Amazon GameLift
uses latency data to reorder the list of destinations to place the game
session in a region with minimal lag. If latency data is provided for
multiple players, Amazon GameLift calculates each region's average lag for
all players and reorders to get the best game play across all players.
To place a new game session request, specify the queue name and a set of
game session properties and settings. Also provide a unique ID (such as a
UUID) for the placement. You'll use this ID to track the status of the
placement request. Optionally, provide a set of IDs and player data for
each player you want to join to the new game session. To optimize game play
for the players, also provide latency data for all players. If successful,
a new game session placement is created. To track the status of a placement
request, call `DescribeGameSessionPlacement` and check the request's
status. If the status is Fulfilled, a new game session has been created and
a game session ARN and region are referenced. If the placement request
times out, you have the option of resubmitting the request or retrying it
with a different queue.
"""
def start_game_session_placement(client, input, options \\ []) do
request(client, "StartGameSessionPlacement", input, options)
end
@doc """
Cancels a game session placement that is in Pending status. To stop a
placement, provide the placement ID values. If successful, the placement is
moved to Cancelled status.
"""
def stop_game_session_placement(client, input, options \\ []) do
request(client, "StopGameSessionPlacement", input, options)
end
@doc """
Updates properties for a fleet alias. To update properties, specify the
alias ID to be updated and provide the information to be changed. To
reassign an alias to another fleet, provide an updated routing strategy. If
successful, the updated alias record is returned.
"""
def update_alias(client, input, options \\ []) do
request(client, "UpdateAlias", input, options)
end
@doc """
Updates metadata in a build record, including the build name and version.
To update the metadata, specify the build ID to update and provide the new
values. If successful, a build object containing the updated metadata is
returned.
"""
def update_build(client, input, options \\ []) do
request(client, "UpdateBuild", input, options)
end
@doc """
Updates fleet properties, including name and description, for a fleet. To
update metadata, specify the fleet ID and the property values you want to
change. If successful, the fleet ID for the updated fleet is returned.
"""
def update_fleet_attributes(client, input, options \\ []) do
request(client, "UpdateFleetAttributes", input, options)
end
@doc """
Updates capacity settings for a fleet. Use this action to specify the
number of EC2 instances (hosts) that you want this fleet to contain. Before
calling this action, you may want to call `DescribeEC2InstanceLimits` to
get the maximum capacity based on the fleet's EC2 instance type.
If you're using autoscaling (see `PutScalingPolicy`), you may want to
specify a minimum and/or maximum capacity. If you don't provide these,
autoscaling can set capacity anywhere between zero and the [service
limits](http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html#limits_gamelift).
To update fleet capacity, specify the fleet ID and the number of instances
you want the fleet to host. If successful, Amazon GameLift starts or
terminates instances so that the fleet's active instance count matches the
desired instance count. You can view a fleet's current capacity information
by calling `DescribeFleetCapacity`. If the desired instance count is higher
than the instance type's limit, the "Limit Exceeded" exception occurs.
"""
def update_fleet_capacity(client, input, options \\ []) do
request(client, "UpdateFleetCapacity", input, options)
end
@doc """
Updates port settings for a fleet. To update settings, specify the fleet ID
to be updated and list the permissions you want to update. List the
permissions you want to add in `InboundPermissionAuthorizations`, and
permissions you want to remove in `InboundPermissionRevocations`.
Permissions to be removed must match existing fleet permissions. If
successful, the fleet ID for the updated fleet is returned.
"""
def update_fleet_port_settings(client, input, options \\ []) do
request(client, "UpdateFleetPortSettings", input, options)
end
@doc """
Updates game session properties. This includes the session name, maximum
player count, protection policy, which controls whether or not an active
game session can be terminated during a scale-down event, and the player
session creation policy, which controls whether or not new players can join
the session. To update a game session, specify the game session ID and the
values you want to change. If successful, an updated `GameSession` object
is returned.
"""
def update_game_session(client, input, options \\ []) do
request(client, "UpdateGameSession", input, options)
end
@doc """
Updates settings for a game session queue, which determines how new game
session requests in the queue are processed. To update settings, specify
the queue name to be updated and provide the new settings. When updating
destinations, provide a complete list of destinations.
"""
def update_game_session_queue(client, input, options \\ []) do
request(client, "UpdateGameSessionQueue", input, options)
end
@doc """
Updates the current runtime configuration for the specified fleet, which
tells Amazon GameLift how to launch server processes on instances in the
fleet. You can update a fleet's runtime configuration at any time after the
fleet is created; it does not need to be in an `ACTIVE` status.
To update runtime configuration, specify the fleet ID and provide a
`RuntimeConfiguration` object with the updated collection of server process
configurations.
Each instance in a Amazon GameLift fleet checks regularly for an updated
runtime configuration and changes how it launches server processes to
comply with the latest version. Existing server processes are not affected
by the update; they continue to run until they end, while Amazon GameLift
simply adds new server processes to fit the current runtime configuration.
As a result, the runtime configuration changes are applied gradually as
existing processes shut down and new processes are launched in Amazon
GameLift's normal process recycling activity.
"""
def update_runtime_configuration(client, input, options \\ []) do
request(client, "UpdateRuntimeConfiguration", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "gamelift"}
host = get_host("gamelift", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "GameLift.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end | lib/aws/gamelift.ex | 0.844024 | 0.686377 | gamelift.ex | starcoder |
defmodule RDF.Query.Builder do
@moduledoc false
alias RDF.Query.BGP
alias RDF.{IRI, BlankNode, Literal, Namespace, PropertyMap}
import RDF.Utils.Guards
import RDF.Utils
def bgp(query, opts \\ []) do
property_map = if context = Keyword.get(opts, :context), do: PropertyMap.new(context)
with {:ok, triple_patterns} <- triple_patterns(query, property_map) do
{:ok, %BGP{triple_patterns: triple_patterns}}
end
end
def bgp!(query, opts \\ []) do
case bgp(query, opts) do
{:ok, bgp} -> bgp
{:error, error} -> raise error
end
end
defp triple_patterns(query, property_map) when is_list(query) or is_map(query) do
flat_map_while_ok(query, fn triple ->
with {:ok, triple_pattern} <- triple_patterns(triple, property_map) do
{:ok, List.wrap(triple_pattern)}
end
end)
end
defp triple_patterns({subject, predicate, objects}, property_map) do
with {:ok, subject_pattern} <- subject_pattern(subject) do
do_triple_patterns(subject_pattern, {predicate, objects}, property_map)
end
end
defp triple_patterns({subject, predications}, property_map) when is_map(predications) do
triple_patterns({subject, Map.to_list(predications)}, property_map)
end
defp triple_patterns({subject, predications}, property_map) do
with {:ok, subject_pattern} <- subject_pattern(subject) do
predications
|> List.wrap()
|> flat_map_while_ok(&do_triple_patterns(subject_pattern, &1, property_map))
end
end
defp do_triple_patterns(subject_pattern, {predicate, objects}, property_map) do
with {:ok, predicate_pattern} <- predicate_pattern(predicate, property_map) do
objects
|> List.wrap()
|> map_while_ok(fn object ->
with {:ok, object_pattern} <- object_pattern(object) do
{:ok, {subject_pattern, predicate_pattern, object_pattern}}
end
end)
end
end
defp subject_pattern(subject) do
value = variable(subject) || resource(subject)
if value do
{:ok, value}
else
{:error,
%RDF.Query.InvalidError{
message: "Invalid subject term in BGP triple pattern: #{inspect(subject)}"
}}
end
end
defp predicate_pattern(predicate, property_map) do
value = variable(predicate) || resource(predicate) || property(predicate, property_map)
if value do
{:ok, value}
else
{:error,
%RDF.Query.InvalidError{
message: "Invalid predicate term in BGP triple pattern: #{inspect(predicate)}"
}}
end
end
defp object_pattern(object) do
value = variable(object) || resource(object) || literal(object)
if value do
{:ok, value}
else
{:error,
%RDF.Query.InvalidError{
message: "Invalid object term in BGP triple pattern: #{inspect(object)}"
}}
end
end
defp variable(var) when is_atom(var) do
var_string = to_string(var)
if String.ends_with?(var_string, "?") do
var_string
|> String.slice(0..-2)
|> String.to_atom()
end
end
defp variable(_), do: nil
defp resource(%IRI{} = iri), do: iri
defp resource(%URI{} = uri), do: IRI.new(uri)
defp resource(%BlankNode{} = bnode), do: bnode
defp resource(var) when is_ordinary_atom(var) do
case to_string(var) do
"_" <> bnode ->
BlankNode.new(bnode)
_ ->
case Namespace.resolve_term(var) do
{:ok, iri} -> iri
_ -> nil
end
end
end
defp resource(_), do: nil
defp property(:a, _), do: RDF.type()
defp property(term, property_map) when is_atom(term) and not is_nil(property_map) do
PropertyMap.iri(property_map, term)
end
defp property(_, _), do: nil
defp literal(%Literal{} = literal), do: literal
defp literal(value), do: Literal.coerce(value)
def path(query, opts \\ [])
def path(query, _) when is_list(query) and length(query) < 3 do
{:error,
%RDF.Query.InvalidError{
message: "Invalid path expression: must have at least three elements"
}}
end
def path([subject | rest], opts) do
path_pattern(subject, rest, [], 0, Keyword.get(opts, :with_elements, false))
|> bgp(opts)
end
def path!(query, opts \\ []) do
case path(query, opts) do
{:ok, bgp} -> bgp
{:error, error} -> raise error
end
end
defp path_pattern(subject, [predicate, object], triple_patterns, _, _) do
[{subject, predicate, object} | triple_patterns]
|> Enum.reverse()
end
defp path_pattern(subject, [predicate | rest], triple_patterns, count, with_elements) do
object = if with_elements, do: :"el#{count}?", else: RDF.bnode(count)
path_pattern(
object,
rest,
[{subject, predicate, object} | triple_patterns],
count + 1,
with_elements
)
end
end | lib/rdf/query/builder.ex | 0.722135 | 0.438184 | builder.ex | starcoder |
defmodule RobotSimulator do
@doc """
Create a Robot Simulator given an initial direction and position.
Valid directions are: `:north`, `:east`, `:south`, `:west`
"""
defmodule Robot do
defstruct direction: :north, position: {0, 0}
@type t :: %Robot{direction: atom, position: {integer, integer}}
end
defguard is_position(x, y) when is_integer(x) and is_integer(y)
@spec create(direction :: atom, position :: {integer, integer}) :: Robot.t() | {atom, String.t()}
def create(), do: %Robot{}
def create(direction, {x, y} = position) when is_position(x, y) do
if direction in [:north, :south, :east, :west] do
%Robot{direction: direction, position: position}
else
{:error, "invalid direction"}
end
end
def create(_direction, _position), do: {:error, "invalid position"}
@doc """
Simulate the robot's movement given a string of instructions.
Valid instructions are: "R" (turn right), "L", (turn left), and "A" (advance)
"""
@spec simulate(Robot.t(), instructions :: String.t()) :: Robot.t() | {atom, String.t()}
def simulate(%Robot{} = robot, instructions) do
instructions
|> to_charlist
|> Enum.reduce(robot, &move/2)
end
defp move(?A, %Robot{direction: :north, position: {x, y}} = robot), do: %{robot | position: {x, y + 1}}
defp move(?A, %Robot{direction: :south, position: {x, y}} = robot), do: %{robot | position: {x, y - 1}}
defp move(?A, %Robot{direction: :east, position: {x, y}} = robot), do: %{robot | position: {x + 1, y}}
defp move(?A, %Robot{direction: :west, position: {x, y}} = robot), do: %{robot | position: {x - 1, y}}
defp move(?R, %Robot{direction: :north} = robot), do: %{robot | direction: :east}
defp move(?R, %Robot{direction: :south} = robot), do: %{robot | direction: :west}
defp move(?R, %Robot{direction: :east} = robot), do: %{robot | direction: :south}
defp move(?R, %Robot{direction: :west} = robot), do: %{robot | direction: :north}
defp move(?L, %Robot{direction: :north} = robot), do: %{robot | direction: :west}
defp move(?L, %Robot{direction: :south} = robot), do: %{robot | direction: :east}
defp move(?L, %Robot{direction: :east} = robot), do: %{robot | direction: :north}
defp move(?L, %Robot{direction: :west} = robot), do: %{robot | direction: :south}
defp move(_instruction, _maybe_robot), do: {:error, "invalid instruction"}
@doc """
Return the robot's direction.
Valid directions are: `:north`, `:east`, `:south`, `:west`
"""
@spec direction(robot :: Robot.t()) :: atom
def direction(%Robot{direction: direction}), do: direction
@doc """
Return the robot's position.
"""
@spec position(robot :: Robot.t()) :: {integer, integer}
def position(%Robot{position: position}), do: position
end | elixir/robot-simulator/lib/robot_simulator.ex | 0.905929 | 0.90813 | robot_simulator.ex | starcoder |
defmodule Rational do
@moduledoc """
Implements exact rational numbers. In its simplest form, Rational.new(3,4)
will produce an exact rational number representation for 3/4. The fraction
will be stored in the lowest terms (i.e., a reduced fraction) by dividing
numerator and denominator through by their greatest common divisor. For
example the fraction 8/12 will be reduced to 2/3.
Both parameters must be integers. The numerator defaults to 0 and the
denominator defaults to 1 so that Rational.new(3) = 3/1 = 3 and Rational.new() =
0/1 = 0
## Examples
iex> Rational.new(3, 4)
#Rational<3/4>
iex> Rational.new(8,12)
#Rational<2/3>
"""
# Un-import Kernel functions to prevent name clashes. We're redefining these
# functions to work on rationals.
import Kernel, except: [abs: 1, div: 2]
@compile {:inline, maybe_unwrap: 1}
if Code.ensure_loaded?(:hipe) do
@compile [:native, {:hipe, [:o3]}]
end
defstruct num: 0, den: 1
@typedoc """
Rational numbers (num/den)
"""
@type rational :: %Rational{
num: integer,
den: non_neg_integer}
@doc """
Finds the greatest common divisor of a pair of numbers. The greatest
common divisor (also known as greatest common factor, highest common
divisor or highest common factor) of two numbers is the largest positive
integer that divides the numbers without remainder. This function uses
the recursive Euclid's algorithm.
#### See also
[new/2](#new/2)
#### Examples
iex> Rational.gcd(42, 56)
14
iex> Rational.gcd(13, 13)
13
iex> Rational.gcd(37, 600)
1
iex> Rational.gcd(20, 100)
20
iex> Rational.gcd(624129, 2061517)
18913
"""
@spec gcd(integer, integer) :: integer
def gcd(m, 0), do: m
def gcd(m, n) do
gcd(n, rem(m, n))
end
@doc """
This function extracts the sign from the provided number. It returns 0 if
the supplied number is 0, -1 if it's less than zero, and +1 if it's greater
than 0.
#### See also
[gcd/2](#gcd/2)
#### Examples
iex> Rational.sign(3)
1
iex> Rational.sign(0)
0
iex> Rational.sign(-3)
-1
"""
@spec sign(rational | number) :: -1 | 0 | 1
def sign(%{num: num}), do: sign(num)
def sign(x) when x < 0, do: -1
def sign(x) when x > 0, do: +1
def sign(_), do: 0
@doc """
Returns a new rational with the specified numerator and denominator.
#### See also
[gcd/2](#gcd/2)
#### Examples
iex> Rational.new(3, 4)
#Rational<3/4>
iex> Rational.new(8,12)
#Rational<2/3>
iex> Rational.new()
0
iex> Rational.new(3)
3
iex> Rational.new(-3, 4)
#Rational<-3/4>
iex> Rational.new(3, -4)
#Rational<-3/4>
iex> Rational.new(-3, -4)
#Rational<3/4>
iex> Rational.new(0,0)
** (ArgumentError) cannot create nan (den=0)
"""
@spec new(rational | number, integer | number) :: rational | number
def new(numerator \\ 0, denominator \\ 1) # Bodyless clause to set defaults
# Handle NaN cases
def new(_, denominator) when denominator == 0 do
raise ArgumentError, message: "cannot create nan (den=0)"
end
def new(numerator, _) when numerator == 0, do: 0
def new(numerator, denominator) when is_integer(numerator) and is_integer(denominator) do
g = gcd(numerator, denominator)
# Want to form rational as (numerator/g, denominator/g). Force the
# sign to reside on the numerator.
n = Kernel.div(numerator, g)
d = Kernel.div(denominator, g)
sgn = sign(n)*sign(d)
%Rational{num: sgn*Kernel.abs(n), den: Kernel.abs(d)}
|> maybe_unwrap()
end
def new(numerator, denominator) do
div(numerator, denominator)
end
@doc """
Returns the floating point value of the rational
#### Examples
iex> Rational.value( Rational.new(3,4) )
0.75
iex> Rational.value( Rational.add(0.2, 0.3) )
0.5
iex> Rational.value( Rational.new(-3,4) )
-0.75
"""
@spec value(rational | number) :: number
def value(number) do
case maybe_wrap(number) do
%{den: 1, num: num} ->
num
%{den: den, num: num} ->
num / den
end
end
@doc """
Returns a new rational which is the sum of the specified rationals (a+b).
#### See also
[gcd/2](#gcd/2), [sub/2](#sub/2), [mult/2](#mult/2), [div/2](#div/2)
#### Examples
iex> Rational.add( Rational.new(3,4), Rational.new(5,8) )
#Rational<11/8>
iex> Rational.add( Rational.new(13,32), Rational.new(5,64) )
#Rational<31/64>
iex> Rational.add( Rational.new(-3,4), Rational.new(5,8) )
#Rational<-1/8>
"""
@spec add(rational | number, rational | number) :: rational | integer
def add(a, b) when a == 0, do: b
def add(a, b) when b == 0, do: a
def add(a, b) do
a = maybe_wrap(a)
b = maybe_wrap(b)
new(a.num * b.den + b.num * a.den, a.den * b.den)
end
@doc """
Returns a new rational which is the difference of the specified rationals
(a-b).
#### See also
[gcd/2](#gcd/2), [add/2](#add/2), [mult/2](#mult/2), [div/2](#div/2)
#### Examples
iex> Rational.sub( Rational.new(3,4), Rational.new(5,8) )
#Rational<1/8>
iex> Rational.sub( Rational.new(13,32), Rational.new(5,64) )
#Rational<21/64>
iex> Rational.sub( Rational.new(-3,4), Rational.new(5,8) )
#Rational<-11/8>
"""
@spec sub(rational | number, rational | number) :: rational | integer
def sub(a, b) when a == 0, do: neg(b)
def sub(a, b) when b == 0, do: a
def sub(a, b) do
a = maybe_wrap(a)
b = maybe_wrap(b)
new(a.num * b.den - b.num * a.den, a.den * b.den)
end
@doc """
Returns a new rational which is the product of the specified rationals
(a*b).
#### See also
[gcd/2](#gcd/2), [add/2](#add/2), [sub/2](#sub/2), [div/2](#div/2)
#### Examples
iex> Rational.mult( Rational.new(3,4), Rational.new(5,8) )
#Rational<15/32>
iex> Rational.mult( Rational.new(13,32), Rational.new(5,64) )
#Rational<65/2048>
iex> Rational.mult( Rational.new(-3,4), Rational.new(5,8) )
#Rational<-15/32>
"""
@spec mult(rational | number, rational | number) :: rational | integer
def mult(a, b) when a == 0 or b == 0 do
0
end
def mult(a, b) do
a = maybe_wrap(a)
b = maybe_wrap(b)
new(a.num * b.num, a.den * b.den)
end
@doc """
Returns a new rational which is the ratio of the specified rationals
(a/b).
#### See also
[gcd/2](#gcd/2), [add/2](#add/2), [sub/2](#sub/2), [mult/2](#mult/2)
#### Examples
iex> Rational.div( Rational.new(3,4), Rational.new(5,8) )
#Rational<6/5>
iex> Rational.div( Rational.new(13,32), Rational.new(5,64) )
#Rational<26/5>
iex> Rational.div( Rational.new(-3,4), Rational.new(5,8) )
#Rational<-6/5>
"""
@spec div(rational | number, rational | number) :: rational | integer
def div(a, _) when a == 0, do: 0
def div(_, b) when b == 0 do
raise ArgumentError, message: "cannot create nan (den=0)"
end
def div(a, b) do
a = maybe_wrap(a)
b = maybe_wrap(b)
new(a.num * b.den, a.den * b.num)
end
@doc """
Compares two Rationals. If the first number (a) is greater than the second
number (b), 1 is returned, if a is less than b, -1 is returned. Otherwise,
if both numbers are equal and 0 is returned.
#### See also
[gt/2](#gt/2), [le/2](#le/2)
#### Examples
iex> Rational.compare( Rational.new(3,4), Rational.new(5,8) )
1
iex> Rational.compare( Rational.new(-3,4), Rational.new(-5,8) )
-1
iex> Rational.compare( Rational.new(3,64), Rational.new(3,64) )
0
"""
@spec compare(rational | number, rational | number) :: (-1 | 0 | 1)
def compare(a, b) do
x = maybe_wrap(sub(a, b))
cond do
x.num == 0 -> 0
sign(x.num) < 0 -> -1
sign(x.num) > 0 -> 1
end
end
@doc """
Returns a boolean indicating whether parameter a is equal to parameter b.
#### See also
[gt/2](#gt/2), [le/2](#le/2)
#### Examples
iex> Rational.equal?( Rational.new(), Rational.new(0,1) )
true
iex> Rational.equal?( Rational.new(3,4), Rational.new(5,8) )
false
iex> Rational.equal?( Rational.new(-3,4), Rational.new(-3,4) )
true
"""
@spec equal?(rational | number, rational | number) :: boolean
def equal?(a, b) do
compare(a, b) == 0
end
@doc """
Returns a boolean indicating whether the parameter a is less than parameter b.
#### See also
[gt/2](#gt/2), [le/2](#le/2)
#### Examples
iex> Rational.lt?( Rational.new(13,32), Rational.new(5,64) )
false
iex> Rational.lt?( Rational.new(-3,4), Rational.new(-5,8) )
true
iex> Rational.lt?( Rational.new(-3,4), Rational.new(5,8) )
true
"""
@spec lt?(rational | number, rational | number) :: boolean
def lt?(a, b) do
compare(a, b) == -1
end
@doc """
Returns a boolean indicating whether the parameter a is less than or equal to
parameter b.
#### See also
[ge/2](#ge/2), [lt/2](#lt/2)
#### Examples
iex> Rational.le?( Rational.new(13,32), Rational.new(5,64) )
false
iex> Rational.le?( Rational.new(-3,4), Rational.new(-5,8) )
true
iex> Rational.le?( Rational.new(-3,4), Rational.new(5,8) )
true
iex> Rational.le?( Rational.new(3,4), Rational.new(3,4) )
true
iex> Rational.le?( Rational.new(-3,4), Rational.new(-3,4) )
true
iex> Rational.le?( Rational.new(), Rational.new() )
true
"""
@spec le?(rational | number, rational | number) :: boolean
def le?(a, b) do
compare(a, b) != 1
end
@doc """
Returns a boolean indicating whether the parameter a is greater than
parameter b.
#### See also
[lt/2](#lt/2), [le/2](#le/2)
#### Examples
iex> Rational.gt?( Rational.new(13,32), Rational.new(5,64) )
true
iex> Rational.gt?( Rational.new(-3,4), Rational.new(-5,8) )
false
iex> Rational.gt?( Rational.new(-3,4), Rational.new(5,8) )
false
"""
@spec gt?(rational | number, rational | number) :: boolean
def gt?(a, b), do: not le?(a,b)
@doc """
Returns a boolean indicating whether the parameter a is greater than or equal
to parameter b.
#### See also
[le/2](#le/2), [gt/2](#gt/2)
#### Examples
iex> Rational.ge?( Rational.new(13,32), Rational.new(5,64) )
true
iex> Rational.ge?( Rational.new(-3,4), Rational.new(-5,8) )
false
iex> Rational.ge?( Rational.new(-3,4), Rational.new(5,8) )
false
iex> Rational.ge?( Rational.new(3,4), Rational.new(3,4) )
true
iex> Rational.ge?( Rational.new(-3,4), Rational.new(-3,4) )
true
iex> Rational.ge?( Rational.new(), Rational.new() )
true
"""
@spec ge?(rational | number, rational | number) :: boolean
def ge?(a, b), do: not lt?(a,b)
@doc """
Returns a new rational which is the negative of the specified rational (a).
#### See also
[new/2](#new/2), [abs/2](#abs/2)
#### Examples
iex> Rational.neg( Rational.new(3,4) )
#Rational<-3/4>
iex> Rational.neg( Rational.new(-13,32) )
#Rational<13/32>
iex> Rational.neg( Rational.new() )
0
"""
@spec neg(rational | number) :: rational | integer
def neg(a) do
a = maybe_wrap(a)
new(-a.num, a.den)
end
@doc """
Returns a new rational which is the absolute value of the specified rational
(a).
#### See also
[new/2](#new/2), [add/2](#add/2), [neg/2](#neg/2)
#### Examples
iex> Rational.abs( Rational.new(3,4) )
#Rational<3/4>
iex> Rational.abs( Rational.new(-13,32) )
#Rational<13/32>
iex> Rational.abs( Rational.new() )
0
"""
@spec abs(rational | number) :: rational | integer
def abs(a) do
a = maybe_wrap(a)
new(Kernel.abs(a.num), a.den)
end
@spec maybe_wrap(rational | number) :: rational
defp maybe_wrap(a) when is_integer(a) do
%__MODULE__{num: a, den: 1}
end
defp maybe_wrap(a) when is_float(a) do
from_float(a)
end
defp maybe_wrap(rational = %__MODULE__{}) do
rational
end
defp maybe_wrap(other) do
raise ArgumentError, message: "unsupported datatype #{inspect(other)}"
end
@spec maybe_unwrap(rational) :: rational | integer
defp maybe_unwrap(%{den: 1, num: num}) do
num
end
defp maybe_unwrap(rational) do
rational
end
defp from_float(num, den \\ 1) do
truncated = trunc(num)
cond do
truncated == num ->
new(truncated, den)
true ->
from_float(num * 10, den * 10)
end
end
end
defimpl Inspect, for: Rational do
def inspect(%{num: num, den: den}, opts) do
"#Rational<#{Inspect.inspect(num, opts)}/#{Inspect.inspect(den, opts)}>"
end
end | lib/rational.ex | 0.861553 | 0.714659 | rational.ex | starcoder |
defmodule GenGossip.Server do
@moduledoc false
use GenServer
alias GenGossip.ClusterState
@default_limit {45, 10_000} # at most 45 gossip messages every 10 seconds
defstruct [:mod, :mod_state, :tokens, :max_tokens, :interval, :cluster_state]
def rejoin(node, mod, state) do
GenServer.cast({mod, node}, {:rejoin, state})
end
def distribute_gossip(mod) do
GenServer.cast({mod, node()}, {:distribute})
end
def send_gossip(to_node, mod), do: send_gossip(node(), to_node, mod)
def send_gossip(node, node, _, _), do: :ok
def send_gossip(from_node, to_node, mod) do
GenServer.cast({mod, from_node}, {:send, to_node})
end
@doc false
def init([mod, args, opts]) do
{tokens, interval} = Keyword.get(opts, :gossip_limit, @default_limit)
cluster_state = ClusterState.new(mod)
state = struct(__MODULE__, [
mod: mod,
max_tokens: tokens,
tokens: tokens,
interval: interval,
cluster_state: cluster_state
])
schedule_next_reset(state)
case mod.init(args) do
{:ok, mod_state} ->
updated_state = %{state| mod_state: mod_state}
{:ok, updated_state}
{:stop, reason} ->
{:stop, reason}
end
end
@doc false
def handle_call({:set_my_cluster_state, cluster_state}, _from, state) do
case ClusterState.Manager.set(state.mod, cluster_state) do
:ok ->
{:reply, :ok, %{state| cluster_state: cluster_state}}
{:error, reason} ->
{:reply, {:error, reason}, state}
end
end
def handle_cast({:send, _to_node}, %{tokens: 0} = state) do
{:noreply, state}
end
def handle_cast({:send, to_node}, state) do
case state.mod.reconcile(to_node, state.mod_state) do
{:ok, dump, mod_state} ->
GenServer.cast({state.mod, to_node}, {:reconcile, state.cluster_state, dump})
{:stop, reason} ->
{:stop, reason}
end
{:noreply, %{state| tokens: state.tokens - 1}}
end
def handle_cast({:reconcile, cluster_state, dump}, state) do
case state.mod.handle_gossip({:reconcile, dump}, state.mod_state) do
{:ok, mod_state} ->
# compare cluster_states
{:noreply, state}
{:stop, reason} ->
{:stop, reason}
end
end
@doc false
def handle_info(:reset_tokens, %{max_tokens: tokens} = state) do
schedule_next_reset(state)
{:noreply, %{state| tokens: tokens}}
end
defp schedule_next_reset(%{interval: interval}) do
Process.send_after(self(), :reset_tokens, interval)
end
end | lib/gen_gossip/server.ex | 0.609757 | 0.443239 | server.ex | starcoder |
defmodule EctoMnesia.Adapter do
@moduledoc """
Ecto 2.X adapter for Mnesia Erlang term database.
## Run-Time Storage Options
* `:host` - Node hostname.
* `:dir` - Path where Mnesia should store DB data.
* `:storage_type` - Type of Mnesia storage.
### Mnesia Storage Types
* `:disc_copies` - store data in both RAM and on dics. Recommended value for most cases.
* `:ram_copies` - store data only in RAM. Data will be lost on node restart.
Useful when working with large datasets that don't need to be persisted.
* `:disc_only_copies` - store data only on dics. This will limit database size to 2GB and affect
adapter performance.
## Limitations
There are some limitations when using Ecto with MySQL that one
needs to be aware of.
### Transactions
Right now all transactions will be run in dirty context.
### UUIDs
Mnesia does not support UUID types. Ecto emulates them by using `binary(16)`.
### DDL Transaction
Mnesia migrations are DDL's by their nature, so Ecto does not have control over it
and behavior may be different from other adapters.
### Types
Mnesia doesn't care about types, so all data will be stored as-is.
"""
# Adapter behavior
@behaviour Ecto.Adapter
@adapter_implementation EctoMnesia.Planner
@doc false
defmacro __before_compile__(_env), do: :ok
@doc false
defdelegate ensure_all_started(repo, type), to: @adapter_implementation
@doc false
defdelegate child_spec(repo, opts), to: @adapter_implementation
@doc false
defdelegate prepare(operation, query), to: @adapter_implementation
@doc false
defdelegate execute(repo, query_meta, query_cache, sources, preprocess, opts), to: @adapter_implementation
@doc false
defdelegate insert(repo, query_meta, sources, on_conflict, returning, opts), to: @adapter_implementation
@doc false
defdelegate insert_all(repo, query_meta, header, rows, on_conflict, returning, opts), to: @adapter_implementation
@doc false
defdelegate update(repo, query_meta, params, filter, autogen, opts), to: @adapter_implementation
@doc false
defdelegate delete(repo, query_meta, filter, opts), to: @adapter_implementation
@doc false
def stream(_, _, _, _, _, _),
do: raise ArgumentError, "stream/6 is not supported by adapter, use EctoMnesia.Table.Stream.new/2 instead"
@doc false
defdelegate transaction(repo, opts, fun), to: @adapter_implementation
@doc false
defdelegate in_transaction?(repo), to: @adapter_implementation
@doc false
defdelegate rollback(repo, tid), to: @adapter_implementation
@doc false
defdelegate autogenerate(type), to: @adapter_implementation
@doc false
defdelegate loaders(primitive, type), to: @adapter_implementation
@doc false
defdelegate dumpers(primitive, type), to: @adapter_implementation
# Storage behaviour for migrations
@behaviour Ecto.Adapter.Storage
@storage_implementation EctoMnesia.Storage
@migrator_implementation EctoMnesia.Storage.Migrator
@doc false
defdelegate storage_up(config), to: @storage_implementation
@doc false
defdelegate storage_down(config), to: @storage_implementation
@doc false
defdelegate execute_ddl(repo, ddl, opts), to: @migrator_implementation, as: :execute
@doc false
def supports_ddl_transaction?, do: false
end | lib/ecto_mnesia/adapter.ex | 0.773473 | 0.599251 | adapter.ex | starcoder |
defmodule Abit.Bitmask do
@moduledoc """
Functions for working with bits & integer bitmasks.
"""
import Bitwise
@doc """
Returns the count of bits set to 1 in the given integer `int`.
## Examples
iex> Abit.Bitmask.set_bits_count(3)
2
iex> Abit.Bitmask.set_bits_count(0)
0
iex> Abit.Bitmask.set_bits_count(1024)
1
iex> Abit.Bitmask.set_bits_count(1023)
10
"""
@spec set_bits_count(integer) :: non_neg_integer
def set_bits_count(int) when is_integer(int) do
do_set_bits_count(int, 0)
end
defp do_set_bits_count(0, acc), do: acc
defp do_set_bits_count(int, acc) do
new_acc = acc + (int &&& 1)
do_set_bits_count(int >>> 1, new_acc)
end
@doc """
Returns bit at `bit_index` in the given `integer`.
## Examples
iex> Abit.Bitmask.bit_at(2, 0)
0
iex> Abit.Bitmask.bit_at(2, 1)
1
iex> Abit.Bitmask.bit_at(1, 0)
1
iex> Abit.Bitmask.bit_at(0, 0)
0
"""
@spec bit_at(integer, non_neg_integer) :: 0 | 1
def bit_at(integer, bit_index) when is_integer(integer) and is_integer(bit_index) do
case integer &&& (1 <<< bit_index) do
0 -> 0
_else -> 1
end
end
@doc """
Sets the bit at `bit_index` in `integer` and returns it.
## Examples
iex> Abit.Bitmask.set_bit_at(1, 0, 0)
0
iex> Abit.Bitmask.set_bit_at(0, 0, 1)
1
iex> Abit.Bitmask.set_bit_at(0, 2, 1)
4
"""
@spec set_bit_at(integer, non_neg_integer, 0 | 1) :: integer
def set_bit_at(integer, bit_index, 0) do
integer &&& bnot(1 <<< bit_index)
end
def set_bit_at(integer, bit_index, 1) do
integer ||| 1 <<< bit_index
end
@doc """
Returns the bitwise hamming distance between the
given integers `int_l` and `int_r`.
## Examples
iex> Abit.Bitmask.hamming_distance(1, 1)
0
iex> Abit.Bitmask.hamming_distance(1, 0)
1
iex> Abit.Bitmask.hamming_distance(1, 1023)
9
iex> Abit.Bitmask.hamming_distance(1, 1024)
2
"""
@spec hamming_distance(integer, integer) :: non_neg_integer
def hamming_distance(int_l, int_r) when is_integer(int_l) and is_integer(int_r) do
(int_l ^^^ int_r) |> set_bits_count
end
@doc """
Converts the given `integer` to a list of bits.
`size` is the size of the bitstring you want the integer to be
converted to before creating a list from it.
## Examples
iex> Abit.Bitmask.to_list(1, 1)
[1]
iex> Abit.Bitmask.to_list(1, 2)
[0, 1]
iex> Abit.Bitmask.to_list(214311324231232211111, 64)
[1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1]
"""
@doc since: "0.2.3"
@spec to_list(integer, pos_integer) :: list(0 | 1)
def to_list(integer, size) when is_integer(integer) and is_integer(size) and size > 0 do
do_to_list(<<integer::size(size)>>)
end
defp do_to_list(<<bit::1, rest::bitstring>>) do
[bit | do_to_list(rest)]
end
defp do_to_list(<<>>), do: []
end | lib/abit/bitmask.ex | 0.879568 | 0.502563 | bitmask.ex | starcoder |
defmodule ExDataHoover.Nozzle do
@moduledoc """
Nozzle exposes a public API around the concept of event sourcing.
The `Nozzle` provides a basic server implementation that allows to absorb an event.
You have the possibility to use your own `Bag` to implements what happened after the absortion.
## Example
For example, the following nozzle will absorb an event to a simple bag:
ExDataHoover.Nozzle.start_link(:simple_noozle, ExDataHoover.Bag.Simple)
ExDataHoover.Nozzle.sync_absorb(
:simple_noozle,
trackee: %{"type" => "User", "id" => 1},
event: "authenticated",
props: %{"at" => "2018-11-14 10:00:00"}
)
#=> {:ok,
%{
event: "authenticated",
properties: %{"at" => "2018-11-14 10:00:00"},
trackee: %{"id" => 1, "type" => "User"},
trackee_id: "6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b",
traits: %{"id" => 1, "type" => "User"}
}
}
"""
use GenServer
# Public interface
@doc """
Starts a nozzle linked to the current process.
Argument expected is bag. The bag has to include `ExDataHoover.Bag` as `@behaviour`.
"""
@spec start_link(module, name: atom, traits: (... -> any)) :: {:ok, pid}
def start_link(name \\ __MODULE__, bag, traits \\ & &1) do
GenServer.start_link(__MODULE__, %{bag: bag, traits: traits}, name: name)
end
@doc """
Absorb an `event`, `trackee`, and `props`.
The implementation will call the `bag.wrap` under the hood.
"""
@spec absorb(atom(), [{:event, any()} | {:props, any()} | {:trackee, any()}, ...]) :: :ok
def absorb(name \\ __MODULE__, trackee: trackee, event: event, props: props)
when is_atom(name) do
GenServer.cast(name, {:absorb, trackee: trackee, event: event, props: props})
end
@doc """
Absorb an `event`, `trackee`, and `props`.
The call is made in a synchronized way.
The implementation is identical as the `absorb function` but it will return
the `bag.wrap` result.
"""
@spec sync_absorb(atom, trackee: any, event: String.t(), props: map) ::
{:ok, any} | {:error | any}
def sync_absorb(name \\ __MODULE__, trackee: trackee, event: event, props: props) do
GenServer.call(name, {:sync_absorb, trackee: trackee, event: event, props: props})
end
# GenServer implementation
def init(state = %{bag: _bag}) do
{:ok, state}
end
def handle_cast({:absorb, trackee: trackee, event: event, props: props}, state) do
do_wrap(
trackee: trackee,
event: event,
props: props,
bag: state[:bag],
traits: state[:traits]
)
{:noreply, state}
end
def handle_call({:sync_absorb, trackee: trackee, event: event, props: props}, _from, state) do
case do_wrap(
trackee: trackee,
event: event,
props: props,
bag: state[:bag],
traits: state[:traits]
) do
{:ok, results} -> {:reply, {:ok, results}, state}
{:error, results} -> {:reply, {:error, results}, state}
error -> {:reply, {:error, error}, state}
end
end
defp do_wrap(trackee: trackee, event: event, props: props, bag: bag, traits: traits) do
case bag.trackee_id(trackee) do
{:ok, trackee_id} ->
bag.wrap(
trackee_id: ExDataHoover.anonymize(trackee_id),
trackee: trackee,
traits: extract_traits(trackee, traits),
event: event,
properties: props
)
error ->
error
end
end
defp extract_traits(trackee, traits) do
trackee
|> traits.()
|> Enum.filter(fn {_, v} -> v end)
|> Enum.into(%{})
end
end | lib/ex_data_hoover/nozzle.ex | 0.884202 | 0.607197 | nozzle.ex | starcoder |
defmodule Cldr.Calendar.Behaviour do
defmacro __using__(opts \\ []) do
epoch = Keyword.fetch!(opts, :epoch)
{date, []} = Code.eval_quoted(epoch)
epoch = Cldr.Calendar.date_to_iso_days(date)
epoch_day_of_week = Date.day_of_week(date)
days_in_week = Keyword.get(opts, :days_in_week, 7)
first_day_of_week = Keyword.get(opts, :first_day_of_week, 1)
cldr_calendar_type = Keyword.get(opts, :cldr_calendar_type, :gregorian)
cldr_calendar_base = Keyword.get(opts, :cldr_calendar_base, :month)
months_in_ordinary_year = Keyword.get(opts, :months_in_ordinary_year, 12)
months_in_leap_year = Keyword.get(opts, :months_in_leap_year, months_in_ordinary_year)
quote location: :keep do
import Cldr.Macros
@behaviour Calendar
@behaviour Cldr.Calendar
@after_compile Cldr.Calendar.Behaviour
@days_in_week unquote(days_in_week)
@quarters_in_year 4
@epoch unquote(epoch)
@epoch_day_of_week unquote(epoch_day_of_week)
@first_day_of_week unquote(first_day_of_week)
@last_day_of_week Cldr.Math.amod(@first_day_of_week + @days_in_week - 1, @days_in_week)
@months_in_ordinary_year unquote(months_in_ordinary_year)
@months_in_leap_year unquote(months_in_leap_year)
def epoch do
@epoch
end
def epoch_day_of_week do
@epoch_day_of_week
end
def first_day_of_week do
@first_day_of_week
end
def last_day_of_week do
@last_day_of_week
end
@doc """
Defines the CLDR calendar type for this calendar.
This type is used in support of `Cldr.Calendar.
localize/3`.
"""
@impl true
def cldr_calendar_type do
unquote(cldr_calendar_type)
end
@doc """
Identifies that this calendar is month based.
"""
@impl true
def calendar_base do
unquote(cldr_calendar_base)
end
@doc """
Determines if the `date` given is valid according to
this calendar.
"""
@impl true
def valid_date?(year, month, day) do
month <= months_in_year(year) && day <= days_in_month(year, month)
end
@doc """
Calculates the year and era from the given `year`.
"""
@era_module Cldr.Calendar.Era.era_module(unquote(cldr_calendar_type))
@spec year_of_era(Calendar.year) :: {year :: Calendar.year(), era :: Calendar.era()}
unless Code.ensure_loaded?(Calendar.ISO) && function_exported?(Calendar.ISO, :year_of_era, 3) do
@impl true
end
def year_of_era(year) do
iso_days = date_to_iso_days(year, 1, 1)
@era_module.year_of_era(iso_days, year)
end
@doc """
Calculates the year and era from the given `date`.
"""
@spec year_of_era(Calendar.year, Calendar.month, Calendar.day) ::
{year :: Calendar.year(), era :: Calendar.era()}
@impl true
def year_of_era(year, month, day) do
iso_days = date_to_iso_days(year, month, day)
@era_module.year_of_era(iso_days, year)
end
@doc """
Returns the calendar year as displayed
on rendered calendars.
"""
@spec calendar_year(Calendar.year, Calendar.month, Calendar.day) :: Calendar.year()
@impl true
def calendar_year(year, month, day) do
year
end
@doc """
Returns the related gregorain year as displayed
on rendered calendars.
"""
@spec related_gregorian_year(Calendar.year, Calendar.month, Calendar.day) :: Calendar.year()
@impl true
def related_gregorian_year(year, month, day) do
year
end
@doc """
Returns the extended year as displayed
on rendered calendars.
"""
@spec extended_year(Calendar.year, Calendar.month, Calendar.day) :: Calendar.year()
@impl true
def extended_year(year, month, day) do
year
end
@doc """
Returns the cyclic year as displayed
on rendered calendars.
"""
@spec cyclic_year(Calendar.year, Calendar.month, Calendar.day) :: Calendar.year()
@impl true
def cyclic_year(year, month, day) do
year
end
@doc """
Returns the quarter of the year from the given
`year`, `month`, and `day`.
"""
@spec quarter_of_year(Calendar.year, Calendar.month, Calendar.day) ::
Cldr.Calendar.quarter()
@impl true
def quarter_of_year(year, month, day) do
ceil(month / (months_in_year(year) / @quarters_in_year))
end
@doc """
Returns the month of the year from the given
`year`, `month`, and `day`.
"""
@spec month_of_year(Calendar.year, Calendar.month, Calendar.day) ::
Calendar.month() | {Calendar.month, Cldr.Calendar.leap_month?()}
@impl true
def month_of_year(_year, month, _day) do
month
end
@doc """
Calculates the week of the year from the given
`year`, `month`, and `day`.
By default this function always returns
`{:error, :not_defined}`.
"""
@spec week_of_year(Calendar.year, Calendar.month, Calendar.day) ::
{:error, :not_defined}
@impl true
def week_of_year(_year, _month, _day) do
{:error, :not_defined}
end
@doc """
Calculates the ISO week of the year from the
given `year`, `month`, and `day`.
By default this function always returns
`{:error, :not_defined}`.
"""
@spec iso_week_of_year(Calendar.year, Calendar.month, Calendar.day) ::
{:error, :not_defined}
@impl true
def iso_week_of_year(_year, _month, _day) do
{:error, :not_defined}
end
@doc """
Calculates the week of the year from the given
`year`, `month`, and `day`.
By default this function always returns
`{:error, :not_defined}`.
"""
@spec week_of_month(Calendar.year, Calendar.month, Calendar.day) ::
{pos_integer(), pos_integer()} | {:error, :not_defined}
@impl true
def week_of_month(_year, _month, _day) do
{:error, :not_defined}
end
@doc """
Calculates the day and era from the given
`year`, `month`, and `day`.
By default we consider on two eras: before the epoch
and on-or-after the epoch.
"""
@spec day_of_era(Calendar.year, Calendar.month, Calendar.day) ::
{day :: Calendar.day, era :: Calendar.era}
@impl true
def day_of_era(year, month, day) do
iso_days = date_to_iso_days(year, month, day)
@era_module.day_of_era(iso_days)
end
@doc """
Calculates the day of the year from the given
`year`, `month`, and `day`.
"""
@spec day_of_year(Calendar.year, Calendar.month, Calendar.day) :: Calendar.day()
@impl true
def day_of_year(year, month, day) do
first_day = date_to_iso_days(year, 1, 1)
this_day = date_to_iso_days(year, month, day)
this_day - first_day + 1
end
if (Code.ensure_loaded?(Date) && function_exported?(Date, :day_of_week, 2)) do
@impl true
@spec day_of_week(Calendar.year, Calendar.month, Calendar.day, :default | atom()) ::
{Calendar.day_of_week(), first_day_of_week :: non_neg_integer(),
last_day_of_week :: non_neg_integer()}
def day_of_week(year, month, day, :default = starting_on) do
days = date_to_iso_days(year, month, day)
day_of_week = Cldr.Math.amod(days - 1, @days_in_week)
{day_of_week, @first_day_of_week, @last_day_of_week}
end
defoverridable day_of_week: 4
else
@impl true
@spec day_of_week(Calendar.year, Calendar.month, Calendar.day) :: 1..7
def day_of_week(year, month, day) do
day_of_week(year, month, day, :default)
end
defoverridable day_of_week: 3
end
@doc """
Returns the number of periods in a given
`year`. A period corresponds to a month
in month-based calendars and a week in
week-based calendars.
"""
@impl true
def periods_in_year(year) do
months_in_year(year)
end
@doc """
Returns the number of months in a
given `year`.
"""
@impl true
def months_in_year(year) do
if leap_year?(year), do: @months_in_leap_year, else: @months_in_ordinary_year
end
@doc """
Returns the number of weeks in a
given `year`.
"""
@impl true
def weeks_in_year(_year) do
{:error, :not_defined}
end
@doc """
Returns the number days in a given year.
The year is the number of years since the
epoch.
"""
@impl true
def days_in_year(year) do
this_year = date_to_iso_days(year, 1, 1)
next_year = date_to_iso_days(year + 1, 1, 1)
next_year - this_year + 1
end
@doc """
Returns how many days there are in the given year
and month.
"""
@spec days_in_month(Calendar.year, Calendar.month) :: Calendar.month()
@impl true
def days_in_month(year, month) do
start_of_this_month =
date_to_iso_days(year, month, 1)
start_of_next_month =
if month == months_in_year(year) do
date_to_iso_days(year + 1, 1, 1)
else
date_to_iso_days(year, month + 1, 1)
end
start_of_next_month - start_of_this_month
end
@doc """
Returns the number days in a a week.
"""
def days_in_week do
@days_in_week
end
@doc """
Returns a `Date.Range.t` representing
a given year.
"""
@impl true
def year(year) do
last_month = months_in_year(year)
days_in_last_month = days_in_month(year, last_month)
with {:ok, start_date} <- Date.new(year, 1, 1, __MODULE__),
{:ok, end_date} <- Date.new(year, last_month, days_in_last_month, __MODULE__) do
Date.range(start_date, end_date)
end
end
@doc """
Returns a `Date.Range.t` representing
a given quarter of a year.
"""
@impl true
def quarter(_year, _quarter) do
{:error, :not_defined}
end
@doc """
Returns a `Date.Range.t` representing
a given month of a year.
"""
@impl true
def month(year, month) do
starting_day = 1
ending_day = days_in_month(year, month)
with {:ok, start_date} <- Date.new(year, month, starting_day, __MODULE__),
{:ok, end_date} <- Date.new(year, month, ending_day, __MODULE__) do
Date.range(start_date, end_date)
end
end
@doc """
Returns a `Date.Range.t` representing
a given week of a year.
"""
@impl true
def week(_year, _week) do
{:error, :not_defined}
end
@doc """
Adds an `increment` number of `date_part`s
to a `year-month-day`.
`date_part` can be `:months` only.
"""
@impl true
def plus(year, month, day, date_part, increment, options \\ [])
def plus(year, month, day, :months, months, options) do
months_in_year = months_in_year(year)
{year_increment, new_month} = Cldr.Math.div_amod(month + months, months_in_year)
new_year = year + year_increment
new_day =
if Keyword.get(options, :coerce, false) do
max_new_day = days_in_month(new_year, new_month)
min(day, max_new_day)
else
day
end
{new_year, new_month, new_day}
end
@doc """
Returns the `t:Calendar.iso_days` format of
the specified date.
"""
@impl true
@spec naive_datetime_to_iso_days(
Calendar.year(),
Calendar.month(),
Calendar.day(),
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
) :: Calendar.iso_days()
def naive_datetime_to_iso_days(year, month, day, hour, minute, second, microsecond) do
{date_to_iso_days(year, month, day), time_to_day_fraction(hour, minute, second, microsecond)}
end
@doc """
Converts the `t:Calendar.iso_days` format to the
datetime format specified by this calendar.
"""
@spec naive_datetime_from_iso_days(Calendar.iso_days()) :: {
Calendar.year(),
Calendar.month(),
Calendar.day(),
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond()
}
@impl true
def naive_datetime_from_iso_days({days, day_fraction}) do
{year, month, day} = date_from_iso_days(days)
{hour, minute, second, microsecond} = time_from_day_fraction(day_fraction)
{year, month, day, hour, minute, second, microsecond}
end
@doc false
calendar_impl()
def parse_date(string) do
Cldr.Calendar.Parse.parse_date(string, __MODULE__)
end
@doc false
calendar_impl()
def parse_utc_datetime(string) do
Cldr.Calendar.Parse.parse_utc_datetime(string, __MODULE__)
end
@doc false
calendar_impl()
def parse_naive_datetime(string) do
Cldr.Calendar.Parse.parse_naive_datetime(string, __MODULE__)
end
@doc false
@impl Calendar
defdelegate parse_time(string), to: Calendar.ISO
@doc false
@impl Calendar
defdelegate day_rollover_relative_to_midnight_utc, to: Calendar.ISO
@doc false
@impl Calendar
defdelegate time_from_day_fraction(day_fraction), to: Calendar.ISO
@doc false
@impl Calendar
defdelegate time_to_day_fraction(hour, minute, second, microsecond), to: Calendar.ISO
@doc false
@impl Calendar
defdelegate date_to_string(year, month, day), to: Calendar.ISO
@doc false
@impl Calendar
defdelegate datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond,
time_zone,
zone_abbr,
utc_offset,
std_offset
),
to: Calendar.ISO
@doc false
@impl Calendar
defdelegate naive_datetime_to_string(
year,
month,
day,
hour,
minute,
second,
microsecond
),
to: Calendar.ISO
@doc false
@impl Calendar
defdelegate time_to_string(hour, minute, second, microsecond), to: Calendar.ISO
@doc false
@impl Calendar
defdelegate valid_time?(hour, minute, second, microsecond), to: Calendar.ISO
defoverridable valid_date?: 3
defoverridable valid_time?: 4
defoverridable naive_datetime_to_string: 7
defoverridable date_to_string: 3
defoverridable time_to_day_fraction: 4
defoverridable time_from_day_fraction: 1
defoverridable day_rollover_relative_to_midnight_utc: 0
defoverridable parse_time: 1
defoverridable parse_naive_datetime: 1
defoverridable parse_utc_datetime: 1
defoverridable parse_date: 1
defoverridable naive_datetime_from_iso_days: 1
defoverridable naive_datetime_to_iso_days: 7
defoverridable year_of_era: 1
defoverridable quarter_of_year: 3
defoverridable month_of_year: 3
defoverridable week_of_year: 3
defoverridable iso_week_of_year: 3
defoverridable week_of_month: 3
defoverridable day_of_era: 3
defoverridable day_of_year: 3
defoverridable periods_in_year: 1
defoverridable months_in_year: 1
defoverridable weeks_in_year: 1
defoverridable days_in_year: 1
defoverridable days_in_month: 2
defoverridable days_in_week: 0
defoverridable year: 1
defoverridable quarter: 2
defoverridable month: 2
defoverridable week: 2
defoverridable plus: 5
defoverridable plus: 6
defoverridable epoch: 0
defoverridable cldr_calendar_type: 0
defoverridable calendar_base: 0
defoverridable calendar_year: 3
defoverridable extended_year: 3
defoverridable related_gregorian_year: 3
defoverridable cyclic_year: 3
end
end
def __after_compile__(env, _bytecode) do
Cldr.Calendar.Era.define_era_module(env.module)
end
end | lib/cldr/calendar/behaviour.ex | 0.864182 | 0.511107 | behaviour.ex | starcoder |
defmodule Monzo.Transaction do
@moduledoc """
[Monzo API reference](https://monzo.com/docs/#transactions)
"""
@endpoint "transactions"
defstruct account_balance: nil, amount: nil, attachments: nil, category: nil,
created: nil, currency: nil, decline_reason: nil, description: nil,
id: nil, is_load: nil, local_amount: nil, local_currency: nil,
merchant: nil, metadata: nil, notes: nil, settled: nil
@type t :: %__MODULE__{
account_balance: integer,
amount: integer,
attachments: list,
category: String.t,
created: String.t,
currency: String.t,
decline_reason: String.t,
description: String.t,
id: String.t,
is_load: boolean,
local_amount: String.t,
local_currency: String.t,
merchant: Monzo.Merchant.t,
metadata: map,
notes: String.t,
settled: boolean
}
@doc """
List transactions
"""
@spec list(Monzo.Client.t, String.t) :: {:ok, [Monzo.Transaction.t]} | {:error, Monzo.Error.t}
def list(client, account_id, opts \\ []) do
{params, as} = Keyword.get(opts, :merchant, false) |> with_merchant(%{"account_id" => account_id})
with {:ok, body} <- Monzo.Client.get(client, @endpoint, params),
{:ok, %{"transactions" => transactions}} <- Poison.decode(body, as: %{"transactions" => [as]}),
do: {:ok, transactions}
end
@doc """
Get a transaction
"""
@spec get(Monzo.Client.t, String.t) :: {:ok, Monzo.Transaction.t} | {:error, Monzo.Error.t}
def get(client, transaction_id, opts \\ []) do
{params, as} = Keyword.get(opts, :merchant, false) |> with_merchant(%{})
with {:ok, body} <- Monzo.Client.get(client, @endpoint <> "/" <> transaction_id, params),
{:ok, %{"transaction" => transaction}} <- Poison.decode(body, as: %{"transaction" => as}),
do: {:ok, transaction}
end
@doc false
@spec with_merchant(boolean, map) :: {map, Monzo.Transaction.t}
defp with_merchant(true, params) do
params = Map.put(params, :expand, ["merchant"])
as = %Monzo.Transaction{merchant: %Monzo.Merchant{address: %Monzo.Address{}}}
{params, as}
end
defp with_merchant(_, params) do
as = %Monzo.Transaction{}
{params, as}
end
end | lib/monzo/transaction.ex | 0.811713 | 0.461017 | transaction.ex | starcoder |
defmodule Litmus.Type.String do
@moduledoc """
This type validates and converts values to strings It converts boolean and
number values to strings.
## Options
* `:default` - Setting `:default` will populate a field with the provided
value, assuming that it is not present already. If a field already has a
value present, it will not be altered.
* `:min_length` - Specifies the minimum number of characters allowed in the
string. Allowed values are non-negative integers.
* `:max_length` - Specifies the maximum number of characters allowed in the
string. Allowed values are non-negative integers.
* `:length` - Specifies the exact number of characters allowed in the
string. Allowed values are non-negative integers.
* `:regex` - Specifies a Regular expression that a string must match. Use
the `Litmus.Type.String.Regex` struct with the options:
* `:pattern` - A `Regex.t()` to match
* `:error_message` - An error message to use when the pattern does not match
* `:replace` - Replaces occurences of a pattern with a string. Use the
`Litmus.Type.String.Replace` struct with the options:
* `:pattern` - A `Regex.t()`, `String.t()`, or compiled pattern to match
* `:replacement` - A `String.t()` to replace
* `:global` - When `true`, all occurences of the pattern are replaced.
When `false`, only the first occurence is replaced. Defaults to `true`.
* `:required` - Setting `:required` to `true` will cause a validation error
when a field is not present or the value is `nil`. Allowed values for
required are `true` and `false`. The default is `false`.
* `:trim` - Removes additional whitespace at the front and end of a string.
Allowed values are `true` and `false`. The default is `false`.
## Examples
iex> schema = %{
...> "username" => %Litmus.Type.String{
...> min_length: 3,
...> max_length: 10,
...> trim: true
...> },
...> "password" => %Litmus.Type.String{
...> length: 6,
...> regex: %Litmus.Type.String.Regex{
...> pattern: ~r/^[a-zA-Z0-9_]*$/,
...> error_message: "password must be alphanumeric"
...> }
...> }
...> }
iex> params = %{"username" => " user123 ", "password" => "<PASSWORD>"}
iex> Litmus.validate(params, schema)
{:ok, %{"username" => "user123", "password" => "<PASSWORD>"}}
iex> Litmus.validate(%{"password" => "<PASSWORD>"}, schema)
{:error, "password must be alphanumeric"}
iex> schema = %{
...> "username" => %Litmus.Type.String{
...> replace: %Litmus.Type.String.Replace{
...> pattern: ~r/\_/,
...> replacement: ""
...> }
...> }
...> }
iex> Litmus.validate(%{"username" => "one_two_three"}, schema)
{:ok, %{"username" => "onetwothree"}}
iex> schema = %{
...> "username" => %Litmus.Type.String{
...> default: "anonymous"
...> }
...> }
iex> Litmus.validate(%{}, schema)
{:ok, %{"username" => "anonymous"}}
"""
alias Litmus.{Default, Required}
alias Litmus.Type
defstruct [
:min_length,
:max_length,
:length,
default: Litmus.Type.Any.NoDefault,
regex: %Type.String.Regex{},
replace: %Type.String.Replace{},
trim: false,
required: false
]
@type t :: %__MODULE__{
default: any,
min_length: non_neg_integer | nil,
max_length: non_neg_integer | nil,
length: non_neg_integer | nil,
regex: Type.String.Regex.t(),
replace: Type.String.Replace.t(),
trim: boolean,
required: boolean
}
@spec validate_field(t, term, map) :: {:ok, map} | {:error, String.t()}
def validate_field(type, field, data) do
with {:ok, data} <- Required.validate(type, field, data),
{:ok, data} <- convert(type, field, data),
{:ok, data} <- trim(type, field, data),
{:ok, data} <- min_length_validate(type, field, data),
{:ok, data} <- max_length_validate(type, field, data),
{:ok, data} <- length_validate(type, field, data),
{:ok, data} <- regex_validate(type, field, data),
{:ok, data} <- replace(type, field, data) do
{:ok, data}
else
{:ok_not_present, data} -> Default.validate(type, field, data)
{:error, msg} -> {:error, msg}
end
end
@spec convert(t, term, map) :: {:ok, map} | {:error, String.t()}
defp convert(%__MODULE__{}, field, params) do
cond do
params[field] == nil ->
{:ok, params}
is_binary(params[field]) ->
{:ok, params}
is_number(params[field]) or is_boolean(params[field]) ->
{:ok, Map.update!(params, field, &to_string/1)}
true ->
{:error, "#{field} must be a string"}
end
end
@spec min_length_validate(t, term, map) :: {:ok, map} | {:error, String.t()}
defp min_length_validate(%__MODULE__{min_length: min_length}, field, params)
when is_integer(min_length) and min_length > 0 do
if params[field] == nil or String.length(params[field]) < min_length do
{:error, "#{field} length must be greater than or equal to #{min_length} characters"}
else
{:ok, params}
end
end
defp min_length_validate(%__MODULE__{}, _field, params) do
{:ok, params}
end
@spec max_length_validate(t, term, map) :: {:ok, map} | {:error, String.t()}
defp max_length_validate(%__MODULE__{max_length: nil}, _field, params) do
{:ok, params}
end
defp max_length_validate(%__MODULE__{max_length: max_length}, field, params)
when is_integer(max_length) and max_length >= 0 do
if Map.get(params, field) && String.length(params[field]) > max_length do
{:error, "#{field} length must be less than or equal to #{max_length} characters"}
else
{:ok, params}
end
end
@spec length_validate(t, term, map) :: {:ok, map} | {:error, String.t()}
defp length_validate(%__MODULE__{length: nil}, _field, params) do
{:ok, params}
end
defp length_validate(%__MODULE__{length: 0}, field, params) do
if params[field] in [nil, ""] do
{:ok, params}
else
{:error, "#{field} length must be 0 characters"}
end
end
defp length_validate(%__MODULE__{length: len}, field, params) when is_integer(len) do
if params[field] == nil || String.length(params[field]) != len do
{:error, "#{field} length must be #{len} characters"}
else
{:ok, params}
end
end
@spec replace(t, term, map) :: {:ok, map}
defp replace(%__MODULE__{replace: %__MODULE__.Replace{pattern: nil}}, _field, params) do
{:ok, params}
end
defp replace(%__MODULE__{replace: replace}, field, params) do
new_string =
String.replace(params[field], replace.pattern, replace.replacement, global: replace.global)
{:ok, Map.put(params, field, new_string)}
end
@spec regex_validate(t, term, map) :: {:ok, map} | {:error, String.t()}
defp regex_validate(%__MODULE__{regex: %__MODULE__.Regex{pattern: nil}}, _field, params) do
{:ok, params}
end
defp regex_validate(%__MODULE__{regex: regex}, field, params) do
if params[field] == nil or !Regex.match?(regex.pattern, params[field]) do
error_message = regex.error_message || "#{field} must be in a valid format"
{:error, error_message}
else
{:ok, params}
end
end
@spec trim(t, term, map) :: {:ok, map}
defp trim(%__MODULE__{trim: true}, field, params) do
if Map.get(params, field) do
trimmed_value = String.trim(params[field])
trimmed_params = Map.put(params, field, trimmed_value)
{:ok, trimmed_params}
else
{:ok, params}
end
end
defp trim(%__MODULE__{trim: false}, _field, params) do
{:ok, params}
end
defimpl Litmus.Type do
alias Litmus.Type
@spec validate(Type.t(), term, map) :: {:ok, map} | {:error, String.t()}
def validate(type, field, data), do: Type.String.validate_field(type, field, data)
end
end | lib/litmus/type/string.ex | 0.9255 | 0.664037 | string.ex | starcoder |
defmodule ExCommons.Map do
@moduledoc """
Helpers for Maps and Structs.
"""
@exceptions [NaiveDateTime, DateTime]
@doc """
Strips selected keys from maps, that can be in a list, or and embedded within.
Will not strip keys from NaiveDateTime, and DateTime, unless given directly.
## Examples
iex> ExCommons.Map.strip_keys(%{}, [])
%{}
iex> ExCommons.Map.strip_keys([%{key: :val}], [:key])
[%{}]
iex> ExCommons.Map.strip_keys(%{embed: %{layered: %{key: :val}}}, [:key])
%{embed: %{layered: %{}}}
"""
@spec strip_keys(Map.t() | [Map.t()], [Atom.t()]) :: Map.t() | [Map.t()]
def strip_keys(list, keys) when is_list(list) do
Enum.map(list, &strip_keys(&1, keys))
end
def strip_keys(map, keys) when is_map(map) do
Map.take(map, Map.keys(map) -- keys) |> Enum.map(&strip_keys(&1, keys)) |> Enum.into(%{})
end
def strip_keys({key, %{__struct__: struct} = val}, _keys)
when struct in @exceptions,
do: {key, val}
def strip_keys({key, val}, keys)
when is_map(val) or is_list(val),
do: {key, strip_keys(val, keys)}
def strip_keys(data, _keys), do: data
@doc """
Atomize keys deeply in a map.
## Examples
iex> ExCommons.Map.atomize_keys(%{atom: %{"string" => true}})
%{atom: %{string: true}}
iex> ExCommons.Map.atomize_keys(%{"string" => %{"string" => true}})
%{string: %{string: true}}
"""
def atomize_keys(map) when is_map(map) do
Map.new(map, fn
{k, v} when is_map(v) -> {String.to_atom(to_string(k)), atomize_keys(v)}
{k, v} -> {String.to_atom(to_string(k)), v}
end)
end
@doc """
Takes all keys from first maps and mirrors them with corresponding values in second map.
Warning: Strips the `__struct__` key.
## Examples
iex> ExCommons.Map.mirror(%{}, %{nested: %{value: false}})
%{}
iex> ExCommons.Map.mirror(%{nested: %{value: true}}, %{nested: %{value: false}})
%{nested: %{value: false}}
iex> ExCommons.Map.mirror(%{nested: %{value: true}}, %{nested: %{value: false, other: nil}})
%{nested: %{value: false}}
iex> ExCommons.Map.mirror(%{nested: %{value: true}}, %{other: "test", nested: %{value: false, other: nil}})
%{nested: %{value: false}}
"""
def mirror(base, mirrored) when is_map(base) and is_map(mirrored) do
for {k, v} <- strip_keys(base, [:__struct__]), into: %{} do
{k, mirror(v, Map.get(mirrored, k))}
end
end
def mirror(_v, value) do
value
end
end | lib/ex_commons/map.ex | 0.872734 | 0.497376 | map.ex | starcoder |
defmodule Cqrs.DomainEvent do
@moduledoc """
Defines a new domain event struct
## Options
* `:from` _optional_ - a struct to derive fields from.
* `:with` _optional_ - a list of `atom` field names to add.
* `:drop` _optional_ - a list of `atom` field names to remove from any field derived from the struct in the `:from` option.
* `:version` _optional_ - a version value. Defaults to `1`
## Example
defmodule DeleteUser do
use Cqrs.Command
field :id, :integer
def handle_dispatch(command, _opts) do
{:ok, :no_impl}
end
end
defmodule UserDeleted do
use Cqrs.DomainEvent,
from: DeleteUser,
with: [:from],
version: 2
end
iex> cmd = DeleteUser.new!(id: 668)
...> event = UserDeleted.new(cmd, from: "chris")
...> %{id: event.id, from: event.from, version: event.version}
%{id: 668, from: "chris", version: 2}
"""
alias Cqrs.{DomainEvent, Guards}
defmacro __using__(opts) do
create_jason_encoders = Application.get_env(:cqrs_tools, :create_jason_encoders, true)
quote generated: true, location: :keep do
version = Keyword.get(unquote(opts), :version, 1)
explicit_keys = DomainEvent.explicit_keys(unquote(opts))
inherited_keys = DomainEvent.inherit_keys(unquote(opts))
keys =
Keyword.merge(inherited_keys, explicit_keys, fn
_key, nil, nil -> nil
_key, nil, value -> value
_key, value, nil -> value
end)
if unquote(create_jason_encoders) and Code.ensure_loaded?(Jason), do: @derive(Jason.Encoder)
defstruct keys
|> DomainEvent.drop_keys(unquote(opts))
|> Kernel.++([{:created_at, nil}, {:version, version}])
def new(source \\ [], attrs \\ []) do
DomainEvent.new(__MODULE__, source, attrs)
end
end
end
@doc false
def drop_keys(keys, opts) do
keys_to_drop = Keyword.get(opts, :drop, []) |> List.wrap()
Enum.reject(keys, fn
{:__struct__, _} -> true
{:created_at, _} -> true
{name, _default_value} -> Enum.member?(keys_to_drop, name)
name -> Enum.member?(keys_to_drop, name)
end)
end
@doc false
def inherit_keys(opts) do
case Keyword.get(opts, :from) do
nil ->
[]
source when is_atom(source) ->
Guards.ensure_is_struct!(source)
source
|> struct()
|> Map.to_list()
source ->
"#{source} should be a valid struct to use with DomainEvent"
end
end
@doc false
def explicit_keys(opts) do
opts
|> Keyword.get(:with, [])
|> List.wrap()
|> Enum.map(fn
field when is_tuple(field) -> field
field when is_atom(field) or is_binary(field) -> {field, nil}
end)
end
@doc false
def new(module, source, attrs) when is_atom(module) do
fields =
source
|> normalize()
|> Map.merge(normalize(attrs))
|> Map.put(:created_at, Cqrs.Clock.utc_now(module))
struct(module, fields)
end
defp normalize(values) when is_struct(values), do: Map.from_struct(values)
defp normalize(values) when is_map(values), do: values
defp normalize(values) when is_list(values), do: Enum.into(values, %{})
end | lib/cqrs/domain_event.ex | 0.845017 | 0.555978 | domain_event.ex | starcoder |
defmodule Faker.Cat.PtBr do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for Cat names and breeds in Brazilian Portuguese
"""
@doc """
Returns a Cat famele name string
## Examples
iex> Faker.Cat.PtBr.female_name()
"Samy"
iex> Faker.Cat.PtBr.female_name()
"Linda"
iex> Faker.Cat.PtBr.female_name()
"Úrsula"
iex> Faker.Cat.PtBr.female_name()
"Florinda"
"""
@spec female_name() :: String.t()
sampler(:female_name, [
"Amber",
"Amelie",
"Amora",
"Amy",
"Ariel",
"Babi",
"Barbie",
"Bombom",
"Cacau",
"Charlotte",
"Chiquinha",
"Cindy",
"Cristal",
"Dalila",
"Dama",
"Dora",
"Dori",
"Estrela",
"Felícia",
"Fibi",
"Filipa",
"Filomena",
"Filó",
"Fiona",
"Florinda",
"Florisbela",
"Fofuxa",
"Frida",
"Gaia",
"Gertrudes",
"Gina",
"Hazel",
"Jabuticaba",
"Jade",
"Jasmin",
"Kaila",
"Kibana",
"Kim",
"Kindy",
"Lila",
"Lili",
"Linda",
"Lizi",
"Lola",
"Lolita",
"Lua",
"Lulu",
"Luna",
"Luzi",
"Madonna",
"Mafalda",
"Magali",
"Malu",
"Mel",
"Merida",
"Mia",
"Mica",
"Mimi",
"Moana",
"Moli",
"Nala",
"Nanny",
"Nairóbi",
"Nikita",
"Nina",
"Pandora",
"Paçoca",
"Pipoca",
"Pituca",
"Safira",
"Samy",
"Sandi",
"Selena",
"Soneca",
"Tina",
"Úrsula",
"Vanellope",
"Wendy",
"Xica",
"Zoe"
])
@doc """
Returns a Cat male name string
## Examples
iex> Faker.Cat.PtBr.male_name()
"Soneca"
iex> Faker.Cat.PtBr.male_name()
"Loui"
iex> Faker.Cat.PtBr.male_name()
"Ton"
iex> Faker.Cat.PtBr.male_name()
"Dante"
"""
@spec male_name() :: String.t()
sampler(
:male_name,
[
"Aladim",
"Algodão",
"Apolo",
"Amendoim",
"Amendupã",
"Aristóteles",
"Bambi",
"Banguela",
"Bartolomeu",
"Batman",
"Bigode",
"Biscoito",
"Bob",
"Bolota",
"Bombom",
"Boris",
"Boyle",
"Brutus",
"Cadu",
"Calvin",
"Chewie",
"Chico",
"Clemente",
"Clovis",
"Dante",
"Elvis",
"Fidélix",
"Frajola",
"Fred",
"Freud",
"Félix",
"Galeão",
"Garfield",
"Genóvio",
"Gepeto",
"Holt",
"Homer",
"Joca",
"Joey",
"Juca",
"Justin",
"Loui",
"Malvin",
"Merlin",
"Mingau",
"Naruto",
"Nemo",
"Nicolau",
"Nilo",
"Nino",
"Olaf",
"Oliver",
"Oreo",
"Oliver",
"Peralta",
"Peter",
"Picasso",
"Pingo",
"Pipoca",
"Pirulito",
"Platão",
"Pluma",
"Pomposo",
"Pongo",
"Romeu",
"Ross",
"Ruffus",
"Russo",
"Simba",
"Singer",
"Soneca",
"Spike",
"Tenente",
"Thor",
"Tommy",
"Ton",
"Tonico",
"Tufão",
"Venon",
"Yoda"
]
)
@doc """
Returns a Cat breed string
## Examples
iex> Faker.Cat.PtBr.breed()
"<NAME>"
iex> Faker.Cat.PtBr.breed()
"<NAME>"
iex> Faker.Cat.PtBr.breed()
"Pelo Curto Brasileiro"
iex> Faker.Cat.PtBr.breed()
"Pelo Curto Americano"
"""
@spec breed() :: String.t()
sampler(:breed, [
"<NAME>",
"<NAME>",
"Bombaim",
"Himalaio",
"Pelo Curto Americano",
"Pelo Curto Brasileiro",
"Pelo Curto Europeu",
"Persa",
"Siamês",
"Vira-lata"
])
end | lib/faker/cat/pt_br.ex | 0.602763 | 0.435721 | pt_br.ex | starcoder |
defmodule Solana.Key do
@moduledoc """
Functions for creating and validating Solana
[keys](https://docs.solana.com/terminology#public-key-pubkey) and
[keypairs](https://docs.solana.com/terminology#keypair).
"""
@typedoc "Solana public or private key"
@type t :: Ed25519.key()
@typedoc "a public/private keypair"
@type pair :: {t(), t()}
@spec pair() :: pair
@doc """
Generates a public/private key pair in the format `{private_key, public_key}`
"""
defdelegate pair, to: Ed25519, as: :generate_key_pair
@doc """
Reads a public/private key pair from a [file system
wallet](https://docs.solana.com/wallet-guide/file-system-wallet) in the format
`{private_key, public_key}`. Returns `{:ok, pair}` if successful, or `{:error,
reason}` if not.
"""
@spec pair_from_file(String.t()) :: {:ok, pair} | {:error, term}
def pair_from_file(path) do
with {:ok, contents} <- File.read(path),
{:ok, list} when is_list(list) <- Jason.decode(contents),
<<sk::binary-size(32), pk::binary-size(32)>> <- :erlang.list_to_binary(list) do
{:ok, {sk, pk}}
else
{:error, _} = error -> error
_contents -> {:error, "invalid wallet format"}
end
end
@doc """
decodes a base58-encoded key and returns it in a tuple.
If it fails, return an error tuple.
"""
@spec decode(encoded :: binary) :: {:ok, t} | {:error, binary}
def decode(encoded) when is_binary(encoded) do
case B58.decode58(encoded) do
{:ok, decoded} -> check(decoded)
_ -> {:error, "invalid public key"}
end
end
def decode(_), do: {:error, "invalid public key"}
@doc """
decodes a base58-encoded key and returns it.
Throws an `ArgumentError` if it fails.
"""
@spec decode!(encoded :: binary) :: t
def decode!(encoded) when is_binary(encoded) do
case decode(encoded) do
{:ok, key} ->
key
{:error, _} ->
raise ArgumentError, "invalid public key input: #{encoded}"
end
end
@doc """
Checks to see if a `t:Solana.Key.t/0` is valid.
"""
@spec check(key :: binary) :: {:ok, t} | {:error, binary}
def check(key)
def check(<<key::binary-32>>), do: {:ok, key}
def check(_), do: {:error, "invalid public key"}
@doc """
Derive a public key from another key, a seed, and a program ID.
The program ID will also serve as the owner of the public key, giving it
permission to write data to the account.
"""
@spec with_seed(base :: t, seed :: binary, program_id :: t) ::
{:ok, t} | {:error, binary}
def with_seed(base, seed, program_id) do
with {:ok, base} <- check(base),
{:ok, program_id} <- check(program_id) do
[base, seed, program_id]
|> hash()
|> check()
else
err -> err
end
end
@doc """
Derives a program address from seeds and a program ID.
"""
@spec derive_address(seeds :: [binary], program_id :: t) ::
{:ok, t} | {:error, term}
def derive_address(seeds, program_id) do
with {:ok, program_id} <- check(program_id),
true <- Enum.all?(seeds, &is_valid_seed?/1) do
[seeds, program_id, "ProgramDerivedAddress"]
|> hash()
|> verify_off_curve()
else
err = {:error, _} -> err
false -> {:error, :invalid_seeds}
end
end
defp is_valid_seed?(seed) do
(is_binary(seed) && byte_size(seed) <= 32) || seed in 0..255
end
defp hash(data), do: :crypto.hash(:sha256, data)
defp verify_off_curve(hash) do
if Ed25519.on_curve?(hash), do: {:error, :invalid_seeds}, else: {:ok, hash}
end
@doc """
Finds a valid program address.
Valid addresses must fall off the ed25519 curve; generate a series of nonces,
then combine each one with the given seeds and program ID until a valid
address is found. If a valid address is found, return the address and the
nonce in a tuple. Otherwise, return an error tuple.
"""
@spec find_address(seeds :: [binary], program_id :: t) ::
{:ok, t, nonce :: byte} | {:error, :no_nonce}
def find_address(seeds, program_id) do
case check(program_id) do
{:ok, program_id} ->
Enum.reduce_while(255..1, {:error, :no_nonce}, fn nonce, acc ->
case derive_address(List.flatten([seeds, nonce]), program_id) do
{:ok, address} -> {:halt, {:ok, address, nonce}}
_err -> {:cont, acc}
end
end)
error ->
error
end
end
end | lib/solana/key.ex | 0.840701 | 0.590573 | key.ex | starcoder |
defmodule Scenic.Component do
@moduledoc """
A Component is simply a Scene that is optimized to be referenced by another scene.
All you need to do to create a Component is call
use Scenic.Component
instead of
use Scenic.Scene
At the top of your module definition.
## Standard Components
Scenic includes a small number of standard components that you can simply reuse in your
scenes. These were chosen to be in the main library because a) they are used frequently,
and b) their use promotes a certain amount of "common" look and feel.
All of these components are typically added/modified via the helper functions in the
[`Scenic.Components`](Scenic.Components.html) module.
* [`Button`](Scenic.Component.Button.html) a simple button.
* [`Checkbox`](Scenic.Component.Input.Checkbox.html) a checkbox input field.
* [`Dropdown`](Scenic.Component.Input.Dropdown.html) a dropdown / select input field.
* [`RadioGroup`](Scenic.Component.Input.RadioGroup.html) a group of radio button inputs.
* [`Slider`](Scenic.Component.Input.Slider.html) a slider input.
* [`TextField`](Scenic.Component.Input.TextField.html) a text / password input field.
* [`Toggle`](Scenic.Component.Input.Toggle.html) an on/off toggle input.
## Other Components
For completeness, Scenic also includes the following standard components. They are used
by the components above, although you are free to use them as well if they fit your needs.
* [`Caret`](Scenic.Component.Input.Caret.html) the vertical, blinking, caret line in a text field.
* [`RadioButton`](Scenic.Component.Input.RadioButton.html) a single radio button in a radio group.
## Verifiers
One of the main differences between a Component and a Scene is the two extra callbacks
that are used to verify incoming data. Since Components are meant to be reused, you
should do some basic validation that the data being set up is valid, then provide
feedback if it isn't.
## Optional: No Children
There is an optimization you can use. If you know for certain that your component
will not attempt to use any components, you can set `has_children` to `false` like this.
use Scenic.Component, has_children: false
Setting `has_children` to `false` this will do two things. First, it won't create
a dynamic supervisor for this scene, which saves some resources. Second,
`push_graph/1` goes through a fast pass that doesn't scan the graph for dynamic children.
For example, the Button component sets `has_children` to `false`.
This option is available for any Scene, not just components
"""
alias Scenic.Primitive
@callback add_to_graph(map, any, list) :: map
@callback verify(any) :: any
@callback info(data :: any) :: String.t()
# import IEx
# ===========================================================================
defmodule Error do
@moduledoc false
defexception message: nil, error: nil, data: nil
end
# ===========================================================================
defmacro __using__(opts) do
quote do
@behaviour Scenic.Component
use Scenic.Scene, unquote(opts)
@spec add_to_graph(graph :: Scenic.Graph.t(), data :: any, opts :: list) :: Scenic.Graph.t()
def add_to_graph(graph, data \\ nil, opts \\ [])
def add_to_graph(%Scenic.Graph{} = graph, data, opts) do
verify!(data)
Primitive.SceneRef.add_to_graph(graph, {__MODULE__, data}, opts)
end
@doc false
@spec info(data :: any) :: String.t()
def info(data) do
"""
#{inspect(__MODULE__)} invalid add_to_graph data
Received: #{inspect(data)}
"""
end
@doc false
@spec verify!(data :: any) :: any
def verify!(data) do
case verify(data) do
{:ok, data} -> data
err -> raise Error, message: info(data), error: err, data: data
end
end
# --------------------------------------------------------
defoverridable add_to_graph: 3,
info: 1
end
# quote
end
# defmacro
end | lib/scenic/component.ex | 0.882671 | 0.64461 | component.ex | starcoder |
defmodule Tensorex.Operator do
@moduledoc """
Functions for basic arithmetic operations with tensors.
"""
@doc """
Adds two tensors.
iex> Tensorex.Operator.add(
...> Tensorex.from_list([[0, 1 , 2 ],
...> [3, -4 , -5.5]]),
...> Tensorex.from_list([[3, -2 , -2 ],
...> [6, -8.1, 12 ]]))
%Tensorex{data: %{[0, 0] => 3, [0, 1] => -1,
[1, 0] => 9, [1, 1] => -12.1, [1, 2] => 6.5}, shape: [2, 3]}
iex> Tensorex.Operator.add(
...> Tensorex.from_list([[0 , 1 , 2 ],
...> [3 , -4 , -5.5]]),
...> Tensorex.from_list([[0.0, -1 , -2 ],
...> [6 , -8.1, 12 ]]))
%Tensorex{data: %{[1, 0] => 9, [1, 1] => -12.1, [1, 2] => 6.5}, shape: [2, 3]}
iex> Tensorex.Operator.add(
...> Tensorex.from_list([[ 0, 6],
...> [-3, 0]]),
...> Tensorex.from_list([[ 8, 0],
...> [ 0, 9]]))
%Tensorex{data: %{[0, 0] => 8, [0, 1] => 6,
[1, 0] => -3, [1, 1] => 9}, shape: [2, 2]}
"""
@spec add(Tensorex.t(), Tensorex.t()) :: Tensorex.t()
def add(%Tensorex{data: left, shape: shape} = tensor, %Tensorex{data: right, shape: shape}) do
{small_store, large_store} =
if map_size(left) < map_size(right), do: {left, right}, else: {right, left}
new_store =
Enum.reduce(small_store, large_store, fn {index, value2}, acc ->
case Map.fetch(acc, index) do
{:ok, value1} when value1 + value2 == 0 -> Map.delete(acc, index)
{:ok, value1} -> Map.put(acc, index, value1 + value2)
:error -> Map.put(acc, index, value2)
end
end)
%{tensor | data: new_store}
end
@doc """
Subtracts a tensor from another.
iex> Tensorex.Operator.subtract(
...> Tensorex.from_list([[0, 1, 2], [3, -4, -5.5]]),
...> Tensorex.from_list([[3, -2, -2], [6, -8.1, 12 ]]))
%Tensorex{data: %{[0, 0] => -3, [0, 1] => 3 , [0, 2] => 4 ,
[1, 0] => -3, [1, 1] => 4.1, [1, 2] => -17.5}, shape: [2, 3]}
iex> Tensorex.Operator.subtract(
...> Tensorex.from_list([[0, 1, 2], [3, -4, -5.5]]),
...> Tensorex.from_list([[0.0, 1, 2], [6, -8.1, 12 ]]))
%Tensorex{data: %{[1, 0] => -3, [1, 1] => 4.1, [1, 2] => -17.5}, shape: [2, 3]}
"""
@spec subtract(Tensorex.t(), Tensorex.t()) :: Tensorex.t()
def subtract(%Tensorex{data: left, shape: shape} = tensor, %Tensorex{data: right, shape: shape}) do
new_store =
Enum.reduce(right, left, fn {index, value2}, acc ->
case Map.fetch(acc, index) do
{:ok, value1} when value1 - value2 == 0 -> Map.delete(acc, index)
{:ok, value1} -> Map.put(acc, index, value1 - value2)
:error -> Map.put(acc, index, -value2)
end
end)
%{tensor | data: new_store}
end
@doc """
Negates a tensor.
iex> Tensorex.Operator.negate(
...> Tensorex.from_list([[ 2 , 3.5, -4 , 0 ],
...> [-2.2, 6 , 0.0, 5.5]]))
%Tensorex{data: %{[0, 0] => -2 , [0, 1] => -3.5, [0, 2] => 4,
[1, 0] => 2.2, [1, 1] => -6 , [1, 3] => -5.5}, shape: [2, 4]}
"""
@spec negate(Tensorex.t()) :: Tensorex.t()
def negate(%Tensorex{data: store} = tensor) do
%{tensor | data: Enum.into(store, %{}, fn {index, value} -> {index, -value} end)}
end
@doc """
Makes a product of tensors.
If both arguments are tensors, it returns a tensor product of them. When one of arguments is a
`t:number/0`, then all elements of the tensor will be amplified by the scalar.
iex> Tensorex.Operator.multiply(
...> Tensorex.from_list([2, 5.2, -4 , 0 ]),
...> Tensorex.from_list([2, 3.5, -1.6, 8.2]))
%Tensorex{data: %{[0, 0] => 4 , [0, 1] => 7.0, [0, 2] => -3.2 , [0, 3] => 16.4 ,
[1, 0] => 10.4, [1, 1] => 18.2, [1, 2] => -8.32, [1, 3] => 42.64,
[2, 0] => -8 , [2, 1] => -14.0, [2, 2] => 6.4 , [2, 3] => -32.8}, shape: [4, 4]}
iex> Tensorex.Operator.multiply(3.5,
...> Tensorex.from_list([[2 , 3.5, -1.5, 8.0],
...> [4.12, -2 , 1 , 0 ]]))
%Tensorex{data: %{[0, 0] => 7.0 , [0, 1] => 12.25, [0, 2] => -5.25, [0, 3] => 28.0,
[1, 0] => 14.42, [1, 1] => -7.0 , [1, 2] => 3.5 }, shape: [2, 4]}
"""
@spec multiply(Tensorex.t() | number, Tensorex.t() | number) :: Tensorex.t()
def multiply(
%Tensorex{data: left_store, shape: left_shape},
%Tensorex{data: right_store, shape: right_shape}
) do
new_store =
Stream.map(left_store, fn {left_index, left_value} ->
Stream.map(right_store, fn {right_index, right_value} ->
{left_index ++ right_index, left_value * right_value}
end)
end)
|> Stream.concat()
|> Enum.into(%{})
%Tensorex{data: new_store, shape: left_shape ++ right_shape}
end
def multiply(scalar, %Tensorex{data: store} = tensor) when is_number(scalar) do
%{tensor | data: Enum.into(store, %{}, fn {index, value} -> {index, scalar * value} end)}
end
def multiply(%Tensorex{data: store} = tensor, scalar) when is_number(scalar) do
%{tensor | data: Enum.into(store, %{}, fn {index, value} -> {index, scalar * value} end)}
end
@doc """
Makes a dot product of tensors.
Components specified by the `axes` will be sumed up (or contracted).
iex> Tensorex.Operator.multiply(
...> Tensorex.from_list([0, 0.0, 0.0, 0 ]),
...> Tensorex.from_list([2, 3.5, -1.6, 8.2]), [{0, 0}])
0.0
iex> Tensorex.Operator.multiply(
...> Tensorex.from_list([[2 , 3.5, -1.6, 8.2],
...> [1.1, 3.0, 8 , -12.1]]),
...> Tensorex.from_list([[0 , 0.0],
...> [0.0, 0 ],
...> [0.0, 0 ],
...> [0 , 0 ]]), [{0, 1}, {1, 0}])
0.0
iex> Tensorex.Operator.multiply(
...> Tensorex.from_list([2, 5.2, -4 , 0 ]),
...> Tensorex.from_list([2, 3.5, -1.6, 8.2]), [{0, 0}])
28.6
iex> Tensorex.Operator.multiply(
...> Tensorex.from_list([[ 2 , 5.5, -4 , 0 ],
...> [ 4.12, -2 , 1 , 0 ]]),
...> Tensorex.from_list([[ 2 , 3.5],
...> [-1.6 , 8.2],
...> [ 2 , -3.5],
...> [-1.5 , 8.0]]), [{0, 1}])
%Tensorex{data: %{[0, 0] => 18.42, [0, 1] => 30.584, [0, 2] => -10.42, [0, 3] => 29.96,
[1, 0] => 4.0 , [1, 1] => -25.2 , [1, 2] => 18.0 , [1, 3] => -24.25,
[2, 0] => -4.5 , [2, 1] => 14.6 , [2, 2] => -11.5 , [2, 3] => 14.0 }, shape: [4, 4]}
"""
@spec multiply(Tensorex.t(), Tensorex.t(), [{non_neg_integer, non_neg_integer}]) ::
Tensorex.t() | number
def multiply(
%Tensorex{data: left, shape: left_shape},
%Tensorex{data: right, shape: right_shape},
axes
)
when is_list(axes) do
shape =
Enum.reduce(axes, [left_shape, right_shape], fn
{left_axis, right_axis}, [left_acc, right_acc] ->
[List.replace_at(left_acc, left_axis, nil), List.replace_at(right_acc, right_axis, nil)]
end)
|> Stream.concat()
|> Enum.filter(& &1)
{left_axes, right_axes} = Enum.unzip(axes)
left_group = group_by_contraction(left, left_axes)
right_group = group_by_contraction(right, right_axes)
store = multiply_with_contraction(left_group, right_group, length(axes))
cond do
Enum.empty?(store) and shape == [] ->
0.0
shape == [] ->
Enum.fetch!(store, 0) |> elem(1)
true ->
%Tensorex{data: store |> Enum.into(%{}), shape: shape}
end
end
@typep contraction_map ::
%{non_neg_integer => contraction_map} | %{optional([non_neg_integer, ...]) => number}
@spec group_by_contraction(Enum.t(), [{non_neg_integer, non_neg_integer}]) :: contraction_map
defp group_by_contraction(elements, []) do
Enum.into(elements, %{}, fn {index, value} -> {Enum.filter(index, & &1), value} end)
end
defp group_by_contraction(store, [axis | axes]) do
Enum.group_by(
store,
fn {index, _} -> Enum.fetch!(index, axis) end,
fn {index, value} -> {List.replace_at(index, axis, nil), value} end
)
|> Enum.into(%{}, fn {grouped_axis, elements} ->
{grouped_axis, group_by_contraction(elements, axes)}
end)
end
@spec multiply_with_contraction(Enum.t(), Enum.t(), non_neg_integer) :: Enum.t()
defp multiply_with_contraction(left, right, 0) do
Stream.map(left, fn {left_index, left_value} ->
Stream.map(right, fn {right_index, right_value} ->
{left_index ++ right_index, left_value * right_value}
end)
end)
|> Stream.concat()
end
defp multiply_with_contraction(left, right, depth) do
Stream.map(left, fn {contract_index, left_elements} ->
case Map.fetch(right, contract_index) do
{:ok, right_elements} ->
multiply_with_contraction(left_elements, right_elements, depth - 1)
:error ->
[]
end
end)
|> Stream.concat()
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
|> Stream.flat_map(fn {index, values} ->
case Enum.sum(values) do
value when value == 0 -> []
value -> [{index, value}]
end
end)
end
@doc """
Returns a transposed tensor.
iex> Tensorex.Operator.transpose(
...> Tensorex.from_list([[[ 2 , 5.5, -4, 0 ],
...> [ 4.12, -2 , 1, 0 ]],
...> [[ 3 , 1.2, 5, 8.9],
...> [ 1 , 6 , 7, 1.3]]]), [{0, 2}])
%Tensorex{data: %{[0, 0, 0] => 2 , [0, 0, 1] => 3 ,
[0, 1, 0] => 4.12, [0, 1, 1] => 1 ,
[1, 0, 0] => 5.5 , [1, 0, 1] => 1.2,
[1, 1, 0] => -2 , [1, 1, 1] => 6 ,
[2, 0, 0] => -4 , [2, 0, 1] => 5 ,
[2, 1, 0] => 1 , [2, 1, 1] => 7 ,
[3, 0, 1] => 8.9,
[3, 1, 1] => 1.3}, shape: [4, 2, 2]}
"""
@spec transpose(Tensorex.t(), [{non_neg_integer, non_neg_integer}, ...]) :: Tensorex.t()
def transpose(%Tensorex{data: store, shape: shape}, axes) when is_list(axes) do
new_store =
Enum.into(store, %{}, fn {index, value} ->
new_index =
Enum.reduce(axes, index, fn {left_axis, right_axis}, acc ->
acc
|> List.replace_at(left_axis, Enum.fetch!(acc, right_axis))
|> List.replace_at(right_axis, Enum.fetch!(acc, left_axis))
end)
{new_index, value}
end)
new_shape =
Enum.reduce(axes, shape, fn {left_axis, right_axis}, acc ->
acc
|> List.replace_at(left_axis, Enum.fetch!(acc, right_axis))
|> List.replace_at(right_axis, Enum.fetch!(acc, left_axis))
end)
%Tensorex{data: new_store, shape: new_shape}
end
@doc """
Divides all elements of the tensor by the scalar.
iex> Tensorex.Operator.divide(
...> Tensorex.from_list([[2 , 3.5, -1.6, 8.2],
...> [1.1, 3.0, 0 , -12.1]]), 4)
%Tensorex{data: %{[0, 0] => 0.5 , [0, 1] => 0.875, [0, 2] => -0.4, [0, 3] => 2.05 ,
[1, 0] => 0.275, [1, 1] => 0.75 , [1, 3] => -3.025}, shape: [2, 4]}
"""
@spec divide(Tensorex.t(), number) :: Tensorex.t()
def divide(%Tensorex{data: store} = tensor, scalar) when is_number(scalar) do
%{tensor | data: Enum.into(store, %{}, fn {index, value} -> {index, value / scalar} end)}
end
@doc """
Returns the determinant of the given tensor.
iex> Tensorex.Operator.determinant(
...> Tensorex.from_list([[13, 1, 2, 3],
...> [ 4, 14, 5, 6],
...> [ 7, 8, 15, 9],
...> [10, 11, 12, 16]])
...> )
14416
iex> Tensorex.Operator.determinant(
...> Tensorex.from_list([[0, 0],
...> [0, 0]])
...> )
0
iex> Tensorex.Operator.determinant(
...> Tensorex.from_list([[2.5, 0 , 0],
...> [0 , 1.8, 0],
...> [0 , 0 , 3]])
...> )
13.5
iex> Tensorex.Operator.determinant(
...> Tensorex.from_list([[[13, 1, 2, 3],
...> [ 4, 14, 5, 6],
...> [ 7, 8, 15, 9],
...> [10, 11, 12, 16]],
...> [[33, 21, 22, 23],
...> [24, 34, 25, 26],
...> [27, 28, 35, 29],
...> [30, 31, 32, 36]],
...> [[53, 41, 42, 43],
...> [44, 54, 45, 46],
...> [47, 48, 55, 49],
...> [50, 51, 52, 56]],
...> [[73, 61, 62, 63],
...> [64, 74, 65, 66],
...> [67, 68, 75, 69],
...> [70, 71, 72, 76]]])
...> )
1567104
"""
@spec determinant(Tensorex.t()) :: number
def determinant(%Tensorex{data: store, shape: [dimension | _]}) do
e = Tensorex.permutation(dimension)
Map.keys(store)
|> List.duplicate(dimension)
|> Stream.with_index()
|> Stream.map(fn {indices, first_index} ->
Stream.filter(indices, &(List.first(&1) === first_index))
end)
|> Enum.reduce([[]], fn indices, acc ->
Stream.map(acc, fn acc_indices ->
Stream.reject(indices, fn index ->
Enum.any?(acc_indices, fn acc_index ->
Stream.zip(acc_index, index) |> Enum.any?(&(elem(&1, 0) === elem(&1, 1)))
end)
end)
|> Stream.map(fn index ->
[index | acc_indices]
end)
end)
|> Stream.concat()
end)
|> Stream.map(fn indices ->
Stream.zip(indices)
|> Stream.map(&Tuple.to_list/1)
|> Stream.map(&e[&1])
|> Stream.concat(Stream.map(indices, &store[&1]))
|> Enum.reduce(&(&1 * &2))
end)
|> Enum.sum()
end
@doc """
Performs a self contraction on the given tensor.
It is known as the trace for 2nd rank tensors.
iex> Tensorex.Operator.contract(
...> Tensorex.from_list([[1, 2, 3],
...> [4, 5, 6],
...> [7, 8, 9]]), [0, 1])
15
iex> Tensorex.Operator.contract(
...> Tensorex.from_list([[[1, 2, 3],
...> [4, 5, 6],
...> [7, 8, 9]],
...> [[2, 3, 4],
...> [5, 6, 7],
...> [8, 9, 1]],
...> [[3, 4, 5],
...> [6, 7, 8],
...> [9, 1, 2]]]), [0, 2])
%Tensorex{data: %{[0] => 9, [1] => 18, [2] => 18}, shape: [3]}
"""
@spec contract(Tensorex.t(), [non_neg_integer]) :: Tensorex.t() | number
def contract(%Tensorex{data: store, shape: shape}, axes) when is_list(axes) do
Stream.flat_map(store, fn {index, value} ->
{contract_indices, remaining_indices} =
Stream.with_index(index) |> Enum.split_with(&(elem(&1, 1) in axes))
if Stream.uniq_by(contract_indices, &elem(&1, 0)) |> Stream.drop(1) |> Enum.empty?() do
[{Enum.map(remaining_indices, &elem(&1, 0)), value}]
else
[]
end
end)
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
|> Enum.into(%{}, fn {index, values} -> {index, Enum.sum(values)} end)
|> case do
%{[] => value} ->
value
new_store ->
new_shape = Enum.reduce(axes, shape, &List.replace_at(&2, &1, nil)) |> Enum.filter(& &1)
%Tensorex{data: new_store, shape: new_shape}
end
end
end | lib/tensorex/operator.ex | 0.727782 | 0.661445 | operator.ex | starcoder |
defmodule Scenic.Scrollable.Components do
alias Scenic.Graph
alias Scenic.Scrollable
alias Scenic.Scrollable.ScrollBars
alias Scenic.Scrollable.ScrollBar
alias Scenic.Primitive
alias Scenic.Primitive.SceneRef
@moduledoc """
This module contains helper functions for adding scrollable components to, or modifying scrollable components in a graph.
Using the `Scenic.Scrollable` component will setup scrollbars and controls for you, and is recommended. However, in special cases it might be prefferable to directly use a `Scenic.Scrollable.ScrollBars` or `Scenic.Scrollable.ScrollBar` component.
"""
@doc """
Add a `Scenic.Scrollable` to a graph.
The `Scenic.Scrollable` component offers a way to show part of a content group bounded by a fixed rectangle or frame, and change the visible part of the content without displacing the bounded rectangle by scrolling.
The scrollable component offers three ways to scroll, which can be used in conjunction:
- The content can be clicked and dragged directly using a mouse.
- Hotkeys can be set for up, down, left and right scroll directions.
- A horizontal and a vertical scroll bar can be set up.
Note that for the hotkeys to work, the scrollable component has to catch focus first by clicking it once with the left mouse button.
## Data
`t:Scenic.Scrollable.settings/0`
To initialize a scrollable component, a map containing `frame` and `content` elements, and a builder function are required. Further customization can be provided with optional styles.
### Frame
The frame contains information about the size of the fixed rectangle shaped bounding box. It is a tuple containing the width as first element, and height as second element.
### Content
The content contains information about the size and offset of the content. The offset can be used to adjust the limits of where the content can be scrolled to, and can for example be of used when the content position looks off in its {0, 0} starting position. If no offset is required, the content can be passed as a tuple containing the width as first element, and height as second element. If an offset is used, the content can be passed as a `t:Scenic.Scrollable.rect/0`, which is a map containing `x`, `y`, `width` and `height` elements.
## Builder
`t:Scenic.Scrollable.builder/0`
In addition to the required data, a scrollable component requires a builder, similar to the `Scenic.Primitive.Group` primitive. The builder is a function that takes a graph, and should return a graph with the necessary components attached to it that form the content of the scrollable component.
## Styles
`t:Scenic.Scrollable.styles/0`
Similar to the `Scenic.Primitive.Group` primitive, any style can be passed to the scrollable component, which will be passed on to the underlying components. In addition, the following styles specific to the scrollable component can be provided.
### scroll_position
`t:Scenic.Scrollable.v2/0`
The starting position of the scrollable content. This does not influence the limits to where the content can be scrolled to.
### scroll_acceleration
`t:Scenic.Scrollable.Acceleration.settings/0`
Settings regarding sensitivity of the scroll functionality. The settings are passed in a map with the following elements:
- acceleration: number
- mass: number
- counter_pressure: number
The higher number given for the acceleration, the faster the scroll movement gains speed. The default value is 20.
The higher number given for the mass, the slower the scroll movement gains speed, and the faster it loses speed. The default value is 1.
The higher number given for counter_pressure, the lower the maximum scroll speed, and the faster the scroll movement loses speed after the user input has stopped. The default value is 0.1.
### scroll_hotkeys
`t:Scenic.Scrollable.Hotkeys.settings/0`
A hotkey can be provided for every scroll direction to enable scrolling using the keyboard. The hotkey settings can be passed in a map with the following elements.
- up: `t:String.t/0`
- down: `t:String.t/0`
- left: `t:String.t/0`
- right: `t:String.t/0`
The passed string can be the letter of the intended key, such as "w" or "s", or the description of a special key, such as the arrow keys "up", "down", "left" or "right".
### scroll_fps
number
Specifies the times per second the scroll content position is recalculated when it is scrolling. For environments with limited resources, it might be prudent to set a lower value than the default 30.
### scroll_drag
`t:Scenic.Scrollable.Drag.settings/0`
Options for enabling scrolling by directly dragging the content using a mouse. Buttons events on the scrollable content will take precedence over the drag functionality. Drag settings are passed in a map with the following elements:
- mouse_buttons: [`t:Scenic.Scrollable.Drag.mouse_button/0`]
The list of mouse buttons specifies with which mouse button the content can be dragged. Available mouse buttons are `:left`, `:right` and `:middle`. By default, the drag functionality is disabled.
### scroll_bar_thickness
number
Specify the thickness of both scroll bars.
### scroll_bar
`t:Scenic.Scrollable.ScrollBar.styles/0`
Specify the styles for both horizontal and vertical scroll bars. If different styles for each scroll bar are desired, use the `vertical_scroll_bar` and `horizontal_scroll_bar` options instead. The following styles are supported"
- scroll_buttons: boolean
- scroll_bar_theme: map
- scroll_bar_radius: number
- scroll_bar_border: number
- scroll_drag: `t:Scenic.Scrollable.Drag.settings/0`
The scroll_buttons boolean can be used to specify of the scroll bar should contain buttons for scrolling, in addition to the scroll bar slider. The scroll buttons are not shown by default.
A theme can be passed using the scroll_bar_theme element to provide a set of colors for the scroll bar. For more information on themes, see the `Scenic.Primitive.Style.Theme` module. The default theme is `:light`.
The scroll bars rounding and border can be adjusted using the scroll_bar_radius and scroll_bar_border elements respectively. The default values are 3 and 1.
The scroll_drag settings can be provided in the same form the scrollable components scroll_drag style is provided, and can be used to specify by which mouse button the scroll bar slider can be dragged. By default, the `:left`, `:right` and `:middle` buttons are all enabled.
### horizontal_scroll_bar
`t:Scenic.Scrollable.ScrollBar.styles/0`
Specify styles for the horizontal scroll bar only. The available styles are exactly the same as explained in the above scroll_bar style section.
### vertical_scroll_bar
`t:Scenic.Scrollable.ScrollBar.styles/0`
Specify styles for the vertical scroll bar only. The available styles are exactly the same as explained in the above scroll_bar style section.
## Examples
iex> graph = Scenic.Scrollable.Components.scrollable(
...> Scenic.Graph.build(),
...> %{
...> frame: {200, 400},
...> content: %{x: 0, y: 10, width: 400, height: 800}
...> },
...> fn graph ->
...> Scenic.Primitives.text(graph, "scrollable text")
...> end,
...> [id: :scrollable_component_1]
...> )
...> graph.primitives[1].id
:scrollable_component_1
iex> graph = Scenic.Scrollable.Components.scrollable(
...> Scenic.Graph.build(),
...> %{
...> frame: {200, 400},
...> content: %{x: 0, y: 10, width: 400, height: 800}
...> },
...> fn graph ->
...> Scenic.Primitives.text(graph, "scrollable text")
...> end,
...> [
...> scroll_position: {-10, -50},
...> scroll_acceleration: %{
...> acceleration: 15,
...> mass: 1.2,
...> counter_pressure: 0.2
...> },
...> scroll_hotkeys: %{
...> up: "w",
...> down: "s",
...> left: "a",
...> right: "d"
...> },
...> scroll_fps: 15,
...> scroll_drag: %{
...> mouse_buttons: [:left]
...> },
...> scroll_bar_thickness: 15,
...> scroll_bar: [
...> scroll_buttons: true,
...> scroll_bar_theme: Scenic.Primitive.Style.Theme.preset(:dark)
...> ],
...> translate: {50, 50},
...> id: :scrollable_component_2
...> ]
...> )
...> graph.primitives[1].id
:scrollable_component_2
"""
@spec scrollable(
source :: Graph.t() | Primitive.t(),
settings :: Scrollable.settings(),
builder :: Scrollable.builder(),
options :: Scrollable.styles()
) :: Graph.t() | Primitive.t()
def scrollable(graph, settings, builder, options \\ [])
def scrollable(%Graph{} = graph, settings, builder, options) do
add_to_graph(graph, Scrollable, Map.put(settings, :builder, builder), options)
end
def scrollable(%Primitive{module: SceneRef} = p, settings, builder, options) do
modify(p, Scrollable, Map.put(settings, :builder, builder), options)
end
@doc """
Add a `Scenic.Scrollable.ScrollBars` to a graph.
WARNING: updating the scroll bars positions through modifying the graph leads to glitches and performance issues.
It is recommended to directly call to the `Scenic.Scrollable.ScrollBars` process with a {:update_scroll_position, {x, y}} message for now.
The scroll bars component can be used to add a horizontal, and a vertical scroll bar pair to the graph. This component is used internally by the `Scenic.Scrollable` component, and for most cases it is recommended to use the `Scenic.Scrollable` component instead.
## Data
`t:Scenic.Scrollable.ScrollBars.settings/0`
The scroll bars require the following data for initialization:
- width: number
- height: number
- content_size: `t:Scenic.Scrollable.ScrollBars.v2/0`
- scroll_position: number
- direction: :horizontal | :vertical
With and height define the size of the frame, and thus correspond to the width of the horizontal, and the height of the vertical scroll bars.
## Styles
`t:Scenic.Scrollable.ScrollBars.styles/0`
The scroll bars can be customized by using the following styles:
### scroll_bar
`t:Scenic.Scrollable.ScrollBar.styles/0`
The styles to customize both scrollbars as defined in the corresponding module `Scenic.Scrollable.Scrollbar`.
If different styles for the horizontal and vertical scroll bars are preffered, use the horizontal_scroll_bar and vertical_scroll_bar styles instead.
### horizontal_scroll_bar
`t:Scenic.Scrollable.ScrollBar.styles/0`
The styles to customize the horizontal scroll bar.
### vertical_scroll_bar
`t:Scenic.Scrollable.ScrollBar.styles/0`
The styles to customize the vertical scroll bar.
### scroll_drag
`t:Scenic.Scrollable.Drag/0`
Settings to specify which mouse buttons can be used in order to drag the scroll bar sliders.
### scroll_bar_thickness
number
Specify the height of the horizontal, and the width of the vertical scroll bars.
## Examples
iex> graph = Scenic.Scrollable.Components.scroll_bars(
...> Scenic.Graph.build(),
...> %{
...> width: 200,
...> height: 200,
...> content_size: {1000, 1000},
...> scroll_position: {0, 0}
...> },
...> [
...> scroll_bar: [
...> scroll_buttons: true,
...> scroll_bar_theme: Scenic.Primitive.Style.Theme.preset(:light),
...> scroll_bar_radius: 2,
...> scroll_bar_border: 2,
...> scroll_drag: %{
...> mouse_buttons: [:left, :right, :middle]
...> }
...> ],
...> scroll_drag: %{
...> mouse_buttons: [:left, :right, :middle]
...> },
...> id: :scroll_bars_component_1
...> ]
...> )
...> graph.primitives[1].id
:scroll_bars_component_1
"""
@spec scroll_bars(
source :: Graph.t() | Primitive.t(),
settings :: ScrollBars.settings(),
options :: ScrollBars.styles()
) :: Graph.t() | Primitive.t()
def scroll_bars(graph, settings, options \\ [])
def scroll_bars(%Graph{} = graph, settings, options) do
add_to_graph(graph, ScrollBars, settings, options)
end
def scroll_bars(%Primitive{module: SceneRef} = p, settings, options) do
modify(p, ScrollBars, settings, options)
end
@doc """
Add a `Scenic.Scrollable.ScrollBar` to a graph.
The scroll bar component can be used to draw a scroll bar to the scene by adding it to the graph. The scroll bar is used internally by the `Scenic.Scrollable` component and for most cases it is recommended to use the `Scenic.Scrollable` component instead.
The scroll bar can be setup to make use of scroll buttons at the scroll bars edges, in order to enable scrolling by pressing and holding such button, in addition to dragging the scroll bar slider control to drag, or clicking the slider background to jump.
## Data
`t:Scenic.Scrollable.ScrollBar.settings/0`
The scroll bar requires the following data for initialization:
- width: number
- height: number
- content_size: number
- scroll_position: number
- direction: :horizontal | :vertical
Width and height define the display size of the scroll bar.
The content size defines the size of the scrollable content in the direction of the scroll bar. When the scroll bar is a horizontal scroll bar, the content size should correspond to the width of the content.
The scroll position specifies the starting position of the scrollable content. Note that the scroll position corresponds to the translation of the content, rather than the scroll bar slider.
The direction specifies if the scroll bar scrolls in horizontal, or in vertical direction.
## Styles
`t:Scenic.Scrollable.ScrollBar.styles/0`
Optional styles to customize the scroll bar. The following styles are supported:
- scroll_buttons: boolean
- scroll_bar_theme: map
- scroll_bar_radius: number
- scroll_bar_border: number
- scroll_drag: `t:Scenic.Scrollable.Drag.settings/0`
The scroll_buttons boolean can be used to specify of the scroll bar should contain buttons for scrolling, in addition to the scroll bar slider. The scroll buttons are not shown by default.
A theme can be passed using the scroll_bar_theme element to provide a set of colors for the scroll bar. For more information on themes, see the `Scenic.Primitive.Style.Theme` module. The default theme is `:light`.
The scroll bars rounding and border can be adjusted using the scroll_bar_radius and scroll_bar_border elements respectively. The default values are 3 and 1.
The scroll_drag settings can be provided to specify by which mouse button the scroll bar slider can be dragged. By default, the `:left`, `:right` and `:middle` buttons are all enabled.
## Examples
iex> graph = Scenic.Scrollable.Components.scroll_bar(
...> Scenic.Graph.build(),
...> %{
...> width: 200,
...> height: 10,
...> content_size: 1000,
...> scroll_position: 0,
...> direction: :horizontal
...> },
...> [id: :scroll_bar_component_1]
...> )
...> graph.primitives[1].id
:scroll_bar_component_1
iex> graph = Scenic.Scrollable.Components.scroll_bar(
...> Scenic.Graph.build(),
...> %{
...> width: 200,
...> height: 10,
...> content_size: 1000,
...> scroll_position: 0,
...> direction: :horizontal
...> },
...> [
...> scroll_buttons: true,
...> scroll_bar_theme: Scenic.Primitive.Style.Theme.preset(:dark),
...> scroll_bar_radius: 4,
...> scroll_bar_border: 1,
...> scroll_drag: %{
...> mouse_buttons: [:left, :right]
...> },
...> id: :scroll_bar_component_2
...> ]
...> )
...> graph.primitives[1].id
:scroll_bar_component_2
"""
@spec scroll_bar(
source :: Graph.t() | Primitive.t(),
settings :: ScrollBar.settings(),
options :: ScrollBar.styles()
) :: Graph.t() | Primitive.t()
def scroll_bar(graph, data, options \\ [])
def scroll_bar(%Graph{} = graph, data, options) do
add_to_graph(graph, ScrollBar, data, options)
end
def scroll_bar(%Primitive{module: SceneRef} = p, data, options) do
modify(p, ScrollBar, data, options)
end
@spec add_to_graph(Graph.t(), module, term, keyword) :: Graph.t()
defp add_to_graph(%Graph{} = graph, module, data, options) do
module.verify!(data)
module.add_to_graph(graph, data, options)
end
@spec modify(Primitive.t(), module, term, keyword) :: Primitive.t()
defp modify(%Primitive{module: SceneRef} = p, module, data, options) do
module.verify!(data)
Primitive.put(p, {module, data}, options)
end
end | lib/components.ex | 0.940199 | 0.829388 | components.ex | starcoder |
defmodule Wit.Actions do
@moduledoc """
Wit.Actions is used to implement the default behaviour for the Wit which involves functions like
`say, merge, error`. The macro `defaction` is also provided to define your own custom actions.
When using `defaction` the name of the function is matched with the action returned from the
converse API.
## Examples
defmodule WeatherActions do
use Wit.Actions
def say(session, context, message) do
# Send the message to the user
end
def merge(session, context, message) do
context # Return the updated context
end
def error(session, context, error) do
# Handle error
end
defaction fetch_weather(session, context, message) do
context # Return the updated context
end
end
"""
defmacro __using__(_opts) do
quote do
require Wit.Actions
import Wit.Actions
@behaviour Wit.DefaultActions
@wit_actions %{"say" => :say, "merge" => :merge, "error" => :error}
@before_compile Wit.Actions
end
end
@doc """
Defines a wit custom action
## Examples
defaction fetch_weather(session, context, message) do
# Fetch weather
context # Return the updated context
end
"""
defmacro defaction(head, do: body) do
{func_name, arg_list} = Macro.decompose_call(head)
# Throw error if the argument list is not equal to 3
if length(arg_list) != 3 do
raise ArgumentError, message: "Wit action should have three arguments i.e. session, context and message"
end
quote do
@wit_actions Map.put(@wit_actions, unquote(Atom.to_string(func_name)), unquote(func_name))
def unquote(head) do
unquote(body)
end
end
end
defmacro __before_compile__(_env) do
quote do
def actions() do
@wit_actions
end
def call_action(action, session, context, message) when action in ["say", "merge"] do
call_action(action, [session, context, message])
end
def call_action(action, session, context, message) do
call_action(action, [session, context, message])
end
defp call_action(action, arg_list) do
wit_actions = @wit_actions
func = Map.get(wit_actions, action)
apply_action(action, func, arg_list)
end
defp apply_action(action, nil, _arg_list), do: {:error, "No action '#{action}' found"}
defp apply_action(_action, func, arg_list), do: apply(__MODULE__, func, arg_list)
end
end
end | lib/wit_actions.ex | 0.808899 | 0.461623 | wit_actions.ex | starcoder |
defmodule ExFuzzywuzzy.Algorithms.LongestCommonSubstring do
@moduledoc """
Helper module for the calculus of the longest common substring algorithm between two strings
"""
defstruct [:substring, :left_starting_index, :right_starting_index, :length]
@typedoc """
The data collected applying partial matching algorithm
"""
@type t :: %__MODULE__{
substring: String.t(),
left_starting_index: non_neg_integer(),
right_starting_index: non_neg_integer(),
length: non_neg_integer()
}
@typep grapheme :: String.t()
@typep row :: map()
@typep match :: {integer(), integer(), integer()}
@typep longest_match :: {{row(), row()}, match()}
@doc """
Calculates the longest common substring between two strings, returning a tuple containing
the matched substring, the length of the substring itself, the starting index of the matches
on the left and on the right.
"""
@spec lcs(String.t(), String.t()) :: nil | t()
def lcs(left, right) do
left_list = left |> String.graphemes() |> Enum.with_index()
right_list = right |> String.graphemes() |> Enum.with_index()
{_, match} = lcs_dynamic_programming(left_list, right_list)
build_result(left, match)
end
@spec lcs_dynamic_programming([grapheme()], [grapheme()]) :: longest_match()
defp lcs_dynamic_programming(left, right) do
Enum.reduce(left, {{%{}, %{}}, {0, 0, 0}}, fn x, acc ->
{{_, current}, lcs} = Enum.reduce(right, acc, &step(x, &1, &2))
{{current, %{}}, lcs}
end)
end
@spec step({integer(), integer()}, {integer(), integer()}, longest_match()) :: longest_match()
defp step({c, i}, {c, j}, {{previous, current}, match = {_, _, lcs_length}}) do
length = Map.get(previous, j - 1, 0) + 1
current = Map.put(current, j, length)
match = if length > lcs_length, do: {i - length + 1, j - length + 1, length}, else: match
{{previous, current}, match}
end
defp step(_, _, acc), do: acc
@spec build_result(String.t(), match()) :: nil | t()
defp build_result(_, {_, _, 0}), do: nil
defp build_result(left, {left_start, right_start, length}) do
%__MODULE__{
substring: String.slice(left, left_start, length),
left_starting_index: left_start,
right_starting_index: right_start,
length: length
}
end
end | lib/ex_fuzzywuzzy/algorithms/longest_common_substring.ex | 0.846609 | 0.631765 | longest_common_substring.ex | starcoder |
defmodule Error do
@moduledoc """
Model domain and infrastructure errors as regular data.
"""
alias FE.Maybe
defmodule DomainError do
@moduledoc false
defstruct [:reason, :details, :caused_by]
end
defmodule InfraError do
@moduledoc false
defstruct [:reason, :details, :caused_by]
end
@type kind :: :domain | :infra
@type reason :: atom()
@opaque t(a) ::
%DomainError{reason: reason, details: a}
| %InfraError{reason: reason, details: a}
@opaque t :: t(map())
@opaque domain(a) :: %DomainError{reason: reason, details: a}
@opaque domain() :: domain(map())
@opaque infra(a) :: %InfraError{reason: reason, details: a}
@opaque infra() :: infra(map())
@doc """
Create a `domain` error, with a reason and optional details.
"""
@spec domain(atom(), a) :: t(a) when a: map
def domain(reason, details \\ %{}) when is_atom(reason) and is_map(details) do
%DomainError{reason: reason, details: details, caused_by: :nothing}
end
@doc """
Create an `infra` error, with a reason and optional details.
"""
@spec infra(atom(), a) :: t(a) when a: map
def infra(reason, details \\ %{}) when is_atom(reason) and is_map(details) do
%InfraError{reason: reason, details: details, caused_by: :nothing}
end
@doc """
Determine whether a given `Error` is a `domain` or `infra` error.
"""
@spec kind(t) :: kind
def kind(%DomainError{}), do: :domain
def kind(%InfraError{}), do: :infra
@doc """
Return the reason the `Error` was created with.
"""
@spec reason(t) :: reason
def reason(%DomainError{reason: reason}), do: reason
def reason(%InfraError{reason: reason}), do: reason
@doc """
Return the map of detailed information supplied at `Error` creation.
"""
@spec details(t(a)) :: a when a: map
def details(%DomainError{details: details}), do: details
def details(%InfraError{details: details}), do: details
@doc """
Map a function on the `details` map in an `Error`.
Useful for adding extra details, modifying exisint ones, or removing them.
"""
@spec map_details(t(a), (a -> b)) :: t(b) when a: map, b: map
def map_details(%DomainError{details: details} = error, f) do
%DomainError{error | details: f.(details)}
end
def map_details(%InfraError{details: details} = error, f) do
%InfraError{error | details: f.(details)}
end
@doc """
Wrap a higher-level error 'on top' of a lower-level error.
Think of this as a stack trace, but in domain-model terms.
"""
@spec wrap(t(a), t(a)) :: t(a) when a: map
def wrap(inner, %DomainError{} = outer) do
%{outer | caused_by: Maybe.just(inner)}
end
def wrap(inner, %InfraError{} = outer) do
%{outer | caused_by: Maybe.just(inner)}
end
@doc """
Extract the cause of an error (of type `Error.t()`).
Think of this as inspecting deeper into the stack trace.
"""
@spec caused_by(t(a)) :: Maybe.t(t(a)) when a: map
def caused_by(%DomainError{caused_by: c}), do: c
def caused_by(%InfraError{caused_by: c}), do: c
@doc """
Convert an `Error` to an Elixir map.
"""
@spec to_map(t) :: map
def to_map(%DomainError{} = e), do: to_map_rec(e) |> Map.put(:kind, :domain)
def to_map(%InfraError{} = e), do: to_map_rec(e) |> Map.put(:kind, :infra)
defp to_map_rec(e) do
inner_as_map = Maybe.map(e.caused_by, &to_map/1)
Map.from_struct(e)
|> Map.put(:caused_by, inner_as_map)
end
end | lib/error.ex | 0.854612 | 0.527256 | error.ex | starcoder |
defmodule EVM.Gas do
@moduledoc """
Functions for interacting wth gas and costs of opscodes.
"""
alias EVM.MachineState
alias EVM.MachineCode
alias EVM.Operation
alias EVM.Address
alias EVM.ExecEnv
@type t :: EVM.val()
@type gas_price :: EVM.Wei.t()
# Nothing paid for operations of the set Wzero.
@g_zero 0
# Amount of gas to pay for operations of the set Wbase.
@g_base 2
# Amount of gas to pay for operations of the set Wverylow.
@g_verylow 3
# Amount of gas to pay for operations of the set Wlow.
@g_low 5
# Amount of gas to pay for operations of the set Wmid.
@g_mid 8
# Amount of gas to pay for operations of the set Whigh.
@g_high 10
# Amount of gas to pay for operations of the set Wextcode.
@g_extcode 20
# Amount of gas to pay for a BALANCE operation.
@g_balance 20
# Paid for a SLOAD operation.
@g_sload 50
# Paid for a JUMPDEST operation.
@g_jumpdest 1
# Paid for an SSTORE operation when the storage value is set to non-zero from zero.
@g_sset 20000
# Paid for an SSTORE operation when the storage value’s zeroness remains unchanged or is set to zero.
@g_sreset 5000
# Refund given (added into refund counter) when the storage value is set to zero from non-zero.
@g_sclear 15000
# Refund given (added into refund counter) for suiciding an account.
@g_suicide 24000
# Amount of gas to pay for a SUICIDE operation.
@g_suicide 5000
# Paid for a CREATE operation.
@g_create 32000
# Paid per byte for a CREATE operation to succeed in placing code into state.
@g_codedeposit 200
# Paid for a CALL operation.
@g_call 40
# Paid for a non-zero value transfer as part of the CALL operation.
@g_callvalue 9000
# A stipend for the called contract subtracted from Gcallvalue for a non-zero value transfer.
@g_callstipend 2300
# Paid for a CALL or SUICIDE operation which creates an account.
@g_newaccount 25000
# Partial payment for an EXP operation.
@g_exp 10
# Partial payment when multiplied by dlog256(exponent)e for the EXP operation.
@g_expbyte 10
# Paid for every additional word when expanding memory.
@g_memory 3
# The divsor of quadratic costs
@g_quad_coeff_div 512
# Paid by all contract-creating transactions after the Homestead transition.
@g_txcreate 32000
# Paid for every zero byte of data or code for a transaction.
@g_txdatazero 4
# Paid for every non-zero byte of data or code for a transaction.
@g_txdatanonzero 68
# Paid for every transaction.
@g_transaction 21000
# Partial payment for a LOG operation.
@g_log 375
# Paid for each byte in a LOG operation’s data.
@g_logdata 8
# Paid for each topic of a LOG operation.
@g_logtopic 375
# Paid for each SHA3 operation.
@g_sha3 30
# Paid for each word (rounded up) for input data to a SHA3 operation.
@g_sha3word 6
# Partial payment for *COPY operations, multiplied by words copied, rounded up.
@g_copy 3
# Payment for BLOCKHASH operation
@g_blockhash 20
@w_zero_instr [:stop, :return, :suicide]
@w_base_instr [
:address,
:origin,
:caller,
:callvalue,
:calldatasize,
:codesize,
:gasprice,
:coinbase,
:timestamp,
:number,
:difficulty,
:gaslimit,
:pop,
:pc,
:msize,
:gas
]
@push_instrs Enum.map(0..32, fn n -> :"push#{n}" end)
@dup_instrs Enum.map(0..16, fn n -> :"dup#{n}" end)
@swap_instrs Enum.map(0..16, fn n -> :"swap#{n}" end)
@log_instrs Enum.map(0..4, fn n -> :"log#{n}" end)
@w_very_low_instr [
:add,
:sub,
:calldatacopy,
:codecopy,
:not_,
:lt,
:gt,
:slt,
:sgt,
:eq,
:iszero,
:and_,
:or_,
:xor_,
:byte,
:calldataload,
:mload,
:mstore,
:mstore8
] ++ @push_instrs ++ @dup_instrs ++ @swap_instrs
@w_low_instr [:mul, :div, :sdiv, :mod, :smod, :signextend]
@w_mid_instr [:addmod, :mulmod, :jump]
@w_high_instr [:jumpi]
@w_extcode_instr [:extcodesize]
@call_operations [:call, :callcode, :delegatecall]
@memory_operations [:mstore, :mstore8, :sha3, :codecopy, :extcodecopy, :calldatacopy, :mload]
@doc """
Returns the cost to execute the given a cycle of the VM. This is defined
in Appenix H of the Yellow Paper, Eq.(220) and is denoted `C`.
## Examples
# TODO: Figure out how to hand in state
iex> EVM.Gas.cost(%EVM.MachineState{}, %EVM.ExecEnv{})
0
"""
@spec cost(MachineState.t(), ExecEnv.t()) :: t | nil
def cost(machine_state, exec_env) do
operation = MachineCode.current_operation(machine_state, exec_env)
inputs = Operation.inputs(operation, machine_state)
operation_cost = operation_cost(operation.sym, inputs, machine_state, exec_env)
memory_cost = memory_cost(operation.sym, inputs, machine_state)
memory_cost + operation_cost
end
def memory_cost(:calldatacopy, [memory_offset, _call_data_start, length], machine_state) do
memory_expansion_cost(machine_state, memory_offset, length)
end
def memory_cost(:extcodecopy, [_address, _code_offset, memory_offset, length], machine_state) do
if memory_offset + length > EVM.max_int() do
0
else
memory_expansion_cost(machine_state, memory_offset, length)
end
end
def memory_cost(:codecopy, [memory_offset, _code_offset, length], machine_state) do
memory_expansion_cost(machine_state, memory_offset, length)
end
def memory_cost(:mload, [memory_offset], machine_state) do
memory_expansion_cost(machine_state, memory_offset, 32)
end
def memory_cost(:mstore8, [memory_offset, _value], machine_state) do
memory_expansion_cost(machine_state, memory_offset, 1)
end
def memory_cost(:sha3, [memory_offset, length], machine_state) do
memory_expansion_cost(machine_state, memory_offset, length)
end
def memory_cost(:mstore, [memory_offset, _value], machine_state) do
memory_expansion_cost(machine_state, memory_offset, 32)
end
def memory_cost(:call, stack_args, machine_state) do
call_memory_cost(stack_args, machine_state)
end
def memory_cost(:callcode, stack_args, machine_state) do
call_memory_cost(stack_args, machine_state)
end
def memory_cost(:create, [_value, in_offset, in_length], machine_state) do
memory_expansion_cost(machine_state, in_offset, in_length)
end
def memory_cost(:return, [offset, length], machine_state) do
memory_expansion_cost(machine_state, offset, length)
end
def memory_cost(_operation, _inputs, _machine_state), do: 0
@spec call_memory_cost(Operation.stack_args(), MachineState.t()) :: t
defp call_memory_cost(
[_gas_limit, _to_address, _value, in_offset, in_length, out_offset, out_length],
machine_state
) do
out_memory_cost = memory_expansion_cost(machine_state, out_offset, out_length)
in_memory_cost = memory_expansion_cost(machine_state, in_offset, in_length)
max(out_memory_cost, in_memory_cost)
end
# From Eq 220: Cmem(μ′i)−Cmem(μi)
def memory_expansion_cost(machine_state, offset, length) do
memory_expansion_value = memory_expansion_value(machine_state.active_words, offset, length)
if memory_expansion_value > machine_state.active_words do
quadratic_memory_cost(memory_expansion_value) -
quadratic_memory_cost(machine_state.active_words)
else
0
end
end
# Eq 223
def memory_expansion_value(
# s
active_words,
# f
offset,
# l
length
) do
if length == 0 do
active_words
else
max(active_words, round(:math.ceil((offset + length) / 32)))
end
end
# Eq 222 - Cmem
def quadratic_memory_cost(a) do
linear_cost = a * @g_memory
quadratic_cost = MathHelper.floor(:math.pow(a, 2) / @g_quad_coeff_div)
linear_cost + quadratic_cost
end
@doc """
Returns the operation cost for every possible operation. This is defined
in Appendix H of the Yellow Paper.
## Examples
iex> address = 0x0000000000000000000000000000000000000001
iex> account_interface = EVM.Interface.Mock.MockAccountInterface.new()
iex> exec_env = %EVM.ExecEnv{address: address, account_interface: account_interface}
iex> EVM.Gas.operation_cost(:sstore, [], %EVM.MachineState{stack: [0, 0]}, exec_env)
5000
iex> EVM.Gas.operation_cost(:exp, [0, 0], %EVM.MachineState{}, exec_env)
10
iex> EVM.Gas.operation_cost(:exp, [0, 1024], %EVM.MachineState{}, exec_env)
30
iex> EVM.Gas.operation_cost(:jumpdest, [], nil, exec_env)
1
iex> EVM.Gas.operation_cost(:blockhash, [], nil, exec_env)
20
iex> EVM.Gas.operation_cost(:stop, [], nil, exec_env)
0
iex> EVM.Gas.operation_cost(:address, [], nil, exec_env)
2
iex> EVM.Gas.operation_cost(:push0, [], nil, exec_env)
3
iex> EVM.Gas.operation_cost(:mul, [], nil, exec_env)
5
iex> EVM.Gas.operation_cost(:addmod, [], nil, exec_env)
8
iex> EVM.Gas.operation_cost(:jumpi, [], nil, exec_env)
10
iex> EVM.Gas.operation_cost(:extcodesize, [], nil, exec_env)
700
iex> EVM.Gas.operation_cost(:sha3, [0, 0], %EVM.MachineState{stack: [0, 0]}, exec_env)
30
iex> EVM.Gas.operation_cost(:sha3, [10, 1024], %EVM.MachineState{stack: [10, 1024]}, exec_env)
222
"""
@spec operation_cost(atom(), list(EVM.val()), list(EVM.val()), MachineState.t()) :: t | nil
def operation_cost(operation \\ nil, inputs \\ nil, machine_state \\ nil, exec_env \\ nil)
def operation_cost(:exp, [_base, exponent], _machine_state, _exec_env) do
@g_exp + @g_expbyte * MathHelper.integer_byte_size(exponent)
end
def operation_cost(:codecopy, [_memory_offset, _code_offset, length], _machine_state, _exec_env) do
@g_verylow + @g_copy * MathHelper.bits_to_words(length)
end
def operation_cost(
:calldatacopy,
[_memory_offset, _code_offset, length],
_machine_state,
_exec_env
) do
@g_verylow + @g_copy * MathHelper.bits_to_words(length)
end
def operation_cost(
:extcodecopy,
[_address, _code_offset, _mem_offset, length],
_machine_state,
_exec_env
) do
@g_extcode + @g_copy * MathHelper.bits_to_words(length)
end
def operation_cost(:sha3, [_length, offset], _machine_state, _exec_env) do
@g_sha3 + @g_sha3word * MathHelper.bits_to_words(offset)
end
@doc """
Returns the cost of a call to `sstore`. This is defined
in Appenfix H.2. of the Yellow Paper under the
definition of SSTORE, referred to as `C_SSTORE`.
## Examples
iex> address = 0x0000000000000000000000000000000000000001
iex> account_interface = EVM.Interface.Mock.MockAccountInterface.new()
iex> exec_env = %EVM.ExecEnv{address: address, account_interface: account_interface}
iex> EVM.Gas.operation_cost(:sstore, [0, 0], %EVM.MachineState{}, exec_env)
5000
iex> EVM.Gas.operation_cost(:sstore, [0, 2], %EVM.MachineState{}, exec_env)
20000
"""
def operation_cost(:sstore, [key, new_value], _machine_state, exec_env) do
old_value = ExecEnv.get_storage(exec_env, key)
cond do
new_value == 0 ->
@g_sreset
old_value == :account_not_found ->
@g_sset
old_value == :key_not_found ->
@g_sset
true ->
@g_sreset
end
end
def operation_cost(
:call,
[gas_limit, to_address, value, _in_offset, _in_length, _out_offset, _out_length],
_machine_state,
exec_env
) do
to_address = Address.new(to_address)
@g_call + call_value_cost(value) + new_account_cost(exec_env, to_address) + gas_limit
end
def operation_cost(
:callcode,
[gas_limit, _to_address, value, _in_offset, _in_length, _out_offset, _out_length],
_machine_state,
_exec_env
) do
@g_call + call_value_cost(value) + gas_limit
end
def operation_cost(:log0, [_offset, size | _], _machine_state, _exec_end) do
@g_log + @g_logdata * size
end
def operation_cost(:log1, [_offset, size | _], _machine_state, _exec_end) do
@g_log + @g_logdata * size + @g_logtopic
end
def operation_cost(:log2, [_offset, size | _], _machine_state, _exec_end) do
@g_log + @g_logdata * size + @g_logtopic * 2
end
def operation_cost(:log3, [_offset, size | _], _machine_state, _exec_end) do
@g_log + @g_logdata * size + @g_logtopic * 3
end
def operation_cost(:log4, [_offset, size | _], _machine_state, _exec_end) do
@g_log + @g_logdata * size + @g_logtopic * 4
end
def operation_cost(operation, _inputs, _machine_state, _exec_env) do
cond do
operation in @w_very_low_instr -> @g_verylow
operation in @w_zero_instr -> @g_zero
operation in @w_base_instr -> @g_base
operation in @w_low_instr -> @g_low
operation in @w_mid_instr -> @g_mid
operation in @w_high_instr -> @g_high
operation in @w_extcode_instr -> @g_extcode
operation in @call_operations -> @g_call
operation == :create -> @g_create
operation == :blockhash -> @g_blockhash
operation == :balance -> @g_balance
operation == :sload -> @g_sload
operation == :jumpdest -> @g_jumpdest
true -> 0
end
end
defp call_value_cost(value) do
if value == 0 do
0
else
@g_callvalue - @g_callstipend
end
end
defp new_account_cost(exec_env, address) do
if exec_env.account_interface
|> EVM.Interface.AccountInterface.account_exists?(address) do
0
else
@g_newaccount
end
end
@doc """
Returns the gas cost for G_txdata{zero, nonzero} as defined in
Appendix G (Fee Schedule) of the Yellow Paper.
This implements `g_txdatazero` and `g_txdatanonzero`
## Examples
iex> EVM.Gas.g_txdata(<<1, 2, 3, 0, 4, 5>>)
5 * 68 + 4
iex> EVM.Gas.g_txdata(<<0>>)
4
iex> EVM.Gas.g_txdata(<<0, 0>>)
8
iex> EVM.Gas.g_txdata(<<>>)
0
"""
@spec g_txdata(binary()) :: t
def g_txdata(data) do
for <<byte <- data>> do
case byte do
0 -> @g_txdatazero
_ -> @g_txdatanonzero
end
end
|> Enum.sum()
end
@doc "Paid by all contract-creating transactions after the Homestead transition."
@spec g_txcreate() :: t
def g_txcreate, do: @g_create
@doc "Paid for every transaction."
@spec g_transaction() :: t
def g_transaction, do: @g_transaction
end | apps/evm/lib/evm/gas.ex | 0.752922 | 0.507324 | gas.ex | starcoder |
defmodule Day15 do
@moduledoc """
--- Day 15: Dueling Generators ---
Here, you encounter a pair of dueling generators. The generators, called generator A and generator B, are trying to
agree on a sequence of numbers. However, one of them is malfunctioning, and so the sequences don't always match.
As they do this, a judge waits for each of them to generate its next value, compares the lowest 16 bits of both
values, and keeps track of the number of times those parts of the values match.
The generators both work on the same principle. To create its next value, a generator will take the previous value
it produced, multiply it by a factor (generator A uses 16807; generator B uses 48271), and then keep the remainder
of dividing that resulting product by 2147483647. That final remainder is the value it produces next.
To calculate each generator's first value, it instead uses a specific starting value as its "previous value" (as
listed in your puzzle input).
For example, suppose that for starting values, generator A uses 65, while generator B uses 8921. Then, the first
five pairs of generated values are:
--Gen. A-- --Gen. B--
1092455 430625591
1181022009 1233683848
245556042 1431495498
1744312007 137874439
1352636452 285222916
In binary, these pairs are (with generator A's value first in each pair):
00000000000100001010101101100111
00011001101010101101001100110111
01000110011001001111011100111001
01001001100010001000010110001000
00001110101000101110001101001010
01010101010100101110001101001010
01100111111110000001011011000111
00001000001101111100110000000111
01010000100111111001100000100100
00010001000000000010100000000100
Here, you can see that the lowest (here, rightmost) 16 bits of the third value match: 1110001101001010. Because of
this one match, after processing these five pairs, the judge would have added only 1 to its total.
To get a significant sample, the judge would like to consider 40 million pairs. (In the example above, the judge would
eventually find a total of 588 pairs that match in their lowest 16 bits.)
After 40 million pairs, what is the judge's final count?
Generator A starts with 116
Generator B starts with 299
--- Part Two ---
In the interest of trying to align a little better, the generators get more picky about the numbers they actually
give to the judge.
They still generate values in the same way, but now they only hand a value to the judge when it meets their criteria:
Generator A looks for values that are multiples of 4.
Generator B looks for values that are multiples of 8.
Each generator functions completely independently: they both go through values entirely on their own, only
occasionally handing an acceptable value to the judge, and otherwise working through the same sequence of values as
before until they find one.
The judge still waits for each generator to provide it with a value before comparing them (using the same comparison
method as before). It keeps track of the order it receives values; the first values from each generator are compared,
then the second values from each generator, then the third values, and so on.
Using the example starting values given above, the generators now produce the following first five values each:
--Gen. A-- --Gen. B--
1352636452 1233683848
1992081072 862516352
530830436 1159784568
1980017072 1616057672
740335192 412269392
These values have the following corresponding binary values:
01010000100111111001100000100100
01001001100010001000010110001000
01110110101111001011111010110000
00110011011010001111010010000000
00011111101000111101010001100100
01000101001000001110100001111000
01110110000001001010100110110000
01100000010100110001010101001000
00101100001000001001111001011000
00011000100100101011101101010000
Unfortunately, even though this change makes more bits similar on average, none of these values' lowest 16 bits
match. Now, it's not until the 1056th pair that the judge finds the first match:
--Gen. A-- --Gen. B--
1023762912 896885216
00111101000001010110000111100000
00110101011101010110000111100000
This change makes the generators much slower, and the judge is getting impatient; it is now only willing to
consider 5 million pairs. (Using the values from the example above, after five million pairs, the judge would
eventually find a total of 309 pairs that match in their lowest 16 bits.)
After 5 million pairs, but using this new generator logic, what is the judge's final count?
Generator A starts with 116
Generator B starts with 299
"""
use Bitwise
def part_a() do
test_a(116,299,40000000,0)
end
def part_b() do
test_b(116,299,5000000,0)
end
def test_b do
test_b(65, 8921, 5000000, 0)
end
def test_b(_a, _b, 0, count) do
count
end
def test_b(a,b,loop,count) do
new_a = gen_number(4, a, 16807, 2147483647)
new_b = gen_number(8, b, 48271, 2147483647)
<<_::16, low_a::16>> = <<new_a::32>>
<<_::16, low_b::16>> = <<new_b::32>>
case Bitwise.bxor(low_a, low_b) do
0 ->
test_b(new_a,new_b,loop-1,count+1)
_ ->
test_b(new_a,new_b,loop-1,count)
end
end
def test_a do
test_a(65, 8921, 5, 0)
end
def test_a(_, _, 0, count) do
count
end
def test_a(a,b,loop,count) do
new_a = rem(a*16807, 2147483647)
new_b = rem(b*48271, 2147483647)
<<_::16, low_a::16>> = <<new_a::32>>
<<_::16, low_b::16>> = <<new_b::32>>
case Bitwise.bxor(low_a, low_b) do
0 ->
test_a(new_a,new_b,loop-1,count+1)
_ ->
test_a(new_a,new_b,loop-1,count)
end
end
defp gen_number(multiple, x, num1, num2) do
case rem(val = rem(x * num1, num2), multiple) == 0 do
true ->
val
false ->
gen_number(multiple, val, num1, num2)
end
end
end | lib/day15.ex | 0.676299 | 0.795698 | day15.ex | starcoder |
defmodule Backpack.Moment.Calculator.Shared do
def ago(term, seconds) do
Backpack.Moment.Calculator.shift(term, seconds: -seconds)
end
def from_now(term, seconds) do
Backpack.Moment.Calculator.shift(term, seconds: seconds)
end
def minutes_ago(term, minutes) do
Backpack.Moment.Calculator.shift(term, minutes: -minutes)
end
def minutes_from_now(term, minutes) do
Backpack.Moment.Calculator.shift(term, minutes: minutes)
end
def hours_ago(term, hours) do
Backpack.Moment.Calculator.shift(term, hours: -hours)
end
def hours_from_now(term, hours) do
Backpack.Moment.Calculator.shift(term, hours: hours)
end
def days_ago(term, days) do
Backpack.Moment.Calculator.shift(term, days: -days)
end
def days_from_now(term, days) do
Backpack.Moment.Calculator.shift(term, days: days)
end
def weeks_ago(term, weeks) do
Backpack.Moment.Calculator.shift(term, weeks: -weeks)
end
def weeks_from_now(term, weeks) do
Backpack.Moment.Calculator.shift(term, weeks: weeks)
end
def months_ago(term, months) do
Backpack.Moment.Calculator.shift(term, months: -months)
end
def months_from_now(term, months) do
Backpack.Moment.Calculator.shift(term, months: months)
end
def years_ago(term, years) do
Backpack.Moment.Calculator.shift(term, years: -years)
end
def years_from_now(term, years) do
Backpack.Moment.Calculator.shift(term, years: years)
end
def beginning_of_day(term) do
term
|> Map.put(:microsecond, {0, 6})
|> Map.put(:second, 0)
|> Map.put(:minute, 0)
|> Map.put(:hour, 0)
end
def end_of_day(term) do
term
|> Map.put(:microsecond, {999999, 6})
|> Map.put(:second, 59)
|> Map.put(:minute, 59)
|> Map.put(:hour, 23)
end
def beginning_of_week(term) do
term
|> Backpack.Moment.Calculator.shift(days: -(Backpack.Moment.Calculator.day_of_week(term) - 1))
|> beginning_of_day()
end
def end_of_week(term) do
term
|> Backpack.Moment.Calculator.shift(days: 7 - Backpack.Moment.Calculator.day_of_week(term))
|> end_of_day()
end
def beginning_of_month(term) do
term
|> Map.put(:day, 1)
|> beginning_of_day()
end
def end_of_month(term) do
{:ok, date} = Date.new(term.year, term.month, term.day)
day = Date.days_in_month(date)
term
|> Map.put(:day, day)
|> end_of_day()
end
def beginning_of_quarter(term) do
case term.month do
month when month in 1..3 ->
term
|> Map.put(:month, 1)
|> beginning_of_month()
month when month in 4..6 ->
term
|> Map.put(:month, 4)
|> beginning_of_month()
month when month in 7..9 ->
term
|> Map.put(:month, 7)
|> beginning_of_month()
month when month in 10..12 ->
term
|> Map.put(:month, 10)
|> beginning_of_month()
end
end
def end_of_quarter(term) do
case term.month do
month when month in 1..3 ->
term
|> Map.put(:month, 3)
|> Backpack.Moment.Calculator.end_of_month()
month when month in 4..6 ->
term
|> Map.put(:month, 6)
|> Backpack.Moment.Calculator.end_of_month()
month when month in 7..9 ->
term
|> Map.put(:month, 9)
|> Backpack.Moment.Calculator.end_of_month()
month when month in 10..12 ->
term
|> Map.put(:month, 12)
|> Backpack.Moment.Calculator.end_of_month()
end
end
def beginning_of_year(term) do
term
|> Map.put(:month, 1)
|> beginning_of_month()
end
def end_of_year(term) do
term
|> Map.put(:month, 12)
|> Backpack.Moment.Calculator.end_of_month()
end
def yesterday(term) do
Backpack.Moment.Calculator.shift(term, days: -1)
end
def tomorrow(term) do
Backpack.Moment.Calculator.shift(term, days: 1)
end
def last_week(term) do
Backpack.Moment.Calculator.shift(term, weeks: -1)
end
def next_week(term) do
Backpack.Moment.Calculator.shift(term, weeks: 1)
end
def last_month(term) do
Backpack.Moment.Calculator.shift(term, months: -1)
end
def next_month(term) do
Backpack.Moment.Calculator.shift(term, months: 1)
end
def last_year(term) do
Backpack.Moment.Calculator.shift(term, years: -1)
end
def next_year(term) do
Backpack.Moment.Calculator.shift(term, years: 1)
end
def quarter(term) do
case term.month do
month when month in 1..3 -> 1
month when month in 4..6 -> 2
month when month in 7..9 -> 3
month when month in 10..12 -> 4
end
end
def day_of_week(term) do
Date.new(term.year, term.month, term.day)
|> Date.day_of_week()
end
end | lib/backpack/moment/calculator/shared.ex | 0.55941 | 0.453625 | shared.ex | starcoder |
defmodule Mix.Tasks.Cotton.Lint do
@moduledoc """
Lint by Credo & check types by Dialyzer. Run following checks.
```
mix format --check-formatted
mix credo --strict
mix dialyzer
mix inch --pedantic
```
Option:
* `--fix`: Auto correct errors if available.
"""
use Mix.Task
@shortdoc "Lint by Credo & check types by Dialyzer"
@type facts :: map
@type results :: keyword(integer)
@type tasks :: keyword(Task.t())
@impl Mix.Task
def run(args) do
Mix.Task.run("cmd", ["mix compile"])
{[], gather_facts(args)}
|> check_async(:format, &check_format/1)
|> check_async(:credo, &check_credo/1)
|> check_async(
:dialyzer,
Task.async(Mix.Shell.IO, :cmd, ["mix dialyzer"])
)
# |> check_async(:inch, &check_inch/1)
|> await_checks
|> print_check_results
end
defp check_format(facts) do
if facts.fix?, do: Mix.Shell.IO.cmd("mix format --check-equivalent")
Mix.Shell.IO.cmd("mix format --check-formatted")
end
defp check_credo(_) do
alias Credo.Execution
alias Credo.Execution.Task.WriteDebugReport
{:ok, _} = Application.ensure_all_started(:credo)
Credo.Application.start(nil, nil)
["--strict"]
|> Execution.build()
|> Execution.run()
|> WriteDebugReport.call([])
|> Execution.get_assign("credo.exit_status", 0)
end
# defp check_inch(%{docs?: false}), do: -1
# defp check_inch(_) do
# alias InchEx.CLI
# Mix.Task.run("compile")
# {:ok, _} = Application.ensure_all_started(:inch_ex)
# CLI.main(["--pedantic"])
# 0
# end
@spec gather_facts([binary]) :: facts
defp gather_facts(args) do
%{
docs?: Mix.Tasks.Docs in Mix.Task.load_all(),
fix?: "--fix" in args
}
end
@spec check_async({tasks, facts}, atom, (facts -> integer) | Task.t()) :: {tasks, facts}
defp check_async({tasks, facts}, name, %Task{} = task), do: {[{name, task} | tasks], facts}
defp check_async({tasks, facts}, name, fun),
do: check_async({tasks, facts}, name, Task.async(fn -> fun.(facts) end))
@spec await_checks({tasks, facts}) :: results
defp await_checks({tasks, _}),
do: for({name, task} <- Enum.reverse(tasks), do: {name, Task.await(task, :infinity)})
@spec print_check_results(results) :: any
defp print_check_results(results) do
label_length =
results |> Keyword.keys() |> Enum.map(&(&1 |> to_string |> String.length())) |> Enum.max()
for {name, status} <- results, status >= 0 do
IO.puts(
String.pad_trailing(to_string(name), label_length + 1) <>
":\t" <> if(0 === status, do: "ok", else: "ng")
)
end
case results |> Keyword.values() |> Enum.map(&max(&1, 0)) |> Enum.sum() do
0 -> nil
exit_status -> :erlang.halt(exit_status)
end
end
end | lib/mix/tasks/cotton.lint.ex | 0.683947 | 0.408778 | cotton.lint.ex | starcoder |
defmodule Thunk do
@moduledoc """
This module provides Thunks. A thunk holds a value thats not yet
been computed. This values can have functions applied to them without forcing the value
and two thunks can be combined into a tuple again without forcing either of them.
The documentation for the functions has Haskell like type signatures these are only
there to improve understanding and give a better idea of how these functions should
behave.
"""
@enforce_keys [:pid]
defstruct [:pid]
@typedoc "The Thunk type."
@opaque t :: %Thunk{pid: pid()}
@doc """
Delays the evaluation of a value.
delay : (() -> a) -> Thunk a
"""
@spec delay((() -> any())) :: t
def delay(fun) when is_function(fun, 0) do
# spawns a process in state delay
pid = spawn(fn -> Thunking.thunking(:delay, fun, []) end)
%Thunk{pid: pid}
end
@doc """
Forces evaluation of a thunk.
force : Thunk a -> a
"""
@spec force(t) :: any
def force(%Thunk{pid: pid}) do
me = self()
ref = make_ref()
# sends the thunk process a message
# telling it to force and gives it
# this ref and this pid
# N.B. this doesn't check if thunk process
# exists and will just hang if attempted
# on a thunk process that does not
# exists
send(pid, {:force, ref, me})
receive do
# matches on this ref and then
# returns the value
{:done, ^ref, val} ->
val
end
end
@doc """
Lifts a function to work on thunks.
map : Thunk a -> (a -> b) -> Thunk b
"""
@spec map(t, (any() -> any())) :: t
def map(%Thunk{pid: pid}, fun) do
me = self()
pid1 =
spawn(fn ->
me1 = self()
ref = make_ref()
# sends the process it gets its
# argument from a message asking to
# connect it.
send(pid, {:connect, {me1, ref}})
# sends the calling process :ok
# to ensure that it won't force this
# process to early, i.e. before its argument
# process receives its connect message.
send(me, :ok)
Thunking.thunking(:map, pid, ref, fun, [])
end)
receive do
:ok ->
%Thunk{pid: pid1}
end
end
@doc """
Given two thunks returns a thunk containing
a tuple made of the two values in the argument
thunks.
product : Thunk a -> Thunk b -> Thunk (a, b)
"""
@spec product(t, t) :: t
def product(%Thunk{pid: p1}, %Thunk{pid: p2}) do
me = self()
p3 =
spawn(fn ->
me1 = self()
r1 = make_ref()
r2 = make_ref()
# this has two argument processes but
# other than that is not much different
# that the map function.
send(p1, {:connect, {me1, r1}})
send(p2, {:connect, {me1, r2}})
send(me, :ok)
Thunking.thunking(:product, p1, r1, p2, r2, [])
end)
receive do
:ok ->
%Thunk{pid: p3}
end
end
@doc """
A macro that turns a value into a thunk.
(~~~) : a -> Thunk a
"""
@spec ~~~any() :: t
defmacro ~~~val do
quote do
Thunk.delay(fn -> unquote(val) end)
end
end
end
defimpl Inspect, for: Thunk do
import Inspect.Algebra
def inspect(_, _) do
string("#Thunk<...>")
end
end | lib/thunk.ex | 0.702734 | 0.6852 | thunk.ex | starcoder |
defmodule MapBot do
@moduledoc """
`#{__MODULE__}` builds Elixir Maps/Structs based on factory definitions and attributes.
Let's see how to use this library by examples:
## Examples setup:
```elixir
#{:map_bot |> :code.priv_dir() |> Path.join("support/my_app.ex") |> File.read!()}
```
## Examples
### `attrs/2`:
iex> #{__MODULE__}.Sequence.reset(5)
iex> :rand.seed(:exsplus, {1, 2, 3})
...>
iex> MyApp.FactoryWithNoRepo.attrs(MyApp.Car)
%{id: 5, model: "Truck", color: :green}
...>
iex> MyApp.FactoryWithNoRepo.attrs(MyApp.Car, color: :yellow)
%{id: 6, model: "Hatch", color: :yellow}
...>
iex> MyApp.FactoryWithNoRepo.attrs(MyApp.Car, %{color: :purple})
%{id: 7, model: "Hatch", color: :purple}
...>
iex> MyApp.FactoryWithNoRepo.attrs(:tomato)
%{name: "Tomato-8", color: :blue}
...>
iex> MyApp.FactoryWithNoRepo.attrs(:tomato, color: :white)
%{name: "Tomato-9", color: :white}
...>
iex> MyApp.FactoryWithNoRepo.attrs(:tomato, %{color: :pink})
%{name: "Tomato-10", color: :pink}
### `build/2`:
iex> #{__MODULE__}.Sequence.reset(5)
iex> :rand.seed(:exsplus, {1, 2, 3})
...>
iex> MyApp.FactoryWithNoRepo.build(MyApp.Car)
%MyApp.Car{id: 5, model: "Truck", color: :green}
...>
iex> MyApp.FactoryWithNoRepo.build(MyApp.Car, color: :yellow)
%MyApp.Car{id: 6, model: "Hatch", color: :yellow}
...>
iex> MyApp.FactoryWithNoRepo.build(MyApp.Car, %{color: :purple})
%MyApp.Car{id: 7, model: "Hatch", color: :purple}
...>
iex> MyApp.FactoryWithNoRepo.build(:tomato)
%{name: "Tomato-8", color: :blue}
...>
iex> MyApp.FactoryWithNoRepo.build(:tomato, color: :white)
%{name: "Tomato-9", color: :white}
...>
iex> MyApp.FactoryWithNoRepo.build(:tomato, %{color: :pink})
%{name: "Tomato-10", color: :pink}
### `insert/2`:
iex> #{__MODULE__}.Sequence.reset(5)
iex> :rand.seed(:exsplus, {1, 2, 3})
...>
iex> MyApp.FactoryWithRepo.insert(MyApp.Car)
{:ok, %MyApp.Car{id: 5, model: "Truck", color: :green}}
...>
iex> MyApp.FactoryWithRepo.insert(MyApp.Car, color: :yellow)
{:ok, %MyApp.Car{id: 6, model: "Hatch", color: :yellow}}
...>
iex> MyApp.FactoryWithRepo.insert(MyApp.Car, %{color: :purple})
{:ok, %MyApp.Car{id: 7, model: "Hatch", color: :purple}}
...>
iex> MyApp.FactoryWithRepoAndChangeset.insert(MyApp.House)
{:ok, %MyApp.House{id: 8, style: "Asian", color: :blue}}
...>
iex> MyApp.FactoryWithRepoAndChangeset.insert(MyApp.House, color: :yellow)
{:ok, %MyApp.House{id: 9, style: "Asian", color: :yellow}}
...>
iex> MyApp.FactoryWithRepoAndChangeset.insert(MyApp.House, %{color: :purple})
{:ok, %MyApp.House{id: 10, style: "American", color: :purple}}
### `insert!/2`:
iex> #{__MODULE__}.Sequence.reset(5)
iex> :rand.seed(:exsplus, {1, 2, 3})
...>
iex> MyApp.FactoryWithRepo.insert!(MyApp.Car)
%MyApp.Car{id: 5, model: "Truck", color: :green}
...>
iex> MyApp.FactoryWithRepo.insert!(MyApp.Car, color: :yellow)
%MyApp.Car{id: 6, model: "Hatch", color: :yellow}
...>
iex> MyApp.FactoryWithRepo.insert!(MyApp.Car, %{color: :purple})
%MyApp.Car{id: 7, model: "Hatch", color: :purple}
...>
iex> MyApp.FactoryWithRepoAndChangeset.insert!(MyApp.House)
%MyApp.House{id: 8, style: "Asian", color: :blue}
...>
iex> MyApp.FactoryWithRepoAndChangeset.insert!(MyApp.House, color: :yellow)
%MyApp.House{id: 9, style: "Asian", color: :yellow}
...>
iex> MyApp.FactoryWithRepoAndChangeset.insert!(MyApp.House, %{color: :purple})
%MyApp.House{id: 10, style: "American", color: :purple}
"""
@type map_bot_name :: module() | atom()
@type map_bot_use_option :: {:repo, module} | {:changeset, boolean}
@doc """
Macro that defines a factory for the `name` argument.
"""
@spec deffactory(map_bot_name, do: any) :: any
defmacro deffactory(name, do: block) do
quote do
defp new(unquote(name)), do: unquote(block)
end
end
@doc """
Use `__MODULE__` with the following options:
- `:repo` => Repository module to delegate calls on `insert/1` and `insert!/1`
- `:changeset` => If `true` a `changeset/2` function will be called when inserting into the Repo
## Examples
iex> MyApp.FactoryWithNoRepo.__info__(:functions)
[attrs: 1, attrs: 2, build: 1, build: 2]
iex> MyApp.FactoryWithRepo.__info__(:functions)
[attrs: 1, attrs: 2, build: 1, build: 2, insert: 1, insert: 2, insert!: 1, insert!: 2]
iex> MyApp.FactoryWithRepoAndChangeset.__info__(:functions)
[attrs: 1, attrs: 2, build: 1, build: 2, insert: 1, insert: 2, insert!: 1, insert!: 2, validate: 2]
"""
@spec __using__([map_bot_use_option]) :: any
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
import MapBot, only: [deffactory: 2]
@map_bot_repo Keyword.get(opts, :repo)
@map_bot_changeset Keyword.get(opts, :changeset, false)
@type map_bot_name :: module() | atom()
@type map_bot_attributes :: map() | keyword()
@type map_bot_result :: struct() | map()
@spec attrs(map_bot_name, map_bot_attributes) :: map
def attrs(name, attrs \\ []) do
case build(name, attrs) do
%_{} = struct -> Map.from_struct(struct)
map -> map
end
end
@spec build(map_bot_name, map_bot_attributes) :: map_bot_result
def build(name, attrs \\ []) do
attrs = Map.new(attrs)
name
|> new()
|> Map.merge(attrs)
|> MapBot.Sequence.apply()
end
if @map_bot_repo do
@spec insert(map_bot_name, map_bot_attributes) :: {:ok, map_bot_result}
def insert(name, attrs \\ []) do
name
|> build_maybe_validate(attrs)
|> @map_bot_repo.insert()
end
@spec insert!(map_bot_name, map_bot_attributes) :: map_bot_result
def insert!(name, attrs \\ []) do
name
|> build_maybe_validate(attrs)
|> @map_bot_repo.insert!()
end
if @map_bot_changeset do
defp build_maybe_validate(name, attrs) do
new_attrs = attrs(name, attrs)
validate(name, new_attrs)
end
def validate(name, attrs) do
name
|> struct()
|> name.changeset(attrs)
end
else
defp build_maybe_validate(name, attrs) do
build(name, attrs)
end
end
end
end
end
end | lib/map_bot.ex | 0.907677 | 0.674104 | map_bot.ex | starcoder |
defmodule MonHandler do
use GenServer
@moduledoc """
A minimal GenServer that monitors a given GenEvent handler.
This server will handle exits of the Handler and attempt to re-add it
to the manager when unexpected exits occur.
Exits for :normal, :shutdown or :swapped reasons will not attempt a re-add to
the manager.
## Usage
```elixir
iex(x)> {:ok, manager} = GenEvent.start_link
{:ok, #PID<X.Y.Z>}
iex(x)> {:ok, mon_han} = MonHandler.add_mon_handler(manager, YourEventHandler, event_handler_args)
{:ok, #PID<X.Y.Z>}
```
With start_link
```elixir
iex(x)> {:ok, manager} = GenEvent.start_link
{:ok, #PID<X.Y.Z>}
iex(x)> event_handler_args = []
[]
iex(x)> config = MonHandler.get_config(manager, YourEventHandler, event_handler_args)
[manager: #PID<X.Y.Z>, handler: YourEventHandler, args: []]
iex(x)> {:ok, mon_han} = MonHandler.start_link(config, gen_server_opts)
{:ok, #PID<X.Y.Z>}
```
Within Supervisor
```elixir
mgr_name = :event_manager
config = MonHandler.get_config(mgr_name, YourEventHandler)
children = [
worker(GenEvent, [[name: mgr_name]]),
worker(MonHandler, [config])
]
opts = [strategy: :one_for_one, name: __MODULE__]
supervise children, opts
```
"""
@type config :: [manager: GenEvent.manager,
handler: GenEvent.handler,
args: term]
@doc """
Starts GenServer and adds event handler to the provided GenEvet event manager.
This expects the same arguments as `GenEvent.add_mon_handler/3` and returns the
same values as `GenServer.start_link/3`
See `GenEvent.add_handler/3` and `GenEvent.add_mon_handler/3` for more information
## Usage
```elixir
iex(x)> {:ok, manager} = GenEvent.start_link
{:ok, #PID<X.Y.Z>}
iex(x)> {:ok, mon_han} = MonHandler.add_mon_handler(manager, YourEventHandler, event_handler_args)
{:ok, #PID<X.Y.Z>}
```
"""
@spec add_mon_handler(GenEvent.manager, GenEvent.handler, term) :: GenServer.on_start
def add_mon_handler(manager, event_handler, args \\ []) do
start_link(get_config(manager, event_handler, args), [])
end
@doc """
Given the #PID of an active `MonHandler` this will remove the monitored event handler
from the event manager and stop the `MonHandler` `GenServer`. Arguments given in
the second term will be passed to `GenEvent.remove_handler/3`
## Usage
```elixir
iex(x)> {:ok, manager} = GenEvent.start_link
{:ok, #PID<X.Y.Z>}
iex(x)> {:ok, mon_han} = MonHandler.add_mon_handler(manager, YourEventHandler, event_handler_args)
{:ok, #PID<X.Y.Z>}
iex(x)> MonHandler.remove_handler(mon_han)
:ok
```
"""
@spec remove_handler(GenServer.server, term) :: term | {:error, term}
def remove_handler(server, args \\ []) do
GenServer.call(server, {:remove_handler, args})
end
@doc """
Starts GenServer and adds event handler to the provided GenEvet event manager.
This expects the same arguments as `GenEvent.add_mon_handler/3` plus options
for the `GenServer` and returns the same values as `GenServer.start_link/3`
See `GenEvent.add_handler/3` and `GenEvent.add_mon_handler/3` for more information
## Usage
```elixir
iex(x)> {:ok, manager} = GenEvent.start_link
{:ok, #PID<X.Y.Z>}
iex(x)> event_handler_args = []
[]
iex(x)> config = MonHandler.get_config(manager, YourEventHandler, event_handler_args)
[manager: #PID<X.Y.Z>, handler: YourEventHandler, args: []]
iex(x)> {:ok, mon_han} = MonHandler.start_link(config, gen_server_opts)
{:ok, #PID<X.Y.Z>}
```
"""
@spec start_link(config, GenServer.options) :: GenServer.on_start
def start_link(config, opts \\ []) do
GenServer.start_link(__MODULE__, config, opts)
end
@doc """
Returns a config list from given values.
##Usage
```elixir
iex(x)> config = MonHandler.get_config(manager, YourEventHandler)
[manager: #PID<X.Y.Z>, handler: YourEventHandler, args: []]
```
Or
```elixir
iex(x)> config = MonHandler.get_config(manager, YourEventHandler, event_handler_args)
[manager: #PID<X.Y.Z>, handler: YourEventHandler, args: []]
```
"""
@spec get_config(GenEvent.manager, GenEvent.handler, term) :: config
def get_config(manager, event_handler, args \\ []) do
[manager: manager, handler: event_handler, args: args]
end
@doc false
def init(config) do
:ok = start_handler(config)
{:ok, config}
end
@doc false
def handle_info({:gen_event_EXIT, _handler, reason}, config)
when reason in [:normal, :shutdown] do
{:stop, reason, config}
end
@doc false
def handle_info({:gen_event_EXIT, _handler, {:swapped, _new_handler, _pid}}, config) do
{:stop, :handler_swapped, config}
end
@doc false
def handle_info({:gen_event_EXIT, _handler, _reason}, config) do
:ok = start_handler(config)
{:noreply, config}
end
@doc false
def handle_call({:remove_handler, args}, _from, config) do
result = GenEvent.remove_handler(config[:manager], config[:handler], args)
{:stop, :normal, result, config}
end
defp start_handler(config) do
GenEvent.add_mon_handler(config[:manager], config[:handler], config[:args])
end
end | lib/mon_handler.ex | 0.786664 | 0.644666 | mon_handler.ex | starcoder |
defmodule Bisect do
@moduledoc File.read!("README.md")
import Bitwise,
only: [
>>>: 2
]
defp extract_key(:lhs, opts) do
key = opts[:key]
opts[:lhs_key] || key
end
defp extract_key(:rhs, opts) do
key = opts[:key]
opts[:rhs_key] || key
end
defp access_value(term, key)
when key in [nil, []] do
term
end
defp access_value(term, key)
when not is_list(key) do
access_value(term, [key])
end
defp access_value(term, key) do
get_in(term, key)
end
@doc ~S"""
Executes binary search in list `enumerable` by passing list elements
to the `function` for comparison, assuming the list is sorted.
### Options
- `key` or `lhs_key`: Path of the value to be compared,
by being passed to `function` while iteration.
See `Kernel.get_in/2`
### Examples
iex> Bisect.search([1, 2, 4], fn x ->
...> x == 4
...> end)
2
iex> Bisect.search([1, 2, 4, 8], fn x ->
...> x == 7
...> end)
4
iex> Bisect.search([1, 2], fn x ->
...> x >= 1
...> end)
0
iex> Bisect.search([1, 2], fn x ->
...> x > 1
...> end)
1
iex> Bisect.search([2, 1], fn x ->
...> x < 0
...> end)
2
iex> Bisect.search(
...> [%{value: 1}, %{value: 2}],
...> fn x ->
...> x > 1
...> end,
...> lhs_key: [:value]
...> )
1
"""
@doc since: "0.4.0"
@spec search(Enum.t(), (term -> boolean), keyword) :: non_neg_integer
def search(enumerable, function, opts \\ []) do
do_search(enumerable, function, 0, length(enumerable), opts)
end
defp do_search(enumerable, function, low, high, opts)
when low < high do
middle = (low + high) >>> 0x1
lhs = Enum.at(enumerable, middle)
lhs_key = extract_key(:lhs, opts)
lhs_value = access_value(lhs, lhs_key)
case apply(function, [lhs_value]) do
true ->
do_search(enumerable, function, low, middle, opts)
false ->
do_search(enumerable, function, middle + 1, high, opts)
end
end
defp do_search(_enumerable, _function, low, _high, _opts) do
low
end
@doc ~S"""
Returns the leftmost index where to insert `term` in list `enumerable`,
assuming the list is sorted.
### Examples
iex> Bisect.bisect_left([1, 2], 1)
0
iex> Bisect.bisect_left([1, 2], 2)
1
iex> Bisect.bisect_left([1, 2], 4)
2
### Options
- `rhs_key`: Path of the value of `term` to be compared.
See `Kernel.get_in/2`
See `Bisect.search/3` for more options.
"""
@doc since: "0.1.0"
@spec bisect_left(Enum.t(), term, keyword) :: non_neg_integer
def bisect_left(enumerable, term, opts \\ []) do
rhs_key = extract_key(:rhs, opts)
rhs_value = access_value(term, rhs_key)
search(
enumerable,
fn x ->
x >= rhs_value
end,
opts
)
end
@doc ~S"""
Returns the rightmost index where to insert `term` in list `enumerable`,
assuming the list is sorted.
### Examples
iex> Bisect.bisect_right([1, 2], 1)
1
iex> Bisect.bisect_right([1, 2, 2, 4], 4)
4
iex> Bisect.bisect_right([2, 4], 0)
0
### Options
- `rhs_key`: Path of the value of `term` to be compared.
See `Kernel.get_in/2`
See `Bisect.search/3` for more options.
"""
@doc since: "0.1.0"
@spec bisect_right(Enum.t(), term, keyword) :: non_neg_integer
def bisect_right(enumerable, term, opts \\ []) do
rhs_key = extract_key(:rhs, opts)
rhs_value = access_value(term, rhs_key)
search(
enumerable,
fn x ->
x > rhs_value
end,
opts
)
end
@doc ~S"""
Inserts `term` into list `enumerable`, and keeps it sorted
assuming the list is already sorted.
If `term` is already in `enumerable`, inserts it to the left of the leftmost `term`.
### Examples
iex> Bisect.insort_left([1, 2], 1)
[1, 1, 2]
iex> Bisect.insort_left([1, 2, 2, 4], 4)
[1, 2, 2, 4, 4]
iex> Bisect.insort_left([2, 4], 0)
[0, 2, 4]
iex> Bisect.insort_left(
...> [%{value: 2}, %{value: 4}],
...> %{value: 0},
...> key: [:value]
...> )
[%{value: 0}, %{value: 2}, %{value: 4}]
### Options
See `Bisect.bisect_left/3`
"""
@doc since: "0.1.0"
@spec insort_left(Enum.t(), term, keyword) :: Enum.t()
def insort_left(enumerable, term, opts \\ []) do
index = bisect_left(enumerable, term, opts)
List.insert_at(enumerable, index, term)
end
@doc ~S"""
Inserts `term` into list `enumerable`, and keeps it sorte
assuming the list is already sorted.
If `term` is already in `enumerable`, inserts it to the right of the rightmost `term`.
### Examples
iex> Bisect.insort_right([1, 2], 1)
[1, 1, 2]
iex> Bisect.insort_right([1, 2, 2, 4], 4)
[1, 2, 2, 4, 4]
iex> Bisect.insort_right([2, 4], 0)
[0, 2, 4]
iex> Bisect.insort_right(
...> [%{value: 2}, %{value: 4}],
...> %{value: 0},
...> key: [:value]
...> )
[%{value: 0}, %{value: 2}, %{value: 4}]
### Options
See `Bisect.bisect_right/3`
"""
@doc since: "0.1.0"
@spec insort_right(Enum.t(), term, keyword) :: Enum.t()
def insort_right(enumerable, term, opts \\ []) do
index = bisect_right(enumerable, term, opts)
List.insert_at(enumerable, index, term)
end
end | lib/bisect.ex | 0.873404 | 0.565179 | bisect.ex | starcoder |
defmodule Scrivener.Headers do
@moduledoc """
Helpers for paginating API responses with [Scrivener](https://github.com/drewolson/scrivener) and HTTP headers. Implements [RFC-5988](https://mnot.github.io/I-D/rfc5988bis/), the proposed standard for Web linking.
Use `paginate/2` to set the pagination headers:
def index(conn, params) do
page = MyApp.Person
|> where([p], p.age > 30)
|> order_by([p], desc: p.age)
|> preload(:friends)
|> MyApp.Repo.paginate(params)
conn
|> Scrivener.Headers.paginate(page)
|> render("index.json", people: page.entries)
end
"""
import Plug.Conn, only: [put_resp_header: 3, get_req_header: 2]
@default_header_keys %{
link: "link",
total: "total",
per_page: "per-page",
total_pages: "total-pages",
page_number: "page-number"
}
@doc """
Add HTTP headers for a `Scrivener.Page`.
"""
@spec paginate(Plug.Conn.t(), Scrivener.Page.t(), opts :: keyword()) :: Plug.Conn.t()
def paginate(conn, page, opts \\ [])
def paginate(conn, page, opts) do
use_x_forwarded = Keyword.get(opts, :use_x_forwarded, false)
header_keys = generate_header_keys(opts)
uri = generate_uri(conn, use_x_forwarded)
do_paginate(conn, page, uri, header_keys)
end
defp generate_uri(conn, true) do
%URI{
scheme: get_x_forwarded_or_conn(conn, :scheme, "proto", &Atom.to_string/1),
host: get_x_forwarded_or_conn(conn, :host, "host"),
port: get_x_forwarded_or_conn(conn, :port, "port", & &1, &String.to_integer/1),
path: conn.request_path,
query: conn.query_string
}
end
defp generate_uri(conn, false) do
%URI{
scheme: Atom.to_string(conn.scheme),
host: conn.host,
port: conn.port,
path: conn.request_path,
query: conn.query_string
}
end
defp do_paginate(conn, page, uri, header_keys) do
conn
|> put_resp_header(header_keys.link, build_link_header(uri, page))
|> put_resp_header(header_keys.total, Integer.to_string(page.total_entries))
|> put_resp_header(header_keys.per_page, Integer.to_string(page.page_size))
|> put_resp_header(header_keys.total_pages, Integer.to_string(page.total_pages))
|> put_resp_header(header_keys.page_number, Integer.to_string(page.page_number))
end
defp get_x_forwarded_or_conn(
conn,
conn_prop,
header_name,
parse_conn \\ & &1,
parse_header \\ & &1
) do
case get_req_header(conn, "x-forwarded-#{header_name}") do
[] -> conn |> Map.get(conn_prop) |> parse_conn.()
[value | _] -> parse_header.(value)
end
end
@spec build_link_header(URI.t(), Scrivener.Page.t()) :: String.t()
defp build_link_header(uri, page) do
[link_str(uri, 1, "first"), link_str(uri, page.total_pages, "last")]
|> maybe_add_prev(uri, page.page_number, page.total_pages)
|> maybe_add_next(uri, page.page_number, page.total_pages)
|> Enum.join(", ")
end
defp link_str(%{query: req_query} = uri, page_number, rel) do
query =
req_query
|> URI.decode_query()
|> Map.put("page", page_number)
|> URI.encode_query()
uri_str =
%URI{uri | query: query}
|> URI.to_string()
~s(<#{uri_str}>; rel="#{rel}")
end
defp maybe_add_prev(links, uri, page_number, total_pages)
when 1 < page_number and page_number <= total_pages do
[link_str(uri, page_number - 1, "prev") | links]
end
defp maybe_add_prev(links, _uri, _page_number, _total_pages) do
links
end
defp maybe_add_next(links, uri, page_number, total_pages)
when 1 <= page_number and page_number < total_pages do
[link_str(uri, page_number + 1, "next") | links]
end
defp maybe_add_next(links, _uri, _page_number, _total_pages) do
links
end
defp generate_header_keys(header_keys: header_keys) do
custom_header_keys = Map.new(header_keys)
Map.merge(@default_header_keys, custom_header_keys)
end
defp generate_header_keys(_), do: @default_header_keys
end | lib/scrivener/headers.ex | 0.788013 | 0.447762 | headers.ex | starcoder |
defmodule Sanbase.Signal.FileHandler do
@moduledoc false
defmodule Helper do
import Sanbase.DateTimeUtils, only: [interval_to_str: 1]
alias Sanbase.TemplateEngine
require Sanbase.Break, as: Break
# The selected field is required by default
# A missing required field will result in a compile time error
def name_to_field_map(map, field, opts \\ []) do
Break.if_kw_invalid?(opts, valid_keys: [:transform_fn, :required?])
transform_fn = Keyword.get(opts, :transform_fn, &Function.identity/1)
required? = Keyword.get(opts, :required?, true)
map
|> Enum.into(%{}, fn
%{"name" => name, ^field => value} ->
{name, transform_fn.(value)}
%{"name" => name} ->
if required? do
Break.break("The field \"#{field}\" in the #{Jason.encode!(name)} signal is required")
else
{name, nil}
end
end)
end
def fields_to_name_map(map, fields) do
map
|> Enum.into(
%{},
fn %{"name" => name} = elem ->
{Map.take(elem, fields), name}
end
)
end
def resolve_timebound_signals(signal_map, timebound_values) do
%{
"name" => name,
"signal" => signal,
"human_readable_name" => human_readable_name
} = signal_map
timebound_values
|> Enum.map(fn timebound ->
%{
signal_map
| "name" => TemplateEngine.run(name, %{timebound: timebound}),
"signal" => TemplateEngine.run(signal, %{timebound: timebound}),
"human_readable_name" =>
TemplateEngine.run(
human_readable_name,
%{timebound_human_readable: interval_to_str(timebound)}
)
}
end)
end
def expand_timebound_signals(signals_json_pre_timebound_expand) do
Enum.flat_map(
signals_json_pre_timebound_expand,
fn signal ->
case Map.get(signal, "timebound") do
nil ->
[signal]
timebound_values ->
resolve_timebound_signals(signal, timebound_values)
end
end
)
end
def atomize_access_level_value(access) when is_binary(access),
do: String.to_existing_atom(access)
def atomize_access_level_value(access) when is_map(access) do
Enum.into(access, %{}, fn {k, v} -> {k, String.to_existing_atom(v)} end)
end
def resolve_access_level(access) when is_atom(access), do: access
def resolve_access_level(access) when is_map(access) do
case access do
%{"historical" => :free, "realtime" => :free} -> :free
_ -> :restricted
end
end
end
# Structure
# This JSON file contains a list of signals available in ClickHouse.
# For every signal we have:
# - signal - the name of the signal
# - access - whether the signal is completely free or some time restrictions
# should be applied
# - aggregation - the default aggregation that is applied to combine the values
# if the data is queried with interval bigger than 'min_interval'
# - min_interval - the minimal interval the data is available for
# - table - the table name in ClickHouse where the signal is stored
@signals_file "signal_files/available_signals.json"
@external_resource available_signals_file = Path.join(__DIR__, @signals_file)
@signals_json_pre_timebound_expand File.read!(available_signals_file) |> Jason.decode!()
@signals_json Helper.expand_timebound_signals(@signals_json_pre_timebound_expand)
@aggregations [:none] ++ Sanbase.Metric.SqlQuery.Helper.aggregations()
@signal_map Helper.name_to_field_map(@signals_json, "signal", required?: true)
@name_to_signal_map @signal_map
@signal_to_name_map Map.new(@name_to_signal_map, fn {k, v} -> {v, k} end)
@access_map Helper.name_to_field_map(@signals_json, "access",
transform_fn: &Helper.atomize_access_level_value/1
)
@table_map Helper.name_to_field_map(@signals_json, "table", required?: true)
@aggregation_map Helper.name_to_field_map(@signals_json, "aggregation",
transform_fn: &String.to_atom/1
)
@min_interval_map Helper.name_to_field_map(@signals_json, "min_interval", required?: true)
@human_readable_name_map Helper.name_to_field_map(@signals_json, "human_readable_name")
@data_type_map Helper.name_to_field_map(@signals_json, "data_type",
transform_fn: &String.to_atom/1
)
@signals_list @signals_json |> Enum.map(fn %{"name" => name} -> name end)
@signals_mapset MapSet.new(@signals_list)
@min_plan_map Helper.name_to_field_map(@signals_json, "min_plan",
transform_fn: fn plan_map ->
Enum.into(plan_map, %{}, fn {k, v} -> {k, String.to_atom(v)} end)
end
)
@signals_data_type_map Helper.name_to_field_map(@signals_json, "data_type",
transform_fn: &String.to_atom/1
)
@selectors_map Helper.name_to_field_map(@signals_json, "selectors",
transform_fn: fn list ->
Enum.map(list, &String.to_atom/1)
end
)
Enum.group_by(
@signals_json_pre_timebound_expand,
fn signal -> {signal["signal"], signal["data_type"]} end
)
|> Map.values()
|> Enum.filter(fn group -> Enum.count(group) > 1 end)
|> Enum.each(fn duplicate_signals ->
Break.break("""
Duplicate signals found: #{inspect(duplicate_signals)}
""")
end)
def aggregations(), do: @aggregations
def aggregation_map(), do: @aggregation_map
def access_map(), do: @access_map
def signal_map(), do: @signal_map
def signals_mapset(), do: @signals_mapset
def min_interval_map(), do: @min_interval_map
def human_readable_name_map(), do: @human_readable_name_map
def table_map(), do: @table_map
def data_type_map(), do: @data_type_map
def min_plan_map(), do: @min_plan_map
def selectors_map(), do: @selectors_map
def name_to_signal_map(), do: @name_to_signal_map
def signal_to_name_map(), do: @signal_to_name_map
def signals_with_access(level) when level in [:free, :restricted] do
@access_map
|> Enum.filter(fn {_signal, restrictions} ->
Helper.resolve_access_level(restrictions) === level
end)
|> Enum.map(&elem(&1, 0))
end
def signals_with_data_type(type) do
@signals_data_type_map
|> Enum.filter(fn {_signal, data_type} -> data_type == type end)
|> Enum.map(&elem(&1, 0))
end
end | lib/sanbase/signal/file_handler.ex | 0.639398 | 0.41567 | file_handler.ex | starcoder |
defmodule MyEnum do
# require MyMacros
@doc """
MyEnum.all?(list, pred) returns true if pred is true for all element in list, false otherwise
## Examples
iex> MyEnum.all? [1, 2, 3, 4, 5], &(&1 > 0)
true
iex> MyEnum.all? [1, 2, 3], &(&1 < 3)
false
iex> MyEnum.all?(["Abracadabra", "Tumetai", "nokogiri" ], &(String.length(&1) > 3))
true
"""
def all?([], _), do: true
def all?([h | t], pred), do: pred.(h) && all?(t, pred)
@doc """
MyEnum.any?(list, pred) returns false if pred is false for every element of list and true otherwise
## Examples
iex> MyEnum.any? [1, 2, 3, 4, 5], &(&1 > 0)
true
iex> MyEnum.any? [1, 2, 3], &(&1 > 4)
false
iex> MyEnum.any?(["Abracadabra", "Tumetai", "nokogiri" ], &(String.length(&1) > 3))
true
"""
def any?([], _), do: false
def any?([h | t], pred), do: pred.(h) || all?(t, pred)
@doc """
MyEnum.filter(list, filt) returns a list of elem satisfying filt
## Examples
iex> MyEnum.filter [1, 2, 3, 4, 5], &(&1 > 0)
[1, 2, 3, 4, 5]
iex> MyEnum.filter [1, 2, 3, 4, 5], &(&1 < 0)
[]
iex> MyEnum.filter [1, 2, 3, 4, 5], &(&1 == 10)
[]
iex> MyEnum.filter [], &(&1 >= 0)
[]
iex> MyEnum.filter [1, 2, 3, 4, 5], fn _ -> true end
[1, 2, 3, 4, 5]
"""
def filter(list, filt), do: _filter(list, [], filt)
defp _filter(list, lr, filt, way \\ :in)
defp _filter([], lr, _, _), do: lr
# NO, it won't work, how do we generate functions iff they are not already generated
# defp _filter(list, lr, filt, way) do
# MyMacros.gen_defp("_filter_#{way}", func, if kw == :in, do: "" else: "not")
# case way do
# :in -> _filter_in(list, lr, filt)
# :out -> _filter_out(list, lr, filt)
# end
# end
defp _filter([h | t], lr, filt, way) do
case way do
:in -> _filter_in([h | t], lr, filt)
:out -> _filter_out([h | t], lr, filt)
end
end
def _filter_in([], lr, _), do: reverse(lr)
def _filter_in([h | t], lr, filt) do
case filt.(h) do
val when val in [false, nil] -> _filter_in(t, lr, filt)
_ -> _filter_in(t, [h | lr], filt)
end
end
def _filter_out([], lr, _), do: reverse(lr)
def _filter_out([h | t], lr, filt) do
case !filt.(h) do
val when val in [false, nil] -> _filter_out(t, lr, filt)
_ -> _filter_out(t, [h | lr], filt)
end
end
# defp _filter([h | t], lr, filt, way) do
# bool = MyMacros.on(way, :in, filt.(h))
# case bool do
# val when val in [false, nil] -> _filter(t, lr, filt, way)
# _ -> _filter(t, [h | lr], filt, way) # truthy
# end
# end
@doc """
MyEnum.reject(list, filt) returns a list of elem that do not satisfied filt (the converse of MyEnum.filter)
## Examples
iex> MyEnum.reject [1, 2, 3, 4, 5], &(&1 > 0)
[]
iex> MyEnum.reject [1, -2, 3, -4, 5], &(&1 < 0)
[1, 3, 5]
MyEnum.reject [1, 2, 3, 4, 5], fn _ -> false end
[]
iex> MyEnum.reject [1, 2, 3, 4, 5], &(&1 == 10)
[1, 2, 3, 4, 5]
"""
def reject(list, filt), do: _filter(list, [], filt, :out)
@doc """
MyEnum.split(list, n), returns a list of lists whose length are <= n
## Examples
iex> MyEnum.split([1, 2, 3, 4, 5], 3)
[[1, 2, 3], [4, 5]]
iex> MyEnum.split([], 3)
[]
iex> MyEnum.split([1, 2, 3, 4, 5], -2)
[]
iex> MyEnum.split([1, 2, 3, 4], 5)
[[1, 2, 3, 4]]
"""
def split(list, n), do: _split(list, [[]], n)
defp _split([], _, n) when n < 0 do
[]
end
defp _split([], lr, _) do
if lr == [[]] do
[]
else
reverse(lr, true)
end
end
defp _split([h | t], [h1 | t1], n) do
if (Kernel.length(h1) + 1) <= n do
_split(t, [ [h | h1] | t1], n)
else
_split(t, [[h] | [h1 | t1]], n)
end
end
# TODO: each
@doc """
MyEnum.take(list, n) takes the first n element of list
## Examples
iex> MyEnum.take([], 3)
[]
iex> MyEnum.take([1, 2], 3)
[]
iex> MyEnum.take([], 3)
[]
iex> MyEnum.take([1, 2, 3, 4, 5, 6], 3)
[1, 2, 3]
iex> MyEnum.take([2, 4, 6, 8], 4)
[2, 4, 6, 8]
iex> MyEnum.take([0, 2, 4, 6, 8, 10], 4)
[0, 2, 4, 6]
iex> MyEnum.take([0, 2, 4, 6, 8, 10], -4)
[]
iex> MyEnum.take([0, 2, 4, 6, 8, 10], 0)
[]
iex> MyEnum.take([0, 2, 4, 6, 8, 10], 1)
[0]
"""
def take(list, n), do: _take(list, [], n, 0)
defp _take(_, _, n, _) when n < 0 do
[]
end
defp _take(_, lr, n, n), do: reverse(lr)
defp _take([], _, n, m) when m < n do
[]
end
defp _take([h | t], lr, n, m), do: _take(t, [h | lr], n, m + 1)
@doc """
MyEnum.flatten(list), returns a one level list by flattening all nested lists from original list
## Examples
iex> MyEnum.flatten([[1, 2], [3, [4, 6], 5]])
[1, 2, 3, 4, 6, 5]
iex> MyEnum.flatten([[1, 2], [3, 5]])
[1, 2, 3, 5]
iex> MyEnum.flatten([2, 1, 3, 5])
[2, 1, 3, 5]
iex> MyEnum.flatten([[1, 2], [3, 5]])
[1, 2, 3, 5]
iex> MyEnum.flatten([])
[]
iex> MyEnum.flatten([[[[]]]])
[]
iex> MyEnum.flatten([1, [2, [3, [5, [4, [6]], 8], 7], 9], 10])
[1, 2, 3, 5, 4, 6, 8, 7, 9, 10]
"""
def flatten(list), do: _flatten(list, [])
defp _flatten([], lr), do: reverse(lr)
defp _flatten([h | t], lr) do
if is_list(h) do
_flatten(t, reverse(_flatten(h, lr)))
else
_flatten(t, [h | lr])
end
end
@doc """
MyEnum.reverse(list) returns a list for which each element (at first level) are in reverse order (from the original list)
## Examples
iex> MyEnum.reverse [1, 2, 3, 4, 5]
[5, 4, 3, 2, 1]
iex> MyEnum.reverse [[1, 2], [3, 4], [5]]
[[5], [3, 4], [1, 2]]
iex> MyEnum.reverse([[1, 2], [3, 4], [5]], true)
[[5], [4, 3], [2, 1]]
iex> MyEnum.reverse([[1, 2], [3, 4], 5, [6, 7]], true)
[[7, 6], 5, [4, 3], [2, 1]]
iex> MyEnum.reverse [1]
[1]
iex> MyEnum.reverse []
[]
"""
def reverse(list, nested \\ false)
def reverse(list, nested), do: _reverse(list, nested, [])
defp _reverse([], _, lr), do: lr
defp _reverse([h | t], nested, lr) do
case nested do
true ->
if is_list(h) do
_reverse(t, nested, [_reverse(h, nested, []) | lr])
else
_reverse(t, nested, [h | lr])
end
false -> _reverse(t, nested, [h | lr])
end
end
end | my_enum/lib/my_enum.ex | 0.539711 | 0.412294 | my_enum.ex | starcoder |
defmodule Edeliver.Relup.RunnableInstruction do
@moduledoc """
This module can be used to provide custom instructions executed during the upgrade.
They can be used in implementations of the `Edeliver.Relup.Modification` behaviours.
A runnable instruction must implement a `c:Edeliver.Relup.RunnableInstruction.run/1` function which will be executed
during the upgrade on the nodes.
Example:
defmodule Acme.Relup.PingNodeInstruction do
use Edeliver.Relup.RunnableInstruction
def modify_relup(instructions = %Instructions{up_instructions: up_instructions}, _config = %{}) do
node_name = :"node@host"
%{instructions|
up_instructions: [call_this([node_name]) | instructions.up_instructions],
down_instructions: [call_this([node_name]) | instructions.down_instructions]
}
end
# executed during hot code upgrade from relup file
def run(_options = [node_name]) do
:net_adm.ping(node_name)
end
# actually implemented already in this module
def call_this(arguments) do
# creates a relup instruction to call `run/1` of this module
{:apply, {__MODULE__, :run, arguments}}
end
end
# using the instruction
defmodule Acme.Relup.Modification do
use Edeliver.Relup.Modification
def modify_relup(instructions = %Instructions{}, config = %{}) do
instructions |> Edeliver.Relup.DefaultModification.modify_relup(config) # use default modifications
|> Acme.Relup.PingNodeInstruction.modify_relup(config) # apply also custom instructions
end
end
"""
require Logger
import Edeliver.Relup.ShiftInstruction, only: [
ensure_module_loaded_before_first_runnable_instructions: 3,
ensure_module_unloaded_after_last_runnable_instruction: 3,
]
alias Edeliver.Relup.Instructions
@doc """
The function to run during hot code upgrade on nodes.
If it throws an error before the `point_of_no_return` the
upgrade is aborted. If it throws an error and was executed
after that point, the release is restarted
"""
@callback run(options::[term]) :: :ok
@doc """
Returns a function which inserts the relup instruction
that calls the `c:Edeliver.Relup.RunnableInstruction.run/1` fuction of this module.
Default is inserting it at the end of the instructions
"""
@callback insert_where() :: ((%Edeliver.Relup.Instructions{}, Edeliver.Relup.Instruction.instruction) -> %Edeliver.Relup.Instructions{})
@doc """
Returns the arguments which will be passed the `c:Edeliver.Relup.RunnableInstruction.run/1` function during the upgrade.
Default is an empty list.
"""
@callback arguments(instructions::%Edeliver.Relup.Instructions{}, config::Edeliver.Relup.Config.t) :: [term]
@doc """
Returns a list of module names which implement the behaviour `Edeliver.Relup.RunnableInstruction`
and are used / referenced by this runnable instruction. These modules must be loaded before this instruction
is executed for upgrades and unloaded after this instruction for downgrades. Default is an empty list.
"""
@callback dependencies() :: [instruction_module::atom]
@doc """
Logs the message of the given type on the node
which executes the upgrade and displays it as output of
the `$APP/bin/$APP upgrade $RELEASE` command. The message is
prefixed with a string derived from the message type.
"""
@spec log_in_upgrade_script(type:: :error|:warning|:info|:debug, message::String.t) :: no_return
def log_in_upgrade_script(type, message) do
message = String.to_char_list(message)
prefix = case type do
:error -> '---> X '
:warning -> '---> ! '
:info -> '---> '
_ -> '----> ' # debug
end
format_in_upgrade_script('~s~s~n', [prefix, message])
end
@doc """
Formats and prints the message on the node
running the upgrade script which was started by the
`$APP/bin/$APP upgrade $RELEASE` command.
"""
@spec format_in_upgrade_script(format::char_list, arguments::[term]) :: no_return
def format_in_upgrade_script(format, arguments) do
:erlang.nodes |> Enum.filter(fn node ->
Regex.match?(~r/upgrader_\d+/, Atom.to_string(node))
end) |> Enum.each(fn node ->
:rpc.cast(node, :io, :format, [:user, format, arguments])
end)
end
@doc """
Logs an error using the `Logger` on the running node which is upgraded.
In addition the same error message is logged on the node which executes
the upgrade and is displayed as output of the
`$APP/bin/$APP upgarde $RELEASE` command.
"""
@spec error(message::String.t) :: no_return
def error(message) do
Logger.error message
log_in_upgrade_script(:error, message)
end
@doc """
Logs a warning using the `Logger` on the running node which is upgraded.
In addition the same warning message is logged on the node which executes
the upgrade and is displayed as output of the
`$APP/bin/$APP upgarde $RELEASE` command.
"""
@spec warn(message::String.t) :: no_return
def warn(message) do
Logger.warn message
log_in_upgrade_script(:warning, message)
end
@doc """
Logs an info message using the `Logger` on the running node which is upgraded.
In addition the same info message is logged on the node which executes
the upgrade and is displayed as output of the
`$APP/bin/$APP upgarde $RELEASE` command.
"""
@spec info(message::String.t) :: no_return
def info(message) do
Logger.info message
log_in_upgrade_script(:info, message)
end
@doc """
Logs a debug message using the `Logger` on the running node which is upgraded.
In addition the same debug message is logged on the node which executes
the upgrade and is displayed as output of the
`$APP/bin/$APP upgarde $RELEASE` command.
"""
@spec debug(message::String.t) :: no_return
def debug(message) do
Logger.debug message
log_in_upgrade_script(:debug, message)
end
@doc """
Ensures that all `Edeliver.Relup.RunnableInstruction` modules used / referenced by this instruction
and returned by the `c:Edeliver.Relup.RunnableInstruction.dependencies/0` callback are loaded before this instruction is executed
during the upgrade.
"""
@spec ensure_dependencies_loaded_before_instruction_for_upgrade(instructions::Instructions.t, runnable_instruction::{:apply, {module::atom, :run, arguments::[term]}}, dependencies::[instruction_module::atom]) :: Instructions.t
def ensure_dependencies_loaded_before_instruction_for_upgrade(instructions = %Instructions{}, call_this_instruction, dependencies) do
dependencies |> Enum.reduce(instructions, fn(dependency, instructions_acc = %Instructions{up_instructions: up_instructions}) ->
%{instructions_acc| up_instructions: ensure_module_loaded_before_first_runnable_instructions(up_instructions, call_this_instruction, dependency)}
end)
end
@doc """
Ensures that all `Edeliver.Relup.RunnableInstruction` modules used / referenced by this instruction
and returned by the `c:Edeliver.Relup.RunnableInstruction.dependencies/0` callback are unloaded after this instruction is executed
during the downgrade.
"""
@spec ensure_dependencies_unloaded_after_instruction_for_downgrade(instructions::Instructions.t, runnable_instruction::{:apply, {module::atom, :run, arguments::[term]}}, dependencies::[instruction_module::atom]) :: Instructions.t
def ensure_dependencies_unloaded_after_instruction_for_downgrade(instructions = %Instructions{}, call_this_instruction, dependencies) do
dependencies |> Enum.reduce(instructions, fn(dependency, instructions_acc = %Instructions{down_instructions: down_instructions}) ->
%{instructions_acc| down_instructions: ensure_module_unloaded_after_last_runnable_instruction(down_instructions, call_this_instruction, dependency)}
end)
end
@doc """
Assumes that the pattern matches or throws an error with the given error message.
The error message is logged as error to the logfile
using the `Logger` and displayed as error output by the
`$APP/bin/$APP upgrade $RELEASE` task using the
`$APP/ebin/install_upgrade.escript` script. If the pattern matches
the variables from the matching are assigned.
"""
defmacro assume({:=, _, [left, right]} = assertion, error_message) do
code = Macro.escape(assertion)
left = Macro.expand(left, __CALLER__)
vars = collect_vars_from_pattern(left)
quote do
right = unquote(right)
expr = unquote(code)
unquote(vars) =
case right do
unquote(left) ->
unquote(vars)
_ ->
error unquote(error_message)
# error is shown as erlang term in the upgrade script
# `$APP/ebin/install_upgrade.escript`. so use an erlang
# string as error message
throw {:error, String.to_char_list(unquote(error_message))}
end
right
end
end
# Used by the assume macro for pattern assignment
defp collect_vars_from_pattern(expr) do
{_, vars} =
Macro.prewalk(expr, [], fn
{:::, _, [left, _]}, acc ->
{[left], acc}
{skip, _, [_]}, acc when skip in [:^, :@] ->
{:ok, acc}
{:_, _, context}, acc when is_atom(context) ->
{:ok, acc}
{name, _, context}, acc when is_atom(name) and is_atom(context) ->
{:ok, [{name, [generated: true], context}|acc]}
node, acc ->
{node, acc}
end)
Enum.uniq(vars)
end
@doc false
defmacro __using__(_opts) do
quote do
use Edeliver.Relup.Instruction
import Edeliver.Relup.RunnableInstruction
@behaviour Edeliver.Relup.RunnableInstruction
alias Edeliver.Relup.Instructions
require Logger
def modify_relup(instructions = %Instructions{}, config = %{}) do
call_this_instruction = call_this(arguments(instructions, config))
insert_where_fun = insert_where()
instructions |> insert_where_fun.(call_this_instruction)
|> ensure_module_loaded_before_instruction(call_this_instruction, __MODULE__)
|> ensure_dependencies_loaded_before_instruction_for_upgrade(call_this_instruction, dependencies())
|> ensure_dependencies_unloaded_after_instruction_for_downgrade(call_this_instruction, dependencies())
end
@spec arguments(%Edeliver.Relup.Instructions{}, Edeliver.Relup.Config.t) :: term
def arguments(%Edeliver.Relup.Instructions{}, %{}), do: []
@spec insert_where()::Instruction.insert_fun
def insert_where, do: &append/2
@spec dependencies() :: [instruction_module::atom]
def dependencies, do: []
defoverridable [modify_relup: 2, insert_where: 0, arguments: 2, dependencies: 0]
@doc """
Calls the `run/1` function of this module
from the relup file during hot code upgrade
"""
@spec call_this(arguments::[term]) :: Instruction.instruction|Instruction.instructions
def call_this(arguments \\ []) do
{:apply, {__MODULE__, :run, [arguments]}}
end
end # quote
end # defmacro __using__
end | lib/edeliver/relup/runnable_instruction.ex | 0.848863 | 0.461927 | runnable_instruction.ex | starcoder |
defmodule Cldr.Number.Backend.Rbnf do
@moduledoc false
def define_number_modules(config) do
backend = config.backend
root_locale = Cldr.Config.root_locale_name()
quote location: :keep do
defmodule Rbnf.NumberSystem do
@moduledoc false
if Cldr.Config.include_module_docs?(unquote(config.generate_docs)) do
@moduledoc """
Functions to implement the number system rule-based-number-format rules of CLDR.
These rules are defined only on the "und" locale and represent specialised
number formatting.
The standard public API for RBNF is via the `Cldr.Number.to_string/2` function.
The functions on this module are defined at compile time based upon the RBNF rules
defined in the Unicode CLDR data repository. Available rules are identified by:
iex> #{inspect(__MODULE__)}.rule_sets(#{inspect(unquote(root_locale))})
[:tamil, :roman_upper, :roman_lower, :hebrew_item,
:hebrew, :greek_upper, :greek_lower, :georgian,
:ethiopic, :cyrillic_lower, :armenian_upper, :armenian_lower]
A rule can then be invoked on an available rule_set. For example
iex> #{inspect(__MODULE__)}.roman_upper(123, #{inspect(unquote(root_locale))})
"CXXIII"
This particular call is equivalent to the call through the public API of:
iex> #{inspect(unquote(backend))}.Number.to_string(123, format: :roman)
{:ok, "CXXIII"}
"""
end
import Kernel, except: [and: 2]
use Cldr.Rbnf.Processor, backend: unquote(backend)
define_rules(:NumberingSystemRules, unquote(backend), __ENV__)
end
defmodule Rbnf.Spellout do
@moduledoc false
if Cldr.Config.include_module_docs?(unquote(config.generate_docs)) do
@moduledoc """
Functions to implement the spellout rule-based-number-format rules of CLDR.
As CLDR notes, the data is incomplete or non-existent for many languages. It
is considered complete for English however.
The standard public API for RBNF is via the `Cldr.Number.to_string/2` function.
The functions on this module are defined at compile time based upon the RBNF rules
defined in the Unicode CLDR data repository. Available rules are identified by:
iex> #{inspect(__MODULE__)}.rule_sets("en")
[:spellout_ordinal_verbose, :spellout_ordinal, :spellout_numbering_year,
:spellout_numbering_verbose, :spellout_numbering, :spellout_cardinal_verbose,
:spellout_cardinal]
A rule can then be invoked on an available rule_set. For example:
iex> #{inspect(__MODULE__)}.spellout_ordinal(123, "en")
"one hundred twenty-third"
This call is equivalent to the call through the public API of:
iex> #{inspect(unquote(backend))}.Number.to_string(123, format: :spellout)
{:ok, "one hundred twenty-three"}
"""
end
import Kernel, except: [and: 2]
use Cldr.Rbnf.Processor, backend: unquote(backend)
define_rules(:SpelloutRules, unquote(backend), __ENV__)
end
defmodule Rbnf.Ordinal do
@moduledoc false
if Cldr.Config.include_module_docs?(unquote(config.generate_docs)) do
@moduledoc """
Functions to implement the ordinal rule-based-number-format rules of CLDR.
As CLDR notes, the data is incomplete or non-existent for many languages. It
is considered complete for English however.
The standard public API for RBNF is via the `Cldr.Number.to_string/2` function.
The functions on this module are defined at compile time based upon the RBNF rules
defined in the Unicode CLDR data repository. Available rules are identified by:
iex> #{inspect(__MODULE__)}.rule_sets(:en)
[:digits_ordinal]
iex> #{inspect(__MODULE__)}.rule_sets("fr")
[
:digits_ordinal_masculine_plural,
:digits_ordinal_masculine,
:digits_ordinal_feminine_plural,
:digits_ordinal_feminine,
:digits_ordinal
]
A rule can then be invoked on an available rule_set. For example
iex> #{inspect(__MODULE__)}.digits_ordinal(123, :en)
"123rd"
This call is equivalent to the call through the public API of:
iex> #{inspect(unquote(backend))}.Number.to_string(123, format: :ordinal)
{:ok, "123rd"}
"""
end
import Kernel, except: [and: 2]
use Cldr.Rbnf.Processor, backend: unquote(backend)
define_rules(:OrdinalRules, unquote(backend), __ENV__)
end
end
end
end | lib/cldr/number/backend/rbnf.ex | 0.7181 | 0.436682 | rbnf.ex | starcoder |
defmodule Spandex.Plug.StartTrace do
@moduledoc """
Starts a trace, skipping ignored routes or methods.
Store info in Conn assigns if we actually trace the request.
"""
@behaviour Plug
alias Spandex.Plug.Utils
alias Spandex.SpanContext
@init_opts Optimal.schema(
opts: [
ignored_methods: {:list, :string},
ignored_routes: {:list, [:regex, :string]},
tracer: :atom,
tracer_opts: :keyword,
span_name: :string
],
defaults: [
ignored_methods: [],
ignored_routes: [],
tracer_opts: [],
span_name: "request"
],
required: [:tracer],
describe: [
ignored_methods:
"A list of strings representing methods to ignore. A good example would be `[\"OPTIONS\"]`",
ignored_routes: "A list of strings or regexes. If it is a string, it must match exactly.",
tracer: "The tracing module to be used to start the trace.",
tracer_opts: "Any opts to be passed to the tracer when starting or continuing the trace.",
span_name: "The name to be used for the top level span."
]
)
@doc """
Accepts and validates opts for the plug, and underlying tracer.
#{Optimal.Doc.document(@init_opts)}
"""
@spec init(opts :: Keyword.t()) :: Keyword.t()
def init(opts), do: Optimal.validate!(opts, @init_opts)
@spec call(conn :: Plug.Conn.t(), opts :: Keyword.t()) :: Plug.Conn.t()
def call(conn, opts) do
if ignoring_request?(conn, opts) do
Utils.trace(conn, false)
else
begin_tracing(conn, opts)
end
end
@spec begin_tracing(conn :: Plug.Conn.t(), Keyword.t()) :: Plug.Conn.t()
defp begin_tracing(conn, opts) do
tracer = opts[:tracer]
tracer_opts = opts[:tracer_opts]
case tracer.distributed_context(conn, tracer_opts) do
{:ok, %SpanContext{} = span_context} ->
tracer.continue_trace("request", span_context, tracer_opts)
Utils.trace(conn, true)
{:error, :no_distributed_trace} ->
tracer.start_trace(opts[:span_name], tracer_opts)
Utils.trace(conn, true)
_ ->
conn
end
end
@spec ignoring_request?(conn :: Plug.Conn.t(), Keyword.t()) :: boolean
defp ignoring_request?(conn, opts) do
ignored_method?(conn, opts) || ignored_route?(conn, opts)
end
@spec ignored_method?(conn :: Plug.Conn.t(), Keyword.t()) :: boolean
defp ignored_method?(conn, opts) do
conn.method in opts[:ignored_methods]
end
@spec ignored_route?(conn :: Plug.Conn.t(), Keyword.t()) :: boolean
defp ignored_route?(conn, opts) do
Enum.any?(opts[:ignored_routes], fn ignored_route ->
match_route?(conn.request_path, ignored_route)
end)
end
@spec match_route?(route :: String.t(), ignore :: %Regex{} | String.t()) :: boolean
defp match_route?(ignore, ignore) when is_bitstring(ignore), do: true
defp match_route?(_, ignore) when is_bitstring(ignore), do: false
defp match_route?(route, ignore) do
String.match?(route, ignore)
end
end | lib/plug/start_trace.ex | 0.761982 | 0.418786 | start_trace.ex | starcoder |