hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 2
classes | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f701c8ef1deceba47a5c1182c16f3b55f0c6f6ff
| 85,881
|
py
|
Python
|
src/jack.py
|
zynthian/jackclient-python
|
e86ee6e82af707bfb34bfac4d53ef3c5f665f65b
|
[
"MIT"
] | null | null | null |
src/jack.py
|
zynthian/jackclient-python
|
e86ee6e82af707bfb34bfac4d53ef3c5f665f65b
|
[
"MIT"
] | null | null | null |
src/jack.py
|
zynthian/jackclient-python
|
e86ee6e82af707bfb34bfac4d53ef3c5f665f65b
|
[
"MIT"
] | 1
|
2022-01-10T12:36:53.000Z
|
2022-01-10T12:36:53.000Z
|
# Copyright (c) 2014-2015 Matthias Geier
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""JACK Client for Python.
http://jackclient-python.readthedocs.io/
"""
__version__ = '0.4.3'
from ctypes.util import find_library as _find_library
import errno as _errno
import platform as _platform
import warnings as _warnings
from _jack import ffi as _ffi
if _platform.system() == 'Windows':
if _platform.architecture()[0] == '64bit':
_libname = _find_library('libjack64')
else:
_libname = _find_library('libjack')
else:
_libname = _find_library('jack')
if _libname is None:
raise OSError('JACK library not found')
_lib = _ffi.dlopen(_libname)
_AUDIO = b'32 bit float mono audio'
_MIDI = b'8 bit raw midi'
STOPPED = _lib.JackTransportStopped
"""Transport halted."""
ROLLING = _lib.JackTransportRolling
"""Transport playing."""
STARTING = _lib.JackTransportStarting
"""Waiting for sync ready."""
NETSTARTING = _lib.JackTransportNetStarting
"""Waiting for sync ready on the network."""
_SUCCESS = 0
_FAILURE = 1
class Client(object):
"""A client that can connect to the JACK audio server."""
def __init__(self, name, use_exact_name=False, no_start_server=False,
servername=None, session_id=None):
"""Create a new JACK client.
A client object is a *context manager*, i.e. it can be used in a
*with statement* to automatically call `activate()` in the
beginning of the statement and `deactivate()` and `close()` on
exit.
Parameters
----------
name : str
The desired client name of at most `client_name_size()`
characters. The name scope is local to each server.
Unless forbidden by the *use_exact_name* option, the server
will modify this name to create a unique variant, if needed.
Other Parameters
----------------
use_exact_name : bool
Whether an error should be raised if *name* is not unique.
See `Status.name_not_unique`.
no_start_server : bool
Do not automatically start the JACK server when it is not
already running. This option is always selected if
``JACK_NO_START_SERVER`` is defined in the calling process
environment.
servername : str
Selects from among several possible concurrent server
instances.
Server names are unique to each user. If unspecified, use
``'default'`` unless ``JACK_DEFAULT_SERVER`` is defined in
the process environment.
session_id : str
Pass a SessionID Token. This allows the sessionmanager to
identify the client again.
"""
status = _ffi.new('jack_status_t*')
options = _lib.JackNullOption
optargs = []
if use_exact_name:
options |= _lib.JackUseExactName
if no_start_server:
options |= _lib.JackNoStartServer
if servername:
options |= _lib.JackServerName
optargs.append(_ffi.new('char[]', servername.encode()))
if session_id:
options |= _lib.JackSessionID
optargs.append(_ffi.new('char[]', session_id.encode()))
self._ptr = _lib.jack_client_open(name.encode(), options, status,
*optargs)
self._status = Status(status[0])
if not self._ptr:
raise JackError('Error initializing "{0}": {1}'.format(
name, self.status))
self._inports = Ports(self, _AUDIO, _lib.JackPortIsInput)
self._outports = Ports(self, _AUDIO, _lib.JackPortIsOutput)
self._midi_inports = Ports(self, _MIDI, _lib.JackPortIsInput)
self._midi_outports = Ports(self, _MIDI, _lib.JackPortIsOutput)
self._keepalive = []
self._position = _ffi.new('jack_position_t*')
# Avoid confusion if something goes wrong before opening the client:
_ptr = _ffi.NULL
def __enter__(self):
self.activate()
return self
def __exit__(self, *args):
self.deactivate()
self.close()
def __del__(self):
"""Close JACK client on garbage collection."""
self.close()
@property
def name(self):
"""The name of the JACK client (read-only)."""
return _ffi.string(_lib.jack_get_client_name(self._ptr)).decode()
@property
def samplerate(self):
"""The sample rate of the JACK system (read-only)."""
return _lib.jack_get_sample_rate(self._ptr)
@property
def blocksize(self):
"""The JACK block size (must be a power of two).
The current maximum size that will ever be passed to the process
callback. It should only be queried *before* `activate()` has
been called. This size may change, clients that depend on it
must register a callback with `set_blocksize_callback()` so they
will be notified if it does.
Changing the blocksize stops the JACK engine process cycle, then
calls all registered callback functions (see
`set_blocksize_callback()`) before restarting the process
cycle. This will cause a gap in the audio flow, so it should
only be done at appropriate stopping points.
"""
return _lib.jack_get_buffer_size(self._ptr)
@blocksize.setter
def blocksize(self, blocksize):
_check(_lib.jack_set_buffer_size(self._ptr, blocksize),
'Error setting JACK blocksize')
@property
def status(self):
"""JACK client status. See `Status`."""
return self._status
@property
def realtime(self):
"""Whether JACK is running with ``-R`` (``--realtime``)."""
return bool(_lib.jack_is_realtime(self._ptr))
@property
def frames_since_cycle_start(self):
"""Time since start of audio block.
The estimated time in frames that has passed since the JACK
server began the current process cycle.
"""
return _lib.jack_frames_since_cycle_start(self._ptr)
@property
def frame_time(self):
"""The estimated current time in frames.
This is intended for use in other threads (not the process
callback). The return value can be compared with the value of
`last_frame_time` to relate time in other threads to JACK time.
"""
return _lib.jack_frame_time(self._ptr)
@property
def last_frame_time(self):
"""The precise time at the start of the current process cycle.
This may only be used from the process callback (see
`set_process_callback()`), and can be used to interpret
timestamps generated by `frame_time` in other threads with
respect to the current process cycle.
This is the only jack time function that returns exact time:
when used during the process callback it always returns the same
value (until the next process callback, where it will return
that value + `blocksize`, etc). The return value is guaranteed
to be monotonic and linear in this fashion unless an xrun occurs
(see `set_xrun_callback()`). If an xrun occurs, clients must
check this value again, as time may have advanced in a
non-linear way (e.g. cycles may have been skipped).
"""
return _lib.jack_last_frame_time(self._ptr)
@property
def inports(self):
"""A list of audio input `Ports`.
New ports can be created and added to this list with
`inports.register() <Ports.register>`.
When :meth:`~OwnPort.unregister` is called on one of the items
in this list, this port is removed from the list.
`inports.clear() <Ports.clear>` can be used to unregister all
audio input ports at once.
See Also
--------
Ports, OwnPort
"""
return self._inports
@property
def outports(self):
"""A list of audio output :class:`Ports`.
New ports can be created and added to this list with
`outports.register() <Ports.register>`.
When :meth:`~OwnPort.unregister` is called on one of the items
in this list, this port is removed from the list.
`outports.clear() <Ports.clear>` can be used to unregister all
audio output ports at once.
See Also
--------
Ports, OwnPort
"""
return self._outports
@property
def midi_inports(self):
"""A list of MIDI input :class:`Ports`.
New MIDI ports can be created and added to this list with
`midi_inports.register() <Ports.register>`.
When :meth:`~OwnPort.unregister` is called on one of the items
in this list, this port is removed from the list.
`midi_inports.clear() <Ports.clear>` can be used to unregister
all MIDI input ports at once.
See Also
--------
Ports, OwnMidiPort
"""
return self._midi_inports
@property
def midi_outports(self):
"""A list of MIDI output :class:`Ports`.
New MIDI ports can be created and added to this list with
`midi_outports.register() <Ports.register>`.
When :meth:`~OwnPort.unregister` is called on one of the items
in this list, this port is removed from the list.
`midi_outports.clear() <Ports.clear>` can be used to unregister
all MIDI output ports at once.
See Also
--------
Ports, OwnMidiPort
"""
return self._midi_outports
def owns(self, port):
"""Check if a given port belongs to *self*.
Parameters
----------
port : str or Port
Full port name or `Port`, `MidiPort`, `OwnPort` or
`OwnMidiPort` object.
"""
port = self._get_port_ptr(port)
return bool(_lib.jack_port_is_mine(self._ptr, port))
def activate(self):
"""Activate JACK client.
Tell the JACK server that the program is ready to start
processing audio.
"""
_check(_lib.jack_activate(self._ptr), 'Error activating JACK client')
def deactivate(self, ignore_errors=True):
"""De-activate JACK client.
Tell the JACK server to remove *self* from the process graph.
Also, disconnect all ports belonging to it, since inactive
clients have no port connections.
"""
err = _lib.jack_deactivate(self._ptr)
if not ignore_errors:
_check(err, 'Error deactivating JACK client')
def cpu_load(self):
"""Return the current CPU load estimated by JACK.
This is a running average of the time it takes to execute a full
process cycle for all clients as a percentage of the real time
available per cycle determined by `blocksize` and `samplerate`.
"""
return _lib.jack_cpu_load(self._ptr)
def close(self, ignore_errors=True):
"""Close the JACK client."""
if self._ptr:
err = _lib.jack_client_close(self._ptr)
self._ptr = _ffi.NULL
if not ignore_errors:
_check(err, 'Error closing JACK client')
def connect(self, source, destination):
"""Establish a connection between two ports.
When a connection exists, data written to the source port will
be available to be read at the destination port.
Audio ports can obviously not be connected with MIDI ports.
Parameters
----------
source : str or Port
One end of the connection. Must be an output port.
destination : str or Port
The other end of the connection. Must be an input port.
See Also
--------
OwnPort.connect, disconnect
"""
if isinstance(source, Port):
source = source.name
if isinstance(destination, Port):
destination = destination.name
err = _lib.jack_connect(self._ptr, source.encode(),
destination.encode())
if err == _errno.EEXIST:
raise JackError('Connection {0!r} -> {1!r} '
'already exists'.format(source, destination))
_check(err,
'Error connecting {0!r} -> {1!r}'.format(source, destination))
def disconnect(self, source, destination):
"""Remove a connection between two ports.
Parameters
----------
source, destination : str or Port
See `connect()`.
"""
if isinstance(source, Port):
source = source.name
if isinstance(destination, Port):
destination = destination.name
_check(_lib.jack_disconnect(
self._ptr, source.encode(), destination.encode()),
"Couldn't disconnect {0!r} -> {1!r}".format(source, destination))
def transport_start(self):
"""Start JACK transport."""
_lib.jack_transport_start(self._ptr)
def transport_stop(self):
"""Stop JACK transport."""
_lib.jack_transport_stop(self._ptr)
@property
def transport_state(self):
"""JACK transport state.
This is one of `STOPPED`, `ROLLING`, `STARTING`, `NETSTARTING`.
See Also
--------
transport_query
"""
return TransportState(_lib.jack_transport_query(self._ptr, _ffi.NULL))
@property
def transport_frame(self):
"""Get/set current JACK transport frame.
Return an estimate of the current transport frame, including any
time elapsed since the last transport positional update.
Assigning a frame number repositions the JACK transport.
"""
return _lib.jack_get_current_transport_frame(self._ptr)
@transport_frame.setter
def transport_frame(self, frame):
_check(_lib.jack_transport_locate(self._ptr, frame),
'Error locating JACK transport')
def transport_locate(self, frame):
"""
.. deprecated:: 0.4.1
Use `transport_frame` instead
"""
_warnings.warn(
'transport_locate() is deprecated, use transport_frame',
DeprecationWarning)
self.transport_frame = frame
def transport_query(self):
"""Query the current transport state and position.
This is a convenience function that does the same as
`transport_query_struct()`, but it only returns the valid fields
in an easy-to-use ``dict``.
Returns
-------
state : TransportState
The transport state can take following values:
`STOPPED`, `ROLLING`, `STARTING` and `NETSTARTING`.
position : dict
A dictionary containing only the valid fields of the
structure returned by `transport_query_struct()`.
See Also
--------
:attr:`transport_state`, transport_query_struct
"""
state, pos = self.transport_query_struct()
return TransportState(state), position2dict(pos)
def transport_query_struct(self):
"""Query the current transport state and position.
This function is realtime-safe, and can be called from any
thread. If called from the process thread, the returned
position corresponds to the first frame of the current cycle and
the state returned is valid for the entire cycle.
Returns
-------
state : int
The transport state can take following values: `STOPPED`,
`ROLLING`, `STARTING` and `NETSTARTING`.
position : jack_position_t
See the `JACK transport documentation`__ for the available
fields.
__ http://jackaudio.org/files/docs/html/
structjack__position__t.html
See Also
--------
transport_query, transport_reposition_struct
"""
state = _lib.jack_transport_query(self._ptr, self._position)
return state, self._position
def transport_reposition_struct(self, position):
"""Request a new transport position.
May be called at any time by any client. The new position takes
effect in two process cycles. If there are slow-sync clients
and the transport is already rolling, it will enter the
`STARTING` state and begin invoking their sync callbacks
(see `jack_set_sync_callback()`__) until ready.
This function is realtime-safe.
__ http://jackaudio.org/files/docs/html/group__TransportControl.html
Parameters
----------
position : jack_position_t
Requested new transport position. This is the same
structure as returned by `transport_query_struct()`.
See Also
--------
transport_query_struct, transport_locate
"""
_check(_lib.jack_transport_reposition(self._ptr, position),
'Error re-positioning transport')
def set_freewheel(self, onoff):
"""Start/Stop JACK's "freewheel" mode.
When in "freewheel" mode, JACK no longer waits for any external
event to begin the start of the next process cycle.
As a result, freewheel mode causes "faster than realtime"
execution of a JACK graph. If possessed, real-time scheduling is
dropped when entering freewheel mode, and if appropriate it is
reacquired when stopping.
IMPORTANT: on systems using capabilities to provide real-time
scheduling (i.e. Linux kernel 2.4), if onoff is zero, this
function must be called from the thread that originally called
`activate()`. This restriction does not apply to other systems
(e.g. Linux kernel 2.6 or OS X).
Parameters
----------
onoff : bool
If ``True``, freewheel mode starts. Otherwise freewheel mode
ends.
See Also
--------
set_freewheel_callback
"""
_check(_lib.jack_set_freewheel(self._ptr, onoff),
'Error setting freewheel mode')
def set_shutdown_callback(self, callback):
"""Register shutdown callback.
Register a function (and optional argument) to be called if and
when the JACK server shuts down the client thread.
The function must be written as if it were an asynchonrous POSIX
signal handler -- use only async-safe functions, and remember
that it is executed from another thread.
A typical function might set a flag or write to a pipe so that
the rest of the application knows that the JACK client thread
has shut down.
.. note:: Clients do not need to call this. It exists only to
help more complex clients understand what is going on. It
should be called before `activate()`.
Parameters
----------
callback : callable
User-supplied function that is called whenever the JACK
daemon is shutdown. It must have this signature::
callback(status: Status, reason: str) -> None
The argument *status* is of type `jack.Status`.
.. note:: The *callback* should typically signal another
thread to correctly finish cleanup by calling `close()`
(since it cannot be called directly in the context of the
thread that calls the shutdown callback).
After server shutdown, the client is *not* deallocated by
JACK, the user (that's you!) is responsible to properly
use `close()` to release client ressources.
Alternatively, the `Client` object can be used as a
*context manager* in a *with statement*, which takes care
of activating, deactivating and closing the client
automatically.
.. note:: Same as with most callbacks, no functions that
interact with the JACK daemon should be used here.
"""
@self._callback('JackInfoShutdownCallback')
def callback_wrapper(code, reason, _):
callback(Status(code), _ffi.string(reason).decode())
_lib.jack_on_info_shutdown(self._ptr, callback_wrapper, _ffi.NULL)
def set_process_callback(self, callback):
"""Register process callback.
Tell the JACK server to call *callback* whenever there is work
be done.
The code in the supplied function must be suitable for real-time
execution. That means that it cannot call functions that might
block for a long time. This includes malloc, free, printf,
pthread_mutex_lock, sleep, wait, poll, select, pthread_join,
pthread_cond_wait, etc, etc.
.. warning:: Most Python interpreters use a `global interpreter
lock (GIL)`__, which violates the above real-time
requirement. Furthermore, Python's `garbage collector`__
might become active at an inconvenient time and block the
process callback for some time.
Because of this, Python is not really suitable for real-time
processing. If you want to implement a *reliable* real-time
audio/MIDI application, you should use a different
programming language, such as C or C++.
If you can live with some random audio drop-outs now and
then, feel free to continue using Python!
__ https://en.wikipedia.org/wiki/Global_Interpreter_Lock
__ https://en.wikipedia.org/wiki/Garbage_collection_(computer_science)
.. note:: This function cannot be called while the client is
activated (after `activate()` has been called).
Parameters
----------
callback : callable
User-supplied function that is called by the engine anytime
there is work to be done. It must have this signature::
callback(frames: int) -> None
The argument *frames* specifies the number of frames that
have to be processed in the current audio block.
It will be the same number as `blocksize` and it will be a
power of two.
As long as the client is active, the *callback* will be
called once in each process cycle. However, if an exception
is raised inside of a *callback*, it will not be called
anymore. The exception `CallbackExit` can be used to
silently prevent further callback invocations, all other
exceptions will print an error message to *stderr*.
"""
@self._callback('JackProcessCallback', error=_FAILURE)
def callback_wrapper(frames, _):
try:
callback(frames)
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_process_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting process callback')
def set_freewheel_callback(self, callback):
"""Register freewheel callback.
Tell the JACK server to call *callback* whenever we enter or
leave "freewheel" mode.
The argument to the callback will be ``True`` if JACK is
entering freewheel mode, and ``False`` otherwise.
All "notification events" are received in a separated non RT
thread, the code in the supplied function does not need to be
suitable for real-time execution.
.. note:: This function cannot be called while the client is
activated (after `activate()` has been called).
Parameters
----------
callback : callable
User-supplied function that is called whenever JACK starts
or stops freewheeling. It must have this signature::
callback(starting: bool) -> None
The argument *starting* is ``True`` if we start to
freewheel, ``False`` otherwise.
.. note:: Same as with most callbacks, no functions that
interact with the JACK daemon should be used here.
See Also
--------
set_freewheel
"""
@self._callback('JackFreewheelCallback')
def callback_wrapper(starting, _):
callback(bool(starting))
_check(_lib.jack_set_freewheel_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting freewheel callback')
def set_blocksize_callback(self, callback):
"""Register blocksize callback.
Tell JACK to call *callback* whenever the size of the the buffer
that will be passed to the process callback is about to change.
Clients that depend on knowing the buffer size must supply a
*callback* before activating themselves.
All "notification events" are received in a separated non RT
thread, the code in the supplied function does not need to be
suitable for real-time execution.
.. note:: This function cannot be called while the client is
activated (after `activate()` has been called).
Parameters
----------
callback : callable
User-supplied function that is invoked whenever the JACK
engine buffer size changes. It must have this signature::
callback(blocksize: int) -> None
The argument *blocksize* is the new buffer size.
The *callback* is supposed to raise `CallbackExit` on error.
.. note:: Although this function is called in the JACK
process thread, the normal process cycle is suspended
during its operation, causing a gap in the audio flow.
So, the *callback* can allocate storage, touch memory not
previously referenced, and perform other operations that
are not realtime safe.
.. note:: Same as with most callbacks, no functions that
interact with the JACK daemon should be used here.
See Also
--------
:attr:`blocksize`
"""
@self._callback('JackBufferSizeCallback', error=_FAILURE)
def callback_wrapper(blocksize, _):
try:
callback(blocksize)
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_buffer_size_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting blocksize callback')
def set_samplerate_callback(self, callback):
"""Register samplerate callback.
Tell the JACK server to call *callback* whenever the system
sample rate changes.
All "notification events" are received in a separated non RT
thread, the code in the supplied function does not need to be
suitable for real-time execution.
.. note:: This function cannot be called while the client is
activated (after `activate()` has been called).
Parameters
----------
callback : callable
User-supplied function that is called when the engine sample
rate changes. It must have this signature::
callback(samplerate: int) -> None
The argument *samplerate* is the new engine sample rate.
The *callback* is supposed to raise `CallbackExit` on error.
.. note:: Same as with most callbacks, no functions that
interact with the JACK daemon should be used here.
See Also
--------
:attr:`samplerate`
"""
@self._callback('JackSampleRateCallback', error=_FAILURE)
def callback_wrapper(samplerate, _):
try:
callback(samplerate)
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_sample_rate_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting samplerate callback')
def set_client_registration_callback(self, callback):
"""Register client registration callback.
Tell the JACK server to call *callback* whenever a client is
registered or unregistered.
All "notification events" are received in a separated non RT
thread, the code in the supplied function does not need to be
suitable for real-time execution.
.. note:: This function cannot be called while the client is
activated (after `activate()` has been called).
Parameters
----------
callback : callable
User-supplied function that is called whenever a client is
registered or unregistered. It must have this signature::
callback(name: str, register: bool) -> None
The first argument contains the client name, the second
argument is ``True`` if the client is being registered and
``False`` if the client is being unregistered.
.. note:: Same as with most callbacks, no functions that
interact with the JACK daemon should be used here.
"""
@self._callback('JackClientRegistrationCallback')
def callback_wrapper(name, register, _):
callback(_ffi.string(name).decode(), bool(register))
_check(_lib.jack_set_client_registration_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting client registration callback')
def set_port_registration_callback(self, callback=None,
only_available=True):
"""Register port registration callback.
Tell the JACK server to call *callback* whenever a port is
registered or unregistered.
All "notification events" are received in a separated non RT
thread, the code in the supplied function does not need to be
suitable for real-time execution.
.. note:: This function cannot be called while the client is
activated (after `activate()` has been called).
.. note:: Due to JACK 1 behavior, it is not possible to get
the pointer to an unregistering JACK Port if it already
existed before `activate()` was called. This will cause
the callback not to be called if *only_available* is
``True``, or called with ``None`` as first argument (see
below).
To avoid this, call `Client.get_ports()` just after
`activate()`, allowing the module to store pointers to
already existing ports and always receive a `Port`
argument for this callback.
Parameters
----------
callback : callable
User-supplied function that is called whenever a port is
registered or unregistered. It must have this signature::
callback(port: Port, register: bool) -> None
The first argument is a `Port`, `MidiPort`, `OwnPort` or
`OwnMidiPort` object, the second argument is ``True`` if the
port is being registered, ``False`` if the port is being
unregistered.
.. note:: Same as with most callbacks, no functions that
interact with the JACK daemon should be used here.
only_available : bool, optional
If ``True``, the *callback* is not called if the port in
question is not available anymore (after another JACK client
has unregistered it).
If ``False``, it is called nonetheless, but the first
argument of the *callback* will be ``None`` if the port is
not available anymore.
See Also
--------
Ports.register
"""
if callback is None:
return lambda cb: self.set_port_registration_callback(
cb, only_available)
@self._callback('JackPortRegistrationCallback')
def callback_wrapper(port_id, register, _):
port_ptr = _lib.jack_port_by_id(self._ptr, port_id)
if port_ptr:
port = self._wrap_port_ptr(port_ptr)
elif only_available:
return
else:
port = None
callback(port, bool(register))
_check(_lib.jack_set_port_registration_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting port registration callback')
def set_port_connect_callback(self, callback=None, only_available=True):
"""Register port connect callback.
Tell the JACK server to call *callback* whenever a port is
connected or disconnected.
All "notification events" are received in a separated non RT
thread, the code in the supplied function does not need to be
suitable for real-time execution.
.. note:: This function cannot be called while the client is
activated (after `activate()` has been called).
.. note:: Due to JACK 1 behavior, it is not possible to get
the pointer to an unregistering JACK Port if it already
existed before `activate()` was called. This will cause
the callback not to be called if *only_available* is
``True``, or called with ``None`` as first argument (see
below).
To avoid this, call `Client.get_ports()` just after
`activate()`, allowing the module to store pointers to
already existing ports and always receive a `Port`
argument for this callback.
Parameters
----------
callback : callable
User-supplied function that is called whenever a port is
connected or disconnected. It must have this signature::
callback(a: Port, b: Port, connect: bool) -> None
The first and second arguments contain `Port`, `MidiPort`,
`OwnPort` or `OwnMidiPort` objects of the ports which are
connected or disconnected. The third argument is ``True``
if the ports were connected and ``False`` if the ports were
disconnected.
.. note:: Same as with most callbacks, no functions that
interact with the JACK daemon should be used here.
only_available : bool, optional
See `set_port_registration_callback()`.
If ``False``, the first and/or the second argument to the
*callback* may be ``None``.
See Also
--------
Client.connect, OwnPort.connect
"""
if callback is None:
return lambda cb: self.set_port_connect_callback(
cb, only_available)
@self._callback('JackPortConnectCallback')
def callback_wrapper(a, b, connect, _):
port_ids = a, b
ports = [None, None]
for idx in 0, 1:
ptr = _lib.jack_port_by_id(self._ptr, port_ids[idx])
if ptr:
ports[idx] = self._wrap_port_ptr(ptr)
elif only_available:
return
else:
pass # Do nothing, port is already None
callback(ports[0], ports[1], bool(connect))
_check(_lib.jack_set_port_connect_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting port connect callback')
def set_port_rename_callback(self, callback=None, only_available=True):
"""Register port rename callback.
Tell the JACK server to call *callback* whenever a port is
renamed.
All "notification events" are received in a separated non RT
thread, the code in the supplied function does not need to be
suitable for real-time execution.
.. note:: This function cannot be called while the client is
activated (after `activate()` has been called).
Parameters
----------
callback : callable
User-supplied function that is called whenever the port name
has been changed. It must have this signature::
callback(port: Port, old: str, new: str) -> None
The first argument is the port that has been renamed (a
`Port`, `MidiPort`, `OwnPort` or `OwnMidiPort` object); the
second and third argument is the old and new name,
respectively. The *callback* is supposed to raise
`CallbackExit` on error.
.. note:: Same as with most callbacks, no functions that
interact with the JACK daemon should be used here.
only_available : bool, optional
See `set_port_registration_callback()`.
See Also
--------
:attr:`Port.shortname`
Notes
-----
The port rename callback is not available in JACK 1!
See `this mailing list posting`__ and `this commit message`__.
__ http://comments.gmane.org/gmane.comp.audio.jackit/28888
__ https://github.com/jackaudio/jack1/commit/
94c819accfab2612050e875c24cf325daa0fd26d
"""
if callback is None:
return lambda cb: self.set_port_rename_callback(cb, only_available)
@self._callback('JackPortRenameCallback', error=_FAILURE)
def callback_wrapper(port_id, old_name, new_name, _):
port_ptr = _lib.jack_port_by_id(self._ptr, port_id)
if port_ptr:
port = self._wrap_port_ptr(port_ptr)
elif only_available:
return
else:
port = None
try:
callback(port, _ffi.string(old_name).decode(),
_ffi.string(new_name).decode())
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_port_rename_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting port rename callback')
def set_graph_order_callback(self, callback):
"""Register graph order callback.
Tell the JACK server to call *callback* whenever the processing
graph is reordered.
All "notification events" are received in a separated non RT
thread, the code in the supplied function does not need to be
suitable for real-time execution.
.. note:: This function cannot be called while the client is
activated (after :meth:`activate` has been called).
Parameters
----------
callback : callable
User-supplied function that is called whenever the
processing graph is reordered.
It must have this signature::
callback() -> None
The *callback* is supposed to raise `CallbackExit` on error.
.. note:: Same as with most callbacks, no functions that
interact with the JACK daemon should be used here.
"""
@self._callback('JackGraphOrderCallback', error=_FAILURE)
def callback_wrapper(_):
try:
callback()
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_graph_order_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting graph order callback')
def set_xrun_callback(self, callback):
"""Register xrun callback.
Tell the JACK server to call *callback* whenever there is an
xrun.
All "notification events" are received in a separated non RT
thread, the code in the supplied function does not need to be
suitable for real-time execution.
.. note:: This function cannot be called while the client is
activated (after `activate()` has been called).
Parameters
----------
callback : callable
User-supplied function that is called whenever an xrun has
occured. It must have this signature::
callback(delayed_usecs: float) -> None
The callback argument is the delay in microseconds due to
the most recent XRUN occurrence.
The *callback* is supposed to raise `CallbackExit` on error.
.. note:: Same as with most callbacks, no functions that
interact with the JACK daemon should be used here.
"""
@self._callback('JackXRunCallback', error=_FAILURE)
def callback_wrapper(_):
try:
callback(_lib.jack_get_xrun_delayed_usecs(self._ptr))
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_xrun_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting xrun callback')
def set_timebase_callback(self, callback=None, conditional=False):
"""Register as timebase master for the JACK subsystem.
The timebase master registers a callback that updates extended
position information such as beats or timecode whenever
necessary. Without this extended information, there is no need
for this function.
There is never more than one master at a time. When a new
client takes over, the former callback is no longer called.
Taking over the timebase may be done conditionally, so that
*callback* is not registered if there was a master already.
Parameters
----------
callback : callable
Realtime function that returns extended position
information. Its output affects all of the following
process cycle. This realtime function must not wait.
It is called immediately after the process callback (see
`set_process_callback()`) in the same thread whenever the
transport is rolling, or when any client has requested a new
position in the previous cycle. The first cycle after
`set_timebase_callback()` is also treated as a new position,
or the first cycle after `activate()` if the client had been
inactive. The *callback* must have this signature::
callback(state: int, blocksize: int, pos: jack_position_t, new_pos: bool) -> None
state
The current transport state. See `transport_state`.
blocksize
The number of frames in the current period.
See `blocksize`.
pos
The position structure for the next cycle; ``pos.frame``
will be its frame number. If *new_pos* is ``False``,
this structure contains extended position information
from the current cycle. If *new_pos* is ``True``, it
contains whatever was set by the requester.
The *callback*'s task is to update the extended
information here. See `transport_query_struct()`
for details about ``jack_position_t``.
new_pos
``True`` for a newly requested *pos*, or for the first
cycle after the timebase callback is defined.
.. note:: The *pos* argument must not be used to set
``pos.frame``. To change position, use
`transport_reposition_struct()` or `transport_locate()`.
These functions are realtime-safe, the timebase callback
can call them directly.
conditional : bool
Set to ``True`` for a conditional request.
Returns
-------
bool
``True`` if the timebase callback was registered.
``False`` if a conditional request failed because another
timebase master is already registered.
"""
if callback is None:
return lambda cb: self.set_timebase_callback(cb, conditional)
@self._callback('JackTimebaseCallback')
def callback_wrapper(state, blocksize, pos, new_pos, _):
callback(state, blocksize, pos, bool(new_pos))
err = _lib.jack_set_timebase_callback(self._ptr, conditional,
callback_wrapper, _ffi.NULL)
# Because of a bug in JACK2 version <= 1.9.10, we also check for -1.
# See https://github.com/jackaudio/jack2/pull/123
if conditional and err in (_errno.EBUSY, -1):
return False
_check(err, 'Error setting timebase callback')
return True
def get_uuid_for_client_name(self, name):
"""Get the session ID for a client name.
The session manager needs this to reassociate a client name to
the session ID.
"""
uuid = _ffi.gc(_lib.jack_get_uuid_for_client_name(
self._ptr, name.encode()), _lib.jack_free)
if not uuid:
raise JackError('Unable to get session ID for {0!r}'.format(name))
return _ffi.string(uuid).decode()
def get_client_name_by_uuid(self, uuid):
"""Get the client name for a session ID.
In order to snapshot the graph connections, the session manager
needs to map session IDs to client names.
"""
name = _ffi.gc(_lib.jack_get_client_name_by_uuid(
self._ptr, uuid.encode()), _lib.jack_free)
if not name:
raise JackError('Unable to get client name for {0!r}'.format(uuid))
return _ffi.string(name).decode()
def get_port_by_name(self, name):
"""Get port by name.
Given a full port name, this returns a `Port`, `MidiPort`,
`OwnPort` or `OwnMidiPort` object.
"""
port_ptr = _lib.jack_port_by_name(self._ptr, name.encode())
if not port_ptr:
raise JackError('Port {0!r} not available'.format(name))
return self._wrap_port_ptr(port_ptr)
def get_all_connections(self, port):
"""Return a list of ports which the given port is connected to.
This differs from `OwnPort.connections` (also available on
`OwnMidiPort`) in two important respects:
1) You may not call this function from code that is executed in
response to a JACK event. For example, you cannot use it in a
graph order callback.
2) You need not be the owner of the port to get information
about its connections.
"""
port = self._get_port_ptr(port)
names = _ffi.gc(_lib.jack_port_get_all_connections(self._ptr, port),
_lib.jack_free)
return self._port_list_from_pointers(names)
def get_ports(self, name_pattern='', is_audio=False, is_midi=False,
is_input=False, is_output=False, is_physical=False,
can_monitor=False, is_terminal=False):
"""Return a list of selected ports.
Parameters
----------
name_pattern : str
A regular expression used to select ports by name. If
empty, no selection based on name will be carried out.
is_audio, is_midi : bool
Select audio/MIDI ports. If neither of them is ``True``,
both types of ports are selected.
is_input, is_output, is_physical, can_monitor, is_terminal : bool
Select ports by their flags. If none of them are ``True``,
no selection based on flags will be carried out.
Returns
-------
list of Port/MidiPort/OwnPort/OwnMidiPort
All ports that satisfy the given conditions.
"""
if is_audio and not is_midi:
type_pattern = _AUDIO
elif is_midi and not is_audio:
type_pattern = _MIDI
else:
type_pattern = b''
flags = 0x0
if is_input:
flags |= _lib.JackPortIsInput
if is_output:
flags |= _lib.JackPortIsOutput
if is_physical:
flags |= _lib.JackPortIsPhysical
if can_monitor:
flags |= _lib.JackPortCanMonitor
if is_terminal:
flags |= _lib.JackPortIsTerminal
names = _ffi.gc(_lib.jack_get_ports(
self._ptr, name_pattern.encode(), type_pattern, flags),
_lib.jack_free)
return self._port_list_from_pointers(names)
def _callback(self, cdecl, **kwargs):
"""Wrapper for ffi.callback() that keeps callback alive."""
def callback_decorator(python_callable):
function_ptr = _ffi.callback(cdecl, python_callable, **kwargs)
self._keepalive.append(function_ptr)
return function_ptr
return callback_decorator
def _register_port(self, name, porttype, is_terminal, is_physical, flags):
"""Create a new port."""
if is_terminal:
flags |= _lib.JackPortIsTerminal
if is_physical:
flags |= _lib.JackPortIsPhysical
port_ptr = _lib.jack_port_register(self._ptr, name.encode(), porttype,
flags, 0)
if not port_ptr:
raise JackError(
'{0!r}: port registration failed'.format(name))
return self._wrap_port_ptr(port_ptr)
def _port_list_from_pointers(self, names):
"""Get list of Port objects from char**."""
ports = []
if names:
idx = 0
while True:
name = names[idx]
if not name:
break
ports.append(self.get_port_by_name(_ffi.string(name).decode()))
idx += 1
return ports
def _get_port_ptr(self, port):
"""Get port pointer from Port object or string or port pointer."""
if isinstance(port, Port):
port = port._ptr
elif isinstance(port, str):
port = self.get_port_by_name(port)._ptr
return port
def _wrap_port_ptr(self, ptr):
"""Create appropriate port object for a given port pointer."""
porttype = _ffi.string(_lib.jack_port_type(ptr))
if porttype == _AUDIO:
port = OwnPort(ptr, self) if self.owns(ptr) else Port(ptr)
elif porttype == _MIDI:
port = OwnMidiPort(ptr, self) if self.owns(ptr) else MidiPort(ptr)
else:
assert False
return port
class Port(object):
"""A JACK audio port.
This class cannot be instantiated directly. Instead, instances of
this class are returned from `Client.get_port_by_name()`,
`Client.get_ports()`, `Client.get_all_connections()` and
`OwnPort.connections`.
In addition, instances of this class are available in the callbacks
which are set with `Client.set_port_registration_callback()`,
`Client.set_port_connect_callback()` or
`Client.set_port_rename_callback`.
Note, however, that if the used `Client` owns the respective port,
instances of `OwnPort` (instead of `Port`) will be created. In case
of MIDI ports, instances of `MidiPort` or `OwnMidiPort` are created.
Besides being the type of non-owned JACK audio ports, this class
also serves as base class for all other port classes (`OwnPort`,
`MidiPort` and `OwnMidiPort`).
New JACK audio/MIDI ports can be created with the
:meth:`~Ports.register` method of `Client.inports`,
`Client.outports`, `Client.midi_inports` and `Client.midi_outports`.
"""
def __init__(self, port_ptr):
self._ptr = port_ptr
def __repr__(self):
return "jack.{0.__class__.__name__}('{0.name}')".format(self)
def __eq__(self, other):
"""Ports are equal if their underlying port pointers are."""
return self._ptr == other._ptr
def __ne__(self, other):
"""This should be implemented whenever __eq__() is implemented."""
return not self.__eq__(other)
@property
def name(self):
"""Full name of the JACK port (read-only)."""
return _ffi.string(_lib.jack_port_name(self._ptr)).decode()
@property
def shortname(self):
"""Short name of the JACK port, not including the client name.
Must be unique among all ports owned by a client.
May be modified at any time. If the resulting full name
(including the ``client_name:`` prefix) is longer than
`port_name_size()`, it will be truncated.
"""
return _ffi.string(_lib.jack_port_short_name(self._ptr)).decode()
@shortname.setter
def shortname(self, shortname):
_check(_lib.jack_port_set_name(self._ptr, shortname.encode()),
'Error setting port name')
@property
def aliases(self):
"""Returns a list of strings with the aliases for the JACK port."""
ctype = "char[{}]".format(_lib.jack_port_name_size())
aliases = [_ffi.new(ctype), _ffi.new(ctype)]
aliasesptr = _ffi.new("char *[]", aliases)
result = []
if _lib.jack_port_get_aliases(self._ptr, aliasesptr) > 0:
for i in 0, 1:
alias = _ffi.string(aliases[i]).decode()
if alias:
result.append(alias)
return result
def set_alias(self, alias):
"""Set an alias for the JACK port.
Ports can have up to two aliases. If both are already set,
this function will return an error.
"""
_check(_lib.jack_port_set_alias(self._ptr, alias.encode()),
'Error setting port alias')
def unset_alias(self, alias):
"""Remove an alias for the JACK port.
If the alias doesn't exist this function will return an error.
"""
_check(_lib.jack_port_unset_alias(self._ptr, alias.encode()),
'Error unsetting port alias')
@property
def uuid(self):
"""The UUID of the JACK port."""
return _lib.jack_port_uuid(self._ptr)
is_audio = property(lambda self: True, doc='This is always ``True``.')
is_midi = property(lambda self: False, doc='This is always ``False``.')
@property
def is_input(self):
"""Can the port receive data?"""
return self._hasflag(_lib.JackPortIsInput)
@property
def is_output(self):
"""Can data be read from this port?"""
return self._hasflag(_lib.JackPortIsOutput)
@property
def is_physical(self):
"""Does it correspond to some kind of physical I/O connector?"""
return self._hasflag(_lib.JackPortIsPhysical)
@property
def can_monitor(self):
"""Does a call to `request_monitor()` make sense?"""
return self._hasflag(_lib.JackPortCanMonitor)
@property
def is_terminal(self):
"""Is the data consumed/generated?"""
return self._hasflag(_lib.JackPortIsTerminal)
def request_monitor(self, onoff):
"""Set input monitoring.
If `can_monitor` is ``True``, turn input monitoring on or
off. Otherwise, do nothing.
Parameters
----------
onoff : bool
If ``True``, switch monitoring on; if ``False``, switch it
off.
"""
_check(_lib.jack_port_request_monitor(self._ptr, onoff),
'Unable to switch monitoring on/off')
def _hasflag(self, flag):
"""Helper method for is_*()."""
return bool(_lib.jack_port_flags(self._ptr) & flag)
class MidiPort(Port):
"""A JACK MIDI port.
This class is derived from `Port` and has exactly the same
attributes and methods.
This class cannot be instantiated directly (see `Port`).
New JACK audio/MIDI ports can be created with the
:meth:`~Ports.register` method of `Client.inports`,
`Client.outports`, `Client.midi_inports` and `Client.midi_outports`.
See Also
--------
Port, OwnMidiPort
"""
is_audio = property(lambda self: False, doc='This is always ``False``.')
is_midi = property(lambda self: True, doc='This is always ``True``.')
class OwnPort(Port):
"""A JACK audio port owned by a `Client`.
This class is derived from `Port`. `OwnPort` objects can do
everything that `Port` objects can, plus a lot more.
This class cannot be instantiated directly (see `Port`).
New JACK audio/MIDI ports can be created with the
:meth:`~Ports.register` method of `Client.inports`,
`Client.outports`, `Client.midi_inports` and `Client.midi_outports`.
"""
def __init__(self, port_ptr, client):
Port.__init__(self, port_ptr)
self._client = client
@property
def number_of_connections(self):
"""Number of connections to or from port."""
return _lib.jack_port_connected(self._ptr)
@property
def connections(self):
"""List of ports which the port is connected to."""
names = _ffi.gc(_lib.jack_port_get_connections(self._ptr),
_lib.jack_free)
return self._client._port_list_from_pointers(names)
def is_connected_to(self, port):
"""Am I *directly* connected to *port*?
Parameters
----------
port : str or Port
Full port name or port object.
"""
if isinstance(port, Port):
port = port.name
return bool(_lib.jack_port_connected_to(self._ptr, port.encode()))
def connect(self, port):
"""Connect to given port.
Parameters
----------
port : str or Port
Full port name or port object.
See Also
--------
Client.connect
"""
if not isinstance(port, Port):
port = self._client.get_port_by_name(port)
if self.is_output:
source = self
if not port.is_input:
raise ValueError('Input port expected')
destination = port
elif self.is_input:
destination = self
if not port.is_output:
raise ValueError('Output port expected')
source = port
else:
assert False
self._client.connect(source.name, destination.name)
def disconnect(self, other=None):
"""Disconnect this port.
Parameters
----------
other : str or Port
Port to disconnect from.
By default, disconnect from all connected ports.
"""
if other is None:
_check(_lib.jack_port_disconnect(self._client._ptr, self._ptr),
'Error disconnecting {0!r}'.format(self.name))
else:
if self.is_output:
args = self, other
elif self.is_input:
args = other, self
self._client.disconnect(*args)
def unregister(self):
"""Unregister port.
Remove the port from the client, disconnecting any existing
connections. This also removes the port from
`Client.inports`, `Client.outports`, `Client.midi_inports` or
`Client.midi_outports`.
"""
if self.is_audio:
listname = ''
elif self.is_midi:
listname = 'midi_'
if self.is_input:
listname += 'inports'
elif self.is_output:
listname += 'outports'
ports = getattr(self._client, listname)
ports._portlist.remove(self)
_check(_lib.jack_port_unregister(self._client._ptr, self._ptr),
'Error unregistering {0!r}'.format(self.name))
def get_buffer(self):
"""Get buffer for audio data.
This returns a buffer holding the memory area associated with
the specified port. For an output port, it will be a memory
area that can be written to; for an input port, it will be an
area containing the data from the port's connection(s), or
zero-filled. If there are multiple inbound connections, the
data will be mixed appropriately.
Caching output ports is DEPRECATED in JACK 2.0, due to some new
optimization (like "pipelining"). Port buffers have to be
retrieved in each callback for proper functioning.
This method shall only be called from within the process
callback (see `Client.set_process_callback()`).
"""
blocksize = self._client.blocksize
return _ffi.buffer(_lib.jack_port_get_buffer(self._ptr, blocksize),
blocksize * _ffi.sizeof('float'))
def get_array(self):
"""Get audio buffer as NumPy array.
Make sure to ``import numpy`` before calling this, otherwise the
first call might take a long time.
This method shall only be called from within the process
callback (see `Client.set_process_callback()`).
See Also
--------
get_buffer
"""
import numpy as np
return np.frombuffer(self.get_buffer(), dtype=np.float32)
class OwnMidiPort(MidiPort, OwnPort):
"""A JACK MIDI port owned by a `Client`.
This class is derived from `OwnPort` and `MidiPort`, which are
themselves derived from `Port`. It has the same attributes and
methods as `OwnPort`, but `get_buffer()` and `get_array()` are
disabled. Instead, it has methods for sending and receiving MIDI
events (to be used only from within the process callback -- see
`Client.set_process_callback()`).
This class cannot be instantiated directly (see `Port`).
New JACK audio/MIDI ports can be created with the
:meth:`~Ports.register` method of `Client.inports`,
`Client.outports`, `Client.midi_inports` and `Client.midi_outports`.
"""
def __init__(self, *args, **kwargs):
OwnPort.__init__(self, *args, **kwargs)
self._event = _ffi.new('jack_midi_event_t*')
def get_buffer(self):
"""Not available for MIDI ports."""
raise NotImplementedError('get_buffer() not available on MIDI ports')
def get_array(self):
"""Not available for MIDI ports."""
raise NotImplementedError('get_array() not available on MIDI ports')
@property
def max_event_size(self):
"""Get the size of the largest event that can be stored by the port.
This returns the current space available, taking into
account events already stored in the port.
"""
return _lib.jack_midi_max_event_size(
_lib.jack_port_get_buffer(self._ptr, self._client.blocksize))
@property
def lost_midi_events(self):
"""Get the number of events that could not be written to the port.
This being a non-zero value implies that the port is full.
Currently the only way this can happen is if events are lost on
port mixdown.
"""
return _lib.jack_midi_get_lost_event_count(
_lib.jack_port_get_buffer(self._ptr, self._client.blocksize))
def incoming_midi_events(self):
"""Return generator for incoming MIDI events.
JACK MIDI is normalised, the MIDI events yielded by this
generator are guaranteed to be complete MIDI events (the status
byte will always be present, and no realtime events will be
interspersed with the events).
Yields
------
time : int
Time (in samples) relative to the beginning of the current
audio block.
event : buffer
The actual MIDI event data.
"""
event = self._event
buf = _lib.jack_port_get_buffer(self._ptr, self._client.blocksize)
for i in range(_lib.jack_midi_get_event_count(buf)):
err = _lib.jack_midi_event_get(event, buf, i)
# TODO: proper error handling if this ever happens:
assert not err, err
yield event.time, _ffi.buffer(event.buffer, event.size)
def clear_buffer(self):
"""Clear an event buffer.
This should be called at the beginning of each process cycle
before calling `reserve_midi_event()` or `write_midi_event()`.
This function may not be called on an input port.
"""
_lib.jack_midi_clear_buffer(
_lib.jack_port_get_buffer(self._ptr, self._client.blocksize))
def write_midi_event(self, time, event):
"""Create an outgoing MIDI event.
Clients must write normalised MIDI data to the port - no running
status and no (one-byte) realtime messages interspersed with
other messages (realtime messages are fine when they occur on
their own, like other messages).
Events must be written in order, sorted by their sample offsets.
JACK will not sort the events for you, and will refuse to store
out-of-order events.
Parameters
----------
time : int
Time (in samples) relative to the beginning of the current
audio block.
event : bytes or buffer or sequence of int
The actual MIDI event data.
.. note:: Buffer objects are only supported for CFFI >= 0.9.
Raises
------
JackError
If MIDI event couldn't be written.
"""
try:
event = _ffi.from_buffer(event)
except AttributeError:
pass # from_buffer() not supported
except TypeError:
pass # input is not a buffer
_check(_lib.jack_midi_event_write(
_lib.jack_port_get_buffer(self._ptr, self._client.blocksize),
time, event, len(event)), 'Error writing MIDI event')
def reserve_midi_event(self, time, size):
"""Get a buffer where an outgoing MIDI event can be written to.
Clients must write normalised MIDI data to the port - no running
status and no (one-byte) realtime messages interspersed with
other messages (realtime messages are fine when they occur on
their own, like other messages).
Events must be written in order, sorted by their sample offsets.
JACK will not sort the events for you, and will refuse to store
out-of-order events.
Parameters
----------
time : int
Time (in samples) relative to the beginning of the current
audio block.
size : int
Number of bytes to reserve.
Returns
-------
buffer
A buffer object where MIDI data bytes can be written to.
If no space could be reserved, an empty buffer is returned.
"""
buf = _lib.jack_midi_event_reserve(
_lib.jack_port_get_buffer(self._ptr, self._client.blocksize),
time, size)
return _ffi.buffer(buf, size if buf else 0)
class Ports(object):
"""A list of input/output ports.
This class is not meant to be instantiated directly. It is only
used as `Client.inports`, `Client.outports`, `Client.midi_inports`
and `Client.midi_outports`.
The ports can be accessed by indexing or by iteration.
New ports can be added with `register()`, existing ports can be
removed by calling their :meth:`~OwnPort.unregister` method.
"""
def __init__(self, client, porttype, flag):
self._client = client
self._type = porttype
self._flag = flag
self._portlist = []
def __len__(self):
return self._portlist.__len__()
def __getitem__(self, name):
return self._portlist.__getitem__(name)
# No __setitem__!
def __iter__(self):
return self._portlist.__iter__()
def __repr__(self):
return self._portlist.__repr__()
def register(self, shortname, is_terminal=False, is_physical=False):
"""Create a new input/output port.
The new `OwnPort` or `OwnMidiPort` object is automatically added
to `Client.inports`, `Client.outports`, `Client.midi_inports` or
`Client.midi_outports`.
Parameters
----------
shortname : str
Each port has a short name. The port's full name contains
the name of the client concatenated with a colon (:)
followed by its short name. The `port_name_size()` is the
maximum length of this full name. Exceeding that will cause
the port registration to fail.
The port name must be unique among all ports owned by this
client.
If the name is not unique, the registration will fail.
is_terminal : bool
For an input port: If ``True``, the data received by the
port will not be passed on or made available at any other
port.
For an output port: If ``True``, the data available at the
port does not originate from any other port
Audio synthesizers, I/O hardware interface clients, HDR
systems are examples of clients that would set this flag for
their ports.
is_physical : bool
If ``True`` the port corresponds to some kind of physical
I/O connector.
Returns
-------
Port
A new `OwnPort` or `OwnMidiPort` instance.
"""
port = self._client._register_port(
shortname, self._type, is_terminal, is_physical, self._flag)
self._portlist.append(port)
return port
def clear(self):
"""Unregister all ports in the list.
See Also
--------
OwnPort.unregister
"""
while self._portlist:
self._portlist[0].unregister()
class RingBuffer(object):
"""JACK's lock-free ringbuffer."""
def __init__(self, size):
"""Create a lock-free ringbuffer.
A ringbuffer is a good way to pass data between threads
(e.g. between the main program and the process callback),
when streaming realtime data to slower media, like audio file
playback or recording.
The key attribute of a ringbuffer is that it can be safely
accessed by two threads simultaneously -- one reading from the
buffer and the other writing to it -- without using any
synchronization or mutual exclusion primitives. For this to
work correctly, there can only be a single reader and a single
writer thread. Their identities cannot be interchanged.
Parameters
----------
size : int
Size in bytes. JACK will allocate a buffer of at least this
size (rounded up to the next power of 2), but one byte is
reserved for internal use. Use `write_space` to
determine the actual size available for writing.
"""
ptr = _lib.jack_ringbuffer_create(size)
if not ptr:
raise JackError('Could not create RingBuffer')
self._ptr = _ffi.gc(ptr, _lib.jack_ringbuffer_free)
@property
def write_space(self):
"""The number of bytes available for writing."""
return _lib.jack_ringbuffer_write_space(self._ptr)
def write(self, data):
"""Write data into the ringbuffer.
Parameters
----------
data : buffer or bytes or iterable of int
Bytes to be written to the ringbuffer.
Returns
-------
int
The number of bytes written, which could be less than the
length of *data* if there was no more space left
(see `write_space`).
See Also
--------
:attr:`write_space`, :attr:`write_buffers`
"""
try:
data = _ffi.from_buffer(data)
except AttributeError:
pass # from_buffer() not supported
except TypeError:
pass # input is not a buffer
return _lib.jack_ringbuffer_write(self._ptr, data, len(data))
@property
def write_buffers(self):
"""Contains two buffer objects that can be written to directly.
Two are needed because the space available for writing may be
split across the end of the ringbuffer. Either of them could be
0 length.
This can be used as a no-copy version of `write()`.
When finished with writing, `write_advance()` should be used.
.. note:: After an operation that changes the write pointer
(`write()`, `write_advance()`, `reset()`), the buffers are no
longer valid and one should use this property again to get
new ones.
"""
vectors = _ffi.new('jack_ringbuffer_data_t[2]')
_lib.jack_ringbuffer_get_write_vector(self._ptr, vectors)
return (
_ffi.buffer(vectors[0].buf, vectors[0].len),
_ffi.buffer(vectors[1].buf, vectors[1].len)
)
def write_advance(self, size):
"""Advance the write pointer.
After data has been written to the ringbuffer using
`write_buffers`, use this method to advance the buffer pointer,
making the data available for future read operations.
Parameters
----------
size : int
The number of bytes to advance.
"""
_lib.jack_ringbuffer_write_advance(self._ptr, size)
@property
def read_space(self):
"""The number of bytes available for reading."""
return _lib.jack_ringbuffer_read_space(self._ptr)
def read(self, size):
"""Read data from the ringbuffer.
Parameters
----------
size : int
Number of bytes to read.
Returns
-------
buffer
A buffer object containing the requested data.
If no more data is left (see `read_space`), a smaller
(or even empty) buffer is returned.
See Also
--------
peek, :attr:`read_space`, :attr:`read_buffers`
"""
data = _ffi.new('unsigned char[]', size)
size = _lib.jack_ringbuffer_read(self._ptr, data, size)
return _ffi.buffer(data, size)
def peek(self, size):
"""Peek at data from the ringbuffer.
Opposed to `read()` this function does not move the read
pointer. Thus it's a convenient way to inspect data in the
ringbuffer in a continuous fashion.
The price is that the data is copied into a newly allocated
buffer. For "raw" non-copy inspection of the data in the
ringbuffer use `read_buffers`.
Parameters
----------
size : int
Number of bytes to peek.
Returns
-------
buffer
A buffer object containing the requested data.
If no more data is left (see `read_space`), a smaller
(or even empty) buffer is returned.
See Also
--------
read, :attr:`read_space`, :attr:`read_buffers`
"""
data = _ffi.new('unsigned char[]', size)
size = _lib.jack_ringbuffer_peek(self._ptr, data, size)
return _ffi.buffer(data, size)
@property
def read_buffers(self):
"""Contains two buffer objects that can be read directly.
Two are needed because the data to be read may be split across
the end of the ringbuffer. Either of them could be 0 length.
This can be used as a no-copy version of `peek()` or `read()`.
When finished with reading, `read_advance()` should be used.
.. note:: After an operation that changes the read pointer
(`read()`, `read_advance()`, `reset()`), the buffers are no
longer valid and one should use this property again to get
new ones.
"""
vectors = _ffi.new('jack_ringbuffer_data_t[2]')
_lib.jack_ringbuffer_get_read_vector(self._ptr, vectors)
return (
_ffi.buffer(vectors[0].buf, vectors[0].len),
_ffi.buffer(vectors[1].buf, vectors[1].len)
)
def read_advance(self, size):
"""Advance the read pointer.
After data has been read from the ringbuffer using
`read_buffers` or `peek()`, use this method to advance the
buffer pointers, making that space available for future write
operations.
Parameters
----------
size : int
The number of bytes to advance.
"""
_lib.jack_ringbuffer_read_advance(self._ptr, size)
def mlock(self):
"""Lock a ringbuffer data block into memory.
Uses the ``mlock()`` system call. This prevents the
ringbuffer's memory from being paged to the swap area.
.. note:: This is not a realtime operation.
"""
_check(_lib.jack_ringbuffer_mlock(self._ptr),
'Error mlocking the RingBuffer data')
def reset(self, size=None):
"""Reset the read and write pointers, making an empty buffer.
.. note:: This is not thread safe.
Parameters
----------
size : int, optional
The new size for the ringbuffer.
Must be less than allocated size.
"""
if size is None:
_lib.jack_ringbuffer_reset(self._ptr)
else:
_lib.jack_ringbuffer_reset_size(self._ptr, size)
@property
def size(self):
"""The number of bytes in total used by the buffer.
See Also
--------
:attr:`read_space`, :attr:`write_space`
"""
return self._ptr.size
class Status(object):
"""Representation of the JACK status bits."""
__slots__ = '_code'
def __init__(self, code):
self._code = code
def __repr__(self):
flags = ', '.join(name for name in dir(self)
if not name.startswith('_') and getattr(self, name))
if not flags:
flags = 'no flags set'
return '<jack.Status 0x{0:X}: {1}>'.format(self._code, flags)
@property
def failure(self):
"""Overall operation failed."""
return self._hasflag(_lib.JackFailure)
@property
def invalid_option(self):
"""The operation contained an invalid or unsupported option."""
return self._hasflag(_lib.JackInvalidOption)
@property
def name_not_unique(self):
"""The desired client name was not unique.
With the *use_exact_name* option of `Client`, this situation is
fatal. Otherwise, the name is modified by appending a dash and
a two-digit number in the range "-01" to "-99". `Client.name`
will return the exact string that was used. If the specified
*name* plus these extra characters would be too long, the open
fails instead.
"""
return self._hasflag(_lib.JackNameNotUnique)
@property
def server_started(self):
"""The JACK server was started for this `Client`.
Otherwise, it was running already.
"""
return self._hasflag(_lib.JackServerStarted)
@property
def server_failed(self):
"""Unable to connect to the JACK server."""
return self._hasflag(_lib.JackServerFailed)
@property
def server_error(self):
"""Communication error with the JACK server."""
return self._hasflag(_lib.JackServerError)
@property
def no_such_client(self):
"""Requested client does not exist."""
return self._hasflag(_lib.JackNoSuchClient)
@property
def load_failure(self):
"""Unable to load internal client."""
return self._hasflag(_lib.JackLoadFailure)
@property
def init_failure(self):
"""Unable to initialize client."""
return self._hasflag(_lib.JackInitFailure)
@property
def shm_failure(self):
"""Unable to access shared memory."""
return self._hasflag(_lib.JackShmFailure)
@property
def version_error(self):
"""Client's protocol version does not match."""
return self._hasflag(_lib.JackVersionError)
@property
def backend_error(self):
"""Backend error."""
return self._hasflag(_lib.JackBackendError)
@property
def client_zombie(self):
"""Client zombified failure."""
return self._hasflag(_lib.JackClientZombie)
def _hasflag(self, flag):
"""Helper function for Status properties."""
return bool(self._code & flag)
class TransportState(object):
"""Representation of the JACK transport state.
See Also
--------
`Client.transport_state`, :meth:`Client.transport_query`
"""
__slots__ = '_code'
def __init__(self, code):
self._code = code
def __eq__(self, other):
return self._code == other
def __repr__(self):
return 'jack.' + {
_lib.JackTransportStopped: 'STOPPED',
_lib.JackTransportRolling: 'ROLLING',
_lib.JackTransportStarting: 'STARTING',
_lib.JackTransportNetStarting: 'NETSTARTING',
}[self._code]
class JackError(Exception):
"""Exception for all kinds of JACK-related errors."""
pass
class CallbackExit(Exception):
"""To be raised in a callback function to signal failure.
See Also
--------
:meth:`Client.set_process_callback`
:meth:`Client.set_blocksize_callback`
:meth:`Client.set_samplerate_callback`
:meth:`Client.set_port_rename_callback`
:meth:`Client.set_graph_order_callback`
:meth:`Client.set_xrun_callback`
"""
pass
def position2dict(pos):
"""Convert CFFI position struct to a dict."""
assert pos.unique_1 == pos.unique_2
keys = ['usecs', 'frame_rate', 'frame']
if pos.valid & _lib.JackPositionBBT:
keys += ['bar', 'beat', 'tick', 'bar_start_tick', 'beats_per_bar',
'beat_type', 'ticks_per_beat', 'beats_per_minute']
if pos.valid & _lib.JackPositionTimecode:
keys += ['frame_time', 'next_time']
if pos.valid & _lib.JackBBTFrameOffset:
keys += ['bbt_offset']
if pos.valid & _lib.JackAudioVideoRatio:
keys += ['audio_frames_per_video_frame']
if pos.valid & _lib.JackVideoFrameOffset:
keys += ['video_offset']
return dict((k, getattr(pos, k)) for k in keys)
def version():
"""Get tuple of major/minor/micro/protocol version."""
v = _ffi.new('int[4]')
_lib.jack_get_version(v+0, v+1, v+2, v+3)
return tuple(v)
def version_string():
"""Get human-readable JACK version."""
return _ffi.string(_lib.jack_get_version_string()).decode()
def client_name_size():
"""Return the maximum number of characters in a JACK client name.
This includes the final NULL character. This value is a constant.
"""
return _lib.jack_client_name_size()
def port_name_size():
"""Maximum length of port names.
The maximum number of characters in a full JACK port name including
the final NULL character. This value is a constant.
A port's full name contains the owning client name concatenated with
a colon (:) followed by its short name and a NULL character.
"""
return _lib.jack_port_name_size()
def set_error_function(callback=None):
"""Set the callback for error message display.
Set it to ``None`` to restore the default error callback function
(which prints the error message plus a newline to stderr).
The *callback* function must have this signature::
callback(message: str) -> None
"""
_set_error_or_info_function(callback, _lib.jack_set_error_function)
def set_info_function(callback=None):
"""Set the callback for info message display.
Set it to ``None`` to restore default info callback function
(which prints the info message plus a newline to stderr).
The *callback* function must have this signature::
callback(message: str) -> None
"""
_set_error_or_info_function(callback, _lib.jack_set_info_function)
def client_pid(name):
"""Return PID of a JACK client.
Parameters
----------
name : str
Name of the JACK client whose PID shall be returned.
Returns
-------
int
PID of *name*. If not available, 0 will be returned.
"""
return _lib.jack_get_client_pid(name.encode())
def _set_error_or_info_function(callback, setter):
"""Helper for set_error_function() and set_info_function()."""
if callback is None:
callback_wrapper = _ffi.NULL
else:
@_ffi.callback('void (*)(const char*)')
def callback_wrapper(msg):
callback(_ffi.string(msg).decode())
_keepalive[setter] = callback_wrapper
setter(callback_wrapper)
_keepalive = {}
def _check(error_code, msg):
"""Check error code and raise JackError if non-zero."""
if error_code:
raise JackError('{0} ({1})'.format(msg, error_code))
| 34.741505
| 97
| 0.616958
|
__version__ = '0.4.3'
from ctypes.util import find_library as _find_library
import errno as _errno
import platform as _platform
import warnings as _warnings
from _jack import ffi as _ffi
if _platform.system() == 'Windows':
if _platform.architecture()[0] == '64bit':
_libname = _find_library('libjack64')
else:
_libname = _find_library('libjack')
else:
_libname = _find_library('jack')
if _libname is None:
raise OSError('JACK library not found')
_lib = _ffi.dlopen(_libname)
_AUDIO = b'32 bit float mono audio'
_MIDI = b'8 bit raw midi'
STOPPED = _lib.JackTransportStopped
ROLLING = _lib.JackTransportRolling
STARTING = _lib.JackTransportStarting
NETSTARTING = _lib.JackTransportNetStarting
_SUCCESS = 0
_FAILURE = 1
class Client(object):
def __init__(self, name, use_exact_name=False, no_start_server=False,
servername=None, session_id=None):
status = _ffi.new('jack_status_t*')
options = _lib.JackNullOption
optargs = []
if use_exact_name:
options |= _lib.JackUseExactName
if no_start_server:
options |= _lib.JackNoStartServer
if servername:
options |= _lib.JackServerName
optargs.append(_ffi.new('char[]', servername.encode()))
if session_id:
options |= _lib.JackSessionID
optargs.append(_ffi.new('char[]', session_id.encode()))
self._ptr = _lib.jack_client_open(name.encode(), options, status,
*optargs)
self._status = Status(status[0])
if not self._ptr:
raise JackError('Error initializing "{0}": {1}'.format(
name, self.status))
self._inports = Ports(self, _AUDIO, _lib.JackPortIsInput)
self._outports = Ports(self, _AUDIO, _lib.JackPortIsOutput)
self._midi_inports = Ports(self, _MIDI, _lib.JackPortIsInput)
self._midi_outports = Ports(self, _MIDI, _lib.JackPortIsOutput)
self._keepalive = []
self._position = _ffi.new('jack_position_t*')
_ptr = _ffi.NULL
def __enter__(self):
self.activate()
return self
def __exit__(self, *args):
self.deactivate()
self.close()
def __del__(self):
self.close()
@property
def name(self):
return _ffi.string(_lib.jack_get_client_name(self._ptr)).decode()
@property
def samplerate(self):
return _lib.jack_get_sample_rate(self._ptr)
@property
def blocksize(self):
return _lib.jack_get_buffer_size(self._ptr)
@blocksize.setter
def blocksize(self, blocksize):
_check(_lib.jack_set_buffer_size(self._ptr, blocksize),
'Error setting JACK blocksize')
@property
def status(self):
return self._status
@property
def realtime(self):
return bool(_lib.jack_is_realtime(self._ptr))
@property
def frames_since_cycle_start(self):
return _lib.jack_frames_since_cycle_start(self._ptr)
@property
def frame_time(self):
return _lib.jack_frame_time(self._ptr)
@property
def last_frame_time(self):
return _lib.jack_last_frame_time(self._ptr)
@property
def inports(self):
return self._inports
@property
def outports(self):
return self._outports
@property
def midi_inports(self):
return self._midi_inports
@property
def midi_outports(self):
return self._midi_outports
def owns(self, port):
port = self._get_port_ptr(port)
return bool(_lib.jack_port_is_mine(self._ptr, port))
def activate(self):
_check(_lib.jack_activate(self._ptr), 'Error activating JACK client')
def deactivate(self, ignore_errors=True):
err = _lib.jack_deactivate(self._ptr)
if not ignore_errors:
_check(err, 'Error deactivating JACK client')
def cpu_load(self):
return _lib.jack_cpu_load(self._ptr)
def close(self, ignore_errors=True):
if self._ptr:
err = _lib.jack_client_close(self._ptr)
self._ptr = _ffi.NULL
if not ignore_errors:
_check(err, 'Error closing JACK client')
def connect(self, source, destination):
if isinstance(source, Port):
source = source.name
if isinstance(destination, Port):
destination = destination.name
err = _lib.jack_connect(self._ptr, source.encode(),
destination.encode())
if err == _errno.EEXIST:
raise JackError('Connection {0!r} -> {1!r} '
'already exists'.format(source, destination))
_check(err,
'Error connecting {0!r} -> {1!r}'.format(source, destination))
def disconnect(self, source, destination):
if isinstance(source, Port):
source = source.name
if isinstance(destination, Port):
destination = destination.name
_check(_lib.jack_disconnect(
self._ptr, source.encode(), destination.encode()),
"Couldn't disconnect {0!r} -> {1!r}".format(source, destination))
def transport_start(self):
_lib.jack_transport_start(self._ptr)
def transport_stop(self):
_lib.jack_transport_stop(self._ptr)
@property
def transport_state(self):
return TransportState(_lib.jack_transport_query(self._ptr, _ffi.NULL))
@property
def transport_frame(self):
return _lib.jack_get_current_transport_frame(self._ptr)
@transport_frame.setter
def transport_frame(self, frame):
_check(_lib.jack_transport_locate(self._ptr, frame),
'Error locating JACK transport')
def transport_locate(self, frame):
_warnings.warn(
'transport_locate() is deprecated, use transport_frame',
DeprecationWarning)
self.transport_frame = frame
def transport_query(self):
state, pos = self.transport_query_struct()
return TransportState(state), position2dict(pos)
def transport_query_struct(self):
state = _lib.jack_transport_query(self._ptr, self._position)
return state, self._position
def transport_reposition_struct(self, position):
_check(_lib.jack_transport_reposition(self._ptr, position),
'Error re-positioning transport')
def set_freewheel(self, onoff):
_check(_lib.jack_set_freewheel(self._ptr, onoff),
'Error setting freewheel mode')
def set_shutdown_callback(self, callback):
@self._callback('JackInfoShutdownCallback')
def callback_wrapper(code, reason, _):
callback(Status(code), _ffi.string(reason).decode())
_lib.jack_on_info_shutdown(self._ptr, callback_wrapper, _ffi.NULL)
def set_process_callback(self, callback):
@self._callback('JackProcessCallback', error=_FAILURE)
def callback_wrapper(frames, _):
try:
callback(frames)
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_process_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting process callback')
def set_freewheel_callback(self, callback):
@self._callback('JackFreewheelCallback')
def callback_wrapper(starting, _):
callback(bool(starting))
_check(_lib.jack_set_freewheel_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting freewheel callback')
def set_blocksize_callback(self, callback):
@self._callback('JackBufferSizeCallback', error=_FAILURE)
def callback_wrapper(blocksize, _):
try:
callback(blocksize)
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_buffer_size_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting blocksize callback')
def set_samplerate_callback(self, callback):
@self._callback('JackSampleRateCallback', error=_FAILURE)
def callback_wrapper(samplerate, _):
try:
callback(samplerate)
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_sample_rate_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting samplerate callback')
def set_client_registration_callback(self, callback):
@self._callback('JackClientRegistrationCallback')
def callback_wrapper(name, register, _):
callback(_ffi.string(name).decode(), bool(register))
_check(_lib.jack_set_client_registration_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting client registration callback')
def set_port_registration_callback(self, callback=None,
only_available=True):
if callback is None:
return lambda cb: self.set_port_registration_callback(
cb, only_available)
@self._callback('JackPortRegistrationCallback')
def callback_wrapper(port_id, register, _):
port_ptr = _lib.jack_port_by_id(self._ptr, port_id)
if port_ptr:
port = self._wrap_port_ptr(port_ptr)
elif only_available:
return
else:
port = None
callback(port, bool(register))
_check(_lib.jack_set_port_registration_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting port registration callback')
def set_port_connect_callback(self, callback=None, only_available=True):
if callback is None:
return lambda cb: self.set_port_connect_callback(
cb, only_available)
@self._callback('JackPortConnectCallback')
def callback_wrapper(a, b, connect, _):
port_ids = a, b
ports = [None, None]
for idx in 0, 1:
ptr = _lib.jack_port_by_id(self._ptr, port_ids[idx])
if ptr:
ports[idx] = self._wrap_port_ptr(ptr)
elif only_available:
return
else:
pass # Do nothing, port is already None
callback(ports[0], ports[1], bool(connect))
_check(_lib.jack_set_port_connect_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting port connect callback')
def set_port_rename_callback(self, callback=None, only_available=True):
if callback is None:
return lambda cb: self.set_port_rename_callback(cb, only_available)
@self._callback('JackPortRenameCallback', error=_FAILURE)
def callback_wrapper(port_id, old_name, new_name, _):
port_ptr = _lib.jack_port_by_id(self._ptr, port_id)
if port_ptr:
port = self._wrap_port_ptr(port_ptr)
elif only_available:
return
else:
port = None
try:
callback(port, _ffi.string(old_name).decode(),
_ffi.string(new_name).decode())
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_port_rename_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting port rename callback')
def set_graph_order_callback(self, callback):
@self._callback('JackGraphOrderCallback', error=_FAILURE)
def callback_wrapper(_):
try:
callback()
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_graph_order_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting graph order callback')
def set_xrun_callback(self, callback):
@self._callback('JackXRunCallback', error=_FAILURE)
def callback_wrapper(_):
try:
callback(_lib.jack_get_xrun_delayed_usecs(self._ptr))
except CallbackExit:
return _FAILURE
return _SUCCESS
_check(_lib.jack_set_xrun_callback(
self._ptr, callback_wrapper, _ffi.NULL),
'Error setting xrun callback')
def set_timebase_callback(self, callback=None, conditional=False):
if callback is None:
return lambda cb: self.set_timebase_callback(cb, conditional)
@self._callback('JackTimebaseCallback')
def callback_wrapper(state, blocksize, pos, new_pos, _):
callback(state, blocksize, pos, bool(new_pos))
err = _lib.jack_set_timebase_callback(self._ptr, conditional,
callback_wrapper, _ffi.NULL)
# Because of a bug in JACK2 version <= 1.9.10, we also check for -1.
# See https://github.com/jackaudio/jack2/pull/123
if conditional and err in (_errno.EBUSY, -1):
return False
_check(err, 'Error setting timebase callback')
return True
def get_uuid_for_client_name(self, name):
uuid = _ffi.gc(_lib.jack_get_uuid_for_client_name(
self._ptr, name.encode()), _lib.jack_free)
if not uuid:
raise JackError('Unable to get session ID for {0!r}'.format(name))
return _ffi.string(uuid).decode()
def get_client_name_by_uuid(self, uuid):
name = _ffi.gc(_lib.jack_get_client_name_by_uuid(
self._ptr, uuid.encode()), _lib.jack_free)
if not name:
raise JackError('Unable to get client name for {0!r}'.format(uuid))
return _ffi.string(name).decode()
def get_port_by_name(self, name):
port_ptr = _lib.jack_port_by_name(self._ptr, name.encode())
if not port_ptr:
raise JackError('Port {0!r} not available'.format(name))
return self._wrap_port_ptr(port_ptr)
def get_all_connections(self, port):
port = self._get_port_ptr(port)
names = _ffi.gc(_lib.jack_port_get_all_connections(self._ptr, port),
_lib.jack_free)
return self._port_list_from_pointers(names)
def get_ports(self, name_pattern='', is_audio=False, is_midi=False,
is_input=False, is_output=False, is_physical=False,
can_monitor=False, is_terminal=False):
if is_audio and not is_midi:
type_pattern = _AUDIO
elif is_midi and not is_audio:
type_pattern = _MIDI
else:
type_pattern = b''
flags = 0x0
if is_input:
flags |= _lib.JackPortIsInput
if is_output:
flags |= _lib.JackPortIsOutput
if is_physical:
flags |= _lib.JackPortIsPhysical
if can_monitor:
flags |= _lib.JackPortCanMonitor
if is_terminal:
flags |= _lib.JackPortIsTerminal
names = _ffi.gc(_lib.jack_get_ports(
self._ptr, name_pattern.encode(), type_pattern, flags),
_lib.jack_free)
return self._port_list_from_pointers(names)
def _callback(self, cdecl, **kwargs):
def callback_decorator(python_callable):
function_ptr = _ffi.callback(cdecl, python_callable, **kwargs)
self._keepalive.append(function_ptr)
return function_ptr
return callback_decorator
def _register_port(self, name, porttype, is_terminal, is_physical, flags):
if is_terminal:
flags |= _lib.JackPortIsTerminal
if is_physical:
flags |= _lib.JackPortIsPhysical
port_ptr = _lib.jack_port_register(self._ptr, name.encode(), porttype,
flags, 0)
if not port_ptr:
raise JackError(
'{0!r}: port registration failed'.format(name))
return self._wrap_port_ptr(port_ptr)
def _port_list_from_pointers(self, names):
ports = []
if names:
idx = 0
while True:
name = names[idx]
if not name:
break
ports.append(self.get_port_by_name(_ffi.string(name).decode()))
idx += 1
return ports
def _get_port_ptr(self, port):
if isinstance(port, Port):
port = port._ptr
elif isinstance(port, str):
port = self.get_port_by_name(port)._ptr
return port
def _wrap_port_ptr(self, ptr):
porttype = _ffi.string(_lib.jack_port_type(ptr))
if porttype == _AUDIO:
port = OwnPort(ptr, self) if self.owns(ptr) else Port(ptr)
elif porttype == _MIDI:
port = OwnMidiPort(ptr, self) if self.owns(ptr) else MidiPort(ptr)
else:
assert False
return port
class Port(object):
def __init__(self, port_ptr):
self._ptr = port_ptr
def __repr__(self):
return "jack.{0.__class__.__name__}('{0.name}')".format(self)
def __eq__(self, other):
return self._ptr == other._ptr
def __ne__(self, other):
return not self.__eq__(other)
@property
def name(self):
return _ffi.string(_lib.jack_port_name(self._ptr)).decode()
@property
def shortname(self):
return _ffi.string(_lib.jack_port_short_name(self._ptr)).decode()
@shortname.setter
def shortname(self, shortname):
_check(_lib.jack_port_set_name(self._ptr, shortname.encode()),
'Error setting port name')
@property
def aliases(self):
ctype = "char[{}]".format(_lib.jack_port_name_size())
aliases = [_ffi.new(ctype), _ffi.new(ctype)]
aliasesptr = _ffi.new("char *[]", aliases)
result = []
if _lib.jack_port_get_aliases(self._ptr, aliasesptr) > 0:
for i in 0, 1:
alias = _ffi.string(aliases[i]).decode()
if alias:
result.append(alias)
return result
def set_alias(self, alias):
_check(_lib.jack_port_set_alias(self._ptr, alias.encode()),
'Error setting port alias')
def unset_alias(self, alias):
_check(_lib.jack_port_unset_alias(self._ptr, alias.encode()),
'Error unsetting port alias')
@property
def uuid(self):
return _lib.jack_port_uuid(self._ptr)
is_audio = property(lambda self: True, doc='This is always ``True``.')
is_midi = property(lambda self: False, doc='This is always ``False``.')
@property
def is_input(self):
return self._hasflag(_lib.JackPortIsInput)
@property
def is_output(self):
return self._hasflag(_lib.JackPortIsOutput)
@property
def is_physical(self):
return self._hasflag(_lib.JackPortIsPhysical)
@property
def can_monitor(self):
return self._hasflag(_lib.JackPortCanMonitor)
@property
def is_terminal(self):
return self._hasflag(_lib.JackPortIsTerminal)
def request_monitor(self, onoff):
_check(_lib.jack_port_request_monitor(self._ptr, onoff),
'Unable to switch monitoring on/off')
def _hasflag(self, flag):
return bool(_lib.jack_port_flags(self._ptr) & flag)
class MidiPort(Port):
is_audio = property(lambda self: False, doc='This is always ``False``.')
is_midi = property(lambda self: True, doc='This is always ``True``.')
class OwnPort(Port):
def __init__(self, port_ptr, client):
Port.__init__(self, port_ptr)
self._client = client
@property
def number_of_connections(self):
return _lib.jack_port_connected(self._ptr)
@property
def connections(self):
names = _ffi.gc(_lib.jack_port_get_connections(self._ptr),
_lib.jack_free)
return self._client._port_list_from_pointers(names)
def is_connected_to(self, port):
if isinstance(port, Port):
port = port.name
return bool(_lib.jack_port_connected_to(self._ptr, port.encode()))
def connect(self, port):
if not isinstance(port, Port):
port = self._client.get_port_by_name(port)
if self.is_output:
source = self
if not port.is_input:
raise ValueError('Input port expected')
destination = port
elif self.is_input:
destination = self
if not port.is_output:
raise ValueError('Output port expected')
source = port
else:
assert False
self._client.connect(source.name, destination.name)
def disconnect(self, other=None):
if other is None:
_check(_lib.jack_port_disconnect(self._client._ptr, self._ptr),
'Error disconnecting {0!r}'.format(self.name))
else:
if self.is_output:
args = self, other
elif self.is_input:
args = other, self
self._client.disconnect(*args)
def unregister(self):
if self.is_audio:
listname = ''
elif self.is_midi:
listname = 'midi_'
if self.is_input:
listname += 'inports'
elif self.is_output:
listname += 'outports'
ports = getattr(self._client, listname)
ports._portlist.remove(self)
_check(_lib.jack_port_unregister(self._client._ptr, self._ptr),
'Error unregistering {0!r}'.format(self.name))
def get_buffer(self):
blocksize = self._client.blocksize
return _ffi.buffer(_lib.jack_port_get_buffer(self._ptr, blocksize),
blocksize * _ffi.sizeof('float'))
def get_array(self):
import numpy as np
return np.frombuffer(self.get_buffer(), dtype=np.float32)
class OwnMidiPort(MidiPort, OwnPort):
def __init__(self, *args, **kwargs):
OwnPort.__init__(self, *args, **kwargs)
self._event = _ffi.new('jack_midi_event_t*')
def get_buffer(self):
raise NotImplementedError('get_buffer() not available on MIDI ports')
def get_array(self):
raise NotImplementedError('get_array() not available on MIDI ports')
@property
def max_event_size(self):
return _lib.jack_midi_max_event_size(
_lib.jack_port_get_buffer(self._ptr, self._client.blocksize))
@property
def lost_midi_events(self):
return _lib.jack_midi_get_lost_event_count(
_lib.jack_port_get_buffer(self._ptr, self._client.blocksize))
def incoming_midi_events(self):
event = self._event
buf = _lib.jack_port_get_buffer(self._ptr, self._client.blocksize)
for i in range(_lib.jack_midi_get_event_count(buf)):
err = _lib.jack_midi_event_get(event, buf, i)
# TODO: proper error handling if this ever happens:
assert not err, err
yield event.time, _ffi.buffer(event.buffer, event.size)
def clear_buffer(self):
_lib.jack_midi_clear_buffer(
_lib.jack_port_get_buffer(self._ptr, self._client.blocksize))
def write_midi_event(self, time, event):
try:
event = _ffi.from_buffer(event)
except AttributeError:
pass # from_buffer() not supported
except TypeError:
pass # input is not a buffer
_check(_lib.jack_midi_event_write(
_lib.jack_port_get_buffer(self._ptr, self._client.blocksize),
time, event, len(event)), 'Error writing MIDI event')
def reserve_midi_event(self, time, size):
buf = _lib.jack_midi_event_reserve(
_lib.jack_port_get_buffer(self._ptr, self._client.blocksize),
time, size)
return _ffi.buffer(buf, size if buf else 0)
class Ports(object):
def __init__(self, client, porttype, flag):
self._client = client
self._type = porttype
self._flag = flag
self._portlist = []
def __len__(self):
return self._portlist.__len__()
def __getitem__(self, name):
return self._portlist.__getitem__(name)
# No __setitem__!
def __iter__(self):
return self._portlist.__iter__()
def __repr__(self):
return self._portlist.__repr__()
def register(self, shortname, is_terminal=False, is_physical=False):
port = self._client._register_port(
shortname, self._type, is_terminal, is_physical, self._flag)
self._portlist.append(port)
return port
def clear(self):
while self._portlist:
self._portlist[0].unregister()
class RingBuffer(object):
def __init__(self, size):
ptr = _lib.jack_ringbuffer_create(size)
if not ptr:
raise JackError('Could not create RingBuffer')
self._ptr = _ffi.gc(ptr, _lib.jack_ringbuffer_free)
@property
def write_space(self):
return _lib.jack_ringbuffer_write_space(self._ptr)
def write(self, data):
try:
data = _ffi.from_buffer(data)
except AttributeError:
pass # from_buffer() not supported
except TypeError:
pass # input is not a buffer
return _lib.jack_ringbuffer_write(self._ptr, data, len(data))
@property
def write_buffers(self):
vectors = _ffi.new('jack_ringbuffer_data_t[2]')
_lib.jack_ringbuffer_get_write_vector(self._ptr, vectors)
return (
_ffi.buffer(vectors[0].buf, vectors[0].len),
_ffi.buffer(vectors[1].buf, vectors[1].len)
)
def write_advance(self, size):
_lib.jack_ringbuffer_write_advance(self._ptr, size)
@property
def read_space(self):
return _lib.jack_ringbuffer_read_space(self._ptr)
def read(self, size):
data = _ffi.new('unsigned char[]', size)
size = _lib.jack_ringbuffer_read(self._ptr, data, size)
return _ffi.buffer(data, size)
def peek(self, size):
data = _ffi.new('unsigned char[]', size)
size = _lib.jack_ringbuffer_peek(self._ptr, data, size)
return _ffi.buffer(data, size)
@property
def read_buffers(self):
vectors = _ffi.new('jack_ringbuffer_data_t[2]')
_lib.jack_ringbuffer_get_read_vector(self._ptr, vectors)
return (
_ffi.buffer(vectors[0].buf, vectors[0].len),
_ffi.buffer(vectors[1].buf, vectors[1].len)
)
def read_advance(self, size):
_lib.jack_ringbuffer_read_advance(self._ptr, size)
def mlock(self):
_check(_lib.jack_ringbuffer_mlock(self._ptr),
'Error mlocking the RingBuffer data')
def reset(self, size=None):
if size is None:
_lib.jack_ringbuffer_reset(self._ptr)
else:
_lib.jack_ringbuffer_reset_size(self._ptr, size)
@property
def size(self):
return self._ptr.size
class Status(object):
__slots__ = '_code'
def __init__(self, code):
self._code = code
def __repr__(self):
flags = ', '.join(name for name in dir(self)
if not name.startswith('_') and getattr(self, name))
if not flags:
flags = 'no flags set'
return '<jack.Status 0x{0:X}: {1}>'.format(self._code, flags)
@property
def failure(self):
return self._hasflag(_lib.JackFailure)
@property
def invalid_option(self):
return self._hasflag(_lib.JackInvalidOption)
@property
def name_not_unique(self):
return self._hasflag(_lib.JackNameNotUnique)
@property
def server_started(self):
return self._hasflag(_lib.JackServerStarted)
@property
def server_failed(self):
return self._hasflag(_lib.JackServerFailed)
@property
def server_error(self):
return self._hasflag(_lib.JackServerError)
@property
def no_such_client(self):
return self._hasflag(_lib.JackNoSuchClient)
@property
def load_failure(self):
return self._hasflag(_lib.JackLoadFailure)
@property
def init_failure(self):
return self._hasflag(_lib.JackInitFailure)
@property
def shm_failure(self):
return self._hasflag(_lib.JackShmFailure)
@property
def version_error(self):
return self._hasflag(_lib.JackVersionError)
@property
def backend_error(self):
return self._hasflag(_lib.JackBackendError)
@property
def client_zombie(self):
return self._hasflag(_lib.JackClientZombie)
def _hasflag(self, flag):
return bool(self._code & flag)
class TransportState(object):
__slots__ = '_code'
def __init__(self, code):
self._code = code
def __eq__(self, other):
return self._code == other
def __repr__(self):
return 'jack.' + {
_lib.JackTransportStopped: 'STOPPED',
_lib.JackTransportRolling: 'ROLLING',
_lib.JackTransportStarting: 'STARTING',
_lib.JackTransportNetStarting: 'NETSTARTING',
}[self._code]
class JackError(Exception):
pass
class CallbackExit(Exception):
pass
def position2dict(pos):
assert pos.unique_1 == pos.unique_2
keys = ['usecs', 'frame_rate', 'frame']
if pos.valid & _lib.JackPositionBBT:
keys += ['bar', 'beat', 'tick', 'bar_start_tick', 'beats_per_bar',
'beat_type', 'ticks_per_beat', 'beats_per_minute']
if pos.valid & _lib.JackPositionTimecode:
keys += ['frame_time', 'next_time']
if pos.valid & _lib.JackBBTFrameOffset:
keys += ['bbt_offset']
if pos.valid & _lib.JackAudioVideoRatio:
keys += ['audio_frames_per_video_frame']
if pos.valid & _lib.JackVideoFrameOffset:
keys += ['video_offset']
return dict((k, getattr(pos, k)) for k in keys)
def version():
v = _ffi.new('int[4]')
_lib.jack_get_version(v+0, v+1, v+2, v+3)
return tuple(v)
def version_string():
return _ffi.string(_lib.jack_get_version_string()).decode()
def client_name_size():
return _lib.jack_client_name_size()
def port_name_size():
return _lib.jack_port_name_size()
def set_error_function(callback=None):
_set_error_or_info_function(callback, _lib.jack_set_error_function)
def set_info_function(callback=None):
_set_error_or_info_function(callback, _lib.jack_set_info_function)
def client_pid(name):
return _lib.jack_get_client_pid(name.encode())
def _set_error_or_info_function(callback, setter):
if callback is None:
callback_wrapper = _ffi.NULL
else:
@_ffi.callback('void (*)(const char*)')
def callback_wrapper(msg):
callback(_ffi.string(msg).decode())
_keepalive[setter] = callback_wrapper
setter(callback_wrapper)
_keepalive = {}
def _check(error_code, msg):
if error_code:
raise JackError('{0} ({1})'.format(msg, error_code))
| true
| true
|
f701c97318788c77594229bcd305aa762690af71
| 15,444
|
py
|
Python
|
yaxil/bids/__init__.py
|
AMP-SCZ/yaxil
|
13ec90df8565930702c9f173182dcaec8e8734e2
|
[
"BSD-3-Clause"
] | null | null | null |
yaxil/bids/__init__.py
|
AMP-SCZ/yaxil
|
13ec90df8565930702c9f173182dcaec8e8734e2
|
[
"BSD-3-Clause"
] | 1
|
2021-12-15T01:05:42.000Z
|
2022-01-05T18:22:39.000Z
|
yaxil/bids/__init__.py
|
AMP-SCZ/yaxil
|
13ec90df8565930702c9f173182dcaec8e8734e2
|
[
"BSD-3-Clause"
] | null | null | null |
import re
import os
import glob
import json
import string
import logging
import subprocess as sp
import yaxil.commons as commons
logger = logging.getLogger(__name__)
# bids legal characters for sub, ses, and task
legal = re.compile('[^a-zA-Z0-9]')
def bids_from_config(yaxil_session, scans_metadata, config, out_base):
'''
Create a BIDS output directory from configuration file
'''
# get session and subject labels from scan metadata
_item = next(iter(scans_metadata))
project,session,subject = _item['session_project'],_item['session_label'],_item['subject_label']
session_id,subject_id = _item['session_id'],_item['subject_id']
# check for dataset_description.json and create it if necessary
check_dataset_description(out_base)
# define bids and sourcedata base directories
sourcedata_base = os.path.join(
out_base,
'sourcedata',
'sub-{0}'.format(legal.sub('', subject)),
'ses-{0}'.format(legal.sub('', session))
)
bids_base = os.path.join(
out_base,
'sub-{0}'.format(legal.sub('', subject)),
'ses-{0}'.format(legal.sub('', session))
)
# put arguments in a struct for convenience
args = commons.struct(
xnat=yaxil_session,
subject=subject,
subject_id=subject_id,
session=session,
session_id=session_id,
project=project,
bids=bids_base,
sourcedata=sourcedata_base
)
# process func, anat, and fmap
func_refs = proc_func(config, args)
anat_refs = proc_anat(config, args)
dwi_refs = proc_dwi(config, args)
fmap_refs = proc_fmap(config, args, func_refs)
def check_dataset_description(bids_dir, bids_version='1.4.0', ds_type='raw'):
if not os.path.exists(bids_dir):
os.makedirs(bids_dir)
ds_desc = os.path.join(bids_dir, 'dataset_description.json')
if not os.path.exists(ds_desc):
js = {
'Name': 'Made by YAXIL',
'BIDSVersion': bids_version,
'DatasetType': ds_type
}
with open(ds_desc, 'w') as fo:
fo.write(json.dumps(js))
def proc_func(config, args):
'''
Download functional data and convert to BIDS
'''
refs = dict()
for scan in iterconfig(config, 'func'):
ref = scan.get('id', None)
templ = 'sub-${sub}_ses-${ses}'
if 'task' in scan:
templ += '_task-${task}'
if 'acquisition' in scan:
templ += '_acq-${acquisition}'
if 'run' in scan:
templ += '_run-${run}'
if 'direction' in scan:
templ += '_dir-${direction}'
templ += '_${modality}'
templ = string.Template(templ)
fbase = templ.safe_substitute(
sub=legal.sub('', args.subject),
ses=legal.sub('', args.session),
task=scan.get('task', None),
acquisition=scan.get('acquisition', None),
run=scan.get('run', None),
direction=scan.get('direction', None),
modality=scan.get('modality', None)
)
# download data to bids sourcedata directory
sourcedata_dir = os.path.join(args.sourcedata, scan['type'])
if not os.path.exists(sourcedata_dir):
os.makedirs(sourcedata_dir)
dicom_dir = os.path.join(sourcedata_dir, '{0}.dicom'.format(fbase))
logger.info('downloading session=%s, scan=%s, loc=%s', args.session, scan['scan'], dicom_dir)
args.xnat.download(args.session, [scan['scan']], out_dir=dicom_dir)
# convert to nifti
fname = '{0}.nii.gz'.format(fbase)
refs[ref] = os.path.join(scan['type'], fname)
fullfile = os.path.join(args.bids, scan['type'], fname)
logger.info('converting %s to %s', dicom_dir, fullfile)
convert(dicom_dir, fullfile)
# add xnat source information to json sidecar
sidecar_file = os.path.join(args.bids, scan['type'], fbase + '.json')
with open(sidecar_file) as fo:
sidecarjs = json.load(fo)
sidecarjs['DataSource'] = {
'application/x-xnat': {
'url': args.xnat.url,
'project': args.project,
'subject': args.subject,
'subject_id': args.subject_id,
'experiment': args.session,
'experiment_id': args.session_id,
'scan': scan['scan']
}
}
# write out updated json sidecar
commons.atomic_write(sidecar_file, json.dumps(sidecarjs, indent=2))
return refs
def proc_anat(config, args):
'''
Download anatomical data and convert to BIDS
'''
refs = dict()
for scan in iterconfig(config, 'anat'):
ref = scan.get('id', None)
templ = 'sub-${sub}_ses-${ses}'
if 'acquisition' in scan:
templ += '_acq-${acquisition}'
if 'run' in scan:
templ += '_run-${run}'
templ += '_${modality}'
templ = string.Template(templ)
fbase = templ.safe_substitute(
sub=legal.sub('', args.subject),
ses=legal.sub('', args.session),
acquisition=scan.get('acquisition', None),
run=scan.get('run', None),
modality=scan.get('modality', None),
)
# download data to bids sourcedata directory
sourcedata_dir = os.path.join(args.sourcedata, scan['type'])
if not os.path.exists(sourcedata_dir):
os.makedirs(sourcedata_dir)
dicom_dir = os.path.join(sourcedata_dir, '{0}.dicom'.format(fbase))
logger.info('downloading session=%s, scan=%s, loc=%s', args.session, scan['scan'], dicom_dir)
args.xnat.download(args.session, [scan['scan']], out_dir=dicom_dir)
# convert to nifti (edge cases for T1w_vNav_setter)
fname = '{0}.nii.gz'.format(fbase)
refs[ref] = os.path.join(scan['type'], fname)
fullfile = os.path.join(args.bids, scan['type'], fname)
logger.info('converting %s to %s', dicom_dir, fullfile)
modality = scan.get('modality', None)
sidecar_files = list()
if modality == 'T1vnav':
fullfile = fullfile.replace('_T1vnav', '_split-%r_T1vnav')
for f in glob.glob(os.path.join(dicom_dir, '*.dcm')):
logger.debug('converting single file %s to %s', f, fullfile)
convert(f, fullfile, single_file=True)
ffbase = re.sub('.nii(.gz)?', '', fullfile)
expr = ffbase.replace('%r', '*') + '.json'
logger.debug('globbing for %s', expr)
sidecar_files = glob.glob(expr)
else:
convert(dicom_dir, fullfile)
sidecar_files = [
os.path.join(args.bids, scan['type'], fbase + '.json')
]
# add xnat source information to json sidecar files
for sidecar_file in sidecar_files:
logger.debug('adding provenance to %s', sidecar_file)
with open(sidecar_file) as fo:
sidecarjs = json.load(fo)
sidecarjs['DataSource'] = {
'application/x-xnat': {
'url': args.xnat.url,
'project': args.project,
'subject': args.subject,
'subject_id': args.subject_id,
'experiment': args.session,
'experiment_id': args.session_id,
'scan': scan['scan']
}
}
# write out updated json sidecar
commons.atomic_write(sidecar_file, json.dumps(sidecarjs, indent=2))
return refs
def proc_dwi(config, args):
'''
Download diffusion data and convert to BIDS
'''
refs = dict()
for scan in iterconfig(config, 'dwi'):
ref = scan.get('id', None)
templ = 'sub-${sub}_ses-${ses}'
if 'acquisition' in scan:
templ += '_acq-${acquisition}'
if 'direction' in scan:
templ += '_dir-${direction}'
if 'run' in scan:
templ += '_run-${run}'
templ += '_${modality}'
templ = string.Template(templ)
fbase = templ.safe_substitute(
sub=legal.sub('', args.subject),
ses=legal.sub('', args.session),
acquisition=scan.get('acquisition', None),
direction=scan.get('direction', None),
run=scan.get('run', None),
modality=scan.get('modality', None)
)
# download data to bids sourcedata directory
sourcedata_dir = os.path.join(args.sourcedata, scan['type'])
if not os.path.exists(sourcedata_dir):
os.makedirs(sourcedata_dir)
dicom_dir = os.path.join(sourcedata_dir, '{0}.dicom'.format(fbase))
logger.info('downloading session=%s, scan=%s, loc=%s', args.session, scan['scan'], dicom_dir)
args.xnat.download(args.session, [scan['scan']], out_dir=dicom_dir)
# convert to nifti
fname = '{0}.nii.gz'.format(fbase)
refs[ref] = os.path.join(scan['type'], fname)
fullfile = os.path.join(args.bids, scan['type'], fname)
logger.info('converting %s to %s', dicom_dir, fullfile)
modality = scan.get('modality', None)
convert(dicom_dir, fullfile)
sidecar_file = os.path.join(args.bids, scan['type'], fbase + '.json')
# add xnat source information to json sidecar files
logger.debug('adding provenance to %s', sidecar_file)
with open(sidecar_file) as fo:
sidecarjs = json.load(fo)
sidecarjs['DataSource'] = {
'application/x-xnat': {
'url': args.xnat.url,
'project': args.project,
'subject': args.subject,
'subject_id': args.subject_id,
'experiment': args.session,
'experiment_id': args.session_id,
'scan': scan['scan']
}
}
# write out updated json sidecar
commons.atomic_write(sidecar_file, json.dumps(sidecarjs, indent=2))
return refs
def proc_fmap(config, args, func_refs=None):
refs = dict()
for scan in iterconfig(config, 'fmap'):
ref = scan.get('id', None)
templ = 'sub-${sub}_ses-${ses}'
if 'acquisition' in scan:
templ += '_acq-${acquisition}'
if 'run' in scan:
templ += '_run-${run}'
if 'direction' in scan:
templ += '_dir-${direction}'
templ += '_${modality}'
templ = string.Template(templ)
fbase = templ.safe_substitute(
sub=legal.sub('', args.subject),
ses=legal.sub('', args.session),
acquisition=scan.get('acquisition', None),
run=scan.get('run', None),
direction=scan.get('direction', None),
modality=scan.get('modality', None),
)
# download data to bids sourcedata directory
sourcedata_dir = os.path.join(args.sourcedata, scan['type'])
if not os.path.exists(sourcedata_dir):
os.makedirs(sourcedata_dir)
dicom_dir = os.path.join(sourcedata_dir, '{0}.dicom'.format(fbase))
logger.info('downloading session=%s, scan=%s, loc=%s', args.session, scan['scan'], dicom_dir)
args.xnat.download(args.session, [scan['scan']], out_dir=dicom_dir)
# convert to nifti
fname = '{0}.nii.gz'.format(fbase)
refs[ref] = os.path.join(scan['type'], fname)
fullfile = os.path.join(args.bids, scan['type'], fname)
logger.info('converting %s to %s', dicom_dir, fullfile)
convert(dicom_dir, fullfile)
# rename fieldmap images to BIDS file naming convention
if scan['type'] == 'fmap':
if scan.get('modality', None) == 'magnitude':
rename_fmapm(args.bids, fbase)
elif scan.get('modality', None) == 'phase':
rename_fmapp(args.bids, fbase)
# add xnat source information to json sidecar
sidecar_file = os.path.join(args.bids, scan['type'], fbase + '.json')
with open(sidecar_file, 'r') as fo:
sidecarjs = json.load(fo)
sidecarjs['DataSource'] = {
'application/x-xnat': {
'url': args.xnat.url,
'project': args.project,
'subject': args.subject,
'subject_id': args.subject_id,
'experiment': args.session,
'experiment_id': args.session_id,
'scan': scan['scan']
}
}
# insert intended-for into json sidecar
if 'intended for' in scan and func_refs:
for intended in scan['intended for']:
if intended in func_refs:
logger.info('adding IntendedFor %s to %s', func_refs[intended], sidecar_file)
if 'IntendedFor' not in sidecarjs:
sidecarjs['IntendedFor'] = list()
if func_refs[intended] not in sidecarjs['IntendedFor']:
sidecarjs['IntendedFor'].append(func_refs[intended])
logger.info('writing file %s', sidecar_file)
# write out updated json sidecar
commons.atomic_write(sidecar_file, json.dumps(sidecarjs, indent=2))
return refs
def iterconfig(config, scan_type):
'''
Iterate over BIDS configuration file
'''
if scan_type in config:
for modality,scans in iter(config[scan_type].items()):
for scan in scans:
scan.update({
'type': scan_type,
'modality': modality
})
yield scan
def rename_fmapm(bids_base, basename):
'''
Rename magnitude fieldmap file to BIDS specification
'''
files = dict()
for ext in ['nii.gz', 'json']:
for echo in [1, 2]:
fname = '{0}_e{1}.{2}'.format(basename, echo, ext)
src = os.path.join(bids_base, 'fmap', fname)
if os.path.exists(src):
dst = src.replace(
'magnitude_e{0}'.format(echo),
'magnitude{0}'.format(echo)
)
logger.debug('renaming %s to %s', src, dst)
os.rename(src, dst)
files[ext] = dst
return files
def rename_fmapp(bids_base, basename):
'''
Rename phase fieldmap file to BIDS specification
'''
files = dict()
for ext in ['nii.gz', 'json']:
fname = '{0}_e2_ph.{1}'.format(basename, ext)
src = os.path.join(bids_base, 'fmap', fname)
if os.path.exists(src):
dst = src.replace(
'phase_e2_ph',
'phase'
)
logger.debug('renaming %s to %s', src, dst)
os.rename(src, dst)
files[ext] = dst
return files
def convert(input, output, single_file=False):
'''
Run dcm2niix on input file
'''
dirname = os.path.dirname(output)
if not os.path.exists(dirname):
os.makedirs(dirname)
basename = os.path.basename(output)
basename = re.sub('.nii(.gz)?', '', basename)
dcm2niix = commons.which('dcm2niix')
cmd = [
'dcm2niix'
]
if single_file:
cmd.extend([
'-s', 'y'
])
cmd.extend([
'-b', 'y',
'-z', 'y',
'-f', basename,
'-o', dirname,
input
])
logger.debug(cmd)
sp.check_output(cmd)
| 38.80402
| 101
| 0.559959
|
import re
import os
import glob
import json
import string
import logging
import subprocess as sp
import yaxil.commons as commons
logger = logging.getLogger(__name__)
legal = re.compile('[^a-zA-Z0-9]')
def bids_from_config(yaxil_session, scans_metadata, config, out_base):
_item = next(iter(scans_metadata))
project,session,subject = _item['session_project'],_item['session_label'],_item['subject_label']
session_id,subject_id = _item['session_id'],_item['subject_id']
check_dataset_description(out_base)
sourcedata_base = os.path.join(
out_base,
'sourcedata',
'sub-{0}'.format(legal.sub('', subject)),
'ses-{0}'.format(legal.sub('', session))
)
bids_base = os.path.join(
out_base,
'sub-{0}'.format(legal.sub('', subject)),
'ses-{0}'.format(legal.sub('', session))
)
args = commons.struct(
xnat=yaxil_session,
subject=subject,
subject_id=subject_id,
session=session,
session_id=session_id,
project=project,
bids=bids_base,
sourcedata=sourcedata_base
)
func_refs = proc_func(config, args)
anat_refs = proc_anat(config, args)
dwi_refs = proc_dwi(config, args)
fmap_refs = proc_fmap(config, args, func_refs)
def check_dataset_description(bids_dir, bids_version='1.4.0', ds_type='raw'):
if not os.path.exists(bids_dir):
os.makedirs(bids_dir)
ds_desc = os.path.join(bids_dir, 'dataset_description.json')
if not os.path.exists(ds_desc):
js = {
'Name': 'Made by YAXIL',
'BIDSVersion': bids_version,
'DatasetType': ds_type
}
with open(ds_desc, 'w') as fo:
fo.write(json.dumps(js))
def proc_func(config, args):
refs = dict()
for scan in iterconfig(config, 'func'):
ref = scan.get('id', None)
templ = 'sub-${sub}_ses-${ses}'
if 'task' in scan:
templ += '_task-${task}'
if 'acquisition' in scan:
templ += '_acq-${acquisition}'
if 'run' in scan:
templ += '_run-${run}'
if 'direction' in scan:
templ += '_dir-${direction}'
templ += '_${modality}'
templ = string.Template(templ)
fbase = templ.safe_substitute(
sub=legal.sub('', args.subject),
ses=legal.sub('', args.session),
task=scan.get('task', None),
acquisition=scan.get('acquisition', None),
run=scan.get('run', None),
direction=scan.get('direction', None),
modality=scan.get('modality', None)
)
sourcedata_dir = os.path.join(args.sourcedata, scan['type'])
if not os.path.exists(sourcedata_dir):
os.makedirs(sourcedata_dir)
dicom_dir = os.path.join(sourcedata_dir, '{0}.dicom'.format(fbase))
logger.info('downloading session=%s, scan=%s, loc=%s', args.session, scan['scan'], dicom_dir)
args.xnat.download(args.session, [scan['scan']], out_dir=dicom_dir)
fname = '{0}.nii.gz'.format(fbase)
refs[ref] = os.path.join(scan['type'], fname)
fullfile = os.path.join(args.bids, scan['type'], fname)
logger.info('converting %s to %s', dicom_dir, fullfile)
convert(dicom_dir, fullfile)
sidecar_file = os.path.join(args.bids, scan['type'], fbase + '.json')
with open(sidecar_file) as fo:
sidecarjs = json.load(fo)
sidecarjs['DataSource'] = {
'application/x-xnat': {
'url': args.xnat.url,
'project': args.project,
'subject': args.subject,
'subject_id': args.subject_id,
'experiment': args.session,
'experiment_id': args.session_id,
'scan': scan['scan']
}
}
commons.atomic_write(sidecar_file, json.dumps(sidecarjs, indent=2))
return refs
def proc_anat(config, args):
refs = dict()
for scan in iterconfig(config, 'anat'):
ref = scan.get('id', None)
templ = 'sub-${sub}_ses-${ses}'
if 'acquisition' in scan:
templ += '_acq-${acquisition}'
if 'run' in scan:
templ += '_run-${run}'
templ += '_${modality}'
templ = string.Template(templ)
fbase = templ.safe_substitute(
sub=legal.sub('', args.subject),
ses=legal.sub('', args.session),
acquisition=scan.get('acquisition', None),
run=scan.get('run', None),
modality=scan.get('modality', None),
)
sourcedata_dir = os.path.join(args.sourcedata, scan['type'])
if not os.path.exists(sourcedata_dir):
os.makedirs(sourcedata_dir)
dicom_dir = os.path.join(sourcedata_dir, '{0}.dicom'.format(fbase))
logger.info('downloading session=%s, scan=%s, loc=%s', args.session, scan['scan'], dicom_dir)
args.xnat.download(args.session, [scan['scan']], out_dir=dicom_dir)
fname = '{0}.nii.gz'.format(fbase)
refs[ref] = os.path.join(scan['type'], fname)
fullfile = os.path.join(args.bids, scan['type'], fname)
logger.info('converting %s to %s', dicom_dir, fullfile)
modality = scan.get('modality', None)
sidecar_files = list()
if modality == 'T1vnav':
fullfile = fullfile.replace('_T1vnav', '_split-%r_T1vnav')
for f in glob.glob(os.path.join(dicom_dir, '*.dcm')):
logger.debug('converting single file %s to %s', f, fullfile)
convert(f, fullfile, single_file=True)
ffbase = re.sub('.nii(.gz)?', '', fullfile)
expr = ffbase.replace('%r', '*') + '.json'
logger.debug('globbing for %s', expr)
sidecar_files = glob.glob(expr)
else:
convert(dicom_dir, fullfile)
sidecar_files = [
os.path.join(args.bids, scan['type'], fbase + '.json')
]
for sidecar_file in sidecar_files:
logger.debug('adding provenance to %s', sidecar_file)
with open(sidecar_file) as fo:
sidecarjs = json.load(fo)
sidecarjs['DataSource'] = {
'application/x-xnat': {
'url': args.xnat.url,
'project': args.project,
'subject': args.subject,
'subject_id': args.subject_id,
'experiment': args.session,
'experiment_id': args.session_id,
'scan': scan['scan']
}
}
commons.atomic_write(sidecar_file, json.dumps(sidecarjs, indent=2))
return refs
def proc_dwi(config, args):
refs = dict()
for scan in iterconfig(config, 'dwi'):
ref = scan.get('id', None)
templ = 'sub-${sub}_ses-${ses}'
if 'acquisition' in scan:
templ += '_acq-${acquisition}'
if 'direction' in scan:
templ += '_dir-${direction}'
if 'run' in scan:
templ += '_run-${run}'
templ += '_${modality}'
templ = string.Template(templ)
fbase = templ.safe_substitute(
sub=legal.sub('', args.subject),
ses=legal.sub('', args.session),
acquisition=scan.get('acquisition', None),
direction=scan.get('direction', None),
run=scan.get('run', None),
modality=scan.get('modality', None)
)
sourcedata_dir = os.path.join(args.sourcedata, scan['type'])
if not os.path.exists(sourcedata_dir):
os.makedirs(sourcedata_dir)
dicom_dir = os.path.join(sourcedata_dir, '{0}.dicom'.format(fbase))
logger.info('downloading session=%s, scan=%s, loc=%s', args.session, scan['scan'], dicom_dir)
args.xnat.download(args.session, [scan['scan']], out_dir=dicom_dir)
fname = '{0}.nii.gz'.format(fbase)
refs[ref] = os.path.join(scan['type'], fname)
fullfile = os.path.join(args.bids, scan['type'], fname)
logger.info('converting %s to %s', dicom_dir, fullfile)
modality = scan.get('modality', None)
convert(dicom_dir, fullfile)
sidecar_file = os.path.join(args.bids, scan['type'], fbase + '.json')
logger.debug('adding provenance to %s', sidecar_file)
with open(sidecar_file) as fo:
sidecarjs = json.load(fo)
sidecarjs['DataSource'] = {
'application/x-xnat': {
'url': args.xnat.url,
'project': args.project,
'subject': args.subject,
'subject_id': args.subject_id,
'experiment': args.session,
'experiment_id': args.session_id,
'scan': scan['scan']
}
}
commons.atomic_write(sidecar_file, json.dumps(sidecarjs, indent=2))
return refs
def proc_fmap(config, args, func_refs=None):
refs = dict()
for scan in iterconfig(config, 'fmap'):
ref = scan.get('id', None)
templ = 'sub-${sub}_ses-${ses}'
if 'acquisition' in scan:
templ += '_acq-${acquisition}'
if 'run' in scan:
templ += '_run-${run}'
if 'direction' in scan:
templ += '_dir-${direction}'
templ += '_${modality}'
templ = string.Template(templ)
fbase = templ.safe_substitute(
sub=legal.sub('', args.subject),
ses=legal.sub('', args.session),
acquisition=scan.get('acquisition', None),
run=scan.get('run', None),
direction=scan.get('direction', None),
modality=scan.get('modality', None),
)
sourcedata_dir = os.path.join(args.sourcedata, scan['type'])
if not os.path.exists(sourcedata_dir):
os.makedirs(sourcedata_dir)
dicom_dir = os.path.join(sourcedata_dir, '{0}.dicom'.format(fbase))
logger.info('downloading session=%s, scan=%s, loc=%s', args.session, scan['scan'], dicom_dir)
args.xnat.download(args.session, [scan['scan']], out_dir=dicom_dir)
fname = '{0}.nii.gz'.format(fbase)
refs[ref] = os.path.join(scan['type'], fname)
fullfile = os.path.join(args.bids, scan['type'], fname)
logger.info('converting %s to %s', dicom_dir, fullfile)
convert(dicom_dir, fullfile)
if scan['type'] == 'fmap':
if scan.get('modality', None) == 'magnitude':
rename_fmapm(args.bids, fbase)
elif scan.get('modality', None) == 'phase':
rename_fmapp(args.bids, fbase)
sidecar_file = os.path.join(args.bids, scan['type'], fbase + '.json')
with open(sidecar_file, 'r') as fo:
sidecarjs = json.load(fo)
sidecarjs['DataSource'] = {
'application/x-xnat': {
'url': args.xnat.url,
'project': args.project,
'subject': args.subject,
'subject_id': args.subject_id,
'experiment': args.session,
'experiment_id': args.session_id,
'scan': scan['scan']
}
}
if 'intended for' in scan and func_refs:
for intended in scan['intended for']:
if intended in func_refs:
logger.info('adding IntendedFor %s to %s', func_refs[intended], sidecar_file)
if 'IntendedFor' not in sidecarjs:
sidecarjs['IntendedFor'] = list()
if func_refs[intended] not in sidecarjs['IntendedFor']:
sidecarjs['IntendedFor'].append(func_refs[intended])
logger.info('writing file %s', sidecar_file)
commons.atomic_write(sidecar_file, json.dumps(sidecarjs, indent=2))
return refs
def iterconfig(config, scan_type):
if scan_type in config:
for modality,scans in iter(config[scan_type].items()):
for scan in scans:
scan.update({
'type': scan_type,
'modality': modality
})
yield scan
def rename_fmapm(bids_base, basename):
files = dict()
for ext in ['nii.gz', 'json']:
for echo in [1, 2]:
fname = '{0}_e{1}.{2}'.format(basename, echo, ext)
src = os.path.join(bids_base, 'fmap', fname)
if os.path.exists(src):
dst = src.replace(
'magnitude_e{0}'.format(echo),
'magnitude{0}'.format(echo)
)
logger.debug('renaming %s to %s', src, dst)
os.rename(src, dst)
files[ext] = dst
return files
def rename_fmapp(bids_base, basename):
files = dict()
for ext in ['nii.gz', 'json']:
fname = '{0}_e2_ph.{1}'.format(basename, ext)
src = os.path.join(bids_base, 'fmap', fname)
if os.path.exists(src):
dst = src.replace(
'phase_e2_ph',
'phase'
)
logger.debug('renaming %s to %s', src, dst)
os.rename(src, dst)
files[ext] = dst
return files
def convert(input, output, single_file=False):
dirname = os.path.dirname(output)
if not os.path.exists(dirname):
os.makedirs(dirname)
basename = os.path.basename(output)
basename = re.sub('.nii(.gz)?', '', basename)
dcm2niix = commons.which('dcm2niix')
cmd = [
'dcm2niix'
]
if single_file:
cmd.extend([
'-s', 'y'
])
cmd.extend([
'-b', 'y',
'-z', 'y',
'-f', basename,
'-o', dirname,
input
])
logger.debug(cmd)
sp.check_output(cmd)
| true
| true
|
f701c99be4ff76447f67b9cbb2dd1f257ff3916e
| 30,823
|
py
|
Python
|
yahoo_fin/stock_info.py
|
rokdd/yahoo_fin
|
da55c89582bc8e858131581da1bd380d19d68bf2
|
[
"MIT"
] | null | null | null |
yahoo_fin/stock_info.py
|
rokdd/yahoo_fin
|
da55c89582bc8e858131581da1bd380d19d68bf2
|
[
"MIT"
] | null | null | null |
yahoo_fin/stock_info.py
|
rokdd/yahoo_fin
|
da55c89582bc8e858131581da1bd380d19d68bf2
|
[
"MIT"
] | null | null | null |
import requests
import pandas as pd
import ftplib
import io
import re
import json
import datetime
try:
from requests_html import HTMLSession
except Exception:
print("""Warning - Certain functionality
requires requests_html, which is not installed.
Install using:
pip install requests_html
After installation, you may have to restart your Python session.""")
base_url = "https://query1.finance.yahoo.com/v8/finance/chart/"
def build_url(ticker, start_date = None, end_date = None, interval = "1d"):
if end_date is None:
end_seconds = int(pd.Timestamp("now").timestamp())
else:
end_seconds = int(pd.Timestamp(end_date).timestamp())
if start_date is None:
start_seconds = 7223400
else:
start_seconds = int(pd.Timestamp(start_date).timestamp())
site = base_url + ticker
params = {"period1": start_seconds, "period2": end_seconds,
"interval": interval.lower(), "events": "div,splits"}
return site, params
def force_float(elt):
try:
return float(elt)
except:
return elt
def _convert_to_numeric(s):
if "M" in s:
s = s.strip("M")
return force_float(s) * 1_000_000
if "B" in s:
s = s.strip("B")
return force_float(s) * 1_000_000_000
return force_float(s)
def get_data(ticker, start_date = None, end_date = None, index_as_date = True,
interval = "1d", headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
):
'''Downloads historical stock price data into a pandas data frame. Interval
must be "1d", "1wk", "1mo", or "1m" for daily, weekly, monthly, or minute data.
Intraday minute data is limited to 7 days.
@param: ticker
@param: start_date = None
@param: end_date = None
@param: index_as_date = True
@param: interval = "1d"
'''
if interval not in ("1d", "1wk", "1mo", "1m"):
raise AssertionError("interval must be of of '1d', '1wk', '1mo', or '1m'")
# build and connect to URL
site, params = build_url(ticker, start_date, end_date, interval)
resp = requests.get(site, params = params, headers = headers)
if not resp.ok:
raise AssertionError(resp.json())
# get JSON response
data = resp.json()
# get open / high / low / close data
frame = pd.DataFrame(data["chart"]["result"][0]["indicators"]["quote"][0])
# get the date info
temp_time = data["chart"]["result"][0]["timestamp"]
if interval != "1m":
# add in adjclose
frame["adjclose"] = data["chart"]["result"][0]["indicators"]["adjclose"][0]["adjclose"]
frame.index = pd.to_datetime(temp_time, unit = "s")
frame.index = frame.index.map(lambda dt: dt.floor("d"))
frame = frame[["open", "high", "low", "close", "adjclose", "volume"]]
else:
frame.index = pd.to_datetime(temp_time, unit = "s")
frame = frame[["open", "high", "low", "close", "volume"]]
frame['ticker'] = ticker.upper()
if not index_as_date:
frame = frame.reset_index()
frame.rename(columns = {"index": "date"}, inplace = True)
return frame
def tickers_sp500(include_company_data = False):
'''Downloads list of tickers currently listed in the S&P 500 '''
# get list of all S&P 500 stocks
sp500 = pd.read_html("https://en.wikipedia.org/wiki/List_of_S%26P_500_companies")[0]
sp500["Symbol"] = sp500["Symbol"].str.replace(".", "-", regex=True)
if include_company_data:
return sp500
sp_tickers = sp500.Symbol.tolist()
sp_tickers = sorted(sp_tickers)
return sp_tickers
def tickers_nasdaq(include_company_data = False):
'''Downloads list of tickers currently listed in the NASDAQ'''
ftp = ftplib.FTP("ftp.nasdaqtrader.com")
ftp.login()
ftp.cwd("SymbolDirectory")
r = io.BytesIO()
ftp.retrbinary('RETR nasdaqlisted.txt', r.write)
if include_company_data:
r.seek(0)
data = pd.read_csv(r, sep = "|")
return data
info = r.getvalue().decode()
splits = info.split("|")
tickers = [x for x in splits if "\r\n" in x]
tickers = [x.split("\r\n")[1] for x in tickers if "NASDAQ" not in x != "\r\n"]
tickers = [ticker for ticker in tickers if "File" not in ticker]
ftp.close()
return tickers
def tickers_other(include_company_data = False):
'''Downloads list of tickers currently listed in the "otherlisted.txt"
file on "ftp.nasdaqtrader.com" '''
ftp = ftplib.FTP("ftp.nasdaqtrader.com")
ftp.login()
ftp.cwd("SymbolDirectory")
r = io.BytesIO()
ftp.retrbinary('RETR otherlisted.txt', r.write)
if include_company_data:
r.seek(0)
data = pd.read_csv(r, sep = "|")
return data
info = r.getvalue().decode()
splits = info.split("|")
tickers = [x for x in splits if "\r\n" in x]
tickers = [x.split("\r\n")[1] for x in tickers]
tickers = [ticker for ticker in tickers if "File" not in ticker]
ftp.close()
return tickers
def tickers_dow(include_company_data = False):
'''Downloads list of currently traded tickers on the Dow'''
site = "https://en.wikipedia.org/wiki/Dow_Jones_Industrial_Average"
table = pd.read_html(site, attrs = {"id":"constituents"})[0]
if include_company_data:
return table
dow_tickers = sorted(table['Symbol'].tolist())
return dow_tickers
def tickers_ibovespa(include_company_data = False):
'''Downloads list of currently traded tickers on the Ibovespa, Brazil'''
table = pd.read_html("https://pt.wikipedia.org/wiki/Lista_de_companhias_citadas_no_Ibovespa")[0]
table.columns = ["Symbol", "Share", "Sector", "Type", "Site"]
if include_company_data:
return table
ibovespa_tickers = sorted(table.Symbol.tolist())
return ibovespa_tickers
def tickers_nifty50(include_company_data = False, headers = {'User-agent': 'Mozilla/5.0'}):
'''Downloads list of currently traded tickers on the NIFTY 50, India'''
site = "https://finance.yahoo.com/quote/%5ENSEI/components?p=%5ENSEI"
table = pd.read_html(requests.get(site, headers=headers).text)[0]
if include_company_data:
return table
nifty50 = sorted(table['Symbol'].tolist())
return nifty50
def tickers_niftybank():
''' Currently traded tickers on the NIFTY BANK, India '''
niftybank = ['AXISBANK', 'KOTAKBANK', 'HDFCBANK', 'SBIN', 'BANKBARODA', 'INDUSINDBK', 'PNB', 'IDFCFIRSTB', 'ICICIBANK', 'RBLBANK', 'FEDERALBNK', 'BANDHANBNK']
return niftybank
def tickers_ftse100(include_company_data = False):
'''Downloads a list of the tickers traded on the FTSE 100 index'''
table = pd.read_html("https://en.wikipedia.org/wiki/FTSE_100_Index", attrs = {"id": "constituents"})[0]
if include_company_data:
return table
return sorted(table.EPIC.tolist())
def tickers_ftse250(include_company_data = False):
'''Downloads a list of the tickers traded on the FTSE 250 index'''
table = pd.read_html("https://en.wikipedia.org/wiki/FTSE_250_Index", attrs = {"id": "constituents"})[0]
table.columns = ["Company", "Ticker"]
if include_company_data:
return table
return sorted(table.Ticker.tolist())
def get_quote_table(ticker , dict_result = True, headers = {'User-agent': 'Mozilla/5.0'}):
'''Scrapes data elements found on Yahoo Finance's quote page
of input ticker
@param: ticker
@param: dict_result = True
'''
site = "https://finance.yahoo.com/quote/" + ticker + "?p=" + ticker
tables = pd.read_html(requests.get(site, headers=headers).text)
data = tables[0].append(tables[1])
data.columns = ["attribute" , "value"]
quote_price = pd.DataFrame(["Quote Price", get_live_price(ticker)]).transpose()
quote_price.columns = data.columns.copy()
data = data.append(quote_price)
data = data.sort_values("attribute")
data = data.drop_duplicates().reset_index(drop = True)
data["value"] = data.value.map(force_float)
if dict_result:
result = {key : val for key,val in zip(data.attribute , data.value)}
return result
return data
def get_stats(ticker, headers = {'User-agent': 'Mozilla/5.0'}):
'''Scrapes information from the statistics tab on Yahoo Finance
for an input ticker
@param: ticker
'''
stats_site = "https://finance.yahoo.com/quote/" + ticker + \
"/key-statistics?p=" + ticker
tables = pd.read_html(requests.get(stats_site, headers=headers).text)
tables = [table for table in tables[1:] if table.shape[1] == 2]
table = tables[0]
for elt in tables[1:]:
table = table.append(elt)
table.columns = ["Attribute" , "Value"]
table = table.reset_index(drop = True)
return table
def get_stats_valuation(ticker, headers = {'User-agent': 'Mozilla/5.0'}):
'''Scrapes Valuation Measures table from the statistics tab on Yahoo Finance
for an input ticker
@param: ticker
'''
stats_site = "https://finance.yahoo.com/quote/" + ticker + \
"/key-statistics?p=" + ticker
tables = pd.read_html(requests.get(stats_site, headers=headers).text)
tables = [table for table in tables if "Trailing P/E" in table.iloc[:,0].tolist()]
table = tables[0].reset_index(drop = True)
return table
def _parse_json(url, headers = {'User-agent': 'Mozilla/5.0'}):
html = requests.get(url=url, headers = headers).text
json_str = html.split('root.App.main =')[1].split(
'(this)')[0].split(';\n}')[0].strip()
try:
data = json.loads(json_str)[
'context']['dispatcher']['stores']['QuoteSummaryStore']
except:
return '{}'
else:
# return data
new_data = json.dumps(data).replace('{}', 'null')
new_data = re.sub(r'\{[\'|\"]raw[\'|\"]:(.*?),(.*?)\}', r'\1', new_data)
json_info = json.loads(new_data)
return json_info
def _parse_table(json_info):
df = pd.DataFrame(json_info)
if df.empty:
return df
del df["maxAge"]
df.set_index("endDate", inplace=True)
df.index = pd.to_datetime(df.index, unit="s")
df = df.transpose()
df.index.name = "Breakdown"
return df
def get_income_statement(ticker, yearly = True):
'''Scrape income statement from Yahoo Finance for a given ticker
@param: ticker
'''
income_site = "https://finance.yahoo.com/quote/" + ticker + \
"/financials?p=" + ticker
json_info = _parse_json(income_site)
if yearly:
temp = json_info["incomeStatementHistory"]["incomeStatementHistory"]
else:
temp = json_info["incomeStatementHistoryQuarterly"]["incomeStatementHistory"]
return _parse_table(temp)
def get_balance_sheet(ticker, yearly = True):
'''Scrapes balance sheet from Yahoo Finance for an input ticker
@param: ticker
'''
balance_sheet_site = "https://finance.yahoo.com/quote/" + ticker + \
"/balance-sheet?p=" + ticker
json_info = _parse_json(balance_sheet_site)
try:
if yearly:
temp = json_info["balanceSheetHistory"]["balanceSheetStatements"]
else:
temp = json_info["balanceSheetHistoryQuarterly"]["balanceSheetStatements"]
except:
temp = []
return _parse_table(temp)
def get_cash_flow(ticker, yearly = True):
'''Scrapes the cash flow statement from Yahoo Finance for an input ticker
@param: ticker
'''
cash_flow_site = "https://finance.yahoo.com/quote/" + \
ticker + "/cash-flow?p=" + ticker
json_info = _parse_json(cash_flow_site)
if yearly:
temp = json_info["cashflowStatementHistory"]["cashflowStatements"]
else:
temp = json_info["cashflowStatementHistoryQuarterly"]["cashflowStatements"]
return _parse_table(temp)
def get_financials(ticker, yearly = True, quarterly = True):
'''Scrapes financials data from Yahoo Finance for an input ticker, including
balance sheet, cash flow statement, and income statement. Returns dictionary
of results.
@param: ticker
@param: yearly = True
@param: quarterly = True
'''
if not yearly and not quarterly:
raise AssertionError("yearly or quarterly must be True")
financials_site = "https://finance.yahoo.com/quote/" + ticker + \
"/financials?p=" + ticker
json_info = _parse_json(financials_site)
result = {}
if yearly:
temp = json_info["incomeStatementHistory"]["incomeStatementHistory"]
table = _parse_table(temp)
result["yearly_income_statement"] = table
temp = json_info["balanceSheetHistory"]["balanceSheetStatements"]
table = _parse_table(temp)
result["yearly_balance_sheet"] = table
temp = json_info["cashflowStatementHistory"]["cashflowStatements"]
table = _parse_table(temp)
result["yearly_cash_flow"] = table
if quarterly:
temp = json_info["incomeStatementHistoryQuarterly"]["incomeStatementHistory"]
table = _parse_table(temp)
result["quarterly_income_statement"] = table
temp = json_info["balanceSheetHistoryQuarterly"]["balanceSheetStatements"]
table = _parse_table(temp)
result["quarterly_balance_sheet"] = table
temp = json_info["cashflowStatementHistoryQuarterly"]["cashflowStatements"]
table = _parse_table(temp)
result["quarterly_cash_flow"] = table
return result
def get_holders(ticker, headers = {'User-agent': 'Mozilla/5.0'}):
'''Scrapes the Holders page from Yahoo Finance for an input ticker
@param: ticker
'''
holders_site = "https://finance.yahoo.com/quote/" + \
ticker + "/holders?p=" + ticker
tables = pd.read_html(requests.get(holders_site, headers=headers).text)
table_names = ["Major Holders" , "Direct Holders (Forms 3 and 4)" ,
"Top Institutional Holders" , "Top Mutual Fund Holders"]
table_mapper = {key : val for key,val in zip(table_names , tables)}
return table_mapper
def get_analysts_info(ticker, headers = {'User-agent': 'Mozilla/5.0'}):
'''Scrapes the Analysts page from Yahoo Finance for an input ticker
@param: ticker
'''
analysts_site = "https://finance.yahoo.com/quote/" + ticker + \
"/analysts?p=" + ticker
tables = pd.read_html(requests.get(analysts_site, headers=headers).text)
table_names = [table.columns[0] for table in tables]
table_mapper = {key : val for key , val in zip(table_names , tables)}
return table_mapper
def get_live_price(ticker):
'''Gets the live price of input ticker
@param: ticker
'''
df = get_data(ticker, end_date = pd.Timestamp.today() + pd.DateOffset(10))
return df.close[-1]
def _raw_get_daily_info(site):
session = HTMLSession()
resp = session.get(site)
tables = pd.read_html(resp.html.raw_html)
df = tables[0].copy()
df.columns = tables[0].columns
del df["52 Week Range"]
df["% Change"] = df["% Change"].map(lambda x: float(x.strip("%+").replace(",", "")))
fields_to_change = [x for x in df.columns.tolist() if "Vol" in x \
or x == "Market Cap"]
for field in fields_to_change:
if type(df[field][0]) == str:
df[field] = df[field].map(_convert_to_numeric)
session.close()
return df
def get_day_most_active(count: int = 100):
return _raw_get_daily_info(f"https://finance.yahoo.com/most-active?offset=0&count={count}")
def get_day_gainers(count: int = 100):
return _raw_get_daily_info(f"https://finance.yahoo.com/gainers?offset=0&count={count}")
def get_day_losers(count: int = 100):
return _raw_get_daily_info(f"https://finance.yahoo.com/losers?offset=0&count={count}")
def get_top_crypto():
'''Gets the top 100 Cryptocurrencies by Market Cap'''
session = HTMLSession()
resp = session.get("https://finance.yahoo.com/cryptocurrencies?offset=0&count=100")
tables = pd.read_html(resp.html.raw_html)
df = tables[0].copy()
df["% Change"] = df["% Change"].map(lambda x: float(str(x).strip("%").\
strip("+").\
replace(",", "")))
del df["52 Week Range"]
del df["1 Day Chart"]
fields_to_change = [x for x in df.columns.tolist() if "Volume" in x \
or x == "Market Cap" or x == "Circulating Supply"]
for field in fields_to_change:
if type(df[field][0]) == str:
df[field] = df[field].map(lambda x: _convert_to_numeric(str(x)))
session.close()
return df
def get_dividends(ticker, start_date = None, end_date = None, index_as_date = True,
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
):
'''Downloads historical dividend data into a pandas data frame.
@param: ticker
@param: start_date = None
@param: end_date = None
@param: index_as_date = True
'''
# build and connect to URL
site, params = build_url(ticker, start_date, end_date, "1d")
resp = requests.get(site, params = params, headers = headers)
if not resp.ok:
return pd.DataFrame()
# get JSON response
data = resp.json()
# check if there is data available for dividends
if "events" not in data["chart"]["result"][0] or "dividends" not in data["chart"]["result"][0]['events']:
return pd.DataFrame()
# get the dividend data
frame = pd.DataFrame(data["chart"]["result"][0]['events']['dividends'])
frame = frame.transpose()
frame.index = pd.to_datetime(frame.index, unit = "s")
frame.index = frame.index.map(lambda dt: dt.floor("d"))
# sort in chronological order
frame = frame.sort_index()
frame['ticker'] = ticker.upper()
# remove old date column
frame = frame.drop(columns='date')
frame = frame.rename({'amount': 'dividend'}, axis = 'columns')
if not index_as_date:
frame = frame.reset_index()
frame.rename(columns = {"index": "date"}, inplace = True)
return frame
def get_splits(ticker, start_date = None, end_date = None, index_as_date = True,
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
):
'''Downloads historical stock split data into a pandas data frame.
@param: ticker
@param: start_date = None
@param: end_date = None
@param: index_as_date = True
'''
# build and connect to URL
site, params = build_url(ticker, start_date, end_date, "1d")
resp = requests.get(site, params = params, headers = headers)
if not resp.ok:
raise AssertionError(resp.json())
# get JSON response
data = resp.json()
# check if there is data available for events
if "events" not in data["chart"]["result"][0]:
raise AssertionError("There is no data available on stock events, or none have occured")
# check if there is data available for splits
if "splits" not in data["chart"]["result"][0]['events']:
raise AssertionError("There is no data available on stock splits, or none have occured")
# get the split data
frame = pd.DataFrame(data["chart"]["result"][0]['events']['splits'])
frame = frame.transpose()
frame.index = pd.to_datetime(frame.index, unit = "s")
frame.index = frame.index.map(lambda dt: dt.floor("d"))
# sort in to chronological order
frame = frame.sort_index()
frame['ticker'] = ticker.upper()
# remove unnecessary columns
frame = frame.drop(columns=['date', 'denominator', 'numerator'])
if not index_as_date:
frame = frame.reset_index()
frame.rename(columns = {"index": "date"}, inplace = True)
return frame
def get_earnings(ticker):
'''Scrapes earnings data from Yahoo Finance for an input ticker
@param: ticker
'''
result = {
"quarterly_results": pd.DataFrame(),
"yearly_revenue_earnings": pd.DataFrame(),
"quarterly_revenue_earnings": pd.DataFrame()
}
financials_site = "https://finance.yahoo.com/quote/" + ticker + \
"/financials?p=" + ticker
json_info = _parse_json(financials_site)
if "earnings" not in json_info:
return result
temp = json_info["earnings"]
if temp == None:
return result
result["quarterly_results"] = pd.DataFrame.from_dict(temp["earningsChart"]["quarterly"])
result["yearly_revenue_earnings"] = pd.DataFrame.from_dict(temp["financialsChart"]["yearly"])
result["quarterly_revenue_earnings"] = pd.DataFrame.from_dict(temp["financialsChart"]["quarterly"])
return result
### Earnings functions
def _parse_earnings_json(url, headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
):
resp = requests.get(url, headers = headers)
content = resp.content.decode(encoding='utf-8', errors='strict')
page_data = [row for row in content.split(
'\n') if row.startswith('root.App.main = ')][0][:-1]
page_data = page_data.split('root.App.main = ', 1)[1]
return json.loads(page_data)
def get_next_earnings_date(ticker):
base_earnings_url = 'https://finance.yahoo.com/quote'
new_url = base_earnings_url + "/" + ticker
parsed_result = _parse_earnings_json(new_url)
temp = parsed_result['context']['dispatcher']['stores']['QuoteSummaryStore']['calendarEvents']['earnings']['earningsDate'][0]['raw']
return datetime.datetime.fromtimestamp(temp)
def get_earnings_history(ticker):
'''Inputs: @ticker
Returns the earnings calendar history of the input ticker with
EPS actual vs. expected data.'''
url = 'https://finance.yahoo.com/calendar/earnings?symbol=' + ticker
result = _parse_earnings_json(url)
return result["context"]["dispatcher"]["stores"]["ScreenerResultsStore"]["results"]["rows"]
def get_earnings_for_date(date, offset = 0, count = 1):
'''Inputs: @date
Returns a dictionary of stock tickers with earnings expected on the
input date. The dictionary contains the expected EPS values for each
stock if available.'''
base_earnings_url = 'https://finance.yahoo.com/calendar/earnings'
if offset >= count:
return []
temp = pd.Timestamp(date)
date = temp.strftime("%Y-%m-%d")
dated_url = '{0}?day={1}&offset={2}&size={3}'.format(
base_earnings_url, date, offset, 100)
result = _parse_earnings_json(dated_url)
stores = result['context']['dispatcher']['stores']
earnings_count = stores['ScreenerCriteriaStore']['meta']['total']
new_offset = offset + 100
more_earnings = get_earnings_for_date(date, new_offset, earnings_count)
current_earnings = stores['ScreenerResultsStore']['results']['rows']
total_earnings = current_earnings + more_earnings
return total_earnings
def get_earnings_in_date_range(start_date, end_date):
'''Inputs: @start_date
@end_date
Returns the stock tickers with expected EPS data for all dates in the
input range (inclusive of the start_date and end_date.'''
earnings_data = []
days_diff = pd.Timestamp(end_date) - pd.Timestamp(start_date)
days_diff = days_diff.days
current_date = pd.Timestamp(start_date)
dates = [current_date + datetime.timedelta(diff) for diff in range(days_diff + 1)]
dates = [d.strftime("%Y-%m-%d") for d in dates]
i = 0
while i < len(dates):
try:
earnings_data += get_earnings_for_date(dates[i])
except Exception:
pass
i += 1
return earnings_data
def get_currencies(headers = {'User-agent': 'Mozilla/5.0'}):
'''Returns the currencies table from Yahoo Finance'''
site = "https://finance.yahoo.com/currencies"
tables = pd.read_html(requests.get(site, headers=headers).text)
result = tables[0]
return result
def get_futures(headers = {'User-agent': 'Mozilla/5.0'}):
'''Returns the futures table from Yahoo Finance'''
site = "https://finance.yahoo.com/commodities"
tables = pd.read_html(requests.get(site, headers=headers).text)
result = tables[0]
return result
def get_undervalued_large_caps(headers = {'User-agent': 'Mozilla/5.0'}):
'''Returns the undervalued large caps table from Yahoo Finance'''
site = "https://finance.yahoo.com/screener/predefined/undervalued_large_caps?offset=0&count=100"
tables = pd.read_html(requests.get(site, headers=headers).text)
result = tables[0]
return result
def get_quote_data(ticker, headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
):
'''Inputs: @ticker
Returns a dictionary containing over 70 elements corresponding to the
input ticker, including company name, book value, moving average data,
pre-market / post-market price (when applicable), and more.'''
site = "https://query1.finance.yahoo.com/v7/finance/quote?symbols=" + ticker
resp = requests.get(site, headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
)
if not resp.ok:
raise AssertionError("""Invalid response from server. Check if ticker is
valid.""")
json_result = resp.json()
info = json_result["quoteResponse"]["result"]
return info[0]
def get_market_status():
'''Returns the current state of the market - PRE, POST, OPEN, or CLOSED'''
quote_data = get_quote_data("^dji")
return quote_data["marketState"]
def get_premarket_price(ticker):
'''Inputs: @ticker
Returns the current pre-market price of the input ticker
(returns value if pre-market price is available.'''
quote_data = get_quote_data(ticker)
if "preMarketPrice" in quote_data:
return quote_data["preMarketPrice"]
raise AssertionError("Premarket price not currently available.")
def get_postmarket_price(ticker):
'''Inputs: @ticker
Returns the current post-market price of the input ticker
(returns value if pre-market price is available.'''
quote_data = get_quote_data(ticker)
if "postMarketPrice" in quote_data:
return quote_data["postMarketPrice"]
raise AssertionError("Postmarket price not currently available.")
# Company Information Functions
def get_company_info(ticker):
'''Scrape the company information for a ticker
@param: ticker
'''
site = f"https://finance.yahoo.com/quote/{ticker}/profile?p={ticker}"
json_info = _parse_json(site)
json_info = json_info["assetProfile"]
info_frame = pd.DataFrame.from_dict(json_info,
orient="index",
columns=["Value"])
info_frame = info_frame.drop("companyOfficers", axis="index")
info_frame.index.name = "Breakdown"
return info_frame
def get_company_officers(ticker):
'''Scrape the company information and return a table of the officers
@param: ticker
'''
site = f"https://finance.yahoo.com/quote/{ticker}/profile?p={ticker}"
json_info = _parse_json(site)
json_info = json_info["assetProfile"]["companyOfficers"]
info_frame = pd.DataFrame.from_dict(json_info)
info_frame = info_frame.set_index("name")
return info_frame
| 29.666025
| 179
| 0.58677
|
import requests
import pandas as pd
import ftplib
import io
import re
import json
import datetime
try:
from requests_html import HTMLSession
except Exception:
print("""Warning - Certain functionality
requires requests_html, which is not installed.
Install using:
pip install requests_html
After installation, you may have to restart your Python session.""")
base_url = "https://query1.finance.yahoo.com/v8/finance/chart/"
def build_url(ticker, start_date = None, end_date = None, interval = "1d"):
if end_date is None:
end_seconds = int(pd.Timestamp("now").timestamp())
else:
end_seconds = int(pd.Timestamp(end_date).timestamp())
if start_date is None:
start_seconds = 7223400
else:
start_seconds = int(pd.Timestamp(start_date).timestamp())
site = base_url + ticker
params = {"period1": start_seconds, "period2": end_seconds,
"interval": interval.lower(), "events": "div,splits"}
return site, params
def force_float(elt):
try:
return float(elt)
except:
return elt
def _convert_to_numeric(s):
if "M" in s:
s = s.strip("M")
return force_float(s) * 1_000_000
if "B" in s:
s = s.strip("B")
return force_float(s) * 1_000_000_000
return force_float(s)
def get_data(ticker, start_date = None, end_date = None, index_as_date = True,
interval = "1d", headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
):
if interval not in ("1d", "1wk", "1mo", "1m"):
raise AssertionError("interval must be of of '1d', '1wk', '1mo', or '1m'")
site, params = build_url(ticker, start_date, end_date, interval)
resp = requests.get(site, params = params, headers = headers)
if not resp.ok:
raise AssertionError(resp.json())
data = resp.json()
frame = pd.DataFrame(data["chart"]["result"][0]["indicators"]["quote"][0])
temp_time = data["chart"]["result"][0]["timestamp"]
if interval != "1m":
frame["adjclose"] = data["chart"]["result"][0]["indicators"]["adjclose"][0]["adjclose"]
frame.index = pd.to_datetime(temp_time, unit = "s")
frame.index = frame.index.map(lambda dt: dt.floor("d"))
frame = frame[["open", "high", "low", "close", "adjclose", "volume"]]
else:
frame.index = pd.to_datetime(temp_time, unit = "s")
frame = frame[["open", "high", "low", "close", "volume"]]
frame['ticker'] = ticker.upper()
if not index_as_date:
frame = frame.reset_index()
frame.rename(columns = {"index": "date"}, inplace = True)
return frame
def tickers_sp500(include_company_data = False):
sp500 = pd.read_html("https://en.wikipedia.org/wiki/List_of_S%26P_500_companies")[0]
sp500["Symbol"] = sp500["Symbol"].str.replace(".", "-", regex=True)
if include_company_data:
return sp500
sp_tickers = sp500.Symbol.tolist()
sp_tickers = sorted(sp_tickers)
return sp_tickers
def tickers_nasdaq(include_company_data = False):
ftp = ftplib.FTP("ftp.nasdaqtrader.com")
ftp.login()
ftp.cwd("SymbolDirectory")
r = io.BytesIO()
ftp.retrbinary('RETR nasdaqlisted.txt', r.write)
if include_company_data:
r.seek(0)
data = pd.read_csv(r, sep = "|")
return data
info = r.getvalue().decode()
splits = info.split("|")
tickers = [x for x in splits if "\r\n" in x]
tickers = [x.split("\r\n")[1] for x in tickers if "NASDAQ" not in x != "\r\n"]
tickers = [ticker for ticker in tickers if "File" not in ticker]
ftp.close()
return tickers
def tickers_other(include_company_data = False):
ftp = ftplib.FTP("ftp.nasdaqtrader.com")
ftp.login()
ftp.cwd("SymbolDirectory")
r = io.BytesIO()
ftp.retrbinary('RETR otherlisted.txt', r.write)
if include_company_data:
r.seek(0)
data = pd.read_csv(r, sep = "|")
return data
info = r.getvalue().decode()
splits = info.split("|")
tickers = [x for x in splits if "\r\n" in x]
tickers = [x.split("\r\n")[1] for x in tickers]
tickers = [ticker for ticker in tickers if "File" not in ticker]
ftp.close()
return tickers
def tickers_dow(include_company_data = False):
site = "https://en.wikipedia.org/wiki/Dow_Jones_Industrial_Average"
table = pd.read_html(site, attrs = {"id":"constituents"})[0]
if include_company_data:
return table
dow_tickers = sorted(table['Symbol'].tolist())
return dow_tickers
def tickers_ibovespa(include_company_data = False):
table = pd.read_html("https://pt.wikipedia.org/wiki/Lista_de_companhias_citadas_no_Ibovespa")[0]
table.columns = ["Symbol", "Share", "Sector", "Type", "Site"]
if include_company_data:
return table
ibovespa_tickers = sorted(table.Symbol.tolist())
return ibovespa_tickers
def tickers_nifty50(include_company_data = False, headers = {'User-agent': 'Mozilla/5.0'}):
site = "https://finance.yahoo.com/quote/%5ENSEI/components?p=%5ENSEI"
table = pd.read_html(requests.get(site, headers=headers).text)[0]
if include_company_data:
return table
nifty50 = sorted(table['Symbol'].tolist())
return nifty50
def tickers_niftybank():
niftybank = ['AXISBANK', 'KOTAKBANK', 'HDFCBANK', 'SBIN', 'BANKBARODA', 'INDUSINDBK', 'PNB', 'IDFCFIRSTB', 'ICICIBANK', 'RBLBANK', 'FEDERALBNK', 'BANDHANBNK']
return niftybank
def tickers_ftse100(include_company_data = False):
table = pd.read_html("https://en.wikipedia.org/wiki/FTSE_100_Index", attrs = {"id": "constituents"})[0]
if include_company_data:
return table
return sorted(table.EPIC.tolist())
def tickers_ftse250(include_company_data = False):
table = pd.read_html("https://en.wikipedia.org/wiki/FTSE_250_Index", attrs = {"id": "constituents"})[0]
table.columns = ["Company", "Ticker"]
if include_company_data:
return table
return sorted(table.Ticker.tolist())
def get_quote_table(ticker , dict_result = True, headers = {'User-agent': 'Mozilla/5.0'}):
site = "https://finance.yahoo.com/quote/" + ticker + "?p=" + ticker
tables = pd.read_html(requests.get(site, headers=headers).text)
data = tables[0].append(tables[1])
data.columns = ["attribute" , "value"]
quote_price = pd.DataFrame(["Quote Price", get_live_price(ticker)]).transpose()
quote_price.columns = data.columns.copy()
data = data.append(quote_price)
data = data.sort_values("attribute")
data = data.drop_duplicates().reset_index(drop = True)
data["value"] = data.value.map(force_float)
if dict_result:
result = {key : val for key,val in zip(data.attribute , data.value)}
return result
return data
def get_stats(ticker, headers = {'User-agent': 'Mozilla/5.0'}):
stats_site = "https://finance.yahoo.com/quote/" + ticker + \
"/key-statistics?p=" + ticker
tables = pd.read_html(requests.get(stats_site, headers=headers).text)
tables = [table for table in tables[1:] if table.shape[1] == 2]
table = tables[0]
for elt in tables[1:]:
table = table.append(elt)
table.columns = ["Attribute" , "Value"]
table = table.reset_index(drop = True)
return table
def get_stats_valuation(ticker, headers = {'User-agent': 'Mozilla/5.0'}):
stats_site = "https://finance.yahoo.com/quote/" + ticker + \
"/key-statistics?p=" + ticker
tables = pd.read_html(requests.get(stats_site, headers=headers).text)
tables = [table for table in tables if "Trailing P/E" in table.iloc[:,0].tolist()]
table = tables[0].reset_index(drop = True)
return table
def _parse_json(url, headers = {'User-agent': 'Mozilla/5.0'}):
html = requests.get(url=url, headers = headers).text
json_str = html.split('root.App.main =')[1].split(
'(this)')[0].split(';\n}')[0].strip()
try:
data = json.loads(json_str)[
'context']['dispatcher']['stores']['QuoteSummaryStore']
except:
return '{}'
else:
new_data = json.dumps(data).replace('{}', 'null')
new_data = re.sub(r'\{[\'|\"]raw[\'|\"]:(.*?),(.*?)\}', r'\1', new_data)
json_info = json.loads(new_data)
return json_info
def _parse_table(json_info):
df = pd.DataFrame(json_info)
if df.empty:
return df
del df["maxAge"]
df.set_index("endDate", inplace=True)
df.index = pd.to_datetime(df.index, unit="s")
df = df.transpose()
df.index.name = "Breakdown"
return df
def get_income_statement(ticker, yearly = True):
income_site = "https://finance.yahoo.com/quote/" + ticker + \
"/financials?p=" + ticker
json_info = _parse_json(income_site)
if yearly:
temp = json_info["incomeStatementHistory"]["incomeStatementHistory"]
else:
temp = json_info["incomeStatementHistoryQuarterly"]["incomeStatementHistory"]
return _parse_table(temp)
def get_balance_sheet(ticker, yearly = True):
balance_sheet_site = "https://finance.yahoo.com/quote/" + ticker + \
"/balance-sheet?p=" + ticker
json_info = _parse_json(balance_sheet_site)
try:
if yearly:
temp = json_info["balanceSheetHistory"]["balanceSheetStatements"]
else:
temp = json_info["balanceSheetHistoryQuarterly"]["balanceSheetStatements"]
except:
temp = []
return _parse_table(temp)
def get_cash_flow(ticker, yearly = True):
cash_flow_site = "https://finance.yahoo.com/quote/" + \
ticker + "/cash-flow?p=" + ticker
json_info = _parse_json(cash_flow_site)
if yearly:
temp = json_info["cashflowStatementHistory"]["cashflowStatements"]
else:
temp = json_info["cashflowStatementHistoryQuarterly"]["cashflowStatements"]
return _parse_table(temp)
def get_financials(ticker, yearly = True, quarterly = True):
if not yearly and not quarterly:
raise AssertionError("yearly or quarterly must be True")
financials_site = "https://finance.yahoo.com/quote/" + ticker + \
"/financials?p=" + ticker
json_info = _parse_json(financials_site)
result = {}
if yearly:
temp = json_info["incomeStatementHistory"]["incomeStatementHistory"]
table = _parse_table(temp)
result["yearly_income_statement"] = table
temp = json_info["balanceSheetHistory"]["balanceSheetStatements"]
table = _parse_table(temp)
result["yearly_balance_sheet"] = table
temp = json_info["cashflowStatementHistory"]["cashflowStatements"]
table = _parse_table(temp)
result["yearly_cash_flow"] = table
if quarterly:
temp = json_info["incomeStatementHistoryQuarterly"]["incomeStatementHistory"]
table = _parse_table(temp)
result["quarterly_income_statement"] = table
temp = json_info["balanceSheetHistoryQuarterly"]["balanceSheetStatements"]
table = _parse_table(temp)
result["quarterly_balance_sheet"] = table
temp = json_info["cashflowStatementHistoryQuarterly"]["cashflowStatements"]
table = _parse_table(temp)
result["quarterly_cash_flow"] = table
return result
def get_holders(ticker, headers = {'User-agent': 'Mozilla/5.0'}):
holders_site = "https://finance.yahoo.com/quote/" + \
ticker + "/holders?p=" + ticker
tables = pd.read_html(requests.get(holders_site, headers=headers).text)
table_names = ["Major Holders" , "Direct Holders (Forms 3 and 4)" ,
"Top Institutional Holders" , "Top Mutual Fund Holders"]
table_mapper = {key : val for key,val in zip(table_names , tables)}
return table_mapper
def get_analysts_info(ticker, headers = {'User-agent': 'Mozilla/5.0'}):
analysts_site = "https://finance.yahoo.com/quote/" + ticker + \
"/analysts?p=" + ticker
tables = pd.read_html(requests.get(analysts_site, headers=headers).text)
table_names = [table.columns[0] for table in tables]
table_mapper = {key : val for key , val in zip(table_names , tables)}
return table_mapper
def get_live_price(ticker):
df = get_data(ticker, end_date = pd.Timestamp.today() + pd.DateOffset(10))
return df.close[-1]
def _raw_get_daily_info(site):
session = HTMLSession()
resp = session.get(site)
tables = pd.read_html(resp.html.raw_html)
df = tables[0].copy()
df.columns = tables[0].columns
del df["52 Week Range"]
df["% Change"] = df["% Change"].map(lambda x: float(x.strip("%+").replace(",", "")))
fields_to_change = [x for x in df.columns.tolist() if "Vol" in x \
or x == "Market Cap"]
for field in fields_to_change:
if type(df[field][0]) == str:
df[field] = df[field].map(_convert_to_numeric)
session.close()
return df
def get_day_most_active(count: int = 100):
return _raw_get_daily_info(f"https://finance.yahoo.com/most-active?offset=0&count={count}")
def get_day_gainers(count: int = 100):
return _raw_get_daily_info(f"https://finance.yahoo.com/gainers?offset=0&count={count}")
def get_day_losers(count: int = 100):
return _raw_get_daily_info(f"https://finance.yahoo.com/losers?offset=0&count={count}")
def get_top_crypto():
session = HTMLSession()
resp = session.get("https://finance.yahoo.com/cryptocurrencies?offset=0&count=100")
tables = pd.read_html(resp.html.raw_html)
df = tables[0].copy()
df["% Change"] = df["% Change"].map(lambda x: float(str(x).strip("%").\
strip("+").\
replace(",", "")))
del df["52 Week Range"]
del df["1 Day Chart"]
fields_to_change = [x for x in df.columns.tolist() if "Volume" in x \
or x == "Market Cap" or x == "Circulating Supply"]
for field in fields_to_change:
if type(df[field][0]) == str:
df[field] = df[field].map(lambda x: _convert_to_numeric(str(x)))
session.close()
return df
def get_dividends(ticker, start_date = None, end_date = None, index_as_date = True,
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
):
site, params = build_url(ticker, start_date, end_date, "1d")
resp = requests.get(site, params = params, headers = headers)
if not resp.ok:
return pd.DataFrame()
data = resp.json()
if "events" not in data["chart"]["result"][0] or "dividends" not in data["chart"]["result"][0]['events']:
return pd.DataFrame()
frame = pd.DataFrame(data["chart"]["result"][0]['events']['dividends'])
frame = frame.transpose()
frame.index = pd.to_datetime(frame.index, unit = "s")
frame.index = frame.index.map(lambda dt: dt.floor("d"))
frame = frame.sort_index()
frame['ticker'] = ticker.upper()
frame = frame.drop(columns='date')
frame = frame.rename({'amount': 'dividend'}, axis = 'columns')
if not index_as_date:
frame = frame.reset_index()
frame.rename(columns = {"index": "date"}, inplace = True)
return frame
def get_splits(ticker, start_date = None, end_date = None, index_as_date = True,
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
):
site, params = build_url(ticker, start_date, end_date, "1d")
resp = requests.get(site, params = params, headers = headers)
if not resp.ok:
raise AssertionError(resp.json())
data = resp.json()
if "events" not in data["chart"]["result"][0]:
raise AssertionError("There is no data available on stock events, or none have occured")
if "splits" not in data["chart"]["result"][0]['events']:
raise AssertionError("There is no data available on stock splits, or none have occured")
frame = pd.DataFrame(data["chart"]["result"][0]['events']['splits'])
frame = frame.transpose()
frame.index = pd.to_datetime(frame.index, unit = "s")
frame.index = frame.index.map(lambda dt: dt.floor("d"))
frame = frame.sort_index()
frame['ticker'] = ticker.upper()
frame = frame.drop(columns=['date', 'denominator', 'numerator'])
if not index_as_date:
frame = frame.reset_index()
frame.rename(columns = {"index": "date"}, inplace = True)
return frame
def get_earnings(ticker):
result = {
"quarterly_results": pd.DataFrame(),
"yearly_revenue_earnings": pd.DataFrame(),
"quarterly_revenue_earnings": pd.DataFrame()
}
financials_site = "https://finance.yahoo.com/quote/" + ticker + \
"/financials?p=" + ticker
json_info = _parse_json(financials_site)
if "earnings" not in json_info:
return result
temp = json_info["earnings"]
if temp == None:
return result
result["quarterly_results"] = pd.DataFrame.from_dict(temp["earningsChart"]["quarterly"])
result["yearly_revenue_earnings"] = pd.DataFrame.from_dict(temp["financialsChart"]["yearly"])
result["quarterly_revenue_earnings"] = pd.DataFrame.from_dict(temp["financialsChart"]["quarterly"])
return result
def _parse_earnings_json(url, headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
):
resp = requests.get(url, headers = headers)
content = resp.content.decode(encoding='utf-8', errors='strict')
page_data = [row for row in content.split(
'\n') if row.startswith('root.App.main = ')][0][:-1]
page_data = page_data.split('root.App.main = ', 1)[1]
return json.loads(page_data)
def get_next_earnings_date(ticker):
base_earnings_url = 'https://finance.yahoo.com/quote'
new_url = base_earnings_url + "/" + ticker
parsed_result = _parse_earnings_json(new_url)
temp = parsed_result['context']['dispatcher']['stores']['QuoteSummaryStore']['calendarEvents']['earnings']['earningsDate'][0]['raw']
return datetime.datetime.fromtimestamp(temp)
def get_earnings_history(ticker):
url = 'https://finance.yahoo.com/calendar/earnings?symbol=' + ticker
result = _parse_earnings_json(url)
return result["context"]["dispatcher"]["stores"]["ScreenerResultsStore"]["results"]["rows"]
def get_earnings_for_date(date, offset = 0, count = 1):
base_earnings_url = 'https://finance.yahoo.com/calendar/earnings'
if offset >= count:
return []
temp = pd.Timestamp(date)
date = temp.strftime("%Y-%m-%d")
dated_url = '{0}?day={1}&offset={2}&size={3}'.format(
base_earnings_url, date, offset, 100)
result = _parse_earnings_json(dated_url)
stores = result['context']['dispatcher']['stores']
earnings_count = stores['ScreenerCriteriaStore']['meta']['total']
new_offset = offset + 100
more_earnings = get_earnings_for_date(date, new_offset, earnings_count)
current_earnings = stores['ScreenerResultsStore']['results']['rows']
total_earnings = current_earnings + more_earnings
return total_earnings
def get_earnings_in_date_range(start_date, end_date):
earnings_data = []
days_diff = pd.Timestamp(end_date) - pd.Timestamp(start_date)
days_diff = days_diff.days
current_date = pd.Timestamp(start_date)
dates = [current_date + datetime.timedelta(diff) for diff in range(days_diff + 1)]
dates = [d.strftime("%Y-%m-%d") for d in dates]
i = 0
while i < len(dates):
try:
earnings_data += get_earnings_for_date(dates[i])
except Exception:
pass
i += 1
return earnings_data
def get_currencies(headers = {'User-agent': 'Mozilla/5.0'}):
site = "https://finance.yahoo.com/currencies"
tables = pd.read_html(requests.get(site, headers=headers).text)
result = tables[0]
return result
def get_futures(headers = {'User-agent': 'Mozilla/5.0'}):
site = "https://finance.yahoo.com/commodities"
tables = pd.read_html(requests.get(site, headers=headers).text)
result = tables[0]
return result
def get_undervalued_large_caps(headers = {'User-agent': 'Mozilla/5.0'}):
site = "https://finance.yahoo.com/screener/predefined/undervalued_large_caps?offset=0&count=100"
tables = pd.read_html(requests.get(site, headers=headers).text)
result = tables[0]
return result
def get_quote_data(ticker, headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
):
site = "https://query1.finance.yahoo.com/v7/finance/quote?symbols=" + ticker
resp = requests.get(site, headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
)
if not resp.ok:
raise AssertionError("""Invalid response from server. Check if ticker is
valid.""")
json_result = resp.json()
info = json_result["quoteResponse"]["result"]
return info[0]
def get_market_status():
quote_data = get_quote_data("^dji")
return quote_data["marketState"]
def get_premarket_price(ticker):
quote_data = get_quote_data(ticker)
if "preMarketPrice" in quote_data:
return quote_data["preMarketPrice"]
raise AssertionError("Premarket price not currently available.")
def get_postmarket_price(ticker):
quote_data = get_quote_data(ticker)
if "postMarketPrice" in quote_data:
return quote_data["postMarketPrice"]
raise AssertionError("Postmarket price not currently available.")
def get_company_info(ticker):
site = f"https://finance.yahoo.com/quote/{ticker}/profile?p={ticker}"
json_info = _parse_json(site)
json_info = json_info["assetProfile"]
info_frame = pd.DataFrame.from_dict(json_info,
orient="index",
columns=["Value"])
info_frame = info_frame.drop("companyOfficers", axis="index")
info_frame.index.name = "Breakdown"
return info_frame
def get_company_officers(ticker):
site = f"https://finance.yahoo.com/quote/{ticker}/profile?p={ticker}"
json_info = _parse_json(site)
json_info = json_info["assetProfile"]["companyOfficers"]
info_frame = pd.DataFrame.from_dict(json_info)
info_frame = info_frame.set_index("name")
return info_frame
| true
| true
|
f701caa049f13a466bcf42bb60f125bec4bf7b7c
| 18
|
py
|
Python
|
mc/__init__.py
|
munasaber/djlib
|
2066353ff718a6fe30dd8897f635ac0f4616b948
|
[
"MIT"
] | null | null | null |
mc/__init__.py
|
munasaber/djlib
|
2066353ff718a6fe30dd8897f635ac0f4616b948
|
[
"MIT"
] | null | null | null |
mc/__init__.py
|
munasaber/djlib
|
2066353ff718a6fe30dd8897f635ac0f4616b948
|
[
"MIT"
] | null | null | null |
from .mc import *
| 9
| 17
| 0.666667
|
from .mc import *
| true
| true
|
f701cacdb2d327f91c1bbc2baa9f7012220e3c96
| 218
|
py
|
Python
|
somebox/common/pathtool.py
|
ipkn/somebox
|
1fedaa07236402269b8ad10dc9563f3d90aaead1
|
[
"MIT"
] | 4
|
2017-12-25T10:36:15.000Z
|
2018-01-01T10:42:34.000Z
|
somebox/common/pathtool.py
|
ipkn/somebox
|
1fedaa07236402269b8ad10dc9563f3d90aaead1
|
[
"MIT"
] | null | null | null |
somebox/common/pathtool.py
|
ipkn/somebox
|
1fedaa07236402269b8ad10dc9563f3d90aaead1
|
[
"MIT"
] | null | null | null |
import os
def find_base(p, bases):
for base_name, base_path in bases.items():
r = os.path.relpath(p, base_path)
if r and (r == '.' or r[0] != '.'):
return base_name, r
return None
| 21.8
| 46
| 0.550459
|
import os
def find_base(p, bases):
for base_name, base_path in bases.items():
r = os.path.relpath(p, base_path)
if r and (r == '.' or r[0] != '.'):
return base_name, r
return None
| true
| true
|
f701cb9ade8c6b90ec926940c886e49ad6ab15a3
| 1,534
|
py
|
Python
|
kinder/lambda_funcs.py
|
42B/krampus
|
738f00652abc93302db5b1205ab3ba45f2507463
|
[
"MIT"
] | 54
|
2018-07-16T02:33:10.000Z
|
2022-03-31T07:57:06.000Z
|
kinder/lambda_funcs.py
|
42B/krampus
|
738f00652abc93302db5b1205ab3ba45f2507463
|
[
"MIT"
] | 12
|
2018-07-12T17:02:54.000Z
|
2019-03-06T03:34:28.000Z
|
kinder/lambda_funcs.py
|
42B/krampus
|
738f00652abc93302db5b1205ab3ba45f2507463
|
[
"MIT"
] | 10
|
2018-07-03T16:35:28.000Z
|
2022-02-19T12:27:09.000Z
|
###############################################################################
# Lambda kinder class
###############################################################################
# lambda is actionable dot ru
###############################################################################
# TODO:
###############################################################################
from lib.krampus_logging import KLog
class Lambda():
def __init__(self, func_name, region, sess):
try:
self.conn = sess.client("lambda", region_name=region)
except Exception as e:
KLog.log("issue connecting to AWS %s" % str(e), "critical")
exit("[!] issue connecting to AWS: %s" % str(e))
# get volume reference
self.func = func_name
self.region = region
# save raw sess in case of instance actions
self.sess = sess
def disable(self):
KLog.log("no disable action for lambda function '%s', will delete instead" % self.func, "warning")
return self.kill()
def kill(self):
try:
# low level call, just pass the resp back
return self.conn.delete_function(FunctionName=self.func)
except Exception as e:
if str(e).find("ResourceNotFoundException") is not -1:
KLog.log("could not find function '%s', dequeueing task" % self.func)
else:
KLog.log("could not delete function '%s', unknown error: %s" % str(e), "critical")
return None
| 40.368421
| 106
| 0.470013
|
from lib.krampus_logging import KLog
class Lambda():
def __init__(self, func_name, region, sess):
try:
self.conn = sess.client("lambda", region_name=region)
except Exception as e:
KLog.log("issue connecting to AWS %s" % str(e), "critical")
exit("[!] issue connecting to AWS: %s" % str(e))
self.func = func_name
self.region = region
self.sess = sess
def disable(self):
KLog.log("no disable action for lambda function '%s', will delete instead" % self.func, "warning")
return self.kill()
def kill(self):
try:
return self.conn.delete_function(FunctionName=self.func)
except Exception as e:
if str(e).find("ResourceNotFoundException") is not -1:
KLog.log("could not find function '%s', dequeueing task" % self.func)
else:
KLog.log("could not delete function '%s', unknown error: %s" % str(e), "critical")
return None
| true
| true
|
f701cc90df7c8af6299601fb9178eb9a859c130c
| 307
|
py
|
Python
|
codigos/Cap02/ex2.1.py
|
skunkworksdev/Ifes_Algoritmo
|
458ef73a304573c12b45d8afae38e13ae0f3354f
|
[
"MIT"
] | null | null | null |
codigos/Cap02/ex2.1.py
|
skunkworksdev/Ifes_Algoritmo
|
458ef73a304573c12b45d8afae38e13ae0f3354f
|
[
"MIT"
] | null | null | null |
codigos/Cap02/ex2.1.py
|
skunkworksdev/Ifes_Algoritmo
|
458ef73a304573c12b45d8afae38e13ae0f3354f
|
[
"MIT"
] | null | null | null |
T = float(input("Entre com a temperatura que está agora: "))
if T >= 26.0 and T <= 36.0:
print("A temperatura está boa")
elif T > 36.0:
print("A temperatura está quente\n Tome bastante líquido")
elif T >= 15.0 and T < 26.0:
print("A temperatura está agradável")
else:
print("A temperatura esta fria")
| 27.909091
| 60
| 0.680782
|
T = float(input("Entre com a temperatura que está agora: "))
if T >= 26.0 and T <= 36.0:
print("A temperatura está boa")
elif T > 36.0:
print("A temperatura está quente\n Tome bastante líquido")
elif T >= 15.0 and T < 26.0:
print("A temperatura está agradável")
else:
print("A temperatura esta fria")
| true
| true
|
f701cca3686904209fe1073f6449348e9c1f82a1
| 7,163
|
py
|
Python
|
theano/sparse/sandbox/sp2.py
|
michaelosthege/aesara
|
55c88832ba71f87c9612d573ede74a4c042ef570
|
[
"BSD-3-Clause"
] | 1
|
2020-12-30T19:12:52.000Z
|
2020-12-30T19:12:52.000Z
|
theano/sparse/sandbox/sp2.py
|
michaelosthege/aesara
|
55c88832ba71f87c9612d573ede74a4c042ef570
|
[
"BSD-3-Clause"
] | null | null | null |
theano/sparse/sandbox/sp2.py
|
michaelosthege/aesara
|
55c88832ba71f87c9612d573ede74a4c042ef570
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import scipy.sparse
import theano
from theano import gof, tensor
from theano.gof.op import Op
from theano.sparse.basic import (
Remove0,
SparseType,
_is_sparse,
as_sparse_variable,
remove0,
)
# Also for compatibility
from theano.tensor import discrete_dtypes, float_dtypes
# Probability Ops are currently back in sandbox, because they do not respect
# Theano's Op contract, as their behaviour is not reproducible: calling
# the perform() method twice with the same argument will yield different
# results.
# from theano.sparse.basic import (
# Multinomial, multinomial, Poisson, poisson,
# Binomial, csr_fbinomial, csc_fbinomial, csr_dbinomial, csc_dbinomial)
# Alias to maintain compatibility
EliminateZeros = Remove0
eliminate_zeros = remove0
# Probability
class Poisson(Op):
"""Return a sparse having random values from a Poisson density
with mean from the input.
WARNING: This Op is NOT deterministic, as calling it twice with the
same inputs will NOT give the same result. This is a violation of
Theano's contract for Ops
:param x: Sparse matrix.
:return: A sparse matrix of random integers of a Poisson density
with mean of `x` element wise.
"""
__props__ = ()
def make_node(self, x):
x = as_sparse_variable(x)
return gof.Apply(self, [x], [x.type()])
def perform(self, node, inputs, outputs):
(x,) = inputs
(out,) = outputs
assert _is_sparse(x)
assert x.format in ["csr", "csc"]
out[0] = x.copy()
out[0].data = np.asarray(np.random.poisson(out[0].data), dtype=x.dtype)
out[0].eliminate_zeros()
def grad(self, inputs, outputs_gradients):
comment = "No gradient exists for class Poisson in\
theano/sparse/sandbox/sp2.py"
return [
theano.gradient.grad_undefined(
op=self, x_pos=0, x=inputs[0], comment=comment
)
]
def infer_shape(self, fgraph, node, ins_shapes):
return ins_shapes
poisson = Poisson()
class Binomial(Op):
"""Return a sparse matrix having random values from a binomial
density having number of experiment `n` and probability of succes
`p`.
WARNING: This Op is NOT deterministic, as calling it twice with the
same inputs will NOT give the same result. This is a violation of
Theano's contract for Ops
:param n: Tensor scalar representing the number of experiment.
:param p: Tensor scalar representing the probability of success.
:param shape: Tensor vector for the output shape.
:return: A sparse matrix of integers representing the number
of success.
"""
__props__ = ("format", "dtype")
def __init__(self, format, dtype):
self.format = format
self.dtype = dtype
def make_node(self, n, p, shape):
n = tensor.as_tensor_variable(n)
p = tensor.as_tensor_variable(p)
shape = tensor.as_tensor_variable(shape)
assert n.dtype in discrete_dtypes
assert p.dtype in float_dtypes
assert shape.dtype in discrete_dtypes
return gof.Apply(
self, [n, p, shape], [SparseType(dtype=self.dtype, format=self.format)()]
)
def perform(self, node, inputs, outputs):
(n, p, shape) = inputs
(out,) = outputs
binomial = np.random.binomial(n, p, size=shape)
csx_matrix = getattr(scipy.sparse, self.format + "_matrix")
out[0] = csx_matrix(binomial, dtype=self.dtype)
def connection_pattern(self, node):
return [[True], [True], [False]]
def grad(self, inputs, gout):
(n, p, shape) = inputs
(gz,) = gout
comment_n = "No gradient exists for the number of samples in class\
Binomial of theano/sparse/sandbox/sp2.py"
comment_p = "No gradient exists for the prob of success in class\
Binomial of theano/sparse/sandbox/sp2.py"
return [
theano.gradient.grad_undefined(op=self, x_pos=0, x=n, comment=comment_n),
theano.gradient.grad_undefined(op=self, x_pos=1, x=p, comment=comment_p),
theano.gradient.disconnected_type(),
]
def infer_shape(self, fgraph, node, ins_shapes):
return [(node.inputs[2][0], node.inputs[2][1])]
csr_fbinomial = Binomial("csr", "float32")
csc_fbinomial = Binomial("csc", "float32")
csr_dbinomial = Binomial("csr", "float64")
csc_dbinomial = Binomial("csc", "float64")
class Multinomial(Op):
"""Return a sparse matrix having random values from a multinomial
density having number of experiment `n` and probability of succes
`p`.
WARNING: This Op is NOT deterministic, as calling it twice with the
same inputs will NOT give the same result. This is a violation of
Theano's contract for Ops
:param n: Tensor type vector or scalar representing the number of
experiment for each row. If `n` is a scalar, it will be
used for each row.
:param p: Sparse matrix of probability where each row is a probability
vector representing the probability of succes. N.B. Each row
must sum to one.
:return: A sparse matrix of random integers from a multinomial density
for each row.
:note: It will works only if `p` have csr format.
"""
__props__ = ()
def make_node(self, n, p):
n = tensor.as_tensor_variable(n)
p = as_sparse_variable(p)
assert p.format in ["csr", "csc"]
return gof.Apply(self, [n, p], [p.type()])
def perform(self, node, inputs, outputs):
(n, p) = inputs
(out,) = outputs
assert _is_sparse(p)
if p.format != "csr":
raise NotImplementedError
out[0] = p.copy()
if n.ndim == 0:
for i in range(p.shape[0]):
k, l = p.indptr[i], p.indptr[i + 1]
out[0].data[k:l] = np.random.multinomial(n, p.data[k:l])
elif n.ndim == 1:
if n.shape[0] != p.shape[0]:
raise ValueError(
"The number of element of n must be "
"the same as the number of row of p."
)
for i in range(p.shape[0]):
k, l = p.indptr[i], p.indptr[i + 1]
out[0].data[k:l] = np.random.multinomial(n[i], p.data[k:l])
def grad(self, inputs, outputs_gradients):
comment_n = "No gradient exists for the number of samples in class\
Multinomial of theano/sparse/sandbox/sp2.py"
comment_p = "No gradient exists for the prob of success in class\
Multinomial of theano/sparse/sandbox/sp2.py"
return [
theano.gradient.grad_undefined(
op=self, x_pos=0, x=inputs[0], comment=comment_n
),
theano.gradient.grad_undefined(
op=self, x_pos=1, x=inputs[1], comment=comment_p
),
]
def infer_shape(self, fgraph, node, ins_shapes):
return [ins_shapes[1]]
multinomial = Multinomial()
| 32.265766
| 85
| 0.620969
|
import numpy as np
import scipy.sparse
import theano
from theano import gof, tensor
from theano.gof.op import Op
from theano.sparse.basic import (
Remove0,
SparseType,
_is_sparse,
as_sparse_variable,
remove0,
)
from theano.tensor import discrete_dtypes, float_dtypes
# the perform() method twice with the same argument will yield different
# results.
# from theano.sparse.basic import (
# Multinomial, multinomial, Poisson, poisson,
# Binomial, csr_fbinomial, csc_fbinomial, csr_dbinomial, csc_dbinomial)
# Alias to maintain compatibility
EliminateZeros = Remove0
eliminate_zeros = remove0
# Probability
class Poisson(Op):
__props__ = ()
def make_node(self, x):
x = as_sparse_variable(x)
return gof.Apply(self, [x], [x.type()])
def perform(self, node, inputs, outputs):
(x,) = inputs
(out,) = outputs
assert _is_sparse(x)
assert x.format in ["csr", "csc"]
out[0] = x.copy()
out[0].data = np.asarray(np.random.poisson(out[0].data), dtype=x.dtype)
out[0].eliminate_zeros()
def grad(self, inputs, outputs_gradients):
comment = "No gradient exists for class Poisson in\
theano/sparse/sandbox/sp2.py"
return [
theano.gradient.grad_undefined(
op=self, x_pos=0, x=inputs[0], comment=comment
)
]
def infer_shape(self, fgraph, node, ins_shapes):
return ins_shapes
poisson = Poisson()
class Binomial(Op):
__props__ = ("format", "dtype")
def __init__(self, format, dtype):
self.format = format
self.dtype = dtype
def make_node(self, n, p, shape):
n = tensor.as_tensor_variable(n)
p = tensor.as_tensor_variable(p)
shape = tensor.as_tensor_variable(shape)
assert n.dtype in discrete_dtypes
assert p.dtype in float_dtypes
assert shape.dtype in discrete_dtypes
return gof.Apply(
self, [n, p, shape], [SparseType(dtype=self.dtype, format=self.format)()]
)
def perform(self, node, inputs, outputs):
(n, p, shape) = inputs
(out,) = outputs
binomial = np.random.binomial(n, p, size=shape)
csx_matrix = getattr(scipy.sparse, self.format + "_matrix")
out[0] = csx_matrix(binomial, dtype=self.dtype)
def connection_pattern(self, node):
return [[True], [True], [False]]
def grad(self, inputs, gout):
(n, p, shape) = inputs
(gz,) = gout
comment_n = "No gradient exists for the number of samples in class\
Binomial of theano/sparse/sandbox/sp2.py"
comment_p = "No gradient exists for the prob of success in class\
Binomial of theano/sparse/sandbox/sp2.py"
return [
theano.gradient.grad_undefined(op=self, x_pos=0, x=n, comment=comment_n),
theano.gradient.grad_undefined(op=self, x_pos=1, x=p, comment=comment_p),
theano.gradient.disconnected_type(),
]
def infer_shape(self, fgraph, node, ins_shapes):
return [(node.inputs[2][0], node.inputs[2][1])]
csr_fbinomial = Binomial("csr", "float32")
csc_fbinomial = Binomial("csc", "float32")
csr_dbinomial = Binomial("csr", "float64")
csc_dbinomial = Binomial("csc", "float64")
class Multinomial(Op):
__props__ = ()
def make_node(self, n, p):
n = tensor.as_tensor_variable(n)
p = as_sparse_variable(p)
assert p.format in ["csr", "csc"]
return gof.Apply(self, [n, p], [p.type()])
def perform(self, node, inputs, outputs):
(n, p) = inputs
(out,) = outputs
assert _is_sparse(p)
if p.format != "csr":
raise NotImplementedError
out[0] = p.copy()
if n.ndim == 0:
for i in range(p.shape[0]):
k, l = p.indptr[i], p.indptr[i + 1]
out[0].data[k:l] = np.random.multinomial(n, p.data[k:l])
elif n.ndim == 1:
if n.shape[0] != p.shape[0]:
raise ValueError(
"The number of element of n must be "
"the same as the number of row of p."
)
for i in range(p.shape[0]):
k, l = p.indptr[i], p.indptr[i + 1]
out[0].data[k:l] = np.random.multinomial(n[i], p.data[k:l])
def grad(self, inputs, outputs_gradients):
comment_n = "No gradient exists for the number of samples in class\
Multinomial of theano/sparse/sandbox/sp2.py"
comment_p = "No gradient exists for the prob of success in class\
Multinomial of theano/sparse/sandbox/sp2.py"
return [
theano.gradient.grad_undefined(
op=self, x_pos=0, x=inputs[0], comment=comment_n
),
theano.gradient.grad_undefined(
op=self, x_pos=1, x=inputs[1], comment=comment_p
),
]
def infer_shape(self, fgraph, node, ins_shapes):
return [ins_shapes[1]]
multinomial = Multinomial()
| true
| true
|
f701ccda92c2322adfba38a2ac93489f41755a14
| 400
|
py
|
Python
|
getDoc/wsgi.py
|
srijannnd/GetDoc-API
|
33ba87f0941f7a8e4b83a4fa85ec83a52b57e3d0
|
[
"MIT"
] | null | null | null |
getDoc/wsgi.py
|
srijannnd/GetDoc-API
|
33ba87f0941f7a8e4b83a4fa85ec83a52b57e3d0
|
[
"MIT"
] | null | null | null |
getDoc/wsgi.py
|
srijannnd/GetDoc-API
|
33ba87f0941f7a8e4b83a4fa85ec83a52b57e3d0
|
[
"MIT"
] | null | null | null |
"""
WSGI config for getDoc project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'getDoc.settings.production')
application = get_wsgi_application()
| 23.529412
| 78
| 0.7875
|
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'getDoc.settings.production')
application = get_wsgi_application()
| true
| true
|
f701cdf08acc2234f9c12d8d349622d596e0778f
| 526
|
py
|
Python
|
d2/poker/main.py
|
blurgyy/summer2019
|
62e5530c74fb77721768f984b3cfbd67c28595ba
|
[
"Apache-2.0"
] | 3
|
2019-09-06T01:13:56.000Z
|
2020-03-18T03:10:53.000Z
|
d2/poker/main.py
|
blurgyy/summer2019
|
62e5530c74fb77721768f984b3cfbd67c28595ba
|
[
"Apache-2.0"
] | null | null | null |
d2/poker/main.py
|
blurgyy/summer2019
|
62e5530c74fb77721768f984b3cfbd67c28595ba
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
import deck
while(True):
p = deck.Deck()
p.shuffle()
pai = p.deal(5)
# assert len(pai) == 5, "??????"
del p
pai.sort(key=lambda x:x.figure)
x = True
for i in range(1, len(pai)):
if(pai[i].suit == pai[i-1].suit and (pai[i].figure == pai[i-1].figure + 1 or pai[i].figure == 10 and pai[i-1].figure == 1)):
continue
else:
x = False
break
if(x == True):
for i in pai:
print(i,end="\t")
print()
| 21.916667
| 132
| 0.471483
|
import deck
while(True):
p = deck.Deck()
p.shuffle()
pai = p.deal(5)
del p
pai.sort(key=lambda x:x.figure)
x = True
for i in range(1, len(pai)):
if(pai[i].suit == pai[i-1].suit and (pai[i].figure == pai[i-1].figure + 1 or pai[i].figure == 10 and pai[i-1].figure == 1)):
continue
else:
x = False
break
if(x == True):
for i in pai:
print(i,end="\t")
print()
| true
| true
|
f701ce4be73d3f3af3e6256b3bb71ac213ac3103
| 4,318
|
py
|
Python
|
eliot/dask.py
|
chenl/eliot
|
8469e98aee19b3bd210515487ca48d9ec97aac6d
|
[
"Apache-2.0"
] | null | null | null |
eliot/dask.py
|
chenl/eliot
|
8469e98aee19b3bd210515487ca48d9ec97aac6d
|
[
"Apache-2.0"
] | null | null | null |
eliot/dask.py
|
chenl/eliot
|
8469e98aee19b3bd210515487ca48d9ec97aac6d
|
[
"Apache-2.0"
] | null | null | null |
"""Support for Eliot tracing with Dask computations."""
from pyrsistent import PClass, field
from dask import compute, optimize
from dask.core import toposort, get_dependencies
from . import start_action, current_action, Action, Message
class _RunWithEliotContext(PClass):
"""
Run a callable within an Eliot context.
@ivar task_id: The serialized Eliot task ID.
@ivar func: The function that Dask wants to run.
@ivar key: The key in the Dask graph.
@ivar dependencies: The keys in the Dask graph this depends on.
"""
task_id = field(type=str)
func = field() # callable
key = field(type=str)
dependencies = field()
# Pretend to be underlying callable for purposes of equality; necessary for
# optimizer to be happy:
def __eq__(self, other):
return self.func == other
def __ne__(self, other):
return self.func != other
def __hash__(self):
return hash(self.func)
def __call__(self, *args, **kwargs):
with Action.continue_task(task_id=self.task_id):
Message.log(
message_type="dask:task",
key=self.key,
dependencies=self.dependencies
)
return self.func(*args, **kwargs)
def compute_with_trace(*args):
"""Do Dask compute(), but with added Eliot tracing.
Dask is a graph of tasks, but Eliot logs trees. So we need to emulate a
graph using a tree. We do this by making Eliot action for each task, but
having it list the tasks it depends on.
We use the following algorithm:
1. Create a top-level action.
2. For each entry in the dask graph, create a child with
serialize_task_id. Do this in likely order of execution, so that
if B depends on A the task level of B is higher than the task Ievel
of A.
3. Replace each function with a wrapper that uses the corresponding
task ID (with Action.continue_task), and while it's at it also
records which other things this function depends on.
Known issues:
1. Retries will confuse Eliot. Probably need different
distributed-tree mechanism within Eliot to solve that.
"""
# 1. Create top-level Eliot Action:
with start_action(action_type="dask:compute"):
# In order to reduce logging verbosity, add logging to the already
# optimized graph:
optimized = optimize(*args, optimizations=[_add_logging])
return compute(*optimized, optimize_graph=False)
def _add_logging(dsk, ignore=None):
"""
Add logging to a Dask graph.
@param dsk: The Dask graph.
@return: New Dask graph.
"""
ctx = current_action()
result = {}
# Use topological sort to ensure Eliot actions are in logical order of
# execution in Dask:
keys = toposort(dsk)
# Give each key a string name. Some keys are just aliases to other
# keys, so make sure we have underlying key available. Later on might
# want to shorten them as well.
def simplify(k):
if isinstance(k, str):
return k
return "-".join(str(o) for o in k)
key_names = {}
for key in keys:
value = dsk[key]
if not callable(value) and value in keys:
# It's an alias for another key:
key_names[key] = key_names[value]
else:
key_names[key] = simplify(key)
# 2. Create Eliot child Actions for each key, in topological order:
key_to_action_id = {
key: str(ctx.serialize_task_id(), "utf-8")
for key in keys
}
# 3. Replace function with wrapper that logs appropriate Action:
for key in keys:
func = dsk[key][0]
args = dsk[key][1:]
if not callable(func):
# This key is just an alias for another key, no need to add
# logging:
result[key] = dsk[key]
continue
wrapped_func = _RunWithEliotContext(
task_id=key_to_action_id[key],
func=func,
key=key_names[key],
dependencies=[key_names[k] for k in get_dependencies(dsk, key)],
)
result[key] = (wrapped_func, ) + tuple(args)
assert result.keys() == dsk.keys()
return result
__all__ = ["compute_with_trace"]
| 31.064748
| 79
| 0.629458
|
from pyrsistent import PClass, field
from dask import compute, optimize
from dask.core import toposort, get_dependencies
from . import start_action, current_action, Action, Message
class _RunWithEliotContext(PClass):
task_id = field(type=str)
func = field() key = field(type=str)
dependencies = field()
def __eq__(self, other):
return self.func == other
def __ne__(self, other):
return self.func != other
def __hash__(self):
return hash(self.func)
def __call__(self, *args, **kwargs):
with Action.continue_task(task_id=self.task_id):
Message.log(
message_type="dask:task",
key=self.key,
dependencies=self.dependencies
)
return self.func(*args, **kwargs)
def compute_with_trace(*args):
with start_action(action_type="dask:compute"):
optimized = optimize(*args, optimizations=[_add_logging])
return compute(*optimized, optimize_graph=False)
def _add_logging(dsk, ignore=None):
ctx = current_action()
result = {}
keys = toposort(dsk)
def simplify(k):
if isinstance(k, str):
return k
return "-".join(str(o) for o in k)
key_names = {}
for key in keys:
value = dsk[key]
if not callable(value) and value in keys:
key_names[key] = key_names[value]
else:
key_names[key] = simplify(key)
# 2. Create Eliot child Actions for each key, in topological order:
key_to_action_id = {
key: str(ctx.serialize_task_id(), "utf-8")
for key in keys
}
# 3. Replace function with wrapper that logs appropriate Action:
for key in keys:
func = dsk[key][0]
args = dsk[key][1:]
if not callable(func):
# This key is just an alias for another key, no need to add
# logging:
result[key] = dsk[key]
continue
wrapped_func = _RunWithEliotContext(
task_id=key_to_action_id[key],
func=func,
key=key_names[key],
dependencies=[key_names[k] for k in get_dependencies(dsk, key)],
)
result[key] = (wrapped_func, ) + tuple(args)
assert result.keys() == dsk.keys()
return result
__all__ = ["compute_with_trace"]
| true
| true
|
f701cf88425ad59561c2f13828995c8ac4ce256c
| 4,582
|
py
|
Python
|
earo/event.py
|
meso5533/Laky-Earo
|
67fa4f2b45596fc3dcfe1b6a54e12202e62cd860
|
[
"Apache-2.0"
] | 1
|
2021-12-22T14:21:42.000Z
|
2021-12-22T14:21:42.000Z
|
earo/event.py
|
Everley1993/Laky-Earo
|
67fa4f2b45596fc3dcfe1b6a54e12202e62cd860
|
[
"Apache-2.0"
] | null | null | null |
earo/event.py
|
Everley1993/Laky-Earo
|
67fa4f2b45596fc3dcfe1b6a54e12202e62cd860
|
[
"Apache-2.0"
] | 1
|
2021-12-22T14:21:43.000Z
|
2021-12-22T14:21:43.000Z
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Copyright 2016 Everley #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
import copy
class Field(object):
"""
The field object of :class:`Event`.
"""
field_type = None
"""
The class of the field.
"""
default = None
"""
The default value of the filed.
"""
def __init__(self, field_type, default=None):
self.field_type = field_type
self.default = default
self.match(default)
def match(self, value):
"""
Raise an :class:`TypeError` is `value` is not an instance of `self.field_type`.
:param value: The value to match.
"""
if value is not None and not isinstance(value, self.field_type):
raise TypeError('expect %s, not %s' %
(self.field_type, type(value)))
class EventMetaClass(type):
"""
The metaclass to help new :class:`Event`.
"""
def __new__(cls, name, bases, attrs):
fields = []
mappings = {}
params = {}
new_attrs = {}
for k, v in attrs.items():
if isinstance(v, Field):
fields.append(v)
mappings[k] = v
params[k] = v.default
else:
new_attrs[k] = v
new_attrs['__fields__'] = fields
new_attrs['__mappings__'] = mappings
new_attrs['__params__'] = params
new_attrs['__actual_params__'] = None
new_attrs['__tag__'] = attrs['__tag__'] \
if '__tag__' in attrs else ''
new_attrs['__description__'] = attrs['__description__'] \
if '__description__' in attrs else ''
return super(EventMetaClass, cls).__new__(cls, name, bases, new_attrs)
class Event(object):
"""
The base class of specific event.
"""
__metaclass__ = EventMetaClass
def __init__(self, **kwargs):
self.__actual_params__ = copy.deepcopy(self.__params__)
for k, v in kwargs.iteritems():
self.__setattr__(k, v)
def __getattr__(self, key):
if key in self.__actual_params__:
return self.__actual_params__[key]
else:
raise AttributeError(
"%s has no param `%s`" %
(type(self), key))
def __setattr__(self, key, value):
if key in ['__actual_params__']:
return super(Event, self).__setattr__(key, value)
if key in self.__actual_params__:
self.__mappings__[key].match(value)
self.__actual_params__[key] = value
else:
raise AttributeError(
"%s has no param `%s`" %
(type(self), key))
@property
def params(self):
"""
A `dict` which is a deep copy of the event's params.
"""
return copy.deepcopy(self.__actual_params__)
@classmethod
def tag(cls):
"""
The tag of the event.
"""
return cls.__tag__
@classmethod
def description(cls):
"""
The description of the event.
"""
return cls.__description__
@classmethod
def key(cls):
"""
A unique string for the event.
"""
return '%s.%s' % (cls.__module__, cls.__name__)
@property
def no_field(self):
"""
return True if the event doesn't have any field.
"""
return len(self.__params__) == 0
| 30.751678
| 87
| 0.494326
|
import copy
class Field(object):
field_type = None
default = None
def __init__(self, field_type, default=None):
self.field_type = field_type
self.default = default
self.match(default)
def match(self, value):
if value is not None and not isinstance(value, self.field_type):
raise TypeError('expect %s, not %s' %
(self.field_type, type(value)))
class EventMetaClass(type):
def __new__(cls, name, bases, attrs):
fields = []
mappings = {}
params = {}
new_attrs = {}
for k, v in attrs.items():
if isinstance(v, Field):
fields.append(v)
mappings[k] = v
params[k] = v.default
else:
new_attrs[k] = v
new_attrs['__fields__'] = fields
new_attrs['__mappings__'] = mappings
new_attrs['__params__'] = params
new_attrs['__actual_params__'] = None
new_attrs['__tag__'] = attrs['__tag__'] \
if '__tag__' in attrs else ''
new_attrs['__description__'] = attrs['__description__'] \
if '__description__' in attrs else ''
return super(EventMetaClass, cls).__new__(cls, name, bases, new_attrs)
class Event(object):
__metaclass__ = EventMetaClass
def __init__(self, **kwargs):
self.__actual_params__ = copy.deepcopy(self.__params__)
for k, v in kwargs.iteritems():
self.__setattr__(k, v)
def __getattr__(self, key):
if key in self.__actual_params__:
return self.__actual_params__[key]
else:
raise AttributeError(
"%s has no param `%s`" %
(type(self), key))
def __setattr__(self, key, value):
if key in ['__actual_params__']:
return super(Event, self).__setattr__(key, value)
if key in self.__actual_params__:
self.__mappings__[key].match(value)
self.__actual_params__[key] = value
else:
raise AttributeError(
"%s has no param `%s`" %
(type(self), key))
@property
def params(self):
return copy.deepcopy(self.__actual_params__)
@classmethod
def tag(cls):
return cls.__tag__
@classmethod
def description(cls):
return cls.__description__
@classmethod
def key(cls):
return '%s.%s' % (cls.__module__, cls.__name__)
@property
def no_field(self):
return len(self.__params__) == 0
| true
| true
|
f701cfcda0e7aeba57d87044b5234012ec3d46b3
| 4,184
|
py
|
Python
|
dataloaders/combine_dbs.py
|
ZurMaD/DeepGrabCut-PyTorch
|
13d9e81e6e438ad3394fb3a78aca26c2cc63c825
|
[
"MIT"
] | 244
|
2018-06-16T07:52:56.000Z
|
2022-03-12T21:45:30.000Z
|
dataloaders/combine_dbs.py
|
ZurMaD/DeepGrabCut-PyTorch
|
13d9e81e6e438ad3394fb3a78aca26c2cc63c825
|
[
"MIT"
] | 7
|
2018-08-21T13:08:06.000Z
|
2021-08-25T04:04:59.000Z
|
dataloaders/combine_dbs.py
|
ZurMaD/DeepGrabCut-PyTorch
|
13d9e81e6e438ad3394fb3a78aca26c2cc63c825
|
[
"MIT"
] | 60
|
2018-07-26T15:47:10.000Z
|
2022-01-02T13:59:56.000Z
|
import torch.utils.data as data
class CombineDBs(data.Dataset):
def __init__(self, dataloaders, excluded=None):
self.dataloaders = dataloaders
self.excluded = excluded
self.im_ids = []
# Combine object lists
for dl in dataloaders:
for elem in dl.im_ids:
if elem not in self.im_ids:
self.im_ids.append(elem)
# Exclude
if excluded:
for dl in excluded:
for elem in dl.im_ids:
if elem in self.im_ids:
self.im_ids.remove(elem)
# Get object pointers
self.obj_list = []
self.im_list = []
new_im_ids = []
obj_counter = 0
num_images = 0
for ii, dl in enumerate(dataloaders):
for jj, curr_im_id in enumerate(dl.im_ids):
if (curr_im_id in self.im_ids) and (curr_im_id not in new_im_ids):
flag = False
new_im_ids.append(curr_im_id)
for kk in range(len(dl.obj_dict[curr_im_id])):
if dl.obj_dict[curr_im_id][kk] != -1:
self.obj_list.append({'db_ii': ii, 'obj_ii': dl.obj_list.index([jj, kk])})
flag = True
obj_counter += 1
self.im_list.append({'db_ii': ii, 'im_ii': jj})
if flag:
num_images += 1
self.im_ids = new_im_ids
print('Combined number of images: {:d}\nCombined number of objects: {:d}'.format(num_images, len(self.obj_list)))
def __getitem__(self, index):
_db_ii = self.obj_list[index]["db_ii"]
_obj_ii = self.obj_list[index]['obj_ii']
sample = self.dataloaders[_db_ii].__getitem__(_obj_ii)
if 'meta' in sample.keys():
sample['meta']['db'] = str(self.dataloaders[_db_ii])
return sample
def __len__(self):
return len(self.obj_list)
def __str__(self):
include_db = [str(db) for db in self.dataloaders]
exclude_db = [str(db) for db in self.excluded]
return 'Included datasets:'+str(include_db)+'\n'+'Excluded datasets:'+str(exclude_db)
if __name__ == "__main__":
import matplotlib.pyplot as plt
from dataloaders import pascal
from dataloaders import sbd
import torch
import numpy as np
import dataset.custom_transforms as tr
from torchvision import transforms
composed_transforms_tr = transforms.Compose([
tr.RandomHorizontalFlip(),
tr.ScaleNRotate(rots=(-15, 15), scales=(.75, 1.25)),
tr.FixedResize(resolutions={'image': (450, 450), 'gt': (450, 450)}),
tr.DistanceMap(v=0.15, elem='gt'),
tr.ConcatInputs(elems=('image', 'distance_map')),
tr.ToTensor()])
composed_transforms_ts = transforms.Compose([
tr.FixedResize(resolutions={'image': (450, 450), 'gt': (450, 450)}),
tr.DistanceMap(v=0.15, elem='gt'),
tr.ConcatInputs(elems=('image', 'distance_map')),
tr.ToTensor()])
pascal_voc_val = pascal.VOCSegmentation(split='val', transform=composed_transforms_ts, retname=True)
sbd = sbd.SBDSegmentation(split=['train', 'val'], transform=composed_transforms_tr, retname=True)
pascal_voc_train = pascal.VOCSegmentation(split='train', transform=composed_transforms_tr, retname=True)
dataset = CombineDBs([pascal_voc_train, sbd], excluded=[pascal_voc_val])
dataloader = torch.utils.data.DataLoader(dataset, batch_size=2, shuffle=True, num_workers=0)
for ii, sample in enumerate(dataloader):
for jj in range(sample["image"].size()[0]):
dismap = sample['distance_map'][jj].numpy()
gt = sample['gt'][jj].numpy()
gt[gt > 0] = 255
gt = np.array(gt[0]).astype(np.uint8)
dismap = np.array(dismap[0]).astype(np.uint8)
display = 0.9 * gt + 0.4 * dismap
display = display.astype(np.uint8)
plt.figure()
plt.title('display')
plt.imshow(display, cmap='gray')
if ii == 1:
break
plt.show(block=True)
| 37.693694
| 121
| 0.580784
|
import torch.utils.data as data
class CombineDBs(data.Dataset):
def __init__(self, dataloaders, excluded=None):
self.dataloaders = dataloaders
self.excluded = excluded
self.im_ids = []
for dl in dataloaders:
for elem in dl.im_ids:
if elem not in self.im_ids:
self.im_ids.append(elem)
if excluded:
for dl in excluded:
for elem in dl.im_ids:
if elem in self.im_ids:
self.im_ids.remove(elem)
self.obj_list = []
self.im_list = []
new_im_ids = []
obj_counter = 0
num_images = 0
for ii, dl in enumerate(dataloaders):
for jj, curr_im_id in enumerate(dl.im_ids):
if (curr_im_id in self.im_ids) and (curr_im_id not in new_im_ids):
flag = False
new_im_ids.append(curr_im_id)
for kk in range(len(dl.obj_dict[curr_im_id])):
if dl.obj_dict[curr_im_id][kk] != -1:
self.obj_list.append({'db_ii': ii, 'obj_ii': dl.obj_list.index([jj, kk])})
flag = True
obj_counter += 1
self.im_list.append({'db_ii': ii, 'im_ii': jj})
if flag:
num_images += 1
self.im_ids = new_im_ids
print('Combined number of images: {:d}\nCombined number of objects: {:d}'.format(num_images, len(self.obj_list)))
def __getitem__(self, index):
_db_ii = self.obj_list[index]["db_ii"]
_obj_ii = self.obj_list[index]['obj_ii']
sample = self.dataloaders[_db_ii].__getitem__(_obj_ii)
if 'meta' in sample.keys():
sample['meta']['db'] = str(self.dataloaders[_db_ii])
return sample
def __len__(self):
return len(self.obj_list)
def __str__(self):
include_db = [str(db) for db in self.dataloaders]
exclude_db = [str(db) for db in self.excluded]
return 'Included datasets:'+str(include_db)+'\n'+'Excluded datasets:'+str(exclude_db)
if __name__ == "__main__":
import matplotlib.pyplot as plt
from dataloaders import pascal
from dataloaders import sbd
import torch
import numpy as np
import dataset.custom_transforms as tr
from torchvision import transforms
composed_transforms_tr = transforms.Compose([
tr.RandomHorizontalFlip(),
tr.ScaleNRotate(rots=(-15, 15), scales=(.75, 1.25)),
tr.FixedResize(resolutions={'image': (450, 450), 'gt': (450, 450)}),
tr.DistanceMap(v=0.15, elem='gt'),
tr.ConcatInputs(elems=('image', 'distance_map')),
tr.ToTensor()])
composed_transforms_ts = transforms.Compose([
tr.FixedResize(resolutions={'image': (450, 450), 'gt': (450, 450)}),
tr.DistanceMap(v=0.15, elem='gt'),
tr.ConcatInputs(elems=('image', 'distance_map')),
tr.ToTensor()])
pascal_voc_val = pascal.VOCSegmentation(split='val', transform=composed_transforms_ts, retname=True)
sbd = sbd.SBDSegmentation(split=['train', 'val'], transform=composed_transforms_tr, retname=True)
pascal_voc_train = pascal.VOCSegmentation(split='train', transform=composed_transforms_tr, retname=True)
dataset = CombineDBs([pascal_voc_train, sbd], excluded=[pascal_voc_val])
dataloader = torch.utils.data.DataLoader(dataset, batch_size=2, shuffle=True, num_workers=0)
for ii, sample in enumerate(dataloader):
for jj in range(sample["image"].size()[0]):
dismap = sample['distance_map'][jj].numpy()
gt = sample['gt'][jj].numpy()
gt[gt > 0] = 255
gt = np.array(gt[0]).astype(np.uint8)
dismap = np.array(dismap[0]).astype(np.uint8)
display = 0.9 * gt + 0.4 * dismap
display = display.astype(np.uint8)
plt.figure()
plt.title('display')
plt.imshow(display, cmap='gray')
if ii == 1:
break
plt.show(block=True)
| true
| true
|
f701d212f8744c7d1aa2560a5c9e1bb4b4483b75
| 3,913
|
py
|
Python
|
teabot-slack/teabot.py
|
Emelieh21/brand-new-kettle-hack
|
36a28b4953801c321ba7d11811bc22cbfe7c9d62
|
[
"Unlicense"
] | 2
|
2017-04-03T22:57:15.000Z
|
2017-04-03T22:59:57.000Z
|
teabot-slack/teabot.py
|
Emelieh21/brand-new-kettle-hack
|
36a28b4953801c321ba7d11811bc22cbfe7c9d62
|
[
"Unlicense"
] | null | null | null |
teabot-slack/teabot.py
|
Emelieh21/brand-new-kettle-hack
|
36a28b4953801c321ba7d11811bc22cbfe7c9d62
|
[
"Unlicense"
] | null | null | null |
import os
import time
from slackclient import SlackClient
import requests
import json
# starterbot's ID as an environment variable
BOT_ID = "<YOUR_BOT_ID>"
# constants
AT_BOT = "<@" + BOT_ID + ">"
MAKE_TEA_COMMAND = "make tea"
STOP_BOILING_COMMAND = "stop boiling"
# instantiate Slack & Twilio clients
slack_client = SlackClient('<YOUR_SLACK_API_TOKEN>')
headers = {'content-type': 'application/json', 'Authorization': '<YOUR_RELAYR_TOKEN>', 'Cache-Control':'no-cache'}
def handle_command(command, channel):
"""
Receives commands directed at the bot and determines if they
are valid commands. If so, then acts on the commands. If not,
returns back what it needs for clarification.
"""
response = "Not sure what you mean. Use the *" + MAKE_TEA_COMMAND + \
"* command with numbers, delimited by spaces."
if command.startswith(MAKE_TEA_COMMAND):
data = {'meaning': 'kettle', 'value': 'true'}
r = requests.post('https://api.relayr.io/devices/<KETTLE_DEVICE_ID>/data', data=json.dumps(data), headers=headers)
response = "Sure... Your water is boiling now!"
if command.startswith(STOP_BOILING_COMMAND):
data = {'meaning': 'kettle', 'value': 'false'}
r = requests.post('https://api.relayr.io/devices/<KETTLE_DEVICE_ID>/data', data=json.dumps(data), headers=headers)
response = "OK - I stopped the kettle!"
if command.startswith("is the kettle boiling?"):
r = requests.get('https://api.relayr.io/devices/<KETTLE_DEVICE_ID>/readings', headers=headers)
resp = json.loads(r.text)
try:
if resp['readings'][0]['value'] == "true":
response = "Yes, the kettle is currently boiling."
if resp['readings'][0]['value'] == "false":
response = "No, the kettle is currently off."
except:
response = "Unfortunately.. I don't know :("
# # Optional: check if the water is hot - only if you have a temperature sensor connected!
# # uncomment the lines below if you want to add this function
# if command.startswith("is the water hot?"):
# r = requests.get('https://api.relayr.io/devices/<KETTLE_TEMPERATURE_DEVICE_ID>/readings', headers=headers)
# resp = json.loads(r.text)
# try:
# if float(resp['readings'][0]['value']) < 25:
# response = "The water is currently cold. You can say \"make tea\" to me and I will heat it up."
# if 25 <= float(resp['readings'][0]['value']) <= 45:
# response = "The water is still quite warm, I can reheat it for you. You can ask me \"make tea\"."
# if float(resp['readings'][0]['value']) > 45:
# response = "The water is still hot. Probably it just boiled."
# except:
# response = "Unfortunately.. I don't know :("
slack_client.api_call("chat.postMessage", channel=channel,
text=response, as_user=True)
def parse_slack_output(slack_rtm_output):
"""
The Slack Real Time Messaging API is an events firehose.
this parsing function returns None unless a message is
directed at the Bot, based on its ID.
"""
output_list = slack_rtm_output
if output_list and len(output_list) > 0:
for output in output_list:
if output and 'text' in output and AT_BOT in output['text']:
# return text after the @ mention, whitespace removed
return output['text'].split(AT_BOT)[1].strip().lower(), \
output['channel']
return None, None
if __name__ == "__main__":
READ_WEBSOCKET_DELAY = 1 # 1 second delay between reading from firehose
if slack_client.rtm_connect():
print("StarterBot connected and running!")
while True:
command, channel = parse_slack_output(slack_client.rtm_read())
if command and channel:
handle_command(command, channel)
time.sleep(READ_WEBSOCKET_DELAY)
else:
print("Connection failed. Invalid Slack token or bot ID?")
| 42.532609
| 116
| 0.661641
|
import os
import time
from slackclient import SlackClient
import requests
import json
BOT_ID = "<YOUR_BOT_ID>"
# constants
AT_BOT = "<@" + BOT_ID + ">"
MAKE_TEA_COMMAND = "make tea"
STOP_BOILING_COMMAND = "stop boiling"
# instantiate Slack & Twilio clients
slack_client = SlackClient('<YOUR_SLACK_API_TOKEN>')
headers = {'content-type': 'application/json', 'Authorization': '<YOUR_RELAYR_TOKEN>', 'Cache-Control':'no-cache'}
def handle_command(command, channel):
"""
Receives commands directed at the bot and determines if they
are valid commands. If so, then acts on the commands. If not,
returns back what it needs for clarification.
"""
response = "Not sure what you mean. Use the *" + MAKE_TEA_COMMAND + \
"* command with numbers, delimited by spaces."
if command.startswith(MAKE_TEA_COMMAND):
data = {'meaning': 'kettle', 'value': 'true'}
r = requests.post('https://api.relayr.io/devices/<KETTLE_DEVICE_ID>/data', data=json.dumps(data), headers=headers)
response = "Sure... Your water is boiling now!"
if command.startswith(STOP_BOILING_COMMAND):
data = {'meaning': 'kettle', 'value': 'false'}
r = requests.post('https://api.relayr.io/devices/<KETTLE_DEVICE_ID>/data', data=json.dumps(data), headers=headers)
response = "OK - I stopped the kettle!"
if command.startswith("is the kettle boiling?"):
r = requests.get('https://api.relayr.io/devices/<KETTLE_DEVICE_ID>/readings', headers=headers)
resp = json.loads(r.text)
try:
if resp['readings'][0]['value'] == "true":
response = "Yes, the kettle is currently boiling."
if resp['readings'][0]['value'] == "false":
response = "No, the kettle is currently off."
except:
response = "Unfortunately.. I don't know :("
slack_client.api_call("chat.postMessage", channel=channel,
text=response, as_user=True)
def parse_slack_output(slack_rtm_output):
"""
The Slack Real Time Messaging API is an events firehose.
this parsing function returns None unless a message is
directed at the Bot, based on its ID.
"""
output_list = slack_rtm_output
if output_list and len(output_list) > 0:
for output in output_list:
if output and 'text' in output and AT_BOT in output['text']:
# return text after the @ mention, whitespace removed
return output['text'].split(AT_BOT)[1].strip().lower(), \
output['channel']
return None, None
if __name__ == "__main__":
READ_WEBSOCKET_DELAY = 1 # 1 second delay between reading from firehose
if slack_client.rtm_connect():
print("StarterBot connected and running!")
while True:
command, channel = parse_slack_output(slack_client.rtm_read())
if command and channel:
handle_command(command, channel)
time.sleep(READ_WEBSOCKET_DELAY)
else:
print("Connection failed. Invalid Slack token or bot ID?")
| false
| true
|
f701d45f0c4e24fdc3dba6acf4d40a703353740b
| 7,254
|
py
|
Python
|
autoload/vital/__vim_gista__/Web/API/github.py
|
lambdalisue/vim-gista
|
c046de1b9d4cf97ff6f80cf3bdbb75b1094f2aaf
|
[
"MIT"
] | 158
|
2015-01-07T14:32:12.000Z
|
2022-03-23T04:17:40.000Z
|
vim/plugins/vim-gista/autoload/vital/__vim_gista__/Web/API/github.py
|
Raymond-yn/dotfiles
|
b1745ff62f4285785877a2c04d93ce8fa2775964
|
[
"MIT"
] | 41
|
2015-01-29T13:50:39.000Z
|
2021-02-22T14:11:15.000Z
|
autoload/vital/_github_auth/Web/API/github.py
|
momo-lab/github_auth.vim
|
8e2e7b61d705368699d50ddff8150bea8dc166c9
|
[
"MIT"
] | 10
|
2015-01-29T12:57:43.000Z
|
2021-06-20T09:39:31.000Z
|
try:
import vim
except ImportError:
raise ImportError(
'"vim" is not available. This module require to be loaded from Vim.'
)
#
# NOTE
# Vim use a global namespace for python/python3 so define a unique name
# function and write a code inside of the function to prevent conflicts.
#
def _vim_vital_web_api_github_main():
"""A namespace function for Vital.Web.API.GitHub"""
import re
import sys
import ssl
import collections
from itertools import chain
from threading import Lock, Thread
try:
import json
except ImportError:
import simplejson as json
try:
from urllib.request import urlopen, Request
from urllib.parse import (urlparse, parse_qs, urlencode, urlunparse)
except ImportError:
from urllib2 import urlopen, Request
from urllib import urlencode
from urlparse import (urlparse, parse_qs, urlunparse)
DEFAULT_INDICATOR = (
'Requesting entries and converting into '
'JSON %%(page)d/%(page_count)d ...'
)
def format_exception():
exc_type, exc_obj, tb = sys.exc_info()
f = tb.tb_frame
lineno = tb.tb_lineno
filename = f.f_code.co_filename
return "%s: %s at %s:%d" % (
exc_obj.__class__.__name__,
exc_obj, filename, lineno,
)
def to_vim(obj):
if obj is None:
return ''
elif isinstance(obj, bool):
return int(obj)
elif isinstance(obj, dict):
return dict([to_vim(k), to_vim(v)] for k, v in obj.items())
elif isinstance(obj, (list, tuple)):
return list(to_vim(v) for v in obj)
return obj
def build_headers(token):
return {'Authorization': 'token %s' % token} if token else {}
def build_url(url, **kwargs):
scheme, netloc, path, params, query, fragment = urlparse(url)
p = parse_qs(query)
p.update(kwargs)
return urlunparse([
scheme, netloc, path, params,
urlencode(p, doseq=True), fragment
])
def request(url, headers={}, method=None):
if method:
if sys.version_info.major >= 3:
req = Request(url, headers=headers, method=method)
else:
req = Request(url, headers=headers)
req.get_method = lambda: method
else:
req = Request(url, headers=headers)
context = ssl._create_unverified_context()
res = urlopen(req, context=context)
if not hasattr(res, 'getheader'):
# urllib2 does not have getheader
res.getheader = lambda name, self=res: self.info().getheader(name)
return res
def request_head(url, name, headers={}):
res = request(url, headers=headers, method='HEAD')
return res.getheader(name)
def request_json(url, headers={}, **kwargs):
url = build_url(url, **kwargs)
res = request(url, headers=headers)
obj = json.loads(res.read().decode('utf-8'))
return to_vim(obj)
def _request_entries(lock, queue, entries_per_pages, url,
headers, callback=None):
try:
while True:
page, indicator = queue.popleft()
entries = request_json(url, headers=headers, page=page)
entries_per_pages.append([page, entries])
if callback:
message = indicator % {'page': len(entries_per_pages)}
if hasattr(vim, 'async_call'):
with lock:
vim.async_call(callback, message)
else:
with lock:
callback(message)
except IndexError:
pass
except Exception as e:
# clear queue to stop other threads
queue.clear()
entries_per_pages.append(e)
def request_entries(url, token,
indicator=DEFAULT_INDICATOR,
page_start=1, page_end=0,
nprocess=20, callback=None, **kwargs):
# the followings might be str when specified from Vim.
page_start = int(page_start)
page_end = int(page_end)
nprocess = int(nprocess)
url = build_url(url, **kwargs)
headers = build_headers(token)
lock = Lock()
queue = collections.deque()
entries_per_pages = collections.deque()
# figure out the number of pages from HEAD request
if page_end == 0:
if callback:
callback('Requesting the total number of pages ...')
response_link = request_head(url, 'link', headers=headers)
if response_link:
m = re.search(
'<.*?[?&]page=(\d+)[^>]*>; rel="last"', response_link
)
page_end = int(m.group(1)) if m else 1
else:
page_end = 1
# prepare task queue
for page in range(page_start, page_end + 1):
queue.append([page, indicator % {
'url': url,
'page_count': page_end - page_start + 1
}])
# start workers
kwargs = dict(
target=_request_entries,
args=(lock, queue, entries_per_pages, url, headers, callback),
)
workers = [Thread(**kwargs) for n in range(nprocess)]
for worker in workers:
worker.start()
for worker in workers:
worker.join()
# check if sub-thread throw exceptions or not
exceptions = list(
filter(lambda x: not isinstance(x, list), entries_per_pages)
)
if len(exceptions):
raise exceptions[0]
# merge and flatten entries
return list(chain.from_iterable(map(
lambda x: x[1], sorted(entries_per_pages, key=lambda x: x[0])
)))
def echo_status_vim(indicator):
vim.command('redraw | echo "%s"' % indicator)
if sys.version_info < (3, 0, 0):
def ensure_unicode(s, encoding):
if isinstance(s, unicode):
return s
else:
return s.decode(encoding)
else:
def ensure_unicode(s, encoding):
if not isinstance(s, bytes):
return s
else:
return s.decode(encoding)
# Execute a main code
namespace = {}
try:
# Override 'request' with 'pseudo_requst' if exists
try:
request = _vim_vital_web_api_github_test_pseudo_request
except NameError:
pass
encoding = vim.eval('&encoding')
kwargs = vim.eval('kwargs')
kwargs = { ensure_unicode(k, encoding): ensure_unicode(v, encoding)
for k, v in kwargs.items()}
if kwargs.pop('verbose', 1):
kwargs['callback'] = echo_status_vim
entries = request_entries(**kwargs)
namespace['entries'] = entries
except:
namespace['exception'] = format_exception()
return namespace
# Call a namespace function
_vim_vital_web_api_github_response = _vim_vital_web_api_github_main()
| 34.056338
| 78
| 0.560656
|
try:
import vim
except ImportError:
raise ImportError(
'"vim" is not available. This module require to be loaded from Vim.'
)
def _vim_vital_web_api_github_main():
import re
import sys
import ssl
import collections
from itertools import chain
from threading import Lock, Thread
try:
import json
except ImportError:
import simplejson as json
try:
from urllib.request import urlopen, Request
from urllib.parse import (urlparse, parse_qs, urlencode, urlunparse)
except ImportError:
from urllib2 import urlopen, Request
from urllib import urlencode
from urlparse import (urlparse, parse_qs, urlunparse)
DEFAULT_INDICATOR = (
'Requesting entries and converting into '
'JSON %%(page)d/%(page_count)d ...'
)
def format_exception():
exc_type, exc_obj, tb = sys.exc_info()
f = tb.tb_frame
lineno = tb.tb_lineno
filename = f.f_code.co_filename
return "%s: %s at %s:%d" % (
exc_obj.__class__.__name__,
exc_obj, filename, lineno,
)
def to_vim(obj):
if obj is None:
return ''
elif isinstance(obj, bool):
return int(obj)
elif isinstance(obj, dict):
return dict([to_vim(k), to_vim(v)] for k, v in obj.items())
elif isinstance(obj, (list, tuple)):
return list(to_vim(v) for v in obj)
return obj
def build_headers(token):
return {'Authorization': 'token %s' % token} if token else {}
def build_url(url, **kwargs):
scheme, netloc, path, params, query, fragment = urlparse(url)
p = parse_qs(query)
p.update(kwargs)
return urlunparse([
scheme, netloc, path, params,
urlencode(p, doseq=True), fragment
])
def request(url, headers={}, method=None):
if method:
if sys.version_info.major >= 3:
req = Request(url, headers=headers, method=method)
else:
req = Request(url, headers=headers)
req.get_method = lambda: method
else:
req = Request(url, headers=headers)
context = ssl._create_unverified_context()
res = urlopen(req, context=context)
if not hasattr(res, 'getheader'):
res.getheader = lambda name, self=res: self.info().getheader(name)
return res
def request_head(url, name, headers={}):
res = request(url, headers=headers, method='HEAD')
return res.getheader(name)
def request_json(url, headers={}, **kwargs):
url = build_url(url, **kwargs)
res = request(url, headers=headers)
obj = json.loads(res.read().decode('utf-8'))
return to_vim(obj)
def _request_entries(lock, queue, entries_per_pages, url,
headers, callback=None):
try:
while True:
page, indicator = queue.popleft()
entries = request_json(url, headers=headers, page=page)
entries_per_pages.append([page, entries])
if callback:
message = indicator % {'page': len(entries_per_pages)}
if hasattr(vim, 'async_call'):
with lock:
vim.async_call(callback, message)
else:
with lock:
callback(message)
except IndexError:
pass
except Exception as e:
queue.clear()
entries_per_pages.append(e)
def request_entries(url, token,
indicator=DEFAULT_INDICATOR,
page_start=1, page_end=0,
nprocess=20, callback=None, **kwargs):
page_start = int(page_start)
page_end = int(page_end)
nprocess = int(nprocess)
url = build_url(url, **kwargs)
headers = build_headers(token)
lock = Lock()
queue = collections.deque()
entries_per_pages = collections.deque()
if page_end == 0:
if callback:
callback('Requesting the total number of pages ...')
response_link = request_head(url, 'link', headers=headers)
if response_link:
m = re.search(
'<.*?[?&]page=(\d+)[^>]*>; rel="last"', response_link
)
page_end = int(m.group(1)) if m else 1
else:
page_end = 1
for page in range(page_start, page_end + 1):
queue.append([page, indicator % {
'url': url,
'page_count': page_end - page_start + 1
}])
kwargs = dict(
target=_request_entries,
args=(lock, queue, entries_per_pages, url, headers, callback),
)
workers = [Thread(**kwargs) for n in range(nprocess)]
for worker in workers:
worker.start()
for worker in workers:
worker.join()
exceptions = list(
filter(lambda x: not isinstance(x, list), entries_per_pages)
)
if len(exceptions):
raise exceptions[0]
return list(chain.from_iterable(map(
lambda x: x[1], sorted(entries_per_pages, key=lambda x: x[0])
)))
def echo_status_vim(indicator):
vim.command('redraw | echo "%s"' % indicator)
if sys.version_info < (3, 0, 0):
def ensure_unicode(s, encoding):
if isinstance(s, unicode):
return s
else:
return s.decode(encoding)
else:
def ensure_unicode(s, encoding):
if not isinstance(s, bytes):
return s
else:
return s.decode(encoding)
namespace = {}
try:
try:
request = _vim_vital_web_api_github_test_pseudo_request
except NameError:
pass
encoding = vim.eval('&encoding')
kwargs = vim.eval('kwargs')
kwargs = { ensure_unicode(k, encoding): ensure_unicode(v, encoding)
for k, v in kwargs.items()}
if kwargs.pop('verbose', 1):
kwargs['callback'] = echo_status_vim
entries = request_entries(**kwargs)
namespace['entries'] = entries
except:
namespace['exception'] = format_exception()
return namespace
_vim_vital_web_api_github_response = _vim_vital_web_api_github_main()
| true
| true
|
f701d5cc91a7627c4c17ffe3d7d861dcaa962752
| 900
|
py
|
Python
|
scanner.py
|
xaviercho/port_scanner
|
c83891782c4d37524b9bf1face1165c7ae0b6820
|
[
"MIT"
] | null | null | null |
scanner.py
|
xaviercho/port_scanner
|
c83891782c4d37524b9bf1face1165c7ae0b6820
|
[
"MIT"
] | null | null | null |
scanner.py
|
xaviercho/port_scanner
|
c83891782c4d37524b9bf1face1165c7ae0b6820
|
[
"MIT"
] | null | null | null |
#!/bin/python3
import sys
import socket
from datetime import datetime
#Defining our target
if len (sys.argv) == 2:
target=socket.gethostbyname(sys.argv[1]) #translate hostname to IPv4
else:
print("invalid amount of arguments.")
print("Syntax: python3 scanner.py <ip>")
#add a pretty banner
print("-" * 50)
print("Scanning target " + target)
print("Time started: " +str(datetime.now()))
print("-"*50)
try:
for port in range(50,85):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.setdefaulttimeout(1)
result = s.connect_ex((target,port))# returns an error indicator
if result ==0:
print("Port {} is open".format(port))
s.close()
except KeyboardInterrupt:
print("\n Exiting program...")
sys.exit()
except socket.gaierror:
print("\n Hostname could not be resolved...")
sys.exit()
except socket.gaierror:
print("\n Could not connect to server...")
sys.exit()
| 21.428571
| 69
| 0.703333
|
import sys
import socket
from datetime import datetime
if len (sys.argv) == 2:
target=socket.gethostbyname(sys.argv[1]) else:
print("invalid amount of arguments.")
print("Syntax: python3 scanner.py <ip>")
print("-" * 50)
print("Scanning target " + target)
print("Time started: " +str(datetime.now()))
print("-"*50)
try:
for port in range(50,85):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.setdefaulttimeout(1)
result = s.connect_ex((target,port)) if result ==0:
print("Port {} is open".format(port))
s.close()
except KeyboardInterrupt:
print("\n Exiting program...")
sys.exit()
except socket.gaierror:
print("\n Hostname could not be resolved...")
sys.exit()
except socket.gaierror:
print("\n Could not connect to server...")
sys.exit()
| true
| true
|
f701d60a5f3be60144083a87cbb8236ccd34114a
| 5,308
|
py
|
Python
|
lib/pwiki/OsAbstract.py
|
ckolumbus/WikidPad.svn
|
8f03c1105a8144c9a82e392ab7f32e263c533775
|
[
"Apache-2.0"
] | 2
|
2019-02-24T08:53:20.000Z
|
2019-09-25T02:11:17.000Z
|
lib/pwiki/OsAbstract.py
|
jho1965us/WikidPad_fork_from_butscher
|
559a16859ab5e77620f017356caa22fe65554192
|
[
"Apache-2.0"
] | null | null | null |
lib/pwiki/OsAbstract.py
|
jho1965us/WikidPad_fork_from_butscher
|
559a16859ab5e77620f017356caa22fe65554192
|
[
"Apache-2.0"
] | null | null | null |
"""
OS abstraction
"""
import os, shutil, os.path, re, traceback
import wx
from . import SystemInfo
from .StringOps import mbcsEnc, urlQuote, pathnameFromUrl, pathEnc
# import WindowsHacks
try:
import WindowsHacks
except:
if SystemInfo.isWindows():
traceback.print_exc()
WindowsHacks = None
try:
import GtkHacks
except:
import ExceptionLogger
ExceptionLogger.logOptionalComponentException(
"Initialize GTK hacks in OsAbstract.py")
GtkHacks = None
# Define startFile
if SystemInfo.isWindows():
if SystemInfo.isWinNT() and SystemInfo.isUnicode() and WindowsHacks:
startFile = WindowsHacks.startFile
else:
def startFile(mainControl, link):
os.startfile(mbcsEnc(link, "replace")[0])
else:
def startFile(mainControl, link):
# We need mainControl only for this version of startFile()
startPath = mainControl.getConfig().get("main", "fileLauncher_path", u"")
if startPath == u"":
wx.LaunchDefaultBrowser(link)
return
if link.startswith("file:"):
link = pathnameFromUrl(link)
os.spawnlp(os.P_NOWAIT, startPath, startPath, link)
# Define copyFile
if SystemInfo.isWinNT() and WindowsHacks:
copyFile = WindowsHacks.copyFile
moveFile = WindowsHacks.moveFile
deleteFile = WindowsHacks.deleteFile
else:
# TODO Mac version
def copyFile(srcPath, dstPath):
"""
Copy file from srcPath to dstPath. dstPath may be overwritten if
existing already. dstPath must point to a file, not a directory.
If some directories in dstPath do not exist, they are created.
This currently just calls shutil.copy2() TODO!
"""
dstDir = os.path.dirname(dstPath)
if not os.path.exists(pathEnc(dstDir)):
os.makedirs(dstDir)
shutil.copy2(srcPath, dstPath)
def moveFile(srcPath, dstPath):
"""
Move file from srcPath to dstPath. dstPath may be overwritten if
existing already. dstPath must point to a file, not a directory.
If some directories in dstPath do not exist, they are created.
"""
dstDir = os.path.dirname(dstPath)
if not os.path.exists(pathEnc(dstDir)):
os.makedirs(dstDir)
shutil.move(srcPath, dstPath)
def deleteFile(path):
"""
Delete file or directory path.
"""
# TODO: Check for directories
# os.rmdir(path) ?
if os.path.isfile(path) or os.path.islink(path):
os.unlink(path)
elif os.path.isdir(path):
os.rmdir(path)
# Define samefile
if SystemInfo.isWindows():
if WindowsHacks:
def samefile(path1, path2):
# Not fully reliable. Does anybody know something better?
if WindowsHacks.getLongPath(path1).lower() == \
WindowsHacks.getLongPath(path2).lower():
return True
return WindowsHacks.getLongPath(os.path.abspath(path1)).lower() == \
WindowsHacks.getLongPath(os.path.abspath(path2)).lower()
else:
def samefile(path1, path2):
return os.path.abspath(path1) == os.path.abspath(path2)
else:
samefile = os.path.samefile
if WindowsHacks:
def normalizePath(path):
return WindowsHacks.getLongPath(os.path.abspath(path)).lower()
else:
def normalizePath(path):
return os.path.normcase(os.path.abspath(path))
# Define checkForOtherInstances
# If defined properly it returns a list of process identifier of other WikidPad
# processes. This list should be empty if option "Single process per user"
# is selected. If it is not, there is an error.
if WindowsHacks:
checkForOtherInstances = WindowsHacks.checkForOtherInstances
else:
def checkForOtherInstances():
return []
# Define createInterceptCollection, createClipboardInterceptor (may return None)
# Define supportsClipboardInterceptor
# Fallback def.
def supportsClipboardInterceptor():
return False
def createInterceptCollection(interceptors=None):
return None
def createClipboardInterceptor(callingWindow):
return None
if SystemInfo.isWindows():
if WindowsHacks:
def supportsClipboardInterceptor():
return True
def createInterceptCollection(interceptors=None):
return WindowsHacks.WinProcInterceptCollection(interceptors)
def createClipboardInterceptor(callingWindow):
return WindowsHacks.ClipboardCatchIceptor(callingWindow)
else:
if GtkHacks:
def supportsClipboardInterceptor():
return True
def createInterceptCollection(interceptors=None):
return GtkHacks.FakeInterceptCollection(interceptors)
def createClipboardInterceptor(callingWindow):
return GtkHacks.ClipboardCatchFakeIceptor(callingWindow)
if WindowsHacks:
translateAcceleratorByKbLayout = WindowsHacks.translateAcceleratorByKbLayout
else:
def translateAcceleratorByKbLayout(accStr):
return accStr
| 30.505747
| 82
| 0.646948
|
import os, shutil, os.path, re, traceback
import wx
from . import SystemInfo
from .StringOps import mbcsEnc, urlQuote, pathnameFromUrl, pathEnc
try:
import WindowsHacks
except:
if SystemInfo.isWindows():
traceback.print_exc()
WindowsHacks = None
try:
import GtkHacks
except:
import ExceptionLogger
ExceptionLogger.logOptionalComponentException(
"Initialize GTK hacks in OsAbstract.py")
GtkHacks = None
if SystemInfo.isWindows():
if SystemInfo.isWinNT() and SystemInfo.isUnicode() and WindowsHacks:
startFile = WindowsHacks.startFile
else:
def startFile(mainControl, link):
os.startfile(mbcsEnc(link, "replace")[0])
else:
def startFile(mainControl, link):
startPath = mainControl.getConfig().get("main", "fileLauncher_path", u"")
if startPath == u"":
wx.LaunchDefaultBrowser(link)
return
if link.startswith("file:"):
link = pathnameFromUrl(link)
os.spawnlp(os.P_NOWAIT, startPath, startPath, link)
if SystemInfo.isWinNT() and WindowsHacks:
copyFile = WindowsHacks.copyFile
moveFile = WindowsHacks.moveFile
deleteFile = WindowsHacks.deleteFile
else:
def copyFile(srcPath, dstPath):
"""
Copy file from srcPath to dstPath. dstPath may be overwritten if
existing already. dstPath must point to a file, not a directory.
If some directories in dstPath do not exist, they are created.
This currently just calls shutil.copy2() TODO!
"""
dstDir = os.path.dirname(dstPath)
if not os.path.exists(pathEnc(dstDir)):
os.makedirs(dstDir)
shutil.copy2(srcPath, dstPath)
def moveFile(srcPath, dstPath):
"""
Move file from srcPath to dstPath. dstPath may be overwritten if
existing already. dstPath must point to a file, not a directory.
If some directories in dstPath do not exist, they are created.
"""
dstDir = os.path.dirname(dstPath)
if not os.path.exists(pathEnc(dstDir)):
os.makedirs(dstDir)
shutil.move(srcPath, dstPath)
def deleteFile(path):
"""
Delete file or directory path.
"""
if os.path.isfile(path) or os.path.islink(path):
os.unlink(path)
elif os.path.isdir(path):
os.rmdir(path)
if SystemInfo.isWindows():
if WindowsHacks:
def samefile(path1, path2):
if WindowsHacks.getLongPath(path1).lower() == \
WindowsHacks.getLongPath(path2).lower():
return True
return WindowsHacks.getLongPath(os.path.abspath(path1)).lower() == \
WindowsHacks.getLongPath(os.path.abspath(path2)).lower()
else:
def samefile(path1, path2):
return os.path.abspath(path1) == os.path.abspath(path2)
else:
samefile = os.path.samefile
if WindowsHacks:
def normalizePath(path):
return WindowsHacks.getLongPath(os.path.abspath(path)).lower()
else:
def normalizePath(path):
return os.path.normcase(os.path.abspath(path))
if WindowsHacks:
checkForOtherInstances = WindowsHacks.checkForOtherInstances
else:
def checkForOtherInstances():
return []
def supportsClipboardInterceptor():
return False
def createInterceptCollection(interceptors=None):
return None
def createClipboardInterceptor(callingWindow):
return None
if SystemInfo.isWindows():
if WindowsHacks:
def supportsClipboardInterceptor():
return True
def createInterceptCollection(interceptors=None):
return WindowsHacks.WinProcInterceptCollection(interceptors)
def createClipboardInterceptor(callingWindow):
return WindowsHacks.ClipboardCatchIceptor(callingWindow)
else:
if GtkHacks:
def supportsClipboardInterceptor():
return True
def createInterceptCollection(interceptors=None):
return GtkHacks.FakeInterceptCollection(interceptors)
def createClipboardInterceptor(callingWindow):
return GtkHacks.ClipboardCatchFakeIceptor(callingWindow)
if WindowsHacks:
translateAcceleratorByKbLayout = WindowsHacks.translateAcceleratorByKbLayout
else:
def translateAcceleratorByKbLayout(accStr):
return accStr
| true
| true
|
f701d647f8fe822a2a2729383c44109685c2195f
| 13,630
|
py
|
Python
|
sdk/python/pulumi_azure/apimanagement/policy.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/apimanagement/policy.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/apimanagement/policy.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['PolicyArgs', 'Policy']
@pulumi.input_type
class PolicyArgs:
def __init__(__self__, *,
api_management_id: pulumi.Input[str],
xml_content: Optional[pulumi.Input[str]] = None,
xml_link: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Policy resource.
:param pulumi.Input[str] api_management_id: The ID of the API Management service. Changing this forces a new API Management service Policy to be created.
:param pulumi.Input[str] xml_content: The XML Content for this Policy as a string.
:param pulumi.Input[str] xml_link: A link to a Policy XML Document, which must be publicly available.
"""
pulumi.set(__self__, "api_management_id", api_management_id)
if xml_content is not None:
pulumi.set(__self__, "xml_content", xml_content)
if xml_link is not None:
pulumi.set(__self__, "xml_link", xml_link)
@property
@pulumi.getter(name="apiManagementId")
def api_management_id(self) -> pulumi.Input[str]:
"""
The ID of the API Management service. Changing this forces a new API Management service Policy to be created.
"""
return pulumi.get(self, "api_management_id")
@api_management_id.setter
def api_management_id(self, value: pulumi.Input[str]):
pulumi.set(self, "api_management_id", value)
@property
@pulumi.getter(name="xmlContent")
def xml_content(self) -> Optional[pulumi.Input[str]]:
"""
The XML Content for this Policy as a string.
"""
return pulumi.get(self, "xml_content")
@xml_content.setter
def xml_content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "xml_content", value)
@property
@pulumi.getter(name="xmlLink")
def xml_link(self) -> Optional[pulumi.Input[str]]:
"""
A link to a Policy XML Document, which must be publicly available.
"""
return pulumi.get(self, "xml_link")
@xml_link.setter
def xml_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "xml_link", value)
@pulumi.input_type
class _PolicyState:
def __init__(__self__, *,
api_management_id: Optional[pulumi.Input[str]] = None,
xml_content: Optional[pulumi.Input[str]] = None,
xml_link: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Policy resources.
:param pulumi.Input[str] api_management_id: The ID of the API Management service. Changing this forces a new API Management service Policy to be created.
:param pulumi.Input[str] xml_content: The XML Content for this Policy as a string.
:param pulumi.Input[str] xml_link: A link to a Policy XML Document, which must be publicly available.
"""
if api_management_id is not None:
pulumi.set(__self__, "api_management_id", api_management_id)
if xml_content is not None:
pulumi.set(__self__, "xml_content", xml_content)
if xml_link is not None:
pulumi.set(__self__, "xml_link", xml_link)
@property
@pulumi.getter(name="apiManagementId")
def api_management_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the API Management service. Changing this forces a new API Management service Policy to be created.
"""
return pulumi.get(self, "api_management_id")
@api_management_id.setter
def api_management_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_management_id", value)
@property
@pulumi.getter(name="xmlContent")
def xml_content(self) -> Optional[pulumi.Input[str]]:
"""
The XML Content for this Policy as a string.
"""
return pulumi.get(self, "xml_content")
@xml_content.setter
def xml_content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "xml_content", value)
@property
@pulumi.getter(name="xmlLink")
def xml_link(self) -> Optional[pulumi.Input[str]]:
"""
A link to a Policy XML Document, which must be publicly available.
"""
return pulumi.get(self, "xml_link")
@xml_link.setter
def xml_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "xml_link", value)
class Policy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_management_id: Optional[pulumi.Input[str]] = None,
xml_content: Optional[pulumi.Input[str]] = None,
xml_link: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a API Management service Policy.
> **NOTE:** This resource will, upon creation, **overwrite any existing policy in the API Management service**, as there is no feasible way to test whether the policy has been modified from the default. Similarly, when this resource is destroyed, the API Management service will revert to its default policy.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_service = azure.apimanagement.Service("exampleService",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
publisher_name="pub1",
publisher_email="[email protected]",
sku_name="Developer_1")
example_named_value = azure.apimanagement.NamedValue("exampleNamedValue",
resource_group_name=example_resource_group.name,
api_management_name=example_service.name,
display_name="ExampleProperty",
value="Example Value")
example_policy = azure.apimanagement.Policy("examplePolicy",
api_management_id=example_service.id,
xml_content=(lambda path: open(path).read())("example.xml"))
```
## Import
API Management service Policys can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:apimanagement/policy:Policy example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.ApiManagement/service/instance1/policies/policy
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_management_id: The ID of the API Management service. Changing this forces a new API Management service Policy to be created.
:param pulumi.Input[str] xml_content: The XML Content for this Policy as a string.
:param pulumi.Input[str] xml_link: A link to a Policy XML Document, which must be publicly available.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a API Management service Policy.
> **NOTE:** This resource will, upon creation, **overwrite any existing policy in the API Management service**, as there is no feasible way to test whether the policy has been modified from the default. Similarly, when this resource is destroyed, the API Management service will revert to its default policy.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_service = azure.apimanagement.Service("exampleService",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
publisher_name="pub1",
publisher_email="[email protected]",
sku_name="Developer_1")
example_named_value = azure.apimanagement.NamedValue("exampleNamedValue",
resource_group_name=example_resource_group.name,
api_management_name=example_service.name,
display_name="ExampleProperty",
value="Example Value")
example_policy = azure.apimanagement.Policy("examplePolicy",
api_management_id=example_service.id,
xml_content=(lambda path: open(path).read())("example.xml"))
```
## Import
API Management service Policys can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:apimanagement/policy:Policy example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.ApiManagement/service/instance1/policies/policy
```
:param str resource_name: The name of the resource.
:param PolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_management_id: Optional[pulumi.Input[str]] = None,
xml_content: Optional[pulumi.Input[str]] = None,
xml_link: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PolicyArgs.__new__(PolicyArgs)
if api_management_id is None and not opts.urn:
raise TypeError("Missing required property 'api_management_id'")
__props__.__dict__["api_management_id"] = api_management_id
__props__.__dict__["xml_content"] = xml_content
__props__.__dict__["xml_link"] = xml_link
super(Policy, __self__).__init__(
'azure:apimanagement/policy:Policy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
api_management_id: Optional[pulumi.Input[str]] = None,
xml_content: Optional[pulumi.Input[str]] = None,
xml_link: Optional[pulumi.Input[str]] = None) -> 'Policy':
"""
Get an existing Policy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_management_id: The ID of the API Management service. Changing this forces a new API Management service Policy to be created.
:param pulumi.Input[str] xml_content: The XML Content for this Policy as a string.
:param pulumi.Input[str] xml_link: A link to a Policy XML Document, which must be publicly available.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PolicyState.__new__(_PolicyState)
__props__.__dict__["api_management_id"] = api_management_id
__props__.__dict__["xml_content"] = xml_content
__props__.__dict__["xml_link"] = xml_link
return Policy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiManagementId")
def api_management_id(self) -> pulumi.Output[str]:
"""
The ID of the API Management service. Changing this forces a new API Management service Policy to be created.
"""
return pulumi.get(self, "api_management_id")
@property
@pulumi.getter(name="xmlContent")
def xml_content(self) -> pulumi.Output[str]:
"""
The XML Content for this Policy as a string.
"""
return pulumi.get(self, "xml_content")
@property
@pulumi.getter(name="xmlLink")
def xml_link(self) -> pulumi.Output[Optional[str]]:
"""
A link to a Policy XML Document, which must be publicly available.
"""
return pulumi.get(self, "xml_link")
| 44.110032
| 316
| 0.65818
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['PolicyArgs', 'Policy']
@pulumi.input_type
class PolicyArgs:
def __init__(__self__, *,
api_management_id: pulumi.Input[str],
xml_content: Optional[pulumi.Input[str]] = None,
xml_link: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "api_management_id", api_management_id)
if xml_content is not None:
pulumi.set(__self__, "xml_content", xml_content)
if xml_link is not None:
pulumi.set(__self__, "xml_link", xml_link)
@property
@pulumi.getter(name="apiManagementId")
def api_management_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "api_management_id")
@api_management_id.setter
def api_management_id(self, value: pulumi.Input[str]):
pulumi.set(self, "api_management_id", value)
@property
@pulumi.getter(name="xmlContent")
def xml_content(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "xml_content")
@xml_content.setter
def xml_content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "xml_content", value)
@property
@pulumi.getter(name="xmlLink")
def xml_link(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "xml_link")
@xml_link.setter
def xml_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "xml_link", value)
@pulumi.input_type
class _PolicyState:
def __init__(__self__, *,
api_management_id: Optional[pulumi.Input[str]] = None,
xml_content: Optional[pulumi.Input[str]] = None,
xml_link: Optional[pulumi.Input[str]] = None):
if api_management_id is not None:
pulumi.set(__self__, "api_management_id", api_management_id)
if xml_content is not None:
pulumi.set(__self__, "xml_content", xml_content)
if xml_link is not None:
pulumi.set(__self__, "xml_link", xml_link)
@property
@pulumi.getter(name="apiManagementId")
def api_management_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "api_management_id")
@api_management_id.setter
def api_management_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_management_id", value)
@property
@pulumi.getter(name="xmlContent")
def xml_content(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "xml_content")
@xml_content.setter
def xml_content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "xml_content", value)
@property
@pulumi.getter(name="xmlLink")
def xml_link(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "xml_link")
@xml_link.setter
def xml_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "xml_link", value)
class Policy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_management_id: Optional[pulumi.Input[str]] = None,
xml_content: Optional[pulumi.Input[str]] = None,
xml_link: Optional[pulumi.Input[str]] = None,
__props__=None):
...
@overload
def __init__(__self__,
resource_name: str,
args: PolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_management_id: Optional[pulumi.Input[str]] = None,
xml_content: Optional[pulumi.Input[str]] = None,
xml_link: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PolicyArgs.__new__(PolicyArgs)
if api_management_id is None and not opts.urn:
raise TypeError("Missing required property 'api_management_id'")
__props__.__dict__["api_management_id"] = api_management_id
__props__.__dict__["xml_content"] = xml_content
__props__.__dict__["xml_link"] = xml_link
super(Policy, __self__).__init__(
'azure:apimanagement/policy:Policy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
api_management_id: Optional[pulumi.Input[str]] = None,
xml_content: Optional[pulumi.Input[str]] = None,
xml_link: Optional[pulumi.Input[str]] = None) -> 'Policy':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PolicyState.__new__(_PolicyState)
__props__.__dict__["api_management_id"] = api_management_id
__props__.__dict__["xml_content"] = xml_content
__props__.__dict__["xml_link"] = xml_link
return Policy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiManagementId")
def api_management_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "api_management_id")
@property
@pulumi.getter(name="xmlContent")
def xml_content(self) -> pulumi.Output[str]:
return pulumi.get(self, "xml_content")
@property
@pulumi.getter(name="xmlLink")
def xml_link(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "xml_link")
| true
| true
|
f701d7618a85844aeb01cd94452ad9a35deb367d
| 3,321
|
py
|
Python
|
backend/backend/settings.py
|
gabrielacham/e-store
|
03118b04a37d2e32c89a576427734bd02a8c46fd
|
[
"MIT"
] | null | null | null |
backend/backend/settings.py
|
gabrielacham/e-store
|
03118b04a37d2e32c89a576427734bd02a8c46fd
|
[
"MIT"
] | null | null | null |
backend/backend/settings.py
|
gabrielacham/e-store
|
03118b04a37d2e32c89a576427734bd02a8c46fd
|
[
"MIT"
] | null | null | null |
"""
Django settings for backend project.
Generated by 'django-admin startproject' using Django 2.2.13.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'q-)uruwrq0lgi+fp=do3(nbh)$o_+fr4qzp5w&_n5$lq0$*ywx'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'rest_framework',
'main.apps.MainConfig'
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'backend.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Caracas'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
CORS_ORIGIN_WHITELIST = [
'http://localhost:3000',
'http://localhost:8000',
'http://localhost:8080',
]
| 25.351145
| 91
| 0.696778
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'q-)uruwrq0lgi+fp=do3(nbh)$o_+fr4qzp5w&_n5$lq0$*ywx'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'rest_framework',
'main.apps.MainConfig'
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'backend.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Caracas'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
CORS_ORIGIN_WHITELIST = [
'http://localhost:3000',
'http://localhost:8000',
'http://localhost:8080',
]
| true
| true
|
f701d79de132b4a6cd7e54e5a411a0f1e7ea9253
| 6,198
|
py
|
Python
|
sdks/python/http_client/v1/polyaxon_sdk/models/v1_list_connections_response.py
|
gregmbi/polyaxon
|
8f24089fa9cb5df28fc7b70aec27d6d23ee81e8d
|
[
"Apache-2.0"
] | null | null | null |
sdks/python/http_client/v1/polyaxon_sdk/models/v1_list_connections_response.py
|
gregmbi/polyaxon
|
8f24089fa9cb5df28fc7b70aec27d6d23ee81e8d
|
[
"Apache-2.0"
] | null | null | null |
sdks/python/http_client/v1/polyaxon_sdk/models/v1_list_connections_response.py
|
gregmbi/polyaxon
|
8f24089fa9cb5df28fc7b70aec27d6d23ee81e8d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.0.79
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1ListConnectionsResponse(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"count": "int",
"results": "list[V1ConnectionResponse]",
"previous": "str",
"next": "str",
}
attribute_map = {
"count": "count",
"results": "results",
"previous": "previous",
"next": "next",
}
def __init__(
self,
count=None,
results=None,
previous=None,
next=None,
local_vars_configuration=None,
): # noqa: E501
"""V1ListConnectionsResponse - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._count = None
self._results = None
self._previous = None
self._next = None
self.discriminator = None
if count is not None:
self.count = count
if results is not None:
self.results = results
if previous is not None:
self.previous = previous
if next is not None:
self.next = next
@property
def count(self):
"""Gets the count of this V1ListConnectionsResponse. # noqa: E501
:return: The count of this V1ListConnectionsResponse. # noqa: E501
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this V1ListConnectionsResponse.
:param count: The count of this V1ListConnectionsResponse. # noqa: E501
:type: int
"""
self._count = count
@property
def results(self):
"""Gets the results of this V1ListConnectionsResponse. # noqa: E501
:return: The results of this V1ListConnectionsResponse. # noqa: E501
:rtype: list[V1ConnectionResponse]
"""
return self._results
@results.setter
def results(self, results):
"""Sets the results of this V1ListConnectionsResponse.
:param results: The results of this V1ListConnectionsResponse. # noqa: E501
:type: list[V1ConnectionResponse]
"""
self._results = results
@property
def previous(self):
"""Gets the previous of this V1ListConnectionsResponse. # noqa: E501
:return: The previous of this V1ListConnectionsResponse. # noqa: E501
:rtype: str
"""
return self._previous
@previous.setter
def previous(self, previous):
"""Sets the previous of this V1ListConnectionsResponse.
:param previous: The previous of this V1ListConnectionsResponse. # noqa: E501
:type: str
"""
self._previous = previous
@property
def next(self):
"""Gets the next of this V1ListConnectionsResponse. # noqa: E501
:return: The next of this V1ListConnectionsResponse. # noqa: E501
:rtype: str
"""
return self._next
@next.setter
def next(self, next):
"""Sets the next of this V1ListConnectionsResponse.
:param next: The next of this V1ListConnectionsResponse. # noqa: E501
:type: str
"""
self._next = next
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1ListConnectionsResponse):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1ListConnectionsResponse):
return True
return self.to_dict() != other.to_dict()
| 27.546667
| 86
| 0.592449
|
import pprint
import re
import six
from polyaxon_sdk.configuration import Configuration
class V1ListConnectionsResponse(object):
openapi_types = {
"count": "int",
"results": "list[V1ConnectionResponse]",
"previous": "str",
"next": "str",
}
attribute_map = {
"count": "count",
"results": "results",
"previous": "previous",
"next": "next",
}
def __init__(
self,
count=None,
results=None,
previous=None,
next=None,
local_vars_configuration=None,
): if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._count = None
self._results = None
self._previous = None
self._next = None
self.discriminator = None
if count is not None:
self.count = count
if results is not None:
self.results = results
if previous is not None:
self.previous = previous
if next is not None:
self.next = next
@property
def count(self):
return self._count
@count.setter
def count(self, count):
self._count = count
@property
def results(self):
return self._results
@results.setter
def results(self, results):
self._results = results
@property
def previous(self):
return self._previous
@previous.setter
def previous(self, previous):
self._previous = previous
@property
def next(self):
return self._next
@next.setter
def next(self, next):
self._next = next
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, V1ListConnectionsResponse):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
if not isinstance(other, V1ListConnectionsResponse):
return True
return self.to_dict() != other.to_dict()
| true
| true
|
f701d83453f612268763a247855c8cdd1e3ec468
| 1,393
|
py
|
Python
|
test/sca/test_edit.py
|
scrambler-crypto/pyecsca
|
491abfb548455669abd470382a48dcd07b2eda87
|
[
"MIT"
] | null | null | null |
test/sca/test_edit.py
|
scrambler-crypto/pyecsca
|
491abfb548455669abd470382a48dcd07b2eda87
|
[
"MIT"
] | null | null | null |
test/sca/test_edit.py
|
scrambler-crypto/pyecsca
|
491abfb548455669abd470382a48dcd07b2eda87
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
import numpy as np
from pyecsca.sca import Trace, trim, reverse, pad
class EditTests(TestCase):
def setUp(self):
self._trace = Trace(np.array([10, 20, 30, 40, 50], dtype=np.dtype("i1")))
def test_trim(self):
result = trim(self._trace, 2)
self.assertIsNotNone(result)
np.testing.assert_equal(result.samples, np.array([30, 40, 50], dtype=np.dtype("i1")))
result = trim(self._trace, end=3)
self.assertIsNotNone(result)
np.testing.assert_equal(result.samples, np.array([10, 20, 30], dtype=np.dtype("i1")))
with self.assertRaises(ValueError):
trim(self._trace, 5, 1)
def test_reverse(self):
result = reverse(self._trace)
self.assertIsNotNone(result)
np.testing.assert_equal(result.samples,
np.array([50, 40, 30, 20, 10], dtype=np.dtype("i1")))
def test_pad(self):
result = pad(self._trace, 2)
self.assertIsNotNone(result)
np.testing.assert_equal(result.samples,
np.array([0, 0, 10, 20, 30, 40, 50, 0, 0], dtype=np.dtype("i1")))
result = pad(self._trace, (1, 3))
self.assertIsNotNone(result)
np.testing.assert_equal(result.samples,
np.array([0, 10, 20, 30, 40, 50, 0, 0, 0], dtype=np.dtype("i1")))
| 33.97561
| 97
| 0.585068
|
from unittest import TestCase
import numpy as np
from pyecsca.sca import Trace, trim, reverse, pad
class EditTests(TestCase):
def setUp(self):
self._trace = Trace(np.array([10, 20, 30, 40, 50], dtype=np.dtype("i1")))
def test_trim(self):
result = trim(self._trace, 2)
self.assertIsNotNone(result)
np.testing.assert_equal(result.samples, np.array([30, 40, 50], dtype=np.dtype("i1")))
result = trim(self._trace, end=3)
self.assertIsNotNone(result)
np.testing.assert_equal(result.samples, np.array([10, 20, 30], dtype=np.dtype("i1")))
with self.assertRaises(ValueError):
trim(self._trace, 5, 1)
def test_reverse(self):
result = reverse(self._trace)
self.assertIsNotNone(result)
np.testing.assert_equal(result.samples,
np.array([50, 40, 30, 20, 10], dtype=np.dtype("i1")))
def test_pad(self):
result = pad(self._trace, 2)
self.assertIsNotNone(result)
np.testing.assert_equal(result.samples,
np.array([0, 0, 10, 20, 30, 40, 50, 0, 0], dtype=np.dtype("i1")))
result = pad(self._trace, (1, 3))
self.assertIsNotNone(result)
np.testing.assert_equal(result.samples,
np.array([0, 10, 20, 30, 40, 50, 0, 0, 0], dtype=np.dtype("i1")))
| true
| true
|
f701d83c8c0491098011e98966490d3b6a659777
| 21,807
|
py
|
Python
|
venv/Lib/site-packages/shiboken2/files.dir/shibokensupport/signature/mapping.py
|
gabistoian/Hide-Text-in-image
|
88b5ef0bd2bcb0e222cfbc7abf6ac2b869f72ec5
|
[
"X11"
] | null | null | null |
venv/Lib/site-packages/shiboken2/files.dir/shibokensupport/signature/mapping.py
|
gabistoian/Hide-Text-in-image
|
88b5ef0bd2bcb0e222cfbc7abf6ac2b869f72ec5
|
[
"X11"
] | null | null | null |
venv/Lib/site-packages/shiboken2/files.dir/shibokensupport/signature/mapping.py
|
gabistoian/Hide-Text-in-image
|
88b5ef0bd2bcb0e222cfbc7abf6ac2b869f72ec5
|
[
"X11"
] | null | null | null |
#############################################################################
##
## Copyright (C) 2019 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of Qt for Python.
##
## $QT_BEGIN_LICENSE:LGPL$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 3 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL3 included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 3 requirements
## will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 2.0 or (at your option) the GNU General
## Public license version 3 or any later version approved by the KDE Free
## Qt Foundation. The licenses are as published by the Free Software
## Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-2.0.html and
## https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################
from __future__ import print_function, absolute_import
"""
mapping.py
This module has the mapping from the pyside C-modules view of signatures
to the Python representation.
The PySide modules are not loaded in advance, but only after they appear
in sys.modules. This minimizes the loading overhead.
"""
import sys
import struct
import os
from shibokensupport.signature import typing
from shibokensupport.signature.typing import TypeVar, Generic
from shibokensupport.signature.lib.tool import with_metaclass
class ellipsis(object):
def __repr__(self):
return "..."
ellipsis = ellipsis()
Point = typing.Tuple[float, float]
Variant = typing.Any
ModelIndexList = typing.List[int]
QImageCleanupFunction = typing.Callable
# unfortunately, typing.Optional[t] expands to typing.Union[t, NoneType]
# Until we can force it to create Optional[t] again, we use this.
NoneType = type(None)
_S = TypeVar("_S")
MultiMap = typing.DefaultDict[str, typing.List[str]]
# ulong_max is only 32 bit on windows.
ulong_max = 2*sys.maxsize+1 if len(struct.pack("L", 1)) != 4 else 0xffffffff
ushort_max = 0xffff
GL_COLOR_BUFFER_BIT = 0x00004000
GL_NEAREST = 0x2600
WId = int
# from 5.9
GL_TEXTURE_2D = 0x0DE1
GL_RGBA = 0x1908
class _NotCalled(str):
"""
Wrap some text with semantics
This class is wrapped around text in order to avoid calling it.
There are three reasons for this:
- some instances cannot be created since they are abstract,
- some can only be created after qApp was created,
- some have an ugly __repr__ with angle brackets in it.
By using derived classes, good looking instances can be created
which can be used to generate source code or .pyi files. When the
real object is needed, the wrapper can simply be called.
"""
def __repr__(self):
return "{}({})".format(type(self).__name__, self)
def __call__(self):
from shibokensupport.signature.mapping import __dict__ as namespace
text = self if self.endswith(")") else self + "()"
return eval(text, namespace)
USE_PEP563 = False
# Note: we cannot know if this feature has been imported.
# Otherwise it would be "sys.version_info[:2] >= (3, 7)".
# We *can* eventually inspect sys.modules and look if
# the calling module has this future statement set,
# but should we do that?
# Some types are abstract. They just show their name.
class Virtual(_NotCalled):
pass
# Other types I simply could not find.
class Missing(_NotCalled):
# The string must be quoted, because the object does not exist.
def __repr__(self):
if USE_PEP563:
return _NotCalled.__repr__(self)
return '{}("{}")'.format(type(self).__name__, self)
class Invalid(_NotCalled):
pass
# Helper types
class Default(_NotCalled):
pass
class Instance(_NotCalled):
pass
# Parameterized primitive variables
class _Parameterized(object):
def __init__(self, type):
self.type = type
self.__name__ = self.__class__.__name__
def __repr__(self):
return "{}({})".format(
type(self).__name__, self.type.__name__)
# Mark the primitive variables to be moved into the result.
class ResultVariable(_Parameterized):
pass
# Mark the primitive variables to become Sequence, Iterable or List
# (decided in the parser).
class ArrayLikeVariable(_Parameterized):
pass
StringList = ArrayLikeVariable(str)
class Reloader(object):
"""
Reloder class
This is a singleton class which provides the update function for the
shiboken and PySide classes.
"""
def __init__(self):
self.sys_module_count = 0
@staticmethod
def module_valid(mod):
if getattr(mod, "__file__", None) and not os.path.isdir(mod.__file__):
ending = os.path.splitext(mod.__file__)[-1]
return ending not in (".py", ".pyc", ".pyo", ".pyi")
return False
def update(self):
"""
'update' imports all binary modules which are already in sys.modules.
The reason is to follow all user imports without introducing new ones.
This function is called by pyside_type_init to adapt imports
when the number of imported modules has changed.
"""
if self.sys_module_count == len(sys.modules):
return
self.sys_module_count = len(sys.modules)
g = globals()
# PYSIDE-1009: Try to recognize unknown modules in errorhandler.py
candidates = list(mod_name for mod_name in sys.modules.copy()
if self.module_valid(sys.modules[mod_name]))
for mod_name in candidates:
# 'top' is PySide2 when we do 'import PySide.QtCore'
# or Shiboken if we do 'import Shiboken'.
# Convince yourself that these two lines below have the same
# global effect as "import Shiboken" or "import PySide2.QtCore".
top = __import__(mod_name)
g[top.__name__] = top
proc_name = "init_" + mod_name.replace(".", "_")
if proc_name in g:
# Modules are in place, we can update the type_map.
g.update(g.pop(proc_name)())
def check_module(mod):
# During a build, there exist the modules already as directories,
# although the '*.so' was not yet created. This causes a problem
# in Python 3, because it accepts folders as namespace modules
# without enforcing an '__init__.py'.
if not Reloader.module_valid(mod):
mod_name = mod.__name__
raise ImportError("Module '{mod_name}' is not a binary module!"
.format(**locals()))
update_mapping = Reloader().update
type_map = {}
namespace = globals() # our module's __dict__
type_map.update({
"...": ellipsis,
"bool": bool,
"char": int,
"char*": str,
"char*const": str,
"double": float,
"float": float,
"int": int,
"List": ArrayLikeVariable,
"long": int,
"PyCallable": typing.Callable,
"PyObject": object,
"PySequence": typing.Iterable, # important for numpy
"PyTypeObject": type,
"QChar": str,
"QHash": typing.Dict,
"qint16": int,
"qint32": int,
"qint64": int,
"qint8": int,
"qintptr": int,
"QList": ArrayLikeVariable,
"qlonglong": int,
"QMap": typing.Dict,
"QPair": typing.Tuple,
"qptrdiff": int,
"qreal": float,
"QSet": typing.Set,
"QString": str,
"QStringList": StringList,
"quint16": int,
"quint32": int,
"quint32": int,
"quint64": int,
"quint8": int,
"quintptr": int,
"qulonglong": int,
"QVariant": Variant,
"QVector": typing.List,
"QSharedPointer": typing.Tuple,
"real": float,
"short": int,
"signed char": int,
"signed long": int,
"std.list": typing.List,
"std.map": typing.Dict,
"std.pair": typing.Tuple,
"std.vector": typing.List,
"str": str,
"true": True,
"Tuple": typing.Tuple,
"uchar": int,
"uchar*": str,
"uint": int,
"ulong": int,
"ULONG_MAX": ulong_max,
"unsigned char": int, # 5.9
"unsigned char*": str,
"unsigned int": int,
"unsigned long int": int, # 5.6, RHEL 6.6
"unsigned long long": int,
"unsigned long": int,
"unsigned short int": int, # 5.6, RHEL 6.6
"unsigned short": int,
"Unspecified": None,
"ushort": int,
"void": int, # be more specific?
"WId": WId,
"zero(bytes)": b"",
"zero(Char)": 0,
"zero(float)": 0,
"zero(int)": 0,
"zero(object)": None,
"zero(str)": "",
"zero(typing.Any)": None,
"zero(Any)": None,
})
type_map.update({
# Handling variables declared as array:
"array double*" : ArrayLikeVariable(float),
"array float*" : ArrayLikeVariable(float),
"array GLint*" : ArrayLikeVariable(int),
"array GLuint*" : ArrayLikeVariable(int),
"array int*" : ArrayLikeVariable(int),
"array long long*" : ArrayLikeVariable(int),
"array long*" : ArrayLikeVariable(int),
"array short*" : ArrayLikeVariable(int),
"array signed char*" : bytes,
"array unsigned char*" : bytes,
"array unsigned int*" : ArrayLikeVariable(int),
"array unsigned short*" : ArrayLikeVariable(int),
})
type_map.update({
# Special cases:
"char*" : bytes,
"QChar*" : bytes,
"quint32*" : int, # only for QRandomGenerator
"quint8*" : bytearray, # only for QCborStreamReader and QCborValue
"uchar*" : bytes,
"unsigned char*": bytes,
})
type_map.update({
# Handling variables that are returned, eventually as Tuples:
"bool*" : ResultVariable(bool),
"float*" : ResultVariable(float),
"int*" : ResultVariable(int),
"long long*" : ResultVariable(int),
"long*" : ResultVariable(int),
"PStr*" : ResultVariable(str), # module sample
"qint32*" : ResultVariable(int),
"qint64*" : ResultVariable(int),
"qreal*" : ResultVariable(float),
"QString*" : ResultVariable(str),
"quint16*" : ResultVariable(int),
"uint*" : ResultVariable(int),
"unsigned int*" : ResultVariable(int),
"QStringList*" : ResultVariable(StringList),
})
# PYSIDE-1328: We need to handle "self" explicitly.
type_map.update({
"self" : "self",
})
# The Shiboken Part
def init_Shiboken():
type_map.update({
"PyType": type,
"shiboken2.bool": bool,
"size_t": int,
})
return locals()
def init_minimal():
type_map.update({
"MinBool": bool,
})
return locals()
def init_sample():
import datetime
type_map.update({
"char": int,
"char**": typing.List[str],
"Complex": complex,
"double": float,
"Foo.HANDLE": int,
"HANDLE": int,
"Null": None,
"nullptr": None,
"ObjectType.Identifier": Missing("sample.ObjectType.Identifier"),
"OddBool": bool,
"PStr": str,
"PyDate": datetime.date,
"sample.bool": bool,
"sample.char": int,
"sample.double": float,
"sample.int": int,
"sample.ObjectType": object,
"sample.OddBool": bool,
"sample.Photon.TemplateBase[Photon.DuplicatorType]": sample.Photon.ValueDuplicator,
"sample.Photon.TemplateBase[Photon.IdentityType]": sample.Photon.ValueIdentity,
"sample.Point": Point,
"sample.PStr": str,
"sample.unsigned char": int,
"std.size_t": int,
"std.string": str,
"ZeroIn": 0,
'Str("<unk")': "<unk",
'Str("<unknown>")': "<unknown>",
'Str("nown>")': "nown>",
})
return locals()
def init_other():
import numbers
type_map.update({
"other.ExtendsNoImplicitConversion": Missing("other.ExtendsNoImplicitConversion"),
"other.Number": numbers.Number,
})
return locals()
def init_smart():
# This missing type should be defined in module smart. We cannot set it to Missing()
# because it is a container type. Therefore, we supply a surrogate:
global SharedPtr
class SharedPtr(Generic[_S]):
__module__ = "smart"
smart.SharedPtr = SharedPtr
type_map.update({
"smart.Smart.Integer2": int,
})
return locals()
# The PySide Part
def init_PySide2_QtCore():
from PySide2.QtCore import Qt, QUrl, QDir
from PySide2.QtCore import QRect, QSize, QPoint, QLocale, QByteArray
from PySide2.QtCore import QMarginsF # 5.9
try:
# seems to be not generated by 5.9 ATM.
from PySide2.QtCore import Connection
except ImportError:
pass
type_map.update({
"' '": " ",
"'%'": "%",
"'g'": "g",
"4294967295UL": 4294967295, # 5.6, RHEL 6.6
"CheckIndexOption.NoOption": Instance(
"PySide2.QtCore.QAbstractItemModel.CheckIndexOptions.NoOption"), # 5.11
"DescriptorType(-1)": int, # Native handle of QSocketDescriptor
"false": False,
"list of QAbstractAnimation": typing.List[PySide2.QtCore.QAbstractAnimation],
"list of QAbstractState": typing.List[PySide2.QtCore.QAbstractState],
"long long": int,
"NULL": None, # 5.6, MSVC
"nullptr": None, # 5.9
"PyByteArray": bytearray,
"PyBytes": bytes,
"QDeadlineTimer(QDeadlineTimer.Forever)": Instance("PySide2.QtCore.QDeadlineTimer"),
"PySide2.QtCore.QUrl.ComponentFormattingOptions":
PySide2.QtCore.QUrl.ComponentFormattingOption, # mismatch option/enum, why???
"PyUnicode": typing.Text,
"Q_NULLPTR": None,
"QDir.Filters(AllEntries | NoDotAndDotDot)": Instance(
"QDir.Filters(QDir.AllEntries | QDir.NoDotAndDotDot)"),
"QDir.SortFlags(Name | IgnoreCase)": Instance(
"QDir.SortFlags(QDir.Name | QDir.IgnoreCase)"),
"QGenericArgument((0))": ellipsis, # 5.6, RHEL 6.6. Is that ok?
"QGenericArgument()": ellipsis,
"QGenericArgument(0)": ellipsis,
"QGenericArgument(NULL)": ellipsis, # 5.6, MSVC
"QGenericArgument(nullptr)": ellipsis, # 5.10
"QGenericArgument(Q_NULLPTR)": ellipsis,
"QJsonObject": typing.Dict[str, PySide2.QtCore.QJsonValue],
"QModelIndex()": Invalid("PySide2.QtCore.QModelIndex"), # repr is btw. very wrong, fix it?!
"QModelIndexList": ModelIndexList,
"QModelIndexList": ModelIndexList,
"QString()": "",
"QStringList()": [],
"QStringRef": str,
"QStringRef": str,
"Qt.HANDLE": int, # be more explicit with some constants?
"QUrl.FormattingOptions(PrettyDecoded)": Instance(
"QUrl.FormattingOptions(QUrl.PrettyDecoded)"),
"QVariant()": Invalid(Variant),
"QVariant.Type": type, # not so sure here...
"QVariantMap": typing.Dict[str, Variant],
"QVariantMap": typing.Dict[str, Variant],
})
try:
type_map.update({
"PySide2.QtCore.QMetaObject.Connection": PySide2.QtCore.Connection, # wrong!
})
except AttributeError:
# this does not exist on 5.9 ATM.
pass
return locals()
def init_PySide2_QtConcurrent():
type_map.update({
"PySide2.QtCore.QFuture[QString]":
PySide2.QtConcurrent.QFutureQString,
"PySide2.QtCore.QFuture[void]":
PySide2.QtConcurrent.QFutureVoid,
})
return locals()
def init_PySide2_QtGui():
from PySide2.QtGui import QPageLayout, QPageSize # 5.12 macOS
type_map.update({
"0.0f": 0.0,
"1.0f": 1.0,
"GL_COLOR_BUFFER_BIT": GL_COLOR_BUFFER_BIT,
"GL_NEAREST": GL_NEAREST,
"int32_t": int,
"QPixmap()": Default("PySide2.QtGui.QPixmap"), # can't create without qApp
"QPlatformSurface*": int, # a handle
"QVector< QTextLayout.FormatRange >()": [], # do we need more structure?
"uint32_t": int,
"uint8_t": int,
"USHRT_MAX": ushort_max,
})
return locals()
def init_PySide2_QtWidgets():
from PySide2.QtWidgets import QWidget, QMessageBox, QStyleOption, QStyleHintReturn, QStyleOptionComplex
from PySide2.QtWidgets import QGraphicsItem, QStyleOptionGraphicsItem # 5.9
type_map.update({
"QMessageBox.StandardButtons(Yes | No)": Instance(
"QMessageBox.StandardButtons(QMessageBox.Yes | QMessageBox.No)"),
"QWidget.RenderFlags(DrawWindowBackground | DrawChildren)": Instance(
"QWidget.RenderFlags(QWidget.DrawWindowBackground | QWidget.DrawChildren)"),
"SH_Default": QStyleHintReturn.SH_Default,
"SO_Complex": QStyleOptionComplex.SO_Complex,
"SO_Default": QStyleOption.SO_Default,
"static_cast<Qt.MatchFlags>(Qt.MatchExactly|Qt.MatchCaseSensitive)": Instance(
"Qt.MatchFlags(Qt.MatchExactly | Qt.MatchCaseSensitive)"),
"Type": PySide2.QtWidgets.QListWidgetItem.Type,
})
return locals()
def init_PySide2_QtSql():
from PySide2.QtSql import QSqlDatabase
type_map.update({
"QLatin1String(defaultConnection)": QSqlDatabase.defaultConnection,
"QVariant.Invalid": Invalid("Variant"), # not sure what I should create, here...
})
return locals()
def init_PySide2_QtNetwork():
from PySide2.QtNetwork import QNetworkRequest
best_structure = typing.OrderedDict if getattr(typing, "OrderedDict", None) else typing.Dict
type_map.update({
"QMultiMap[PySide2.QtNetwork.QSsl.AlternativeNameEntryType, QString]":
best_structure[PySide2.QtNetwork.QSsl.AlternativeNameEntryType, typing.List[str]],
"DefaultTransferTimeoutConstant":
QNetworkRequest.TransferTimeoutConstant,
"QNetworkRequest.DefaultTransferTimeoutConstant":
QNetworkRequest.TransferTimeoutConstant,
})
del best_structure
return locals()
def init_PySide2_QtXmlPatterns():
from PySide2.QtXmlPatterns import QXmlName
type_map.update({
"QXmlName.NamespaceCode": Missing("PySide2.QtXmlPatterns.QXmlName.NamespaceCode"),
"QXmlName.PrefixCode": Missing("PySide2.QtXmlPatterns.QXmlName.PrefixCode"),
})
return locals()
def init_PySide2_QtMultimedia():
import PySide2.QtMultimediaWidgets
# Check if foreign import is valid. See mapping.py in shiboken2.
check_module(PySide2.QtMultimediaWidgets)
type_map.update({
"QGraphicsVideoItem": PySide2.QtMultimediaWidgets.QGraphicsVideoItem,
"qint64": int,
"QVideoWidget": PySide2.QtMultimediaWidgets.QVideoWidget,
})
return locals()
def init_PySide2_QtOpenGL():
type_map.update({
"GLbitfield": int,
"GLenum": int,
"GLfloat": float, # 5.6, MSVC 15
"GLint": int,
"GLuint": int,
})
return locals()
def init_PySide2_QtQml():
type_map.update({
"QJSValueList()": [],
"QVariantHash()": typing.Dict[str, Variant], # from 5.9
})
return locals()
def init_PySide2_QtQuick():
type_map.update({
"PySide2.QtQuick.QSharedPointer[PySide2.QtQuick.QQuickItemGrabResult]":
PySide2.QtQuick.QQuickItemGrabResult,
"UnsignedShortType": int,
})
return locals()
def init_PySide2_QtScript():
type_map.update({
"QScriptValueList()": [],
})
return locals()
def init_PySide2_QtTest():
type_map.update({
"PySide2.QtTest.QTest.PySideQTouchEventSequence": PySide2.QtTest.QTest.QTouchEventSequence,
"PySide2.QtTest.QTouchEventSequence": PySide2.QtTest.QTest.QTouchEventSequence,
})
return locals()
# from 5.6, MSVC
def init_PySide2_QtWinExtras():
type_map.update({
"QList< QWinJumpListItem* >()": [],
})
return locals()
# from 5.12, macOS
def init_PySide2_QtDataVisualization():
from PySide2.QtDataVisualization import QtDataVisualization
QtDataVisualization.QBarDataRow = typing.List[QtDataVisualization.QBarDataItem]
QtDataVisualization.QBarDataArray = typing.List[QtDataVisualization.QBarDataRow]
QtDataVisualization.QSurfaceDataRow = typing.List[QtDataVisualization.QSurfaceDataItem]
QtDataVisualization.QSurfaceDataArray = typing.List[QtDataVisualization.QSurfaceDataRow]
type_map.update({
"100.0f": 100.0,
"QtDataVisualization.QBarDataArray": QtDataVisualization.QBarDataArray,
"QtDataVisualization.QBarDataArray*": QtDataVisualization.QBarDataArray,
"QtDataVisualization.QSurfaceDataArray": QtDataVisualization.QSurfaceDataArray,
"QtDataVisualization.QSurfaceDataArray*": QtDataVisualization.QSurfaceDataArray,
})
return locals()
def init_testbinding():
type_map.update({
"testbinding.PySideCPP2.TestObjectWithoutNamespace": testbinding.TestObjectWithoutNamespace,
})
return locals()
# end of file
| 32.941088
| 107
| 0.647315
|
from __future__ import print_function, absolute_import
import sys
import struct
import os
from shibokensupport.signature import typing
from shibokensupport.signature.typing import TypeVar, Generic
from shibokensupport.signature.lib.tool import with_metaclass
class ellipsis(object):
def __repr__(self):
return "..."
ellipsis = ellipsis()
Point = typing.Tuple[float, float]
Variant = typing.Any
ModelIndexList = typing.List[int]
QImageCleanupFunction = typing.Callable
NoneType = type(None)
_S = TypeVar("_S")
MultiMap = typing.DefaultDict[str, typing.List[str]]
ulong_max = 2*sys.maxsize+1 if len(struct.pack("L", 1)) != 4 else 0xffffffff
ushort_max = 0xffff
GL_COLOR_BUFFER_BIT = 0x00004000
GL_NEAREST = 0x2600
WId = int
GL_TEXTURE_2D = 0x0DE1
GL_RGBA = 0x1908
class _NotCalled(str):
def __repr__(self):
return "{}({})".format(type(self).__name__, self)
def __call__(self):
from shibokensupport.signature.mapping import __dict__ as namespace
text = self if self.endswith(")") else self + "()"
return eval(text, namespace)
USE_PEP563 = False
class Virtual(_NotCalled):
pass
class Missing(_NotCalled):
def __repr__(self):
if USE_PEP563:
return _NotCalled.__repr__(self)
return '{}("{}")'.format(type(self).__name__, self)
class Invalid(_NotCalled):
pass
class Default(_NotCalled):
pass
class Instance(_NotCalled):
pass
class _Parameterized(object):
def __init__(self, type):
self.type = type
self.__name__ = self.__class__.__name__
def __repr__(self):
return "{}({})".format(
type(self).__name__, self.type.__name__)
class ResultVariable(_Parameterized):
pass
class ArrayLikeVariable(_Parameterized):
pass
StringList = ArrayLikeVariable(str)
class Reloader(object):
def __init__(self):
self.sys_module_count = 0
@staticmethod
def module_valid(mod):
if getattr(mod, "__file__", None) and not os.path.isdir(mod.__file__):
ending = os.path.splitext(mod.__file__)[-1]
return ending not in (".py", ".pyc", ".pyo", ".pyi")
return False
def update(self):
if self.sys_module_count == len(sys.modules):
return
self.sys_module_count = len(sys.modules)
g = globals()
candidates = list(mod_name for mod_name in sys.modules.copy()
if self.module_valid(sys.modules[mod_name]))
for mod_name in candidates:
top = __import__(mod_name)
g[top.__name__] = top
proc_name = "init_" + mod_name.replace(".", "_")
if proc_name in g:
g.update(g.pop(proc_name)())
def check_module(mod):
if not Reloader.module_valid(mod):
mod_name = mod.__name__
raise ImportError("Module '{mod_name}' is not a binary module!"
.format(**locals()))
update_mapping = Reloader().update
type_map = {}
namespace = globals()
type_map.update({
"...": ellipsis,
"bool": bool,
"char": int,
"char*": str,
"char*const": str,
"double": float,
"float": float,
"int": int,
"List": ArrayLikeVariable,
"long": int,
"PyCallable": typing.Callable,
"PyObject": object,
"PySequence": typing.Iterable, # important for numpy
"PyTypeObject": type,
"QChar": str,
"QHash": typing.Dict,
"qint16": int,
"qint32": int,
"qint64": int,
"qint8": int,
"qintptr": int,
"QList": ArrayLikeVariable,
"qlonglong": int,
"QMap": typing.Dict,
"QPair": typing.Tuple,
"qptrdiff": int,
"qreal": float,
"QSet": typing.Set,
"QString": str,
"QStringList": StringList,
"quint16": int,
"quint32": int,
"quint32": int,
"quint64": int,
"quint8": int,
"quintptr": int,
"qulonglong": int,
"QVariant": Variant,
"QVector": typing.List,
"QSharedPointer": typing.Tuple,
"real": float,
"short": int,
"signed char": int,
"signed long": int,
"std.list": typing.List,
"std.map": typing.Dict,
"std.pair": typing.Tuple,
"std.vector": typing.List,
"str": str,
"true": True,
"Tuple": typing.Tuple,
"uchar": int,
"uchar*": str,
"uint": int,
"ulong": int,
"ULONG_MAX": ulong_max,
"unsigned char": int, # 5.9
"unsigned char*": str,
"unsigned int": int,
"unsigned long int": int, # 5.6, RHEL 6.6
"unsigned long long": int,
"unsigned long": int,
"unsigned short int": int, # 5.6, RHEL 6.6
"unsigned short": int,
"Unspecified": None,
"ushort": int,
"void": int, # be more specific?
"WId": WId,
"zero(bytes)": b"",
"zero(Char)": 0,
"zero(float)": 0,
"zero(int)": 0,
"zero(object)": None,
"zero(str)": "",
"zero(typing.Any)": None,
"zero(Any)": None,
})
type_map.update({
# Handling variables declared as array:
"array double*" : ArrayLikeVariable(float),
"array float*" : ArrayLikeVariable(float),
"array GLint*" : ArrayLikeVariable(int),
"array GLuint*" : ArrayLikeVariable(int),
"array int*" : ArrayLikeVariable(int),
"array long long*" : ArrayLikeVariable(int),
"array long*" : ArrayLikeVariable(int),
"array short*" : ArrayLikeVariable(int),
"array signed char*" : bytes,
"array unsigned char*" : bytes,
"array unsigned int*" : ArrayLikeVariable(int),
"array unsigned short*" : ArrayLikeVariable(int),
})
type_map.update({
# Special cases:
"char*" : bytes,
"QChar*" : bytes,
"quint32*" : int, # only for QRandomGenerator
"quint8*" : bytearray, # only for QCborStreamReader and QCborValue
"uchar*" : bytes,
"unsigned char*": bytes,
})
type_map.update({
# Handling variables that are returned, eventually as Tuples:
"bool*" : ResultVariable(bool),
"float*" : ResultVariable(float),
"int*" : ResultVariable(int),
"long long*" : ResultVariable(int),
"long*" : ResultVariable(int),
"PStr*" : ResultVariable(str), # module sample
"qint32*" : ResultVariable(int),
"qint64*" : ResultVariable(int),
"qreal*" : ResultVariable(float),
"QString*" : ResultVariable(str),
"quint16*" : ResultVariable(int),
"uint*" : ResultVariable(int),
"unsigned int*" : ResultVariable(int),
"QStringList*" : ResultVariable(StringList),
})
# PYSIDE-1328: We need to handle "self" explicitly.
type_map.update({
"self" : "self",
})
# The Shiboken Part
def init_Shiboken():
type_map.update({
"PyType": type,
"shiboken2.bool": bool,
"size_t": int,
})
return locals()
def init_minimal():
type_map.update({
"MinBool": bool,
})
return locals()
def init_sample():
import datetime
type_map.update({
"char": int,
"char**": typing.List[str],
"Complex": complex,
"double": float,
"Foo.HANDLE": int,
"HANDLE": int,
"Null": None,
"nullptr": None,
"ObjectType.Identifier": Missing("sample.ObjectType.Identifier"),
"OddBool": bool,
"PStr": str,
"PyDate": datetime.date,
"sample.bool": bool,
"sample.char": int,
"sample.double": float,
"sample.int": int,
"sample.ObjectType": object,
"sample.OddBool": bool,
"sample.Photon.TemplateBase[Photon.DuplicatorType]": sample.Photon.ValueDuplicator,
"sample.Photon.TemplateBase[Photon.IdentityType]": sample.Photon.ValueIdentity,
"sample.Point": Point,
"sample.PStr": str,
"sample.unsigned char": int,
"std.size_t": int,
"std.string": str,
"ZeroIn": 0,
'Str("<unk")': "<unk",
'Str("<unknown>")': "<unknown>",
'Str("nown>")': "nown>",
})
return locals()
def init_other():
import numbers
type_map.update({
"other.ExtendsNoImplicitConversion": Missing("other.ExtendsNoImplicitConversion"),
"other.Number": numbers.Number,
})
return locals()
def init_smart():
# This missing type should be defined in module smart. We cannot set it to Missing()
# because it is a container type. Therefore, we supply a surrogate:
global SharedPtr
class SharedPtr(Generic[_S]):
__module__ = "smart"
smart.SharedPtr = SharedPtr
type_map.update({
"smart.Smart.Integer2": int,
})
return locals()
# The PySide Part
def init_PySide2_QtCore():
from PySide2.QtCore import Qt, QUrl, QDir
from PySide2.QtCore import QRect, QSize, QPoint, QLocale, QByteArray
from PySide2.QtCore import QMarginsF # 5.9
try:
# seems to be not generated by 5.9 ATM.
from PySide2.QtCore import Connection
except ImportError:
pass
type_map.update({
"' '": " ",
"'%'": "%",
"'g'": "g",
"4294967295UL": 4294967295, # 5.6, RHEL 6.6
"CheckIndexOption.NoOption": Instance(
"PySide2.QtCore.QAbstractItemModel.CheckIndexOptions.NoOption"), # 5.11
"DescriptorType(-1)": int, # Native handle of QSocketDescriptor
"false": False,
"list of QAbstractAnimation": typing.List[PySide2.QtCore.QAbstractAnimation],
"list of QAbstractState": typing.List[PySide2.QtCore.QAbstractState],
"long long": int,
"NULL": None, # 5.6, MSVC
"nullptr": None, # 5.9
"PyByteArray": bytearray,
"PyBytes": bytes,
"QDeadlineTimer(QDeadlineTimer.Forever)": Instance("PySide2.QtCore.QDeadlineTimer"),
"PySide2.QtCore.QUrl.ComponentFormattingOptions":
PySide2.QtCore.QUrl.ComponentFormattingOption, # mismatch option/enum, why???
"PyUnicode": typing.Text,
"Q_NULLPTR": None,
"QDir.Filters(AllEntries | NoDotAndDotDot)": Instance(
"QDir.Filters(QDir.AllEntries | QDir.NoDotAndDotDot)"),
"QDir.SortFlags(Name | IgnoreCase)": Instance(
"QDir.SortFlags(QDir.Name | QDir.IgnoreCase)"),
"QGenericArgument((0))": ellipsis, # 5.6, RHEL 6.6. Is that ok?
"QGenericArgument()": ellipsis,
"QGenericArgument(0)": ellipsis,
"QGenericArgument(NULL)": ellipsis, # 5.6, MSVC
"QGenericArgument(nullptr)": ellipsis, # 5.10
"QGenericArgument(Q_NULLPTR)": ellipsis,
"QJsonObject": typing.Dict[str, PySide2.QtCore.QJsonValue],
"QModelIndex()": Invalid("PySide2.QtCore.QModelIndex"), # repr is btw. very wrong, fix it?!
"QModelIndexList": ModelIndexList,
"QModelIndexList": ModelIndexList,
"QString()": "",
"QStringList()": [],
"QStringRef": str,
"QStringRef": str,
"Qt.HANDLE": int, # be more explicit with some constants?
"QUrl.FormattingOptions(PrettyDecoded)": Instance(
"QUrl.FormattingOptions(QUrl.PrettyDecoded)"),
"QVariant()": Invalid(Variant),
"QVariant.Type": type, # not so sure here...
"QVariantMap": typing.Dict[str, Variant],
"QVariantMap": typing.Dict[str, Variant],
})
try:
type_map.update({
"PySide2.QtCore.QMetaObject.Connection": PySide2.QtCore.Connection, # wrong!
})
except AttributeError:
# this does not exist on 5.9 ATM.
pass
return locals()
def init_PySide2_QtConcurrent():
type_map.update({
"PySide2.QtCore.QFuture[QString]":
PySide2.QtConcurrent.QFutureQString,
"PySide2.QtCore.QFuture[void]":
PySide2.QtConcurrent.QFutureVoid,
})
return locals()
def init_PySide2_QtGui():
from PySide2.QtGui import QPageLayout, QPageSize # 5.12 macOS
type_map.update({
"0.0f": 0.0,
"1.0f": 1.0,
"GL_COLOR_BUFFER_BIT": GL_COLOR_BUFFER_BIT,
"GL_NEAREST": GL_NEAREST,
"int32_t": int,
"QPixmap()": Default("PySide2.QtGui.QPixmap"), # can't create without qApp
"QPlatformSurface*": int, "QVector< QTextLayout.FormatRange >()": [], "uint32_t": int,
"uint8_t": int,
"USHRT_MAX": ushort_max,
})
return locals()
def init_PySide2_QtWidgets():
from PySide2.QtWidgets import QWidget, QMessageBox, QStyleOption, QStyleHintReturn, QStyleOptionComplex
from PySide2.QtWidgets import QGraphicsItem, QStyleOptionGraphicsItem type_map.update({
"QMessageBox.StandardButtons(Yes | No)": Instance(
"QMessageBox.StandardButtons(QMessageBox.Yes | QMessageBox.No)"),
"QWidget.RenderFlags(DrawWindowBackground | DrawChildren)": Instance(
"QWidget.RenderFlags(QWidget.DrawWindowBackground | QWidget.DrawChildren)"),
"SH_Default": QStyleHintReturn.SH_Default,
"SO_Complex": QStyleOptionComplex.SO_Complex,
"SO_Default": QStyleOption.SO_Default,
"static_cast<Qt.MatchFlags>(Qt.MatchExactly|Qt.MatchCaseSensitive)": Instance(
"Qt.MatchFlags(Qt.MatchExactly | Qt.MatchCaseSensitive)"),
"Type": PySide2.QtWidgets.QListWidgetItem.Type,
})
return locals()
def init_PySide2_QtSql():
from PySide2.QtSql import QSqlDatabase
type_map.update({
"QLatin1String(defaultConnection)": QSqlDatabase.defaultConnection,
"QVariant.Invalid": Invalid("Variant"), })
return locals()
def init_PySide2_QtNetwork():
from PySide2.QtNetwork import QNetworkRequest
best_structure = typing.OrderedDict if getattr(typing, "OrderedDict", None) else typing.Dict
type_map.update({
"QMultiMap[PySide2.QtNetwork.QSsl.AlternativeNameEntryType, QString]":
best_structure[PySide2.QtNetwork.QSsl.AlternativeNameEntryType, typing.List[str]],
"DefaultTransferTimeoutConstant":
QNetworkRequest.TransferTimeoutConstant,
"QNetworkRequest.DefaultTransferTimeoutConstant":
QNetworkRequest.TransferTimeoutConstant,
})
del best_structure
return locals()
def init_PySide2_QtXmlPatterns():
from PySide2.QtXmlPatterns import QXmlName
type_map.update({
"QXmlName.NamespaceCode": Missing("PySide2.QtXmlPatterns.QXmlName.NamespaceCode"),
"QXmlName.PrefixCode": Missing("PySide2.QtXmlPatterns.QXmlName.PrefixCode"),
})
return locals()
def init_PySide2_QtMultimedia():
import PySide2.QtMultimediaWidgets
check_module(PySide2.QtMultimediaWidgets)
type_map.update({
"QGraphicsVideoItem": PySide2.QtMultimediaWidgets.QGraphicsVideoItem,
"qint64": int,
"QVideoWidget": PySide2.QtMultimediaWidgets.QVideoWidget,
})
return locals()
def init_PySide2_QtOpenGL():
type_map.update({
"GLbitfield": int,
"GLenum": int,
"GLfloat": float, "GLint": int,
"GLuint": int,
})
return locals()
def init_PySide2_QtQml():
type_map.update({
"QJSValueList()": [],
"QVariantHash()": typing.Dict[str, Variant], })
return locals()
def init_PySide2_QtQuick():
type_map.update({
"PySide2.QtQuick.QSharedPointer[PySide2.QtQuick.QQuickItemGrabResult]":
PySide2.QtQuick.QQuickItemGrabResult,
"UnsignedShortType": int,
})
return locals()
def init_PySide2_QtScript():
type_map.update({
"QScriptValueList()": [],
})
return locals()
def init_PySide2_QtTest():
type_map.update({
"PySide2.QtTest.QTest.PySideQTouchEventSequence": PySide2.QtTest.QTest.QTouchEventSequence,
"PySide2.QtTest.QTouchEventSequence": PySide2.QtTest.QTest.QTouchEventSequence,
})
return locals()
def init_PySide2_QtWinExtras():
type_map.update({
"QList< QWinJumpListItem* >()": [],
})
return locals()
def init_PySide2_QtDataVisualization():
from PySide2.QtDataVisualization import QtDataVisualization
QtDataVisualization.QBarDataRow = typing.List[QtDataVisualization.QBarDataItem]
QtDataVisualization.QBarDataArray = typing.List[QtDataVisualization.QBarDataRow]
QtDataVisualization.QSurfaceDataRow = typing.List[QtDataVisualization.QSurfaceDataItem]
QtDataVisualization.QSurfaceDataArray = typing.List[QtDataVisualization.QSurfaceDataRow]
type_map.update({
"100.0f": 100.0,
"QtDataVisualization.QBarDataArray": QtDataVisualization.QBarDataArray,
"QtDataVisualization.QBarDataArray*": QtDataVisualization.QBarDataArray,
"QtDataVisualization.QSurfaceDataArray": QtDataVisualization.QSurfaceDataArray,
"QtDataVisualization.QSurfaceDataArray*": QtDataVisualization.QSurfaceDataArray,
})
return locals()
def init_testbinding():
type_map.update({
"testbinding.PySideCPP2.TestObjectWithoutNamespace": testbinding.TestObjectWithoutNamespace,
})
return locals()
| true
| true
|
f701d937456c45c6727a342282dc55e55803f41c
| 1,747
|
py
|
Python
|
pyqt_ui.py
|
hacker-h/pyusb-keyboard-alike
|
ce892fbd8dac5ad72af628c993a3e12c076485d6
|
[
"MIT"
] | 47
|
2015-02-09T11:39:03.000Z
|
2022-03-29T00:55:21.000Z
|
pyqt_ui.py
|
hacker-h/pyusb-keyboard-alike
|
ce892fbd8dac5ad72af628c993a3e12c076485d6
|
[
"MIT"
] | 12
|
2015-03-03T21:13:28.000Z
|
2021-10-02T18:05:18.000Z
|
pyqt_ui.py
|
hacker-h/pyusb-keyboard-alike
|
ce892fbd8dac5ad72af628c993a3e12c076485d6
|
[
"MIT"
] | 23
|
2015-02-19T13:46:32.000Z
|
2021-07-31T18:57:55.000Z
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'pyqt_example.ui'
#
# Created: Sun May 18 03:45:55 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(800, 600)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.dataList = QtGui.QListWidget(self.centralwidget)
self.dataList.setObjectName(_fromUtf8("dataList"))
self.gridLayout.addWidget(self.dataList, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "PyQt4 + PyUSB keyboard-alike example", None))
| 36.395833
| 105
| 0.718947
|
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(800, 600)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.dataList = QtGui.QListWidget(self.centralwidget)
self.dataList.setObjectName(_fromUtf8("dataList"))
self.gridLayout.addWidget(self.dataList, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "PyQt4 + PyUSB keyboard-alike example", None))
| true
| true
|
f701d938aba4d8300cacab21080c0e69d8ac18c8
| 2,597
|
py
|
Python
|
day05.py
|
spgill/AdventOfCode2021
|
58218062d64de12dac9761a30a1f9762d9a9ab6e
|
[
"MIT"
] | null | null | null |
day05.py
|
spgill/AdventOfCode2021
|
58218062d64de12dac9761a30a1f9762d9a9ab6e
|
[
"MIT"
] | null | null | null |
day05.py
|
spgill/AdventOfCode2021
|
58218062d64de12dac9761a30a1f9762d9a9ab6e
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
### stdlib imports
import pathlib
### local imports
import utils
@utils.part1
def part1(puzzleInput: str):
# Parse the coordinate pairs from the puzzle input
coordList = [
[
tuple(int(coord) for coord in pair.split(","))
for pair in line.split(" -> ")
]
for line in puzzleInput.strip().splitlines()
]
# Dictionary containing lookups for coordinate hits
part1Grid: dict[tuple[int, int], int] = {}
part2Grid: dict[tuple[int, int], int] = {}
# Iterate through each line pair and mark each coordinate the line passes through
for (startX, startY), (endX, endY) in coordList:
xMod = -1 if endX < startX else 1
xRange = range(startX, endX + xMod, xMod)
yMod = -1 if endY < startY else 1
yRange = range(startY, endY + yMod, yMod)
# For horizontal and vertical lines, it's sufficient to simply loop through the coordinates
if startX == endX or startY == endY:
for x in xRange:
for y in yRange:
part1Grid[(x, y)] = part1Grid.get((x, y), 0) + 1
part2Grid[(x, y)] = part2Grid.get((x, y), 0) + 1
# For diagonal lines (45 deg only) we can assume the x and y ranges are equal in length
else:
for i, x in enumerate(xRange):
y = yRange[i]
part2Grid[(x, y)] = part2Grid.get((x, y), 0) + 1
# If the draw option is enabled, create visualization images
if utils.getOption("draw"):
from PIL import Image
maxX, maxY = 0, 0
for (startX, startY), (endX, endY) in coordList:
maxX = max(startX, endX, maxX)
maxY = max(startY, endY, maxY)
for i, grid in enumerate([part1Grid, part2Grid]):
canvas = Image.new("RGB", (maxX + 1, maxY + 1))
for coord, count in grid.items():
canvas.putpixel(
coord, (255, 0, 0) if count > 1 else (255, 255, 255)
)
canvas.save(pathlib.Path.cwd() / f"day05.part{i + 1}.png")
# The answer is the number of grid coordinates with more than one line
utils.printAnswer(len([item for item in part1Grid.items() if item[1] > 1]))
# Pass the part 2 answer to its solution function
return len([item for item in part2Grid.items() if item[1] > 1])
@utils.part2
def part2(_, answer: int):
# Part 1 counted the overlapping points for diagonal lines as well,
# so we can just print the answer
utils.printAnswer(answer)
utils.start()
| 32.4625
| 99
| 0.58298
|
import pathlib
import utils
@utils.part1
def part1(puzzleInput: str):
coordList = [
[
tuple(int(coord) for coord in pair.split(","))
for pair in line.split(" -> ")
]
for line in puzzleInput.strip().splitlines()
]
part1Grid: dict[tuple[int, int], int] = {}
part2Grid: dict[tuple[int, int], int] = {}
for (startX, startY), (endX, endY) in coordList:
xMod = -1 if endX < startX else 1
xRange = range(startX, endX + xMod, xMod)
yMod = -1 if endY < startY else 1
yRange = range(startY, endY + yMod, yMod)
if startX == endX or startY == endY:
for x in xRange:
for y in yRange:
part1Grid[(x, y)] = part1Grid.get((x, y), 0) + 1
part2Grid[(x, y)] = part2Grid.get((x, y), 0) + 1
# For diagonal lines (45 deg only) we can assume the x and y ranges are equal in length
else:
for i, x in enumerate(xRange):
y = yRange[i]
part2Grid[(x, y)] = part2Grid.get((x, y), 0) + 1
# If the draw option is enabled, create visualization images
if utils.getOption("draw"):
from PIL import Image
maxX, maxY = 0, 0
for (startX, startY), (endX, endY) in coordList:
maxX = max(startX, endX, maxX)
maxY = max(startY, endY, maxY)
for i, grid in enumerate([part1Grid, part2Grid]):
canvas = Image.new("RGB", (maxX + 1, maxY + 1))
for coord, count in grid.items():
canvas.putpixel(
coord, (255, 0, 0) if count > 1 else (255, 255, 255)
)
canvas.save(pathlib.Path.cwd() / f"day05.part{i + 1}.png")
# The answer is the number of grid coordinates with more than one line
utils.printAnswer(len([item for item in part1Grid.items() if item[1] > 1]))
# Pass the part 2 answer to its solution function
return len([item for item in part2Grid.items() if item[1] > 1])
@utils.part2
def part2(_, answer: int):
# Part 1 counted the overlapping points for diagonal lines as well,
# so we can just print the answer
utils.printAnswer(answer)
utils.start()
| true
| true
|
f701d957a50072dc59b5c2758115c26fdfc2caf8
| 2,245
|
py
|
Python
|
database/api/ticket.py
|
aprilsanchez/ictf-framework
|
2d37b5632b8ca8a4d90a9d84d689a023d19033cf
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
database/api/ticket.py
|
aprilsanchez/ictf-framework
|
2d37b5632b8ca8a4d90a9d84d689a023d19033cf
|
[
"BSD-2-Clause-FreeBSD"
] | 2
|
2022-01-13T03:58:43.000Z
|
2022-03-12T01:01:29.000Z
|
database/api/ticket.py
|
aprilsanchez/ictf-framework
|
2d37b5632b8ca8a4d90a9d84d689a023d19033cf
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from flask import request, abort, jsonify
from . import app, mysql
from utils import requires_auth
@requires_auth
@app.route("/tickets/add", methods=['POST'])
def submit_ticket():
team_id = request.form.get("team_id")
subject = request.form.get("subject")
msg = request.form.get("message")
ts = request.form.get("ts")
cursor = mysql.cursor()
cursor.execute("""INSERT INTO tickets
(team_id, ts, subject, msg, response)
VALUES (%s, %s, %s, %s, %s)""",
(team_id, ts, subject, msg, "No Response Yet"))
ticket_id = cursor.lastrowid
mysql.database.commit()
if cursor.rowcount == 0:
return json.dumps({"result": "fail"})
else:
return json.dumps({"result": "success", "ticket_id": ticket_id})
@app.route("/tickets/get")
@app.route("/tickets/get/<int:team_id>")
@requires_auth
def get_all_tickets(team_id = None):
cursor = mysql.cursor()
if not team_id:
cursor.execute("""SELECT * FROM tickets""")
else:
cursor.execute("""SELECT * FROM tickets where team_id = %d;""", team_id)
tks = cursor.fetchall()
for t in tks:
t['msg'] = t['msg'].decode('utf-8')
t['response'] = t['response'].decode('utf-8')
return jsonify({"tickets": tks})
@app.route("/tickets/get/open")
@requires_auth
def get_open_tickets():
cursor = mysql.cursor()
cursor.execute("""SELECT * FROM tickets WHERE done = 0;""")
return jsonify({"tickets": cursor.fetchall()})
@app.route("/tickets/respond/<int:ticket_id>")
@requires_auth
def respond_to_ticket(ticket_id):
response = request.form.get("response")
cursor = mysql.cursor()
cursor.execute("""UPDATE tickets SET response = %s WHERE id = %s;""", (response, ticket_id))
mysql.database.commit()
return jsonify({"result": 'success'})
@app.route("/tickets/close/<int:ticket_id>", methods=['POST'])
@requires_auth
def close_ticket(ticket_id):
ticket_id = int(ticket_id)
cursor = mysql.cursor()
cursor.execute("""UPDATE tickets SET done = 1 WHERE id = %s;""", ticket_id)
mysql.database.commit()
return json.dumps({"result": 'success'})
| 28.0625
| 96
| 0.628508
|
import json
from flask import request, abort, jsonify
from . import app, mysql
from utils import requires_auth
@requires_auth
@app.route("/tickets/add", methods=['POST'])
def submit_ticket():
team_id = request.form.get("team_id")
subject = request.form.get("subject")
msg = request.form.get("message")
ts = request.form.get("ts")
cursor = mysql.cursor()
cursor.execute("""INSERT INTO tickets
(team_id, ts, subject, msg, response)
VALUES (%s, %s, %s, %s, %s)""",
(team_id, ts, subject, msg, "No Response Yet"))
ticket_id = cursor.lastrowid
mysql.database.commit()
if cursor.rowcount == 0:
return json.dumps({"result": "fail"})
else:
return json.dumps({"result": "success", "ticket_id": ticket_id})
@app.route("/tickets/get")
@app.route("/tickets/get/<int:team_id>")
@requires_auth
def get_all_tickets(team_id = None):
cursor = mysql.cursor()
if not team_id:
cursor.execute("""SELECT * FROM tickets""")
else:
cursor.execute("""SELECT * FROM tickets where team_id = %d;""", team_id)
tks = cursor.fetchall()
for t in tks:
t['msg'] = t['msg'].decode('utf-8')
t['response'] = t['response'].decode('utf-8')
return jsonify({"tickets": tks})
@app.route("/tickets/get/open")
@requires_auth
def get_open_tickets():
cursor = mysql.cursor()
cursor.execute("""SELECT * FROM tickets WHERE done = 0;""")
return jsonify({"tickets": cursor.fetchall()})
@app.route("/tickets/respond/<int:ticket_id>")
@requires_auth
def respond_to_ticket(ticket_id):
response = request.form.get("response")
cursor = mysql.cursor()
cursor.execute("""UPDATE tickets SET response = %s WHERE id = %s;""", (response, ticket_id))
mysql.database.commit()
return jsonify({"result": 'success'})
@app.route("/tickets/close/<int:ticket_id>", methods=['POST'])
@requires_auth
def close_ticket(ticket_id):
ticket_id = int(ticket_id)
cursor = mysql.cursor()
cursor.execute("""UPDATE tickets SET done = 1 WHERE id = %s;""", ticket_id)
mysql.database.commit()
return json.dumps({"result": 'success'})
| true
| true
|
f701da916352cb9069d9c0e76375bee26e2ffece
| 3,959
|
py
|
Python
|
quark/plugin_modules/routes.py
|
Cerberus98/quark
|
53848e357a1b4d5d23f565963f22115d8997e38f
|
[
"Apache-2.0"
] | null | null | null |
quark/plugin_modules/routes.py
|
Cerberus98/quark
|
53848e357a1b4d5d23f565963f22115d8997e38f
|
[
"Apache-2.0"
] | null | null | null |
quark/plugin_modules/routes.py
|
Cerberus98/quark
|
53848e357a1b4d5d23f565963f22115d8997e38f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 Openstack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from neutron.common import exceptions
from neutron import quota
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
from quark import allocation_pool
from quark.db import api as db_api
from quark.db import models as db_models
from quark import exceptions as quark_exceptions
from quark import plugin_views as v
CONF = cfg.CONF
DEFAULT_ROUTE = netaddr.IPNetwork("0.0.0.0/0")
LOG = logging.getLogger(__name__)
ipam_driver = (importutils.import_class(CONF.QUARK.ipam_driver))()
def get_route(context, id):
LOG.info("get_route %s for tenant %s" % (id, context.tenant_id))
route = db_api.route_find(context, id=id, scope=db_api.ONE)
if not route:
raise quark_exceptions.RouteNotFound(route_id=id)
return v._make_route_dict(route)
def get_routes(context):
LOG.info("get_routes for tenant %s" % context.tenant_id)
routes = db_api.route_find(context)
return [v._make_route_dict(r) for r in routes]
def create_route(context, route):
LOG.info("create_route for tenant %s" % context.tenant_id)
route = route["route"]
for key in ["gateway", "cidr", "subnet_id"]:
if key not in route:
raise exceptions.BadRequest(resource="routes",
msg="%s is required" % key)
subnet_id = route["subnet_id"]
with context.session.begin():
subnet = db_api.subnet_find(context, id=subnet_id, scope=db_api.ONE)
if not subnet:
raise exceptions.SubnetNotFound(subnet_id=subnet_id)
policies = db_models.IPPolicy.get_ip_policy_cidrs(subnet)
alloc_pools = allocation_pool.AllocationPools(subnet["cidr"],
policies=policies)
alloc_pools.validate_gateway_excluded(route["gateway"])
# TODO(anyone): May want to denormalize the cidr values into columns
# to achieve single db lookup on conflict check
route_cidr = netaddr.IPNetwork(route["cidr"])
subnet_routes = db_api.route_find(context, subnet_id=subnet_id,
scope=db_api.ALL)
quota.QUOTAS.limit_check(context, context.tenant_id,
routes_per_subnet=len(subnet_routes) + 1)
for sub_route in subnet_routes:
sub_route_cidr = netaddr.IPNetwork(sub_route["cidr"])
if sub_route_cidr.value == DEFAULT_ROUTE.value:
continue
if route_cidr in sub_route_cidr or sub_route_cidr in route_cidr:
raise quark_exceptions.RouteConflict(
route_id=sub_route["id"], cidr=str(route_cidr))
new_route = db_api.route_create(context, **route)
return v._make_route_dict(new_route)
def delete_route(context, id):
# TODO(mdietz): This is probably where we check to see that someone is
# admin and only filter on tenant if they aren't. Correct
# for all the above later
LOG.info("delete_route %s for tenant %s" % (id, context.tenant_id))
with context.session.begin():
route = db_api.route_find(context, id=id, scope=db_api.ONE)
if not route:
raise quark_exceptions.RouteNotFound(route_id=id)
db_api.route_delete(context, route)
| 40.397959
| 78
| 0.674665
|
import netaddr
from neutron.common import exceptions
from neutron import quota
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
from quark import allocation_pool
from quark.db import api as db_api
from quark.db import models as db_models
from quark import exceptions as quark_exceptions
from quark import plugin_views as v
CONF = cfg.CONF
DEFAULT_ROUTE = netaddr.IPNetwork("0.0.0.0/0")
LOG = logging.getLogger(__name__)
ipam_driver = (importutils.import_class(CONF.QUARK.ipam_driver))()
def get_route(context, id):
LOG.info("get_route %s for tenant %s" % (id, context.tenant_id))
route = db_api.route_find(context, id=id, scope=db_api.ONE)
if not route:
raise quark_exceptions.RouteNotFound(route_id=id)
return v._make_route_dict(route)
def get_routes(context):
LOG.info("get_routes for tenant %s" % context.tenant_id)
routes = db_api.route_find(context)
return [v._make_route_dict(r) for r in routes]
def create_route(context, route):
LOG.info("create_route for tenant %s" % context.tenant_id)
route = route["route"]
for key in ["gateway", "cidr", "subnet_id"]:
if key not in route:
raise exceptions.BadRequest(resource="routes",
msg="%s is required" % key)
subnet_id = route["subnet_id"]
with context.session.begin():
subnet = db_api.subnet_find(context, id=subnet_id, scope=db_api.ONE)
if not subnet:
raise exceptions.SubnetNotFound(subnet_id=subnet_id)
policies = db_models.IPPolicy.get_ip_policy_cidrs(subnet)
alloc_pools = allocation_pool.AllocationPools(subnet["cidr"],
policies=policies)
alloc_pools.validate_gateway_excluded(route["gateway"])
route_cidr = netaddr.IPNetwork(route["cidr"])
subnet_routes = db_api.route_find(context, subnet_id=subnet_id,
scope=db_api.ALL)
quota.QUOTAS.limit_check(context, context.tenant_id,
routes_per_subnet=len(subnet_routes) + 1)
for sub_route in subnet_routes:
sub_route_cidr = netaddr.IPNetwork(sub_route["cidr"])
if sub_route_cidr.value == DEFAULT_ROUTE.value:
continue
if route_cidr in sub_route_cidr or sub_route_cidr in route_cidr:
raise quark_exceptions.RouteConflict(
route_id=sub_route["id"], cidr=str(route_cidr))
new_route = db_api.route_create(context, **route)
return v._make_route_dict(new_route)
def delete_route(context, id):
# for all the above later
LOG.info("delete_route %s for tenant %s" % (id, context.tenant_id))
with context.session.begin():
route = db_api.route_find(context, id=id, scope=db_api.ONE)
if not route:
raise quark_exceptions.RouteNotFound(route_id=id)
db_api.route_delete(context, route)
| true
| true
|
f701da983fde9db5d64162d83f52c871e82c9ed0
| 17,783
|
py
|
Python
|
rootfs/usr/lib/python3/dist-packages/numpy/polynomial/tests/test_legendre.py
|
kappaIO-Dev/kappaIO-sdk-armhf-crosscompile
|
66fc5fc21e6235f7a3be72a7ccac68e2224b7fb2
|
[
"MIT"
] | null | null | null |
rootfs/usr/lib/python3/dist-packages/numpy/polynomial/tests/test_legendre.py
|
kappaIO-Dev/kappaIO-sdk-armhf-crosscompile
|
66fc5fc21e6235f7a3be72a7ccac68e2224b7fb2
|
[
"MIT"
] | null | null | null |
rootfs/usr/lib/python3/dist-packages/numpy/polynomial/tests/test_legendre.py
|
kappaIO-Dev/kappaIO-sdk-armhf-crosscompile
|
66fc5fc21e6235f7a3be72a7ccac68e2224b7fb2
|
[
"MIT"
] | null | null | null |
"""Tests for legendre module.
"""
import numpy as np
import numpy.polynomial.legendre as leg
import numpy.polynomial.polynomial as poly
from numpy.testing import *
P0 = np.array([ 1])
P1 = np.array([ 0, 1])
P2 = np.array([-1, 0, 3])/2
P3 = np.array([ 0, -3, 0, 5])/2
P4 = np.array([ 3, 0, -30, 0, 35])/8
P5 = np.array([ 0, 15, 0, -70, 0, 63])/8
P6 = np.array([-5, 0, 105, 0,-315, 0, 231])/16
P7 = np.array([ 0,-35, 0, 315, 0, -693, 0, 429])/16
P8 = np.array([35, 0,-1260, 0,6930, 0,-12012, 0,6435])/128
P9 = np.array([ 0,315, 0,-4620, 0,18018, 0,-25740, 0,12155])/128
Plist = [P0, P1, P2, P3, P4, P5, P6, P7, P8, P9]
def trim(x) :
return leg.legtrim(x, tol=1e-6)
class TestConstants(TestCase) :
def test_legdomain(self) :
assert_equal(leg.legdomain, [-1, 1])
def test_legzero(self) :
assert_equal(leg.legzero, [0])
def test_legone(self) :
assert_equal(leg.legone, [1])
def test_legx(self) :
assert_equal(leg.legx, [0, 1])
class TestArithmetic(TestCase) :
x = np.linspace(-1, 1, 100)
y0 = poly.polyval(x, P0)
y1 = poly.polyval(x, P1)
y2 = poly.polyval(x, P2)
y3 = poly.polyval(x, P3)
y4 = poly.polyval(x, P4)
y5 = poly.polyval(x, P5)
y6 = poly.polyval(x, P6)
y7 = poly.polyval(x, P7)
y8 = poly.polyval(x, P8)
y9 = poly.polyval(x, P9)
y = [y0, y1, y2, y3, y4, y5, y6, y7, y8, y9]
def test_legval(self) :
def f(x) :
return x*(x**2 - 1)
#check empty input
assert_equal(leg.legval([], [1]).size, 0)
#check normal input)
for i in range(10) :
msg = "At i=%d" % i
ser = np.zeros
tgt = self.y[i]
res = leg.legval(self.x, [0]*i + [1])
assert_almost_equal(res, tgt, err_msg=msg)
#check that shape is preserved
for i in range(3) :
dims = [2]*i
x = np.zeros(dims)
assert_equal(leg.legval(x, [1]).shape, dims)
assert_equal(leg.legval(x, [1,0]).shape, dims)
assert_equal(leg.legval(x, [1,0,0]).shape, dims)
def test_legadd(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] += 1
res = leg.legadd([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_legsub(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] -= 1
res = leg.legsub([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_legmulx(self):
assert_equal(leg.legmulx([0]), [0])
assert_equal(leg.legmulx([1]), [0,1])
for i in range(1, 5):
tmp = 2*i + 1
ser = [0]*i + [1]
tgt = [0]*(i - 1) + [i/tmp, 0, (i + 1)/tmp]
assert_equal(leg.legmulx(ser), tgt)
def test_legmul(self) :
# check values of result
for i in range(5) :
pol1 = [0]*i + [1]
val1 = leg.legval(self.x, pol1)
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
pol2 = [0]*j + [1]
val2 = leg.legval(self.x, pol2)
pol3 = leg.legmul(pol1, pol2)
val3 = leg.legval(self.x, pol3)
assert_(len(pol3) == i + j + 1, msg)
assert_almost_equal(val3, val1*val2, err_msg=msg)
def test_legdiv(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
ci = [0]*i + [1]
cj = [0]*j + [1]
tgt = leg.legadd(ci, cj)
quo, rem = leg.legdiv(tgt, ci)
res = leg.legadd(leg.legmul(quo, ci), rem)
assert_equal(trim(res), trim(tgt), err_msg=msg)
class TestCalculus(TestCase) :
def test_legint(self) :
# check exceptions
assert_raises(ValueError, leg.legint, [0], .5)
assert_raises(ValueError, leg.legint, [0], -1)
assert_raises(ValueError, leg.legint, [0], 1, [0,0])
# test integration of zero polynomial
for i in range(2, 5):
k = [0]*(i - 2) + [1]
res = leg.legint([0], m=i, k=k)
assert_almost_equal(res, [0, 1])
# check single integration with integration constant
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [1/scl]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i])
res = leg.leg2poly(legint)
assert_almost_equal(trim(res), trim(tgt))
# check single integration with integration constant and lbnd
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i], lbnd=-1)
assert_almost_equal(leg.legval(-1, legint), i)
# check single integration with integration constant and scaling
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [2/scl]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i], scl=2)
res = leg.leg2poly(legint)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with default k
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1)
res = leg.legint(pol, m=j)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with defined k
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1, k=[k])
res = leg.legint(pol, m=j, k=list(range(j)))
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with lbnd
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1, k=[k], lbnd=-1)
res = leg.legint(pol, m=j, k=list(range(j)), lbnd=-1)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with scaling
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1, k=[k], scl=2)
res = leg.legint(pol, m=j, k=list(range(j)), scl=2)
assert_almost_equal(trim(res), trim(tgt))
def test_legder(self) :
# check exceptions
assert_raises(ValueError, leg.legder, [0], .5)
assert_raises(ValueError, leg.legder, [0], -1)
# check that zeroth deriviative does nothing
for i in range(5) :
tgt = [1] + [0]*i
res = leg.legder(tgt, m=0)
assert_equal(trim(res), trim(tgt))
# check that derivation is the inverse of integration
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = leg.legder(leg.legint(tgt, m=j), m=j)
assert_almost_equal(trim(res), trim(tgt))
# check derivation with scaling
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = leg.legder(leg.legint(tgt, m=j, scl=2), m=j, scl=.5)
assert_almost_equal(trim(res), trim(tgt))
class TestMisc(TestCase) :
def test_legfromroots(self) :
res = leg.legfromroots([])
assert_almost_equal(trim(res), [1])
for i in range(1,5) :
roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
pol = leg.legfromroots(roots)
res = leg.legval(roots, pol)
tgt = 0
assert_(len(pol) == i + 1)
assert_almost_equal(leg.leg2poly(pol)[-1], 1)
assert_almost_equal(res, tgt)
def test_legroots(self) :
assert_almost_equal(leg.legroots([1]), [])
assert_almost_equal(leg.legroots([1, 2]), [-.5])
for i in range(2,5) :
tgt = np.linspace(-1, 1, i)
res = leg.legroots(leg.legfromroots(tgt))
assert_almost_equal(trim(res), trim(tgt))
def test_legvander(self) :
# check for 1d x
x = np.arange(3)
v = leg.legvander(x, 3)
assert_(v.shape == (3,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], leg.legval(x, coef))
# check for 2d x
x = np.array([[1,2],[3,4],[5,6]])
v = leg.legvander(x, 3)
assert_(v.shape == (3,2,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], leg.legval(x, coef))
def test_legfit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
# Test exceptions
assert_raises(ValueError, leg.legfit, [1], [1], -1)
assert_raises(TypeError, leg.legfit, [[1]], [1], 0)
assert_raises(TypeError, leg.legfit, [], [1], 0)
assert_raises(TypeError, leg.legfit, [1], [[[1]]], 0)
assert_raises(TypeError, leg.legfit, [1, 2], [1], 0)
assert_raises(TypeError, leg.legfit, [1], [1, 2], 0)
assert_raises(TypeError, leg.legfit, [1], [1], 0, w=[[1]])
assert_raises(TypeError, leg.legfit, [1], [1], 0, w=[1,1])
# Test fit
x = np.linspace(0,2)
y = f(x)
#
coef3 = leg.legfit(x, y, 3)
assert_equal(len(coef3), 4)
assert_almost_equal(leg.legval(x, coef3), y)
#
coef4 = leg.legfit(x, y, 4)
assert_equal(len(coef4), 5)
assert_almost_equal(leg.legval(x, coef4), y)
#
coef2d = leg.legfit(x, np.array([y,y]).T, 3)
assert_almost_equal(coef2d, np.array([coef3,coef3]).T)
# test weighting
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
y[0::2] = 0
wcoef3 = leg.legfit(x, yw, 3, w=w)
assert_almost_equal(wcoef3, coef3)
#
wcoef2d = leg.legfit(x, np.array([yw,yw]).T, 3, w=w)
assert_almost_equal(wcoef2d, np.array([coef3,coef3]).T)
def test_legtrim(self) :
coef = [2, -1, 1, 0]
# Test exceptions
assert_raises(ValueError, leg.legtrim, coef, -1)
# Test results
assert_equal(leg.legtrim(coef), coef[:-1])
assert_equal(leg.legtrim(coef, 1), coef[:-3])
assert_equal(leg.legtrim(coef, 2), [0])
def test_legline(self) :
assert_equal(leg.legline(3,4), [3, 4])
def test_leg2poly(self) :
for i in range(10) :
assert_almost_equal(leg.leg2poly([0]*i + [1]), Plist[i])
def test_poly2leg(self) :
for i in range(10) :
assert_almost_equal(leg.poly2leg(Plist[i]), [0]*i + [1])
def assert_poly_almost_equal(p1, p2):
assert_almost_equal(p1.coef, p2.coef)
assert_equal(p1.domain, p2.domain)
class TestLegendreClass(TestCase) :
p1 = leg.Legendre([1,2,3])
p2 = leg.Legendre([1,2,3], [0,1])
p3 = leg.Legendre([1,2])
p4 = leg.Legendre([2,2,3])
p5 = leg.Legendre([3,2,3])
def test_equal(self) :
assert_(self.p1 == self.p1)
assert_(self.p2 == self.p2)
assert_(not self.p1 == self.p2)
assert_(not self.p1 == self.p3)
assert_(not self.p1 == [1,2,3])
def test_not_equal(self) :
assert_(not self.p1 != self.p1)
assert_(not self.p2 != self.p2)
assert_(self.p1 != self.p2)
assert_(self.p1 != self.p3)
assert_(self.p1 != [1,2,3])
def test_add(self) :
tgt = leg.Legendre([2,4,6])
assert_(self.p1 + self.p1 == tgt)
assert_(self.p1 + [1,2,3] == tgt)
assert_([1,2,3] + self.p1 == tgt)
def test_sub(self) :
tgt = leg.Legendre([1])
assert_(self.p4 - self.p1 == tgt)
assert_(self.p4 - [1,2,3] == tgt)
assert_([2,2,3] - self.p1 == tgt)
def test_mul(self) :
tgt = leg.Legendre([4.13333333, 8.8, 11.23809524, 7.2, 4.62857143])
assert_poly_almost_equal(self.p1 * self.p1, tgt)
assert_poly_almost_equal(self.p1 * [1,2,3], tgt)
assert_poly_almost_equal([1,2,3] * self.p1, tgt)
def test_floordiv(self) :
tgt = leg.Legendre([1])
assert_(self.p4 // self.p1 == tgt)
assert_(self.p4 // [1,2,3] == tgt)
assert_([2,2,3] // self.p1 == tgt)
def test_mod(self) :
tgt = leg.Legendre([1])
assert_((self.p4 % self.p1) == tgt)
assert_((self.p4 % [1,2,3]) == tgt)
assert_(([2,2,3] % self.p1) == tgt)
def test_divmod(self) :
tquo = leg.Legendre([1])
trem = leg.Legendre([2])
quo, rem = divmod(self.p5, self.p1)
assert_(quo == tquo and rem == trem)
quo, rem = divmod(self.p5, [1,2,3])
assert_(quo == tquo and rem == trem)
quo, rem = divmod([3,2,3], self.p1)
assert_(quo == tquo and rem == trem)
def test_pow(self) :
tgt = leg.Legendre([1])
for i in range(5) :
res = self.p1**i
assert_(res == tgt)
tgt = tgt*self.p1
def test_call(self) :
# domain = [-1, 1]
x = np.linspace(-1, 1)
tgt = 3*(1.5*x**2 - .5) + 2*x + 1
assert_almost_equal(self.p1(x), tgt)
# domain = [0, 1]
x = np.linspace(0, 1)
xx = 2*x - 1
assert_almost_equal(self.p2(x), self.p1(xx))
def test_degree(self) :
assert_equal(self.p1.degree(), 2)
def test_cutdeg(self) :
assert_raises(ValueError, self.p1.cutdeg, .5)
assert_raises(ValueError, self.p1.cutdeg, -1)
assert_equal(len(self.p1.cutdeg(3)), 3)
assert_equal(len(self.p1.cutdeg(2)), 3)
assert_equal(len(self.p1.cutdeg(1)), 2)
assert_equal(len(self.p1.cutdeg(0)), 1)
def test_convert(self) :
x = np.linspace(-1,1)
p = self.p1.convert(domain=[0,1])
assert_almost_equal(p(x), self.p1(x))
def test_mapparms(self) :
parms = self.p2.mapparms()
assert_almost_equal(parms, [-1, 2])
def test_trim(self) :
coef = [1, 1e-6, 1e-12, 0]
p = leg.Legendre(coef)
assert_equal(p.trim().coef, coef[:3])
assert_equal(p.trim(1e-10).coef, coef[:2])
assert_equal(p.trim(1e-5).coef, coef[:1])
def test_truncate(self) :
assert_raises(ValueError, self.p1.truncate, .5)
assert_raises(ValueError, self.p1.truncate, 0)
assert_equal(len(self.p1.truncate(4)), 3)
assert_equal(len(self.p1.truncate(3)), 3)
assert_equal(len(self.p1.truncate(2)), 2)
assert_equal(len(self.p1.truncate(1)), 1)
def test_copy(self) :
p = self.p1.copy()
assert_(self.p1 == p)
def test_integ(self) :
p = self.p2.integ()
assert_almost_equal(p.coef, leg.legint([1,2,3], 1, 0, scl=.5))
p = self.p2.integ(lbnd=0)
assert_almost_equal(p(0), 0)
p = self.p2.integ(1, 1)
assert_almost_equal(p.coef, leg.legint([1,2,3], 1, 1, scl=.5))
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.coef, leg.legint([1,2,3], 2, [1,2], scl=.5))
def test_deriv(self) :
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
assert_almost_equal(p.deriv(2).coef, self.p2.coef)
def test_roots(self) :
p = leg.Legendre(leg.poly2leg([0, -1, 0, 1]), [0, 1])
res = p.roots()
tgt = [0, .5, 1]
assert_almost_equal(res, tgt)
def test_linspace(self):
xdes = np.linspace(0, 1, 20)
ydes = self.p2(xdes)
xres, yres = self.p2.linspace(20)
assert_almost_equal(xres, xdes)
assert_almost_equal(yres, ydes)
def test_fromroots(self) :
roots = [0, .5, 1]
p = leg.Legendre.fromroots(roots, domain=[0, 1])
res = p.coef
tgt = leg.poly2leg([0, -1, 0, 1])
assert_almost_equal(res, tgt)
def test_fit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
x = np.linspace(0,3)
y = f(x)
# test default value of domain
p = leg.Legendre.fit(x, y, 3)
assert_almost_equal(p.domain, [0,3])
# test that fit works in given domains
p = leg.Legendre.fit(x, y, 3, None)
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [0,3])
p = leg.Legendre.fit(x, y, 3, [])
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [-1, 1])
# test that fit accepts weights.
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
yw[0::2] = 0
p = leg.Legendre.fit(x, yw, 3, w=w)
assert_almost_equal(p(x), y)
def test_identity(self) :
x = np.linspace(0,3)
p = leg.Legendre.identity()
assert_almost_equal(p(x), x)
p = leg.Legendre.identity([1,3])
assert_almost_equal(p(x), x)
#
if __name__ == "__main__":
run_module_suite()
| 33.053903
| 75
| 0.506495
|
import numpy as np
import numpy.polynomial.legendre as leg
import numpy.polynomial.polynomial as poly
from numpy.testing import *
P0 = np.array([ 1])
P1 = np.array([ 0, 1])
P2 = np.array([-1, 0, 3])/2
P3 = np.array([ 0, -3, 0, 5])/2
P4 = np.array([ 3, 0, -30, 0, 35])/8
P5 = np.array([ 0, 15, 0, -70, 0, 63])/8
P6 = np.array([-5, 0, 105, 0,-315, 0, 231])/16
P7 = np.array([ 0,-35, 0, 315, 0, -693, 0, 429])/16
P8 = np.array([35, 0,-1260, 0,6930, 0,-12012, 0,6435])/128
P9 = np.array([ 0,315, 0,-4620, 0,18018, 0,-25740, 0,12155])/128
Plist = [P0, P1, P2, P3, P4, P5, P6, P7, P8, P9]
def trim(x) :
return leg.legtrim(x, tol=1e-6)
class TestConstants(TestCase) :
def test_legdomain(self) :
assert_equal(leg.legdomain, [-1, 1])
def test_legzero(self) :
assert_equal(leg.legzero, [0])
def test_legone(self) :
assert_equal(leg.legone, [1])
def test_legx(self) :
assert_equal(leg.legx, [0, 1])
class TestArithmetic(TestCase) :
x = np.linspace(-1, 1, 100)
y0 = poly.polyval(x, P0)
y1 = poly.polyval(x, P1)
y2 = poly.polyval(x, P2)
y3 = poly.polyval(x, P3)
y4 = poly.polyval(x, P4)
y5 = poly.polyval(x, P5)
y6 = poly.polyval(x, P6)
y7 = poly.polyval(x, P7)
y8 = poly.polyval(x, P8)
y9 = poly.polyval(x, P9)
y = [y0, y1, y2, y3, y4, y5, y6, y7, y8, y9]
def test_legval(self) :
def f(x) :
return x*(x**2 - 1)
assert_equal(leg.legval([], [1]).size, 0)
for i in range(10) :
msg = "At i=%d" % i
ser = np.zeros
tgt = self.y[i]
res = leg.legval(self.x, [0]*i + [1])
assert_almost_equal(res, tgt, err_msg=msg)
for i in range(3) :
dims = [2]*i
x = np.zeros(dims)
assert_equal(leg.legval(x, [1]).shape, dims)
assert_equal(leg.legval(x, [1,0]).shape, dims)
assert_equal(leg.legval(x, [1,0,0]).shape, dims)
def test_legadd(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] += 1
res = leg.legadd([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_legsub(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] -= 1
res = leg.legsub([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_legmulx(self):
assert_equal(leg.legmulx([0]), [0])
assert_equal(leg.legmulx([1]), [0,1])
for i in range(1, 5):
tmp = 2*i + 1
ser = [0]*i + [1]
tgt = [0]*(i - 1) + [i/tmp, 0, (i + 1)/tmp]
assert_equal(leg.legmulx(ser), tgt)
def test_legmul(self) :
for i in range(5) :
pol1 = [0]*i + [1]
val1 = leg.legval(self.x, pol1)
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
pol2 = [0]*j + [1]
val2 = leg.legval(self.x, pol2)
pol3 = leg.legmul(pol1, pol2)
val3 = leg.legval(self.x, pol3)
assert_(len(pol3) == i + j + 1, msg)
assert_almost_equal(val3, val1*val2, err_msg=msg)
def test_legdiv(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
ci = [0]*i + [1]
cj = [0]*j + [1]
tgt = leg.legadd(ci, cj)
quo, rem = leg.legdiv(tgt, ci)
res = leg.legadd(leg.legmul(quo, ci), rem)
assert_equal(trim(res), trim(tgt), err_msg=msg)
class TestCalculus(TestCase) :
def test_legint(self) :
assert_raises(ValueError, leg.legint, [0], .5)
assert_raises(ValueError, leg.legint, [0], -1)
assert_raises(ValueError, leg.legint, [0], 1, [0,0])
for i in range(2, 5):
k = [0]*(i - 2) + [1]
res = leg.legint([0], m=i, k=k)
assert_almost_equal(res, [0, 1])
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [1/scl]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i])
res = leg.leg2poly(legint)
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i], lbnd=-1)
assert_almost_equal(leg.legval(-1, legint), i)
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [2/scl]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i], scl=2)
res = leg.leg2poly(legint)
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1)
res = leg.legint(pol, m=j)
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1, k=[k])
res = leg.legint(pol, m=j, k=list(range(j)))
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1, k=[k], lbnd=-1)
res = leg.legint(pol, m=j, k=list(range(j)), lbnd=-1)
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1, k=[k], scl=2)
res = leg.legint(pol, m=j, k=list(range(j)), scl=2)
assert_almost_equal(trim(res), trim(tgt))
def test_legder(self) :
assert_raises(ValueError, leg.legder, [0], .5)
assert_raises(ValueError, leg.legder, [0], -1)
for i in range(5) :
tgt = [1] + [0]*i
res = leg.legder(tgt, m=0)
assert_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = leg.legder(leg.legint(tgt, m=j), m=j)
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = leg.legder(leg.legint(tgt, m=j, scl=2), m=j, scl=.5)
assert_almost_equal(trim(res), trim(tgt))
class TestMisc(TestCase) :
def test_legfromroots(self) :
res = leg.legfromroots([])
assert_almost_equal(trim(res), [1])
for i in range(1,5) :
roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
pol = leg.legfromroots(roots)
res = leg.legval(roots, pol)
tgt = 0
assert_(len(pol) == i + 1)
assert_almost_equal(leg.leg2poly(pol)[-1], 1)
assert_almost_equal(res, tgt)
def test_legroots(self) :
assert_almost_equal(leg.legroots([1]), [])
assert_almost_equal(leg.legroots([1, 2]), [-.5])
for i in range(2,5) :
tgt = np.linspace(-1, 1, i)
res = leg.legroots(leg.legfromroots(tgt))
assert_almost_equal(trim(res), trim(tgt))
def test_legvander(self) :
x = np.arange(3)
v = leg.legvander(x, 3)
assert_(v.shape == (3,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], leg.legval(x, coef))
x = np.array([[1,2],[3,4],[5,6]])
v = leg.legvander(x, 3)
assert_(v.shape == (3,2,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], leg.legval(x, coef))
def test_legfit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
assert_raises(ValueError, leg.legfit, [1], [1], -1)
assert_raises(TypeError, leg.legfit, [[1]], [1], 0)
assert_raises(TypeError, leg.legfit, [], [1], 0)
assert_raises(TypeError, leg.legfit, [1], [[[1]]], 0)
assert_raises(TypeError, leg.legfit, [1, 2], [1], 0)
assert_raises(TypeError, leg.legfit, [1], [1, 2], 0)
assert_raises(TypeError, leg.legfit, [1], [1], 0, w=[[1]])
assert_raises(TypeError, leg.legfit, [1], [1], 0, w=[1,1])
x = np.linspace(0,2)
y = f(x)
coef3 = leg.legfit(x, y, 3)
assert_equal(len(coef3), 4)
assert_almost_equal(leg.legval(x, coef3), y)
coef4 = leg.legfit(x, y, 4)
assert_equal(len(coef4), 5)
assert_almost_equal(leg.legval(x, coef4), y)
coef2d = leg.legfit(x, np.array([y,y]).T, 3)
assert_almost_equal(coef2d, np.array([coef3,coef3]).T)
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
y[0::2] = 0
wcoef3 = leg.legfit(x, yw, 3, w=w)
assert_almost_equal(wcoef3, coef3)
wcoef2d = leg.legfit(x, np.array([yw,yw]).T, 3, w=w)
assert_almost_equal(wcoef2d, np.array([coef3,coef3]).T)
def test_legtrim(self) :
coef = [2, -1, 1, 0]
assert_raises(ValueError, leg.legtrim, coef, -1)
assert_equal(leg.legtrim(coef), coef[:-1])
assert_equal(leg.legtrim(coef, 1), coef[:-3])
assert_equal(leg.legtrim(coef, 2), [0])
def test_legline(self) :
assert_equal(leg.legline(3,4), [3, 4])
def test_leg2poly(self) :
for i in range(10) :
assert_almost_equal(leg.leg2poly([0]*i + [1]), Plist[i])
def test_poly2leg(self) :
for i in range(10) :
assert_almost_equal(leg.poly2leg(Plist[i]), [0]*i + [1])
def assert_poly_almost_equal(p1, p2):
assert_almost_equal(p1.coef, p2.coef)
assert_equal(p1.domain, p2.domain)
class TestLegendreClass(TestCase) :
p1 = leg.Legendre([1,2,3])
p2 = leg.Legendre([1,2,3], [0,1])
p3 = leg.Legendre([1,2])
p4 = leg.Legendre([2,2,3])
p5 = leg.Legendre([3,2,3])
def test_equal(self) :
assert_(self.p1 == self.p1)
assert_(self.p2 == self.p2)
assert_(not self.p1 == self.p2)
assert_(not self.p1 == self.p3)
assert_(not self.p1 == [1,2,3])
def test_not_equal(self) :
assert_(not self.p1 != self.p1)
assert_(not self.p2 != self.p2)
assert_(self.p1 != self.p2)
assert_(self.p1 != self.p3)
assert_(self.p1 != [1,2,3])
def test_add(self) :
tgt = leg.Legendre([2,4,6])
assert_(self.p1 + self.p1 == tgt)
assert_(self.p1 + [1,2,3] == tgt)
assert_([1,2,3] + self.p1 == tgt)
def test_sub(self) :
tgt = leg.Legendre([1])
assert_(self.p4 - self.p1 == tgt)
assert_(self.p4 - [1,2,3] == tgt)
assert_([2,2,3] - self.p1 == tgt)
def test_mul(self) :
tgt = leg.Legendre([4.13333333, 8.8, 11.23809524, 7.2, 4.62857143])
assert_poly_almost_equal(self.p1 * self.p1, tgt)
assert_poly_almost_equal(self.p1 * [1,2,3], tgt)
assert_poly_almost_equal([1,2,3] * self.p1, tgt)
def test_floordiv(self) :
tgt = leg.Legendre([1])
assert_(self.p4 // self.p1 == tgt)
assert_(self.p4 // [1,2,3] == tgt)
assert_([2,2,3] // self.p1 == tgt)
def test_mod(self) :
tgt = leg.Legendre([1])
assert_((self.p4 % self.p1) == tgt)
assert_((self.p4 % [1,2,3]) == tgt)
assert_(([2,2,3] % self.p1) == tgt)
def test_divmod(self) :
tquo = leg.Legendre([1])
trem = leg.Legendre([2])
quo, rem = divmod(self.p5, self.p1)
assert_(quo == tquo and rem == trem)
quo, rem = divmod(self.p5, [1,2,3])
assert_(quo == tquo and rem == trem)
quo, rem = divmod([3,2,3], self.p1)
assert_(quo == tquo and rem == trem)
def test_pow(self) :
tgt = leg.Legendre([1])
for i in range(5) :
res = self.p1**i
assert_(res == tgt)
tgt = tgt*self.p1
def test_call(self) :
x = np.linspace(-1, 1)
tgt = 3*(1.5*x**2 - .5) + 2*x + 1
assert_almost_equal(self.p1(x), tgt)
x = np.linspace(0, 1)
xx = 2*x - 1
assert_almost_equal(self.p2(x), self.p1(xx))
def test_degree(self) :
assert_equal(self.p1.degree(), 2)
def test_cutdeg(self) :
assert_raises(ValueError, self.p1.cutdeg, .5)
assert_raises(ValueError, self.p1.cutdeg, -1)
assert_equal(len(self.p1.cutdeg(3)), 3)
assert_equal(len(self.p1.cutdeg(2)), 3)
assert_equal(len(self.p1.cutdeg(1)), 2)
assert_equal(len(self.p1.cutdeg(0)), 1)
def test_convert(self) :
x = np.linspace(-1,1)
p = self.p1.convert(domain=[0,1])
assert_almost_equal(p(x), self.p1(x))
def test_mapparms(self) :
parms = self.p2.mapparms()
assert_almost_equal(parms, [-1, 2])
def test_trim(self) :
coef = [1, 1e-6, 1e-12, 0]
p = leg.Legendre(coef)
assert_equal(p.trim().coef, coef[:3])
assert_equal(p.trim(1e-10).coef, coef[:2])
assert_equal(p.trim(1e-5).coef, coef[:1])
def test_truncate(self) :
assert_raises(ValueError, self.p1.truncate, .5)
assert_raises(ValueError, self.p1.truncate, 0)
assert_equal(len(self.p1.truncate(4)), 3)
assert_equal(len(self.p1.truncate(3)), 3)
assert_equal(len(self.p1.truncate(2)), 2)
assert_equal(len(self.p1.truncate(1)), 1)
def test_copy(self) :
p = self.p1.copy()
assert_(self.p1 == p)
def test_integ(self) :
p = self.p2.integ()
assert_almost_equal(p.coef, leg.legint([1,2,3], 1, 0, scl=.5))
p = self.p2.integ(lbnd=0)
assert_almost_equal(p(0), 0)
p = self.p2.integ(1, 1)
assert_almost_equal(p.coef, leg.legint([1,2,3], 1, 1, scl=.5))
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.coef, leg.legint([1,2,3], 2, [1,2], scl=.5))
def test_deriv(self) :
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
assert_almost_equal(p.deriv(2).coef, self.p2.coef)
def test_roots(self) :
p = leg.Legendre(leg.poly2leg([0, -1, 0, 1]), [0, 1])
res = p.roots()
tgt = [0, .5, 1]
assert_almost_equal(res, tgt)
def test_linspace(self):
xdes = np.linspace(0, 1, 20)
ydes = self.p2(xdes)
xres, yres = self.p2.linspace(20)
assert_almost_equal(xres, xdes)
assert_almost_equal(yres, ydes)
def test_fromroots(self) :
roots = [0, .5, 1]
p = leg.Legendre.fromroots(roots, domain=[0, 1])
res = p.coef
tgt = leg.poly2leg([0, -1, 0, 1])
assert_almost_equal(res, tgt)
def test_fit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
x = np.linspace(0,3)
y = f(x)
p = leg.Legendre.fit(x, y, 3)
assert_almost_equal(p.domain, [0,3])
p = leg.Legendre.fit(x, y, 3, None)
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [0,3])
p = leg.Legendre.fit(x, y, 3, [])
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [-1, 1])
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
yw[0::2] = 0
p = leg.Legendre.fit(x, yw, 3, w=w)
assert_almost_equal(p(x), y)
def test_identity(self) :
x = np.linspace(0,3)
p = leg.Legendre.identity()
assert_almost_equal(p(x), x)
p = leg.Legendre.identity([1,3])
assert_almost_equal(p(x), x)
if __name__ == "__main__":
run_module_suite()
| true
| true
|
f701dacd676f4efb3f3647cf1a9e39a5a8d89d55
| 3,508
|
py
|
Python
|
google/ads/googleads/v6/services/services/google_ads_field_service/pagers.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | null | null | null |
google/ads/googleads/v6/services/services/google_ads_field_service/pagers.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | null | null | null |
google/ads/googleads/v6/services/services/google_ads_field_service/pagers.py
|
wxxlouisa/google-ads-python
|
f24137966f6bfcb765a9b1fae79f2d23041825fe
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, Callable, Iterable, Sequence, Tuple
from google.ads.googleads.v6.resources.types import google_ads_field
from google.ads.googleads.v6.services.types import google_ads_field_service
class SearchGoogleAdsFieldsPager:
"""A pager for iterating through ``search_google_ads_fields`` requests.
This class thinly wraps an initial
:class:`google.ads.googleads.v6.services.types.SearchGoogleAdsFieldsResponse` object, and
provides an ``__iter__`` method to iterate through its
``results`` field.
If there are more pages, the ``__iter__`` method will make additional
``SearchGoogleAdsFields`` requests and continue to iterate
through the ``results`` field on the
corresponding responses.
All the usual :class:`google.ads.googleads.v6.services.types.SearchGoogleAdsFieldsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
..., google_ads_field_service.SearchGoogleAdsFieldsResponse
],
request: google_ads_field_service.SearchGoogleAdsFieldsRequest,
response: google_ads_field_service.SearchGoogleAdsFieldsResponse,
metadata: Sequence[Tuple[str, str]] = (),
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (:class:`google.ads.googleads.v6.services.types.SearchGoogleAdsFieldsRequest`):
The initial request object.
response (:class:`google.ads.googleads.v6.services.types.SearchGoogleAdsFieldsResponse`):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = google_ads_field_service.SearchGoogleAdsFieldsRequest(
request
)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(
self,
) -> Iterable[google_ads_field_service.SearchGoogleAdsFieldsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(
self._request, metadata=self._metadata
)
yield self._response
def __iter__(self) -> Iterable[google_ads_field.GoogleAdsField]:
for page in self.pages:
yield from page.results
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
| 38.549451
| 101
| 0.691847
|
from typing import Any, Callable, Iterable, Sequence, Tuple
from google.ads.googleads.v6.resources.types import google_ads_field
from google.ads.googleads.v6.services.types import google_ads_field_service
class SearchGoogleAdsFieldsPager:
def __init__(
self,
method: Callable[
..., google_ads_field_service.SearchGoogleAdsFieldsResponse
],
request: google_ads_field_service.SearchGoogleAdsFieldsRequest,
response: google_ads_field_service.SearchGoogleAdsFieldsResponse,
metadata: Sequence[Tuple[str, str]] = (),
):
self._method = method
self._request = google_ads_field_service.SearchGoogleAdsFieldsRequest(
request
)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(
self,
) -> Iterable[google_ads_field_service.SearchGoogleAdsFieldsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(
self._request, metadata=self._metadata
)
yield self._response
def __iter__(self) -> Iterable[google_ads_field.GoogleAdsField]:
for page in self.pages:
yield from page.results
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
| true
| true
|
f701dad2f113506e9b75c3b55da16c8042f59e05
| 869
|
py
|
Python
|
torchmetrics/functional/audio/__init__.py
|
gagan3012/metrics
|
5a2388ccaa97cc3608b1fa28879f77436434a6d6
|
[
"Apache-2.0"
] | 1
|
2021-09-14T23:34:48.000Z
|
2021-09-14T23:34:48.000Z
|
torchmetrics/functional/audio/__init__.py
|
gagan3012/metrics
|
5a2388ccaa97cc3608b1fa28879f77436434a6d6
|
[
"Apache-2.0"
] | 1
|
2021-10-16T05:02:56.000Z
|
2021-12-15T07:02:17.000Z
|
torchmetrics/functional/audio/__init__.py
|
gagan3012/metrics
|
5a2388ccaa97cc3608b1fa28879f77436434a6d6
|
[
"Apache-2.0"
] | 2
|
2021-10-16T05:02:43.000Z
|
2022-02-10T16:01:52.000Z
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from torchmetrics.functional.audio.pit import pit, pit_permutate # noqa: F401
from torchmetrics.functional.audio.si_sdr import si_sdr # noqa: F401
from torchmetrics.functional.audio.si_snr import si_snr # noqa: F401
from torchmetrics.functional.audio.snr import snr # noqa: F401
| 48.277778
| 78
| 0.779056
|
from torchmetrics.functional.audio.pit import pit, pit_permutate from torchmetrics.functional.audio.si_sdr import si_sdr from torchmetrics.functional.audio.si_snr import si_snr from torchmetrics.functional.audio.snr import snr
| true
| true
|
f701dbb60581a894fa82d654ad38824ba276b7a5
| 4,113
|
py
|
Python
|
model/seg_models/pspnet.py
|
AceCoooool/segmentation
|
2f4d5ac193cab580eb8ba789e79db6dadcfecfd0
|
[
"MIT"
] | 2
|
2019-06-08T13:09:08.000Z
|
2020-09-21T04:03:09.000Z
|
model/seg_models/pspnet.py
|
AceCoooool/segmentation
|
2f4d5ac193cab580eb8ba789e79db6dadcfecfd0
|
[
"MIT"
] | 2
|
2019-05-20T11:56:02.000Z
|
2019-06-02T13:22:55.000Z
|
model/seg_models/pspnet.py
|
AceCoooool/segmentation
|
2f4d5ac193cab580eb8ba789e79db6dadcfecfd0
|
[
"MIT"
] | 1
|
2020-09-22T03:55:39.000Z
|
2020-09-22T03:55:39.000Z
|
"""Pyramid Scene Parsing Network"""
import os
import torch
from torch import nn
import torch.nn.functional as F
from model.seg_models.segbase import SegBaseModel
from model.module.basic import _FCNHead
__all__ = ['PSPNet', 'get_psp',
'get_psp_resnet101_voc',
'get_psp_resnet101_citys']
# head
def _PSP1x1Conv(in_channels, out_channels):
return nn.Sequential(nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False),
nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True))
class _PyramidPooling(nn.Module):
def __init__(self, in_channels):
super(_PyramidPooling, self).__init__()
out_channels = in_channels // 4
self.conv1 = _PSP1x1Conv(in_channels, out_channels)
self.conv2 = _PSP1x1Conv(in_channels, out_channels)
self.conv3 = _PSP1x1Conv(in_channels, out_channels)
self.conv4 = _PSP1x1Conv(in_channels, out_channels)
@staticmethod
def pool(x, size):
return F.adaptive_avg_pool2d(x, output_size=size)
@staticmethod
def upsample(x, h, w):
return F.interpolate(x, (h, w), mode='bilinear', align_corners=True)
def forward(self, x):
_, _, h, w = x.shape
feat1 = self.upsample(self.conv1(self.pool(x, 1)), h, w)
feat2 = self.upsample(self.conv2(self.pool(x, 2)), h, w)
feat3 = self.upsample(self.conv3(self.pool(x, 3)), h, w)
feat4 = self.upsample(self.conv4(self.pool(x, 4)), h, w)
return torch.cat([x, feat1, feat2, feat3, feat4], dim=1)
class _PSPHead(nn.Module):
def __init__(self, nclass, **kwargs):
super(_PSPHead, self).__init__(**kwargs)
self.psp = _PyramidPooling(2048)
self.block = list()
self.block.append(nn.Conv2d(4096, 512, kernel_size=3, padding=1, bias=False))
self.block.append(nn.BatchNorm2d(512))
self.block.append(nn.ReLU(inplace=True))
self.block.append(nn.Dropout(0.1))
self.block.append(nn.Conv2d(512, nclass, kernel_size=1))
self.block = nn.Sequential(*self.block)
def forward(self, x):
x = self.psp(x)
return self.block(x)
class PSPNet(SegBaseModel):
def __init__(self, nclass, backbone='resnet50', aux=True, dilated=True, jpu=False,
pretrained_base=True, base_size=520, crop_size=480, **kwargs):
super(PSPNet, self).__init__(nclass, aux, backbone, base_size=base_size, dilated=dilated, jpu=jpu,
crop_size=crop_size, pretrained_base=pretrained_base, **kwargs)
self.head = _PSPHead(nclass, **kwargs)
if self.aux:
self.auxlayer = _FCNHead(1024, nclass, **kwargs)
self.__setattr__('others', ['head', 'auxlayer'] if self.aux else ['head'])
def forward(self, x):
c3, c4 = self.base_forward(x)
outputs = []
x = self.head(c4)
x = F.interpolate(x, self._up_kwargs, mode='bilinear', align_corners=True)
outputs.append(x)
if self.aux:
auxout = self.auxlayer(c3)
auxout = F.interpolate(auxout, self._up_kwargs, mode='bilinear', align_corners=True)
outputs.append(auxout)
return tuple(outputs)
def get_psp(dataset='pascal_voc', backbone='resnet101', pretrained=False, pretrained_base=True,
jpu=False, root=os.path.expanduser('~/.torch/models'), **kwargs):
acronyms = {
'pascal_voc': 'voc',
'citys': 'citys',
}
from data import datasets
# infer number of classes
model = PSPNet(datasets[dataset].NUM_CLASS, backbone=backbone,
pretrained_base=pretrained_base, jpu=jpu, **kwargs)
if pretrained:
from model.model_store import get_model_file
name = 'psp_%s_%s' % (backbone, acronyms[dataset])
name = name + '_jpu' if jpu else name
model.load_state_dict(torch.load(get_model_file(name, root=root)))
return model
def get_psp_resnet101_voc(**kwargs):
return get_psp('pascal_voc', 'resnet101', **kwargs)
def get_psp_resnet101_citys(**kwargs):
return get_psp('citys', 'resnet101', **kwargs)
| 36.39823
| 106
| 0.644299
|
import os
import torch
from torch import nn
import torch.nn.functional as F
from model.seg_models.segbase import SegBaseModel
from model.module.basic import _FCNHead
__all__ = ['PSPNet', 'get_psp',
'get_psp_resnet101_voc',
'get_psp_resnet101_citys']
def _PSP1x1Conv(in_channels, out_channels):
return nn.Sequential(nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False),
nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True))
class _PyramidPooling(nn.Module):
def __init__(self, in_channels):
super(_PyramidPooling, self).__init__()
out_channels = in_channels // 4
self.conv1 = _PSP1x1Conv(in_channels, out_channels)
self.conv2 = _PSP1x1Conv(in_channels, out_channels)
self.conv3 = _PSP1x1Conv(in_channels, out_channels)
self.conv4 = _PSP1x1Conv(in_channels, out_channels)
@staticmethod
def pool(x, size):
return F.adaptive_avg_pool2d(x, output_size=size)
@staticmethod
def upsample(x, h, w):
return F.interpolate(x, (h, w), mode='bilinear', align_corners=True)
def forward(self, x):
_, _, h, w = x.shape
feat1 = self.upsample(self.conv1(self.pool(x, 1)), h, w)
feat2 = self.upsample(self.conv2(self.pool(x, 2)), h, w)
feat3 = self.upsample(self.conv3(self.pool(x, 3)), h, w)
feat4 = self.upsample(self.conv4(self.pool(x, 4)), h, w)
return torch.cat([x, feat1, feat2, feat3, feat4], dim=1)
class _PSPHead(nn.Module):
def __init__(self, nclass, **kwargs):
super(_PSPHead, self).__init__(**kwargs)
self.psp = _PyramidPooling(2048)
self.block = list()
self.block.append(nn.Conv2d(4096, 512, kernel_size=3, padding=1, bias=False))
self.block.append(nn.BatchNorm2d(512))
self.block.append(nn.ReLU(inplace=True))
self.block.append(nn.Dropout(0.1))
self.block.append(nn.Conv2d(512, nclass, kernel_size=1))
self.block = nn.Sequential(*self.block)
def forward(self, x):
x = self.psp(x)
return self.block(x)
class PSPNet(SegBaseModel):
def __init__(self, nclass, backbone='resnet50', aux=True, dilated=True, jpu=False,
pretrained_base=True, base_size=520, crop_size=480, **kwargs):
super(PSPNet, self).__init__(nclass, aux, backbone, base_size=base_size, dilated=dilated, jpu=jpu,
crop_size=crop_size, pretrained_base=pretrained_base, **kwargs)
self.head = _PSPHead(nclass, **kwargs)
if self.aux:
self.auxlayer = _FCNHead(1024, nclass, **kwargs)
self.__setattr__('others', ['head', 'auxlayer'] if self.aux else ['head'])
def forward(self, x):
c3, c4 = self.base_forward(x)
outputs = []
x = self.head(c4)
x = F.interpolate(x, self._up_kwargs, mode='bilinear', align_corners=True)
outputs.append(x)
if self.aux:
auxout = self.auxlayer(c3)
auxout = F.interpolate(auxout, self._up_kwargs, mode='bilinear', align_corners=True)
outputs.append(auxout)
return tuple(outputs)
def get_psp(dataset='pascal_voc', backbone='resnet101', pretrained=False, pretrained_base=True,
jpu=False, root=os.path.expanduser('~/.torch/models'), **kwargs):
acronyms = {
'pascal_voc': 'voc',
'citys': 'citys',
}
from data import datasets
model = PSPNet(datasets[dataset].NUM_CLASS, backbone=backbone,
pretrained_base=pretrained_base, jpu=jpu, **kwargs)
if pretrained:
from model.model_store import get_model_file
name = 'psp_%s_%s' % (backbone, acronyms[dataset])
name = name + '_jpu' if jpu else name
model.load_state_dict(torch.load(get_model_file(name, root=root)))
return model
def get_psp_resnet101_voc(**kwargs):
return get_psp('pascal_voc', 'resnet101', **kwargs)
def get_psp_resnet101_citys(**kwargs):
return get_psp('citys', 'resnet101', **kwargs)
| true
| true
|
f701dc4aceb91f10edb6c0bfa86f3db68e475953
| 14,857
|
py
|
Python
|
mechanicalsoup/stateful_browser.py
|
timgates42/MechanicalSoup
|
0b04e8b9aed3a3e5160ac89081e2c270598c2f09
|
[
"MIT"
] | null | null | null |
mechanicalsoup/stateful_browser.py
|
timgates42/MechanicalSoup
|
0b04e8b9aed3a3e5160ac89081e2c270598c2f09
|
[
"MIT"
] | 16
|
2021-03-19T09:44:52.000Z
|
2022-03-12T00:22:14.000Z
|
mechanicalsoup/stateful_browser.py
|
timgates42/MechanicalSoup
|
0b04e8b9aed3a3e5160ac89081e2c270598c2f09
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from six.moves import urllib
from .browser import Browser
from .utils import LinkNotFoundError
from .form import Form
import sys
import re
import bs4
class _BrowserState:
def __init__(self, page=None, url=None, form=None, request=None):
self.page = page
self.url = url
self.form = form
self.request = request
class StatefulBrowser(Browser):
"""An extension of :class:`Browser` that stores the browser's state
and provides many convenient functions for interacting with HTML elements.
It is the primary tool in MechanicalSoup for interfacing with websites.
:param session: Attach a pre-existing requests Session instead of
constructing a new one.
:param soup_config: Configuration passed to BeautifulSoup to affect
the way HTML is parsed. Defaults to ``{'features': 'lxml'}``.
If overriden, it is highly recommended to `specify a parser
<https://www.crummy.com/software/BeautifulSoup/bs4/doc/#specifying-the-parser-to-use>`__.
Otherwise, BeautifulSoup will issue a warning and pick one for
you, but the parser it chooses may be different on different
machines.
:param requests_adapters: Configuration passed to requests, to affect
the way HTTP requests are performed.
:param raise_on_404: If True, raise :class:`LinkNotFoundError`
when visiting a page triggers a 404 Not Found error.
:param user_agent: Set the user agent header to this value.
All arguments are forwarded to :func:`Browser`.
Examples ::
browser = mechanicalsoup.StatefulBrowser(
soup_config={'features': 'lxml'}, # Use the lxml HTML parser
raise_on_404=True,
user_agent='MyBot/0.1: mysite.example.com/bot_info',
)
browser.open(url)
# ...
browser.close()
Once not used anymore, the browser can be closed
using :func:`~Browser.close`.
"""
def __init__(self, *args, **kwargs):
super(StatefulBrowser, self).__init__(*args, **kwargs)
self.__debug = False
self.__verbose = 0
self.__state = _BrowserState()
def set_debug(self, debug):
"""Set the debug mode (off by default).
Set to True to enable debug mode. When active, some actions
will launch a browser on the current page on failure to let
you inspect the page content.
"""
self.__debug = debug
def get_debug(self):
"""Get the debug mode (off by default)."""
return self.__debug
def set_verbose(self, verbose):
"""Set the verbosity level (an integer).
* 0 means no verbose output.
* 1 shows one dot per visited page (looks like a progress bar)
* >= 2 shows each visited URL.
"""
self.__verbose = verbose
def get_verbose(self):
"""Get the verbosity level. See :func:`set_verbose()`."""
return self.__verbose
def get_url(self):
"""Get the URL of the currently visited page."""
return self.__state.url
def get_current_form(self):
"""Get the currently selected form as a :class:`Form` object.
See :func:`select_form`.
"""
return self.__state.form
def __setitem__(self, name, value):
"""Call item assignment on the currently selected form.
See :func:`Form.__setitem__`.
"""
self.get_current_form()[name] = value
def new_control(self, type, name, value, **kwargs):
"""Call :func:`Form.new_control` on the currently selected form."""
return self.get_current_form().new_control(type, name, value, **kwargs)
def get_current_page(self):
"""Get the current page as a soup object."""
return self.__state.page
def absolute_url(self, url):
"""Return the absolute URL made from the current URL and ``url``.
The current URL is only used to provide any missing components of
``url``, as in the `.urljoin() method of urllib.parse
<https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urljoin>`__.
"""
return urllib.parse.urljoin(self.get_url(), url)
def open(self, url, *args, **kwargs):
"""Open the URL and store the Browser's state in this object.
All arguments are forwarded to :func:`Browser.get`.
:return: Forwarded from :func:`Browser.get`.
"""
if self.__verbose == 1:
sys.stdout.write('.')
sys.stdout.flush()
elif self.__verbose >= 2:
print(url)
resp = self.get(url, *args, **kwargs)
self.__state = _BrowserState(page=resp.soup, url=resp.url,
request=resp.request)
return resp
def open_fake_page(self, page_text, url=None, soup_config=None):
"""Mock version of :func:`open`.
Behave as if opening a page whose text is ``page_text``, but do not
perform any network access. If ``url`` is set, pretend it is the page's
URL. Useful mainly for testing.
"""
soup_config = soup_config or self.soup_config
self.__state = _BrowserState(
page=bs4.BeautifulSoup(page_text, **soup_config),
url=url)
def open_relative(self, url, *args, **kwargs):
"""Like :func:`open`, but ``url`` can be relative to the currently
visited page.
"""
return self.open(self.absolute_url(url), *args, **kwargs)
def refresh(self):
"""Reload the current page with the same request as originally done.
Any change (`select_form`, or any value filled-in in the form) made to
the current page before refresh is discarded.
:raise ValueError: Raised if no refreshable page is loaded, e.g., when
using the shallow ``Browser`` wrapper functions.
:return: Response of the request."""
old_request = self.__state.request
if old_request is None:
raise ValueError('The current page is not refreshable. Either no '
'page is opened or low-level browser methods '
'were used to do so')
resp = self.session.send(old_request)
Browser.add_soup(resp, self.soup_config)
self.__state = _BrowserState(page=resp.soup, url=resp.url,
request=resp.request)
return resp
def select_form(self, selector="form", nr=0):
"""Select a form in the current page.
:param selector: CSS selector or a bs4.element.Tag object to identify
the form to select.
If not specified, ``selector`` defaults to "form", which is
useful if, e.g., there is only one form on the page.
For ``selector`` syntax, see the `.select() method in BeautifulSoup
<https://www.crummy.com/software/BeautifulSoup/bs4/doc/#css-selectors>`__.
:param nr: A zero-based index specifying which form among those that
match ``selector`` will be selected. Useful when one or more forms
have the same attributes as the form you want to select, and its
position on the page is the only way to uniquely identify it.
Default is the first matching form (``nr=0``).
:return: The selected form as a soup object. It can also be
retrieved later with :func:`get_current_form`.
"""
if isinstance(selector, bs4.element.Tag):
if selector.name != "form":
raise LinkNotFoundError
self.__state.form = Form(selector)
else:
# nr is a 0-based index for consistency with mechanize
found_forms = self.get_current_page().select(selector,
limit=nr + 1)
if len(found_forms) != nr + 1:
if self.__debug:
print('select_form failed for', selector)
self.launch_browser()
raise LinkNotFoundError()
self.__state.form = Form(found_forms[-1])
return self.get_current_form()
def submit_selected(self, btnName=None, update_state=True,
*args, **kwargs):
"""Submit the form that was selected with :func:`select_form`.
:return: Forwarded from :func:`Browser.submit`.
If there are multiple submit input/button elements, passes ``btnName``
to :func:`Form.choose_submit` on the current form to choose between
them. If `update_state` is False, form will be submited but the browser
state will remain unchanged. This is useful for forms that result in
a download of a file. All other arguments are forwarded to
:func:`Browser.submit`.
"""
self.get_current_form().choose_submit(btnName)
referer = self.get_url()
if referer is not None:
if 'headers' in kwargs:
kwargs['headers']['Referer'] = referer
else:
kwargs['headers'] = {'Referer': referer}
resp = self.submit(self.__state.form, url=self.__state.url,
*args, **kwargs)
if update_state:
self.__state = _BrowserState(page=resp.soup, url=resp.url,
request=resp.request)
return resp
def list_links(self, *args, **kwargs):
"""Display the list of links in the current page. Arguments are
forwarded to :func:`links`.
"""
print("Links in the current page:")
for l in self.links(*args, **kwargs):
print(" ", l)
def links(self, url_regex=None, link_text=None, *args, **kwargs):
"""Return links in the page, as a list of bs4.element.Tag objects.
To return links matching specific criteria, specify ``url_regex``
to match the *href*-attribute, or ``link_text`` to match the
*text*-attribute of the Tag. All other arguments are forwarded to
the `.find_all() method in BeautifulSoup
<https://www.crummy.com/software/BeautifulSoup/bs4/doc/#find-all>`__.
"""
all_links = self.get_current_page().find_all(
'a', href=True, *args, **kwargs)
if url_regex is not None:
all_links = [a for a in all_links
if re.search(url_regex, a['href'])]
if link_text is not None:
all_links = [a for a in all_links
if a.text == link_text]
return all_links
def find_link(self, *args, **kwargs):
"""Find and return a link, as a bs4.element.Tag object.
The search can be refined by specifying any argument that is accepted
by :func:`links`. If several links match, return the first one found.
If no link is found, raise :class:`LinkNotFoundError`.
"""
links = self.links(*args, **kwargs)
if len(links) == 0:
raise LinkNotFoundError()
else:
return links[0]
def _find_link_internal(self, link, args, kwargs):
"""Wrapper around find_link that deals with convenience special-cases:
* If ``link`` has an *href*-attribute, then return it. If not,
consider it as a ``url_regex`` argument.
* If searching for the link fails and debug is active, launch
a browser.
"""
if hasattr(link, 'attrs') and 'href' in link.attrs:
return link
# Check if "link" parameter should be treated as "url_regex"
# but reject obtaining it from both places.
if link and 'url_regex' in kwargs:
raise ValueError('link parameter cannot be treated as '
'url_regex because url_regex is already '
'present in keyword arguments')
elif link:
kwargs['url_regex'] = link
try:
return self.find_link(*args, **kwargs)
except LinkNotFoundError:
if self.get_debug():
print('find_link failed for', kwargs)
self.list_links()
self.launch_browser()
raise
def follow_link(self, link=None, *args, **kwargs):
"""Follow a link.
If ``link`` is a bs4.element.Tag (i.e. from a previous call to
:func:`links` or :func:`find_link`), then follow the link.
If ``link`` doesn't have a *href*-attribute or is None, treat
``link`` as a url_regex and look it up with :func:`find_link`.
Any additional arguments specified are forwarded to this function.
If the link is not found, raise :class:`LinkNotFoundError`.
Before raising, if debug is activated, list available links in the
page and launch a browser.
:return: Forwarded from :func:`open_relative`.
"""
link = self._find_link_internal(link, args, kwargs)
referer = self.get_url()
headers = {'Referer': referer} if referer else None
return self.open_relative(link['href'], headers=headers)
def download_link(self, link=None, file=None, *args, **kwargs):
"""Downloads the contents of a link to a file. This function behaves
similarly to :func:`follow_link`, but the browser state will
not change when calling this function.
:param file: Filesystem path where the page contents will be
downloaded. If the file already exists, it will be overwritten.
Other arguments are the same as :func:`follow_link` (``link``
can either be a bs4.element.Tag or a URL regex, other
arguments are forwarded to :func:`find_link`).
:return: `requests.Response
<http://docs.python-requests.org/en/master/api/#requests.Response>`__
object.
"""
link = self._find_link_internal(link, args, kwargs)
url = self.absolute_url(link['href'])
referer = self.get_url()
headers = {'Referer': referer} if referer else None
response = self.session.get(url, headers=headers)
if self.raise_on_404 and response.status_code == 404:
raise LinkNotFoundError()
# Save the response content to file
if file is not None:
with open(file, 'wb') as f:
f.write(response.content)
return response
def launch_browser(self, soup=None):
"""Launch a browser to display a page, for debugging purposes.
:param: soup: Page contents to display, supplied as a bs4 soup object.
Defaults to the current page of the ``StatefulBrowser`` instance.
"""
if soup is None:
soup = self.get_current_page()
super(StatefulBrowser, self).launch_browser(soup)
| 39.408488
| 97
| 0.609006
|
from __future__ import print_function
from six.moves import urllib
from .browser import Browser
from .utils import LinkNotFoundError
from .form import Form
import sys
import re
import bs4
class _BrowserState:
def __init__(self, page=None, url=None, form=None, request=None):
self.page = page
self.url = url
self.form = form
self.request = request
class StatefulBrowser(Browser):
def __init__(self, *args, **kwargs):
super(StatefulBrowser, self).__init__(*args, **kwargs)
self.__debug = False
self.__verbose = 0
self.__state = _BrowserState()
def set_debug(self, debug):
self.__debug = debug
def get_debug(self):
return self.__debug
def set_verbose(self, verbose):
self.__verbose = verbose
def get_verbose(self):
return self.__verbose
def get_url(self):
return self.__state.url
def get_current_form(self):
return self.__state.form
def __setitem__(self, name, value):
self.get_current_form()[name] = value
def new_control(self, type, name, value, **kwargs):
return self.get_current_form().new_control(type, name, value, **kwargs)
def get_current_page(self):
return self.__state.page
def absolute_url(self, url):
return urllib.parse.urljoin(self.get_url(), url)
def open(self, url, *args, **kwargs):
if self.__verbose == 1:
sys.stdout.write('.')
sys.stdout.flush()
elif self.__verbose >= 2:
print(url)
resp = self.get(url, *args, **kwargs)
self.__state = _BrowserState(page=resp.soup, url=resp.url,
request=resp.request)
return resp
def open_fake_page(self, page_text, url=None, soup_config=None):
soup_config = soup_config or self.soup_config
self.__state = _BrowserState(
page=bs4.BeautifulSoup(page_text, **soup_config),
url=url)
def open_relative(self, url, *args, **kwargs):
return self.open(self.absolute_url(url), *args, **kwargs)
def refresh(self):
old_request = self.__state.request
if old_request is None:
raise ValueError('The current page is not refreshable. Either no '
'page is opened or low-level browser methods '
'were used to do so')
resp = self.session.send(old_request)
Browser.add_soup(resp, self.soup_config)
self.__state = _BrowserState(page=resp.soup, url=resp.url,
request=resp.request)
return resp
def select_form(self, selector="form", nr=0):
if isinstance(selector, bs4.element.Tag):
if selector.name != "form":
raise LinkNotFoundError
self.__state.form = Form(selector)
else:
found_forms = self.get_current_page().select(selector,
limit=nr + 1)
if len(found_forms) != nr + 1:
if self.__debug:
print('select_form failed for', selector)
self.launch_browser()
raise LinkNotFoundError()
self.__state.form = Form(found_forms[-1])
return self.get_current_form()
def submit_selected(self, btnName=None, update_state=True,
*args, **kwargs):
self.get_current_form().choose_submit(btnName)
referer = self.get_url()
if referer is not None:
if 'headers' in kwargs:
kwargs['headers']['Referer'] = referer
else:
kwargs['headers'] = {'Referer': referer}
resp = self.submit(self.__state.form, url=self.__state.url,
*args, **kwargs)
if update_state:
self.__state = _BrowserState(page=resp.soup, url=resp.url,
request=resp.request)
return resp
def list_links(self, *args, **kwargs):
print("Links in the current page:")
for l in self.links(*args, **kwargs):
print(" ", l)
def links(self, url_regex=None, link_text=None, *args, **kwargs):
all_links = self.get_current_page().find_all(
'a', href=True, *args, **kwargs)
if url_regex is not None:
all_links = [a for a in all_links
if re.search(url_regex, a['href'])]
if link_text is not None:
all_links = [a for a in all_links
if a.text == link_text]
return all_links
def find_link(self, *args, **kwargs):
links = self.links(*args, **kwargs)
if len(links) == 0:
raise LinkNotFoundError()
else:
return links[0]
def _find_link_internal(self, link, args, kwargs):
if hasattr(link, 'attrs') and 'href' in link.attrs:
return link
if link and 'url_regex' in kwargs:
raise ValueError('link parameter cannot be treated as '
'url_regex because url_regex is already '
'present in keyword arguments')
elif link:
kwargs['url_regex'] = link
try:
return self.find_link(*args, **kwargs)
except LinkNotFoundError:
if self.get_debug():
print('find_link failed for', kwargs)
self.list_links()
self.launch_browser()
raise
def follow_link(self, link=None, *args, **kwargs):
link = self._find_link_internal(link, args, kwargs)
referer = self.get_url()
headers = {'Referer': referer} if referer else None
return self.open_relative(link['href'], headers=headers)
def download_link(self, link=None, file=None, *args, **kwargs):
link = self._find_link_internal(link, args, kwargs)
url = self.absolute_url(link['href'])
referer = self.get_url()
headers = {'Referer': referer} if referer else None
response = self.session.get(url, headers=headers)
if self.raise_on_404 and response.status_code == 404:
raise LinkNotFoundError()
if file is not None:
with open(file, 'wb') as f:
f.write(response.content)
return response
def launch_browser(self, soup=None):
if soup is None:
soup = self.get_current_page()
super(StatefulBrowser, self).launch_browser(soup)
| true
| true
|
f701dd00d32a4f319dc0dfd776cf299128e400bf
| 781
|
py
|
Python
|
authDemo/python/mongo_sandbox.py
|
borro0/Remote-BatterlylessSensorTestbed
|
caea8bd9490d55f86de31761b1cc8712b3ebf06d
|
[
"MIT"
] | null | null | null |
authDemo/python/mongo_sandbox.py
|
borro0/Remote-BatterlylessSensorTestbed
|
caea8bd9490d55f86de31761b1cc8712b3ebf06d
|
[
"MIT"
] | null | null | null |
authDemo/python/mongo_sandbox.py
|
borro0/Remote-BatterlylessSensorTestbed
|
caea8bd9490d55f86de31761b1cc8712b3ebf06d
|
[
"MIT"
] | null | null | null |
from pymongo import MongoClient
from bson.objectid import ObjectId
import pprint
client = MongoClient()
db = client.auth_demo
collection = db.users
#myquery = {"local": {"testRuns": {"$elemMatch": {"_id": ObjectId("5c6c119e5724c9272ca7266d")}}}}
#myquery = {"local": {"testRuns": {"date": "20190219"}}}
#myquery = {"local": {"testRuns": { "$elemMatch": {"date": "20190219"}}}}
#myquery = {"local.testRuns.date" : "20190219"}
#5c6d70ce5e0ee62337b47db3,
#myquery = {"local.email" : "[email protected]"}
myquery = {"testRuns._id" : ObjectId('5c6d70ce5e0ee62337b47db3')}
newvalues = { "$set": { "local.testRuns.$.status": "done" } }
collection.update_one(myquery, newvalues)
document = collection.find_one(myquery)
print(document)
#print(document["local"]["testRuns"][0])
| 30.038462
| 97
| 0.699104
|
from pymongo import MongoClient
from bson.objectid import ObjectId
import pprint
client = MongoClient()
db = client.auth_demo
collection = db.users
myquery = {"testRuns._id" : ObjectId('5c6d70ce5e0ee62337b47db3')}
newvalues = { "$set": { "local.testRuns.$.status": "done" } }
collection.update_one(myquery, newvalues)
document = collection.find_one(myquery)
print(document)
| true
| true
|
f701dd084a73bcb11656e05d04be88875e0a57a1
| 184
|
py
|
Python
|
ps2/test_Lect7.py
|
John-L-Jones-IV/6.0001
|
e58620b7fd59aa369c6d75071311d2930b669d8e
|
[
"MIT"
] | null | null | null |
ps2/test_Lect7.py
|
John-L-Jones-IV/6.0001
|
e58620b7fd59aa369c6d75071311d2930b669d8e
|
[
"MIT"
] | null | null | null |
ps2/test_Lect7.py
|
John-L-Jones-IV/6.0001
|
e58620b7fd59aa369c6d75071311d2930b669d8e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from Lect7 import *
def test_abs():
""" Unit test for abs() """
failure = False
if not failure:
print('SUCESS')
print('Testing abs()...')
test_abs()
| 15.333333
| 29
| 0.619565
|
from Lect7 import *
def test_abs():
failure = False
if not failure:
print('SUCESS')
print('Testing abs()...')
test_abs()
| true
| true
|
f701dd49ac9a00d4f3244b00c3f11b44a7310995
| 7,545
|
py
|
Python
|
nipy/io/imageformats/header_ufuncs.py
|
yarikoptic/NiPy-OLD
|
8759b598ac72d3b9df7414642c7a662ad9c55ece
|
[
"BSD-3-Clause"
] | 1
|
2015-08-22T16:14:45.000Z
|
2015-08-22T16:14:45.000Z
|
nipy/io/imageformats/header_ufuncs.py
|
yarikoptic/NiPy-OLD
|
8759b598ac72d3b9df7414642c7a662ad9c55ece
|
[
"BSD-3-Clause"
] | null | null | null |
nipy/io/imageformats/header_ufuncs.py
|
yarikoptic/NiPy-OLD
|
8759b598ac72d3b9df7414642c7a662ad9c55ece
|
[
"BSD-3-Clause"
] | null | null | null |
''' Ufunc-like functions operating on Analyze headers '''
import numpy as np
from volumeutils import array_from_file, array_to_file, \
HeaderDataError, HeaderTypeError, \
calculate_scale, can_cast
def read_unscaled_data(hdr, fileobj):
''' Read raw (unscaled) data from ``fileobj``
Parameters
----------
hdr : header
analyze-like header implementing ``get_data_dtype``,
``get_data_shape`` and ``get_data_offset``.
fileobj : file-like
Must be open, and implement ``read`` and ``seek`` methods
Returns
-------
arr : array-like
an array like object (that might be an ndarray),
implementing at least slicing.
'''
dtype = hdr.get_data_dtype()
shape = hdr.get_data_shape()
offset = hdr.get_data_offset()
return array_from_file(shape, dtype, fileobj, offset)
def read_data(hdr, fileobj):
''' Read data from ``fileobj`` given ``hdr``
Parameters
----------
hdr : header
analyze-like header implementing ``get_slope_inter`` and
requirements for ``read_unscaled_data``
fileobj : file-like
Must be open, and implement ``read`` and ``seek`` methods
Returns
-------
arr : array-like
an array like object (that might be an ndarray),
implementing at least slicing.
'''
slope, inter = hdr.get_slope_inter()
data = read_unscaled_data(hdr, fileobj)
if slope is None:
return data
# The data may be from a memmap, and not writeable
if slope:
if slope !=1.0:
try:
data *= slope
except ValueError:
data = data * slope
if inter:
try:
data += inter
except ValueError:
data = data + inter
return data
def write_data(hdr, data, fileobj,
intercept=0.0,
divslope=1.0,
mn=None,
mx=None):
''' Write ``data`` to ``fileobj`` coercing to header dtype
Parameters
----------
hdr : header
header object implementing ``get_data_dtype``, ``get_data_shape``
and ``get_data_offset``.
data : array-like
data to write; should match header defined shape. Data is
coerced to dtype matching header by simple ``astype``.
fileobj : file-like object
Object with file interface, implementing ``write`` and ``seek``
intercept : scalar, optional
scalar to subtract from data, before dividing by ``divslope``.
Default is 0.0
divslope : None or scalar, optional
scalefactor to *divide* data by before writing. Default
is 1.0. If None, image has no valid data, zeros are written
mn : scalar, optional
minimum threshold in (unscaled) data, such that all data below
this value are set to this value. Default is None (no threshold)
mx : scalar, optional
maximum threshold in (unscaled) data, such that all data above
this value are set to this value. Default is None (no threshold)
Examples
--------
>>> from nipy.io.imageformats.analyze import AnalyzeHeader
>>> hdr = AnalyzeHeader()
>>> hdr.set_data_shape((1, 2, 3))
>>> hdr.set_data_dtype(np.float64)
>>> from StringIO import StringIO
>>> str_io = StringIO()
>>> data = np.arange(6).reshape(1,2,3)
>>> write_data(hdr, data, str_io)
>>> data.astype(np.float64).tostring('F') == str_io.getvalue()
True
We check the data shape
>>> write_data(hdr, data.reshape(3,2,1), str_io)
Traceback (most recent call last):
...
HeaderDataError: Data should be shape (1, 2, 3)
'''
data = np.asarray(data)
shape = hdr.get_data_shape()
if data.shape != shape:
raise HeaderDataError('Data should be shape (%s)' %
', '.join(str(s) for s in shape))
out_dtype = hdr.get_data_dtype()
offset = hdr.get_data_offset()
try:
fileobj.seek(offset)
except IOError, msg:
if fileobj.tell() != offset:
raise IOError(msg)
if divslope is None: # No valid data
fileobj.write('\x00' * (data.size*out_dtype.itemsize))
return
array_to_file(data, out_dtype, fileobj, intercept, divslope,
mn, mx)
def adapt_header(hdr, data):
''' Calculate scaling for data, set into header, return scaling
Check that the data can be sensibly adapted to this header data
dtype. If the header type does support useful scaling to allow
this, raise a HeaderTypeError.
Parameters
----------
hdr : header
header to match data to. The header may be adapted in-place
data : array-like
array of data for which to calculate scaling etc
Returns
-------
divslope : None or scalar
divisor for data, after subtracting intercept. If None, then
there are no valid data
intercept : None or scalar
number to subtract from data before writing.
mn : None or scalar
data minimum to write, None means use data minimum
mx : None or scalar
data maximum to write, None means use data maximum
Examples
--------
>>> from nipy.io.imageformats.analyze import AnalyzeHeader
>>> hdr = AnalyzeHeader()
>>> hdr.set_data_dtype(np.float32)
>>> data = np.arange(6, dtype=np.float32).reshape(1,2,3)
>>> adapt_header(hdr, data)
(1.0, 0.0, None, None)
>>> hdr.set_data_dtype(np.int16)
>>> adapt_header(hdr, data) # The Analyze header cannot scale
Traceback (most recent call last):
...
HeaderTypeError: Cannot cast data to header dtype without large potential loss in precision
'''
data = np.asarray(data)
out_dtype = hdr.get_data_dtype()
if not can_cast(data.dtype.type,
out_dtype.type,
hdr.has_data_intercept,
hdr.has_data_slope):
raise HeaderTypeError('Cannot cast data to header dtype without'
' large potential loss in precision')
if not hdr.has_data_slope:
return 1.0, 0.0, None, None
slope, inter, mn, mx = calculate_scale(
data,
out_dtype,
hdr.has_data_intercept)
if slope is None:
hdr.set_slope_inter(1.0, 0.0)
else:
hdr.set_slope_inter(slope, inter)
return slope, inter, mn, mx
def write_scaled_data(hdr, data, fileobj):
''' Write data to ``fileobj`` with best data match to ``hdr`` dtype
This is a convenience function that modifies the header as well as
writing the data to file. Because it modifies the header, it is not
very useful for general image writing, where you often need to first
write the header, then the image.
Parameters
----------
data : array-like
data to write; should match header defined shape
fileobj : file-like object
Object with file interface, implementing ``write`` and ``seek``
Returns
-------
None
Examples
--------
>>> from nipy.io.imageformats.analyze import AnalyzeHeader
>>> hdr = AnalyzeHeader()
>>> hdr.set_data_shape((1, 2, 3))
>>> hdr.set_data_dtype(np.float64)
>>> from StringIO import StringIO
>>> str_io = StringIO()
>>> data = np.arange(6).reshape(1,2,3)
>>> write_scaled_data(hdr, data, str_io)
>>> data.astype(np.float64).tostring('F') == str_io.getvalue()
True
'''
slope, inter, mn, mx = adapt_header(hdr, data)
write_data(hdr, data, fileobj, inter, slope, mn, mx)
| 32.521552
| 95
| 0.616302
|
''' Ufunc-like functions operating on Analyze headers '''
import numpy as np
from volumeutils import array_from_file, array_to_file, \
HeaderDataError, HeaderTypeError, \
calculate_scale, can_cast
def read_unscaled_data(hdr, fileobj):
''' Read raw (unscaled) data from ``fileobj``
Parameters
----------
hdr : header
analyze-like header implementing ``get_data_dtype``,
``get_data_shape`` and ``get_data_offset``.
fileobj : file-like
Must be open, and implement ``read`` and ``seek`` methods
Returns
-------
arr : array-like
an array like object (that might be an ndarray),
implementing at least slicing.
'''
dtype = hdr.get_data_dtype()
shape = hdr.get_data_shape()
offset = hdr.get_data_offset()
return array_from_file(shape, dtype, fileobj, offset)
def read_data(hdr, fileobj):
''' Read data from ``fileobj`` given ``hdr``
Parameters
----------
hdr : header
analyze-like header implementing ``get_slope_inter`` and
requirements for ``read_unscaled_data``
fileobj : file-like
Must be open, and implement ``read`` and ``seek`` methods
Returns
-------
arr : array-like
an array like object (that might be an ndarray),
implementing at least slicing.
'''
slope, inter = hdr.get_slope_inter()
data = read_unscaled_data(hdr, fileobj)
if slope is None:
return data
if slope:
if slope !=1.0:
try:
data *= slope
except ValueError:
data = data * slope
if inter:
try:
data += inter
except ValueError:
data = data + inter
return data
def write_data(hdr, data, fileobj,
intercept=0.0,
divslope=1.0,
mn=None,
mx=None):
''' Write ``data`` to ``fileobj`` coercing to header dtype
Parameters
----------
hdr : header
header object implementing ``get_data_dtype``, ``get_data_shape``
and ``get_data_offset``.
data : array-like
data to write; should match header defined shape. Data is
coerced to dtype matching header by simple ``astype``.
fileobj : file-like object
Object with file interface, implementing ``write`` and ``seek``
intercept : scalar, optional
scalar to subtract from data, before dividing by ``divslope``.
Default is 0.0
divslope : None or scalar, optional
scalefactor to *divide* data by before writing. Default
is 1.0. If None, image has no valid data, zeros are written
mn : scalar, optional
minimum threshold in (unscaled) data, such that all data below
this value are set to this value. Default is None (no threshold)
mx : scalar, optional
maximum threshold in (unscaled) data, such that all data above
this value are set to this value. Default is None (no threshold)
Examples
--------
>>> from nipy.io.imageformats.analyze import AnalyzeHeader
>>> hdr = AnalyzeHeader()
>>> hdr.set_data_shape((1, 2, 3))
>>> hdr.set_data_dtype(np.float64)
>>> from StringIO import StringIO
>>> str_io = StringIO()
>>> data = np.arange(6).reshape(1,2,3)
>>> write_data(hdr, data, str_io)
>>> data.astype(np.float64).tostring('F') == str_io.getvalue()
True
We check the data shape
>>> write_data(hdr, data.reshape(3,2,1), str_io)
Traceback (most recent call last):
...
HeaderDataError: Data should be shape (1, 2, 3)
'''
data = np.asarray(data)
shape = hdr.get_data_shape()
if data.shape != shape:
raise HeaderDataError('Data should be shape (%s)' %
', '.join(str(s) for s in shape))
out_dtype = hdr.get_data_dtype()
offset = hdr.get_data_offset()
try:
fileobj.seek(offset)
except IOError, msg:
if fileobj.tell() != offset:
raise IOError(msg)
if divslope is None: fileobj.write('\x00' * (data.size*out_dtype.itemsize))
return
array_to_file(data, out_dtype, fileobj, intercept, divslope,
mn, mx)
def adapt_header(hdr, data):
''' Calculate scaling for data, set into header, return scaling
Check that the data can be sensibly adapted to this header data
dtype. If the header type does support useful scaling to allow
this, raise a HeaderTypeError.
Parameters
----------
hdr : header
header to match data to. The header may be adapted in-place
data : array-like
array of data for which to calculate scaling etc
Returns
-------
divslope : None or scalar
divisor for data, after subtracting intercept. If None, then
there are no valid data
intercept : None or scalar
number to subtract from data before writing.
mn : None or scalar
data minimum to write, None means use data minimum
mx : None or scalar
data maximum to write, None means use data maximum
Examples
--------
>>> from nipy.io.imageformats.analyze import AnalyzeHeader
>>> hdr = AnalyzeHeader()
>>> hdr.set_data_dtype(np.float32)
>>> data = np.arange(6, dtype=np.float32).reshape(1,2,3)
>>> adapt_header(hdr, data)
(1.0, 0.0, None, None)
>>> hdr.set_data_dtype(np.int16)
>>> adapt_header(hdr, data) # The Analyze header cannot scale
Traceback (most recent call last):
...
HeaderTypeError: Cannot cast data to header dtype without large potential loss in precision
'''
data = np.asarray(data)
out_dtype = hdr.get_data_dtype()
if not can_cast(data.dtype.type,
out_dtype.type,
hdr.has_data_intercept,
hdr.has_data_slope):
raise HeaderTypeError('Cannot cast data to header dtype without'
' large potential loss in precision')
if not hdr.has_data_slope:
return 1.0, 0.0, None, None
slope, inter, mn, mx = calculate_scale(
data,
out_dtype,
hdr.has_data_intercept)
if slope is None:
hdr.set_slope_inter(1.0, 0.0)
else:
hdr.set_slope_inter(slope, inter)
return slope, inter, mn, mx
def write_scaled_data(hdr, data, fileobj):
''' Write data to ``fileobj`` with best data match to ``hdr`` dtype
This is a convenience function that modifies the header as well as
writing the data to file. Because it modifies the header, it is not
very useful for general image writing, where you often need to first
write the header, then the image.
Parameters
----------
data : array-like
data to write; should match header defined shape
fileobj : file-like object
Object with file interface, implementing ``write`` and ``seek``
Returns
-------
None
Examples
--------
>>> from nipy.io.imageformats.analyze import AnalyzeHeader
>>> hdr = AnalyzeHeader()
>>> hdr.set_data_shape((1, 2, 3))
>>> hdr.set_data_dtype(np.float64)
>>> from StringIO import StringIO
>>> str_io = StringIO()
>>> data = np.arange(6).reshape(1,2,3)
>>> write_scaled_data(hdr, data, str_io)
>>> data.astype(np.float64).tostring('F') == str_io.getvalue()
True
'''
slope, inter, mn, mx = adapt_header(hdr, data)
write_data(hdr, data, fileobj, inter, slope, mn, mx)
| false
| true
|
f701de2c1d23a64551fa8d5ea8b116d1515678bb
| 1,407
|
py
|
Python
|
generator.py
|
akshay482/Discord-Token-Bruteforcer-Helper
|
b51845b145f8de867eb9e5ee152829803d104807
|
[
"MIT"
] | 7
|
2021-05-17T22:36:39.000Z
|
2021-11-29T13:00:28.000Z
|
generator.py
|
akshay482/Discord-Token-Bruteforcer-Helper
|
b51845b145f8de867eb9e5ee152829803d104807
|
[
"MIT"
] | 4
|
2021-03-11T22:52:47.000Z
|
2022-03-18T20:10:57.000Z
|
generator.py
|
akshay482/Discord-Token-Bruteforcer-Helper
|
b51845b145f8de867eb9e5ee152829803d104807
|
[
"MIT"
] | 21
|
2021-03-21T13:07:12.000Z
|
2022-03-27T14:50:33.000Z
|
import ctypes, os, threading, strgen, base64
tokenid = "4030200023"
class Discord:
def __init__(self):
self.regularExpression = ".([a-zA-Z0-9]{6})\.([a-zA-Z0-9]{27})" # This is the regular expression for discord.
self.generated = 0
def generate(self):
discordToken = strgen.StringGenerator(self.regularExpression).render()
discordToken = discordToken.replace("..", ".")
discordToken = str(id) + discordToken
print(discordToken)
self.generated += 1
self.write(discordToken)
self.title()
def new_method(self):
return self.regularExpression
def write(self, discordToken):
if os.path.isfile("./tokens.txt"):
writeToken = open("./tokens.txt", "a")
writeToken.write(f"{discordToken}\n")
else:
open("./tokens.txt", "w").close() # Simply create the file.
def title(self):
ctypes.windll.kernel32.SetConsoleTitleW(f"Discord Token Bruteforcer - Calastrophe#5752: {self.generated}")
open("./tokens.txt", "w").close() # Create and clear our token file each time
token = Discord()
amountToGen = int(input("Enter amount of tokens to generate: "))
id = base64.b64encode((input("Enter ID: ")).encode("ascii"))
id = str(id)[2:-1]
for _ in range(amountToGen):
threading.Thread(target=token.generate).start()
| 34.317073
| 118
| 0.618337
|
import ctypes, os, threading, strgen, base64
tokenid = "4030200023"
class Discord:
def __init__(self):
self.regularExpression = ".([a-zA-Z0-9]{6})\.([a-zA-Z0-9]{27})" self.generated = 0
def generate(self):
discordToken = strgen.StringGenerator(self.regularExpression).render()
discordToken = discordToken.replace("..", ".")
discordToken = str(id) + discordToken
print(discordToken)
self.generated += 1
self.write(discordToken)
self.title()
def new_method(self):
return self.regularExpression
def write(self, discordToken):
if os.path.isfile("./tokens.txt"):
writeToken = open("./tokens.txt", "a")
writeToken.write(f"{discordToken}\n")
else:
open("./tokens.txt", "w").close()
def title(self):
ctypes.windll.kernel32.SetConsoleTitleW(f"Discord Token Bruteforcer - Calastrophe#5752: {self.generated}")
open("./tokens.txt", "w").close() token = Discord()
amountToGen = int(input("Enter amount of tokens to generate: "))
id = base64.b64encode((input("Enter ID: ")).encode("ascii"))
id = str(id)[2:-1]
for _ in range(amountToGen):
threading.Thread(target=token.generate).start()
| true
| true
|
f701de88c351c8b4d1ced5a28d63e4d979a67525
| 164
|
py
|
Python
|
library/forms.py
|
Elcaveman/Universias
|
252a4bdaabbaa723d5afa9a070efd33c5d37ec87
|
[
"MIT"
] | null | null | null |
library/forms.py
|
Elcaveman/Universias
|
252a4bdaabbaa723d5afa9a070efd33c5d37ec87
|
[
"MIT"
] | 8
|
2021-03-19T03:06:36.000Z
|
2022-01-13T02:41:19.000Z
|
library/forms.py
|
Elcaveman/Universias
|
252a4bdaabbaa723d5afa9a070efd33c5d37ec87
|
[
"MIT"
] | null | null | null |
from django import forms
from .models import Post
class PostForm(forms.ModelForm):
class Meta:
model = Post
exclude = ('timestamp' ,'owner')
| 16.4
| 40
| 0.652439
|
from django import forms
from .models import Post
class PostForm(forms.ModelForm):
class Meta:
model = Post
exclude = ('timestamp' ,'owner')
| true
| true
|
f701df068b2f136d27f5c1dacf5a101cbdf4bc52
| 2,388
|
py
|
Python
|
search_tool.py
|
PabloEmidio/Findout-in-Comment
|
d57fab1e9333290366619f3fb0542708af53234c
|
[
"MIT"
] | 7
|
2021-05-28T11:23:10.000Z
|
2021-11-16T11:09:18.000Z
|
search_tool.py
|
PabloEmidio/Findout-in-Comment
|
d57fab1e9333290366619f3fb0542708af53234c
|
[
"MIT"
] | null | null | null |
search_tool.py
|
PabloEmidio/Findout-in-Comment
|
d57fab1e9333290366619f3fb0542708af53234c
|
[
"MIT"
] | 1
|
2021-08-02T02:45:08.000Z
|
2021-08-02T02:45:08.000Z
|
import parsel, requests, asyncio, re
from typing import List
class InComment:
def __init__(self, optional_words: List[str]=[], remove_words: List[str]=[]) -> None:
self.might_sensitive_words = [
'user',
'password',
'import',
'login',
'.php',
'file',
'release',
'version',
'make',
'replace',
'called',
'test',
'debug',
'see',
'by',
'tag'
]
[self.might_sensitive_words.append(f'O: {word}') for word in optional_words]
[self.might_sensitive_words.remove(word) for word in remove_words if word in self.might_sensitive_words]
@staticmethod
async def _search(url: str)->str:
return requests.get(url, headers={'User-Agent': 'Mozilla'}).text
@staticmethod
def _check_sensitive_level(comment: str, by_optional_word: bool=False)->dict:
high = ['password', 'user', 'login', 'import', 'make']
medium = ['replace', '.php', 'file', 'by', 'release', 'version']
if by_optional_word:
return {'optional': comment}
elif any(string in comment for string in high):
return {'high': comment}
elif any(string in comment for string in medium):
return {'medium': comment}
else:
return {'low': comment}
@classmethod
async def _get_comments(cls, url: str, is_local: bool)->List[str]:
html_struct = await cls._search(url) if not is_local else open(url, 'r').read()
element = parsel.Selector(html_struct)
return element.xpath('//comment()').getall()
def return_might_sensitive_comments(self, url: str, is_local: bool, return_tags: bool=False)->List[dict]:
comments: List[str] = asyncio.run(self._get_comments(url, is_local))
for comment in comments:
if not re.match('<[^>]*>', comment.replace('<!--', '').replace('-->', '')) or return_tags:
for might_sensitive_word in self.might_sensitive_words:
if might_sensitive_word.replace('O: ', '').lower() in comment.lower() and 'input' not in comment.lower():
yield self._check_sensitive_level(comment, by_optional_word='O: ' in might_sensitive_word)
| 37.904762
| 125
| 0.567839
|
import parsel, requests, asyncio, re
from typing import List
class InComment:
def __init__(self, optional_words: List[str]=[], remove_words: List[str]=[]) -> None:
self.might_sensitive_words = [
'user',
'password',
'import',
'login',
'.php',
'file',
'release',
'version',
'make',
'replace',
'called',
'test',
'debug',
'see',
'by',
'tag'
]
[self.might_sensitive_words.append(f'O: {word}') for word in optional_words]
[self.might_sensitive_words.remove(word) for word in remove_words if word in self.might_sensitive_words]
@staticmethod
async def _search(url: str)->str:
return requests.get(url, headers={'User-Agent': 'Mozilla'}).text
@staticmethod
def _check_sensitive_level(comment: str, by_optional_word: bool=False)->dict:
high = ['password', 'user', 'login', 'import', 'make']
medium = ['replace', '.php', 'file', 'by', 'release', 'version']
if by_optional_word:
return {'optional': comment}
elif any(string in comment for string in high):
return {'high': comment}
elif any(string in comment for string in medium):
return {'medium': comment}
else:
return {'low': comment}
@classmethod
async def _get_comments(cls, url: str, is_local: bool)->List[str]:
html_struct = await cls._search(url) if not is_local else open(url, 'r').read()
element = parsel.Selector(html_struct)
return element.xpath('//comment()').getall()
def return_might_sensitive_comments(self, url: str, is_local: bool, return_tags: bool=False)->List[dict]:
comments: List[str] = asyncio.run(self._get_comments(url, is_local))
for comment in comments:
if not re.match('<[^>]*>', comment.replace('<!--', '').replace('-->', '')) or return_tags:
for might_sensitive_word in self.might_sensitive_words:
if might_sensitive_word.replace('O: ', '').lower() in comment.lower() and 'input' not in comment.lower():
yield self._check_sensitive_level(comment, by_optional_word='O: ' in might_sensitive_word)
| true
| true
|
f701df2d6c3bb3e37f1bcdb97765f18418e63e7b
| 2,302
|
py
|
Python
|
homeassistant/components/supervisord/sensor.py
|
mikan-megane/core
|
837220cce40890e296920d33a623adbc11bd15a6
|
[
"Apache-2.0"
] | 11
|
2018-02-16T15:35:47.000Z
|
2020-01-14T15:20:00.000Z
|
homeassistant/components/supervisord/sensor.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 79
|
2020-07-23T07:13:37.000Z
|
2022-03-22T06:02:37.000Z
|
homeassistant/components/supervisord/sensor.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 6
|
2018-02-04T03:48:55.000Z
|
2022-01-24T20:37:04.000Z
|
"""Sensor for Supervisord process status."""
import logging
import xmlrpc.client
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_URL
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_DESCRIPTION = "description"
ATTR_GROUP = "group"
DEFAULT_URL = "http://localhost:9001/RPC2"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_URL, default=DEFAULT_URL): cv.url}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Supervisord platform."""
url = config.get(CONF_URL)
try:
supervisor_server = xmlrpc.client.ServerProxy(url)
processes = supervisor_server.supervisor.getAllProcessInfo()
except ConnectionRefusedError:
_LOGGER.error("Could not connect to Supervisord")
return False
add_entities(
[SupervisorProcessSensor(info, supervisor_server) for info in processes], True
)
class SupervisorProcessSensor(SensorEntity):
"""Representation of a supervisor-monitored process."""
def __init__(self, info, server):
"""Initialize the sensor."""
self._info = info
self._server = server
self._available = True
@property
def name(self):
"""Return the name of the sensor."""
return self._info.get("name")
@property
def state(self):
"""Return the state of the sensor."""
return self._info.get("statename")
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._available
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_DESCRIPTION: self._info.get("description"),
ATTR_GROUP: self._info.get("group"),
}
def update(self):
"""Update device state."""
try:
self._info = self._server.supervisor.getProcessInfo(
self._info.get("group") + ":" + self._info.get("name")
)
self._available = True
except ConnectionRefusedError:
_LOGGER.warning("Supervisord not available")
self._available = False
| 28.775
| 86
| 0.662467
|
import logging
import xmlrpc.client
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_URL
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_DESCRIPTION = "description"
ATTR_GROUP = "group"
DEFAULT_URL = "http://localhost:9001/RPC2"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_URL, default=DEFAULT_URL): cv.url}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
url = config.get(CONF_URL)
try:
supervisor_server = xmlrpc.client.ServerProxy(url)
processes = supervisor_server.supervisor.getAllProcessInfo()
except ConnectionRefusedError:
_LOGGER.error("Could not connect to Supervisord")
return False
add_entities(
[SupervisorProcessSensor(info, supervisor_server) for info in processes], True
)
class SupervisorProcessSensor(SensorEntity):
def __init__(self, info, server):
self._info = info
self._server = server
self._available = True
@property
def name(self):
return self._info.get("name")
@property
def state(self):
return self._info.get("statename")
@property
def available(self):
return self._available
@property
def extra_state_attributes(self):
return {
ATTR_DESCRIPTION: self._info.get("description"),
ATTR_GROUP: self._info.get("group"),
}
def update(self):
try:
self._info = self._server.supervisor.getProcessInfo(
self._info.get("group") + ":" + self._info.get("name")
)
self._available = True
except ConnectionRefusedError:
_LOGGER.warning("Supervisord not available")
self._available = False
| true
| true
|
f701df6b7161b3abc24aeed5c7a185b8b3c69e8c
| 2,342
|
py
|
Python
|
app/api/routes/transfer.py
|
cPoolChia/ChiaAutoplotter-Backend
|
8d875c1f846df395ddc76e3d84b36da45ad4d557
|
[
"MIT"
] | 7
|
2021-06-01T09:20:34.000Z
|
2021-10-12T07:24:04.000Z
|
app/api/routes/transfer.py
|
cPoolChia/ChiaFarmerManager-Backend
|
8d875c1f846df395ddc76e3d84b36da45ad4d557
|
[
"MIT"
] | null | null | null |
app/api/routes/transfer.py
|
cPoolChia/ChiaFarmerManager-Backend
|
8d875c1f846df395ddc76e3d84b36da45ad4d557
|
[
"MIT"
] | 1
|
2021-05-31T13:08:14.000Z
|
2021-05-31T13:08:14.000Z
|
from app.schemas import filtration
from typing import Any
from uuid import UUID, uuid4
import celery
from datetime import datetime, timedelta
from app import crud, models, schemas
from app.celery import celery as celery_app
from app.api import deps
from app.core.config import settings
from app.utils import auth
from app.core import tasks
from fastapi import APIRouter, Depends, HTTPException, Body, Query
from sqlalchemy.orm import Session
from fastapi_utils.cbv import cbv
from fastapi_utils.inferring_router import InferringRouter
from fastapi_utils.tasks import repeat_every
from app.api.routes.base import BaseAuthCBV
from app.db.session import DatabaseSession
router = InferringRouter()
# @router.on_event("startup")
# @repeat_every(seconds=60, raise_exceptions=True)
# def scan_queues_on_servers() -> None:
# db = DatabaseSession()
# for plot_queue in crud.plot_queue.get_multi(db)[1]:
# tasks.scan_plotting.delay(plot_queue.id)
# db.close()
@cbv(router)
class TransferCBV(BaseAuthCBV):
@router.post("/")
def create_transfer(self, data: schemas.TransferCreate) -> schemas.TransferReturn:
plot = crud.plot.get(self.db, id=data.plot_id)
if plot is None:
raise HTTPException(404, "Plot with such id is not found")
if plot.status in [schemas.PlotStatus.PLOTING, schemas.PlotStatus.PENDING]:
raise HTTPException(403, "Can not transfer plotting and pending plots")
start_dir = plot.located_directory
dest_dir = crud.directory.get(self.db, id=data.destination_directory_id)
if dest_dir is None:
raise HTTPException(404, "Directory with such id is not found")
data_extended = schemas.TransferCreateExtended(
**data.dict(), starting_directory_id=start_dir.id
)
transfer = crud.transfer.create(self.db, obj_in=data_extended)
return schemas.TransferReturn.from_orm(transfer)
@router.get("/")
def get_transfers_table(
self,
filtration: schemas.FilterData[models.Transfer] = Depends(
deps.get_filtration_data(models.Transfer)
),
) -> schemas.Table[schemas.TransferReturn]:
amount, items = crud.transfer.get_multi(self.db, filtration=filtration)
return schemas.Table[schemas.TransferReturn](amount=amount, items=items)
| 37.174603
| 86
| 0.727583
|
from app.schemas import filtration
from typing import Any
from uuid import UUID, uuid4
import celery
from datetime import datetime, timedelta
from app import crud, models, schemas
from app.celery import celery as celery_app
from app.api import deps
from app.core.config import settings
from app.utils import auth
from app.core import tasks
from fastapi import APIRouter, Depends, HTTPException, Body, Query
from sqlalchemy.orm import Session
from fastapi_utils.cbv import cbv
from fastapi_utils.inferring_router import InferringRouter
from fastapi_utils.tasks import repeat_every
from app.api.routes.base import BaseAuthCBV
from app.db.session import DatabaseSession
router = InferringRouter()
@cbv(router)
class TransferCBV(BaseAuthCBV):
@router.post("/")
def create_transfer(self, data: schemas.TransferCreate) -> schemas.TransferReturn:
plot = crud.plot.get(self.db, id=data.plot_id)
if plot is None:
raise HTTPException(404, "Plot with such id is not found")
if plot.status in [schemas.PlotStatus.PLOTING, schemas.PlotStatus.PENDING]:
raise HTTPException(403, "Can not transfer plotting and pending plots")
start_dir = plot.located_directory
dest_dir = crud.directory.get(self.db, id=data.destination_directory_id)
if dest_dir is None:
raise HTTPException(404, "Directory with such id is not found")
data_extended = schemas.TransferCreateExtended(
**data.dict(), starting_directory_id=start_dir.id
)
transfer = crud.transfer.create(self.db, obj_in=data_extended)
return schemas.TransferReturn.from_orm(transfer)
@router.get("/")
def get_transfers_table(
self,
filtration: schemas.FilterData[models.Transfer] = Depends(
deps.get_filtration_data(models.Transfer)
),
) -> schemas.Table[schemas.TransferReturn]:
amount, items = crud.transfer.get_multi(self.db, filtration=filtration)
return schemas.Table[schemas.TransferReturn](amount=amount, items=items)
| true
| true
|
f701dfb05b4e2b1468d4835338476d9029415eab
| 1,686
|
py
|
Python
|
src/get_5_five_letter_words.py
|
Masa-Shin/make-4-five-letter-words-without-repetition
|
102de6a0eaaaa59086e351b7af44861ad63dcce6
|
[
"MIT"
] | 3
|
2022-02-02T22:15:27.000Z
|
2022-02-04T13:01:00.000Z
|
src/get_5_five_letter_words.py
|
Masa-Shin/make-4-five-letter-words-without-repetition-
|
102de6a0eaaaa59086e351b7af44861ad63dcce6
|
[
"MIT"
] | null | null | null |
src/get_5_five_letter_words.py
|
Masa-Shin/make-4-five-letter-words-without-repetition-
|
102de6a0eaaaa59086e351b7af44861ad63dcce6
|
[
"MIT"
] | 1
|
2022-02-17T20:46:41.000Z
|
2022-02-17T20:46:41.000Z
|
import re
import time
import json
import numpy as np
from collections import Counter
from utilities.utilities import VOWELS, LETTERS, get_vowel_count, get_available_words, log_list
start = time.time()
# 正解単語リストを開く
with open('data/answer-word-list.txt', mode='r') as f:
answer_word_list = f.read().split('\n')
# 入力可能単語リストを開く
with open('data/valid-word-list.txt', mode='r') as f:
valid_word_list = f.read().split('\n')
valid_word_list += answer_word_list
# can only use each letter once
word_list = [word for word in valid_word_list if len(set(word)) == 5]
log_list(word_list, "word_list")
result = []
for word_1 in word_list:
word_list_for_word_2 = get_available_words(
word_list, list(word_1))
for i_2, word_2 in enumerate(word_list_for_word_2):
word_list_for_word_3 = get_available_words(
word_list_for_word_2[i_2+1:], list(word_2))
for i_3, word_3 in enumerate(word_list_for_word_3):
word_list_for_word_4 = get_available_words(
word_list_for_word_3[i_3+1:], list(word_3))
for i_4, word_4 in enumerate(word_list_for_word_4):
word_list_for_word_5 = get_available_words(
word_list_for_word_4[i_4+1:], list(word_4))
print([word_1, word_2, word_3, word_4])
for word_5 in enumerate(word_list_for_word_5):
words = [word_1, word_2, word_3, word_4, word_5]
result.append(sorted(words))
log_list(result, "results are")
elapsed_time = time.time() - start
print("elapsed_time: {0}".format(elapsed_time))
with open('power_quintet.txt', 'w') as f:
f.write(json.dumps(result))
| 31.222222
| 95
| 0.675563
|
import re
import time
import json
import numpy as np
from collections import Counter
from utilities.utilities import VOWELS, LETTERS, get_vowel_count, get_available_words, log_list
start = time.time()
with open('data/answer-word-list.txt', mode='r') as f:
answer_word_list = f.read().split('\n')
with open('data/valid-word-list.txt', mode='r') as f:
valid_word_list = f.read().split('\n')
valid_word_list += answer_word_list
word_list = [word for word in valid_word_list if len(set(word)) == 5]
log_list(word_list, "word_list")
result = []
for word_1 in word_list:
word_list_for_word_2 = get_available_words(
word_list, list(word_1))
for i_2, word_2 in enumerate(word_list_for_word_2):
word_list_for_word_3 = get_available_words(
word_list_for_word_2[i_2+1:], list(word_2))
for i_3, word_3 in enumerate(word_list_for_word_3):
word_list_for_word_4 = get_available_words(
word_list_for_word_3[i_3+1:], list(word_3))
for i_4, word_4 in enumerate(word_list_for_word_4):
word_list_for_word_5 = get_available_words(
word_list_for_word_4[i_4+1:], list(word_4))
print([word_1, word_2, word_3, word_4])
for word_5 in enumerate(word_list_for_word_5):
words = [word_1, word_2, word_3, word_4, word_5]
result.append(sorted(words))
log_list(result, "results are")
elapsed_time = time.time() - start
print("elapsed_time: {0}".format(elapsed_time))
with open('power_quintet.txt', 'w') as f:
f.write(json.dumps(result))
| true
| true
|
f701dfdb741a6485ded2f00f3d7adc9bb8d37b17
| 8,773
|
py
|
Python
|
homeassistant/components/glances/sensor.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 7
|
2019-02-07T14:14:12.000Z
|
2019-07-28T06:56:10.000Z
|
homeassistant/components/glances/sensor.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 47
|
2020-07-23T07:14:33.000Z
|
2022-03-31T06:01:46.000Z
|
homeassistant/components/glances/sensor.py
|
klauern/home-assistant-core
|
c18ba6aec0627e6afb6442c678edb5ff2bb17db6
|
[
"Apache-2.0"
] | 5
|
2020-03-29T00:29:13.000Z
|
2021-09-06T20:58:40.000Z
|
"""Support gathering system information of hosts which are running glances."""
import logging
from homeassistant.const import CONF_NAME, STATE_UNAVAILABLE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import DATA_UPDATED, DOMAIN, SENSOR_TYPES
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Glances sensors."""
client = hass.data[DOMAIN][config_entry.entry_id]
name = config_entry.data[CONF_NAME]
dev = []
for sensor_type, sensor_details in SENSOR_TYPES.items():
if not sensor_details[0] in client.api.data:
continue
if sensor_details[0] in client.api.data:
if sensor_details[0] == "fs":
# fs will provide a list of disks attached
for disk in client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
disk["mnt_point"],
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
elif sensor_details[0] == "sensors":
# sensors will provide temp for different devices
for sensor in client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
sensor["label"],
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
elif client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
"",
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
async_add_entities(dev, True)
class GlancesSensor(Entity):
"""Implementation of a Glances sensor."""
def __init__(
self,
glances_data,
name,
sensor_name_prefix,
sensor_name_suffix,
sensor_type,
sensor_details,
):
"""Initialize the sensor."""
self.glances_data = glances_data
self._sensor_name_prefix = sensor_name_prefix
self._sensor_name_suffix = sensor_name_suffix
self._name = name
self.type = sensor_type
self._state = None
self.sensor_details = sensor_details
self.unsub_update = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {self._sensor_name_prefix} {self._sensor_name_suffix}"
@property
def unique_id(self):
"""Set unique_id for sensor."""
return f"{self.glances_data.host}-{self.name}"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self.sensor_details[3]
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self.sensor_details[2]
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self.glances_data.available
@property
def state(self):
"""Return the state of the resources."""
return self._state
@property
def should_poll(self):
"""Return the polling requirement for this sensor."""
return False
async def async_added_to_hass(self):
"""Handle entity which will be added."""
self.unsub_update = async_dispatcher_connect(
self.hass, DATA_UPDATED, self._schedule_immediate_update
)
@callback
def _schedule_immediate_update(self):
self.async_schedule_update_ha_state(True)
async def will_remove_from_hass(self):
"""Unsubscribe from update dispatcher."""
if self.unsub_update:
self.unsub_update()
self.unsub_update = None
async def async_update(self):
"""Get the latest data from REST API."""
value = self.glances_data.api.data
if value is None:
return
if value is not None:
if self.sensor_details[0] == "fs":
for var in value["fs"]:
if var["mnt_point"] == self._sensor_name_prefix:
disk = var
break
if self.type == "disk_use_percent":
self._state = disk["percent"]
elif self.type == "disk_use":
self._state = round(disk["used"] / 1024 ** 3, 1)
elif self.type == "disk_free":
try:
self._state = round(disk["free"] / 1024 ** 3, 1)
except KeyError:
self._state = round(
(disk["size"] - disk["used"]) / 1024 ** 3, 1,
)
elif self.type == "sensor_temp":
for sensor in value["sensors"]:
if sensor["label"] == self._sensor_name_prefix:
self._state = sensor["value"]
break
elif self.type == "memory_use_percent":
self._state = value["mem"]["percent"]
elif self.type == "memory_use":
self._state = round(value["mem"]["used"] / 1024 ** 2, 1)
elif self.type == "memory_free":
self._state = round(value["mem"]["free"] / 1024 ** 2, 1)
elif self.type == "swap_use_percent":
self._state = value["memswap"]["percent"]
elif self.type == "swap_use":
self._state = round(value["memswap"]["used"] / 1024 ** 3, 1)
elif self.type == "swap_free":
self._state = round(value["memswap"]["free"] / 1024 ** 3, 1)
elif self.type == "processor_load":
# Windows systems don't provide load details
try:
self._state = value["load"]["min15"]
except KeyError:
self._state = value["cpu"]["total"]
elif self.type == "process_running":
self._state = value["processcount"]["running"]
elif self.type == "process_total":
self._state = value["processcount"]["total"]
elif self.type == "process_thread":
self._state = value["processcount"]["thread"]
elif self.type == "process_sleeping":
self._state = value["processcount"]["sleeping"]
elif self.type == "cpu_use_percent":
self._state = value["quicklook"]["cpu"]
elif self.type == "docker_active":
count = 0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
count += 1
self._state = count
except KeyError:
self._state = count
elif self.type == "docker_cpu_use":
cpu_use = 0.0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
cpu_use += container["cpu"]["total"]
self._state = round(cpu_use, 1)
except KeyError:
self._state = STATE_UNAVAILABLE
elif self.type == "docker_memory_use":
mem_use = 0.0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
mem_use += container["memory"]["usage"]
self._state = round(mem_use / 1024 ** 2, 1)
except KeyError:
self._state = STATE_UNAVAILABLE
| 37.978355
| 84
| 0.493788
|
import logging
from homeassistant.const import CONF_NAME, STATE_UNAVAILABLE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import DATA_UPDATED, DOMAIN, SENSOR_TYPES
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
client = hass.data[DOMAIN][config_entry.entry_id]
name = config_entry.data[CONF_NAME]
dev = []
for sensor_type, sensor_details in SENSOR_TYPES.items():
if not sensor_details[0] in client.api.data:
continue
if sensor_details[0] in client.api.data:
if sensor_details[0] == "fs":
for disk in client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
disk["mnt_point"],
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
elif sensor_details[0] == "sensors":
for sensor in client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
sensor["label"],
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
elif client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
"",
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
async_add_entities(dev, True)
class GlancesSensor(Entity):
def __init__(
self,
glances_data,
name,
sensor_name_prefix,
sensor_name_suffix,
sensor_type,
sensor_details,
):
self.glances_data = glances_data
self._sensor_name_prefix = sensor_name_prefix
self._sensor_name_suffix = sensor_name_suffix
self._name = name
self.type = sensor_type
self._state = None
self.sensor_details = sensor_details
self.unsub_update = None
@property
def name(self):
return f"{self._name} {self._sensor_name_prefix} {self._sensor_name_suffix}"
@property
def unique_id(self):
return f"{self.glances_data.host}-{self.name}"
@property
def icon(self):
return self.sensor_details[3]
@property
def unit_of_measurement(self):
return self.sensor_details[2]
@property
def available(self):
return self.glances_data.available
@property
def state(self):
return self._state
@property
def should_poll(self):
return False
async def async_added_to_hass(self):
self.unsub_update = async_dispatcher_connect(
self.hass, DATA_UPDATED, self._schedule_immediate_update
)
@callback
def _schedule_immediate_update(self):
self.async_schedule_update_ha_state(True)
async def will_remove_from_hass(self):
if self.unsub_update:
self.unsub_update()
self.unsub_update = None
async def async_update(self):
value = self.glances_data.api.data
if value is None:
return
if value is not None:
if self.sensor_details[0] == "fs":
for var in value["fs"]:
if var["mnt_point"] == self._sensor_name_prefix:
disk = var
break
if self.type == "disk_use_percent":
self._state = disk["percent"]
elif self.type == "disk_use":
self._state = round(disk["used"] / 1024 ** 3, 1)
elif self.type == "disk_free":
try:
self._state = round(disk["free"] / 1024 ** 3, 1)
except KeyError:
self._state = round(
(disk["size"] - disk["used"]) / 1024 ** 3, 1,
)
elif self.type == "sensor_temp":
for sensor in value["sensors"]:
if sensor["label"] == self._sensor_name_prefix:
self._state = sensor["value"]
break
elif self.type == "memory_use_percent":
self._state = value["mem"]["percent"]
elif self.type == "memory_use":
self._state = round(value["mem"]["used"] / 1024 ** 2, 1)
elif self.type == "memory_free":
self._state = round(value["mem"]["free"] / 1024 ** 2, 1)
elif self.type == "swap_use_percent":
self._state = value["memswap"]["percent"]
elif self.type == "swap_use":
self._state = round(value["memswap"]["used"] / 1024 ** 3, 1)
elif self.type == "swap_free":
self._state = round(value["memswap"]["free"] / 1024 ** 3, 1)
elif self.type == "processor_load":
try:
self._state = value["load"]["min15"]
except KeyError:
self._state = value["cpu"]["total"]
elif self.type == "process_running":
self._state = value["processcount"]["running"]
elif self.type == "process_total":
self._state = value["processcount"]["total"]
elif self.type == "process_thread":
self._state = value["processcount"]["thread"]
elif self.type == "process_sleeping":
self._state = value["processcount"]["sleeping"]
elif self.type == "cpu_use_percent":
self._state = value["quicklook"]["cpu"]
elif self.type == "docker_active":
count = 0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
count += 1
self._state = count
except KeyError:
self._state = count
elif self.type == "docker_cpu_use":
cpu_use = 0.0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
cpu_use += container["cpu"]["total"]
self._state = round(cpu_use, 1)
except KeyError:
self._state = STATE_UNAVAILABLE
elif self.type == "docker_memory_use":
mem_use = 0.0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
mem_use += container["memory"]["usage"]
self._state = round(mem_use / 1024 ** 2, 1)
except KeyError:
self._state = STATE_UNAVAILABLE
| true
| true
|
f701e02276ef30939ff185d5969b6d129bfad693
| 13,265
|
py
|
Python
|
bot.py
|
gsheppar/codec-dashboard
|
232b1a9cae289f22aea52db423932846f6b11d95
|
[
"MIT"
] | 13
|
2018-07-03T07:14:24.000Z
|
2022-02-22T06:39:16.000Z
|
bot.py
|
gsheppar/codec-dashboard
|
232b1a9cae289f22aea52db423932846f6b11d95
|
[
"MIT"
] | null | null | null |
bot.py
|
gsheppar/codec-dashboard
|
232b1a9cae289f22aea52db423932846f6b11d95
|
[
"MIT"
] | 3
|
2017-08-30T14:20:40.000Z
|
2019-11-21T19:58:06.000Z
|
import csv
import datetime
import json
import os
import smtplib
import threading
from flask import request, make_response, Flask, render_template
import config
from codec.actions import get_status, send_survey, send_register, get_last, get_sip, get_people, get_loss, get_diag, send_dial
###############################
# Retrieved from config.py
email_user= config.email_user
email_pwd= config.email_pwd
email_dest = config.email_dest
email_server = config.email_server
path=os.path.abspath(os.curdir)
log = path + "/message_log.txt"
###############################
bot = Flask(__name__)
###############################
#Functions for displaying webpages
def get_rooms():
with open('codec/codec.json') as data_file:
data = json.load(data_file)
return data
def get_surveys():
now = datetime.datetime.now()
now_str = now.strftime("%Y-%m")
surveycsv = 'survey/Feedback-{}.csv'.format(now_str)
surveyjson = 'survey/Feedback-{}.json'.format(now_str)
with open(surveycsv) as f:
reader = csv.DictReader(f, skipinitialspace=True)
rows = list(reader)
with open(surveyjson, 'w') as data_file:
data_file.write(json.dumps(rows, sort_keys=True, indent=4, separators=(',', ': ')))
with open(surveyjson) as data_file:
data = json.load(data_file)
return data
@bot.route('/')
def hello():
return dashboard()
@bot.route('/rooms', methods=['GET','POST'])
def rooms():
if request.method == 'GET':
return render_template('rooms.html')
elif request.method == 'POST':
rooms = get_rooms()
return render_template('rooms.html', rooms=rooms)
@bot.route('/surveys', methods=['GET', 'POST'])
def surveys():
if request.method == 'GET':
return render_template('surveys.html')
elif request.method == 'POST':
# Check if survey file for this month is there if not create one
check_surveyfile()
surveys = get_surveys()
return render_template('surveys.html', surveys=surveys)
@bot.route('/dashboard', methods=['GET', 'POST'])
def dashboard():
sytemsdown = 0
activecalls = 0
occupiedrooms = 0
diagerrors = "No"
roomnum = 0
videopacketloss = "No"
audiopacketloss = "No"
with open('codec/codec.json') as data_file:
data = json.load(data_file)
for codec in data:
roomnum += 1
if (codec['NetworkAlert'] == "Yes" or codec['SIPAlert'] == "Yes"):
sytemsdown += 1
if (codec['Occupied'] == "Yes"):
occupiedrooms += 1
if (codec['Call'] == "Yes"):
activecalls += 1
if (codec['Diag'] == "Errors"):
diagerrors = "Yes"
if (codec['VideoPacketloss'] == "Yes"):
videopacketloss = "Yes"
if (codec['AudioPacketloss'] == "Yes"):
audiopacketloss = "Yes"
return render_template('dashboard.html', systemsdown=sytemsdown, activecalls=activecalls, occupiedrooms=occupiedrooms, diagerrors=diagerrors, videopacketloss=videopacketloss, audiopacketloss=audiopacketloss, roomnum=roomnum)
@bot.route('/surveygraph', methods=['GET', 'POST'])
def surveygraph():
#Check if survey file for this month is there if not create one
check_surveyfile()
numexcellent = 0
numgood = 0
numpoor = 0
numnone = 0
now = datetime.datetime.now()
now_str = now.strftime("%Y-%m")
surveyjson = 'survey/Feedback-{}.json'.format(now_str)
with open(surveyjson) as data_file:
data = json.load(data_file)
for codec in data:
if (codec['Quality'] == "Excellent"):
numexcellent += 1
if (codec['Quality'] == "Good"):
numgood += 1
if (codec['Quality'] == "Poor"):
numpoor += 1
if (codec['Quality'] == "No response"):
numnone += 1
return render_template('surveygraph.html', numexcellent=numexcellent, numgood=numgood, numpoor=numpoor, numnone=numnone)
###############################
#Functions for codec feedbakc
@bot.route('/codec', methods=['POST'])
def receivepostfromcodec():
#logging all requests
f = open(log, "a")
f.write("\n")
f.write(request.data)
#Check if survey file for this month is there if not create one
check_surveyfile()
now = datetime.datetime.now()
now_str = now.strftime("%Y-%m")
surveycsv = 'survey/Feedback-{}.csv'.format(now_str)
# Call Connection and codec check
try:
data = json.loads(request.data)
action = data['Status']['Call'][0]['Status']['Value']
newunit = "Yes"
print("Received status call: {}".format(action))
if action == "Connected":
host = data['Status']['Identification']['IPAddress']['Value']
name = data['Status']['Identification']['SystemName']['Value']
with open('codec/codec.json', 'r') as data_file:
data = json.load(data_file)
for codec in data:
if (codec['SystemName'] == name):
codec["Call"] = "Yes"
codec["IP"] = host
newunit = "No"
if (codec['IP'] == host):
codec["Call"] = "Yes"
codec["SystemName"] = name
newunit = "No"
#Create new codec in codec.json file
if newunit == "Yes":
print "New unit found"
entry = {"Booked": "N/A", "Call": "Yes", "Diag": "None", "DiagAlert": "No", "IP": host,
"NetworkAlert": "No", "Occupied": "No", "AudioPacketloss": "No", "VideoPacketloss": "No", "People": "N/A",
"SIP": "Registered", "SIPAlert": "No", "Status": "Standby",
"SystemName": name}
data.append(entry)
print json.dumps(data)
with open('codec/codec.json', 'w') as data_file:
data_file.write(json.dumps(data, sort_keys=True, indent=4, separators=(',', ': ')))
except Exception as e:
print "Request did not contain any action type: Call Connected"
# Send survey if call disconnects
try:
data = json.loads(request.data)
action = data['Event']['CallDisconnect']['CauseType']['Value']
print("Received status call: {}".format(action))
if (action == "LocalDisconnect" or action == "RemoteDisconnect"):
host = data['Event']['Identification']['IPAddress']['Value']
print send_survey(host)
except Exception as e:
print "Request did not contain any action type: Send Survey"
# Widget clicked
try:
data = json.loads(request.data)
widget = data['Event']['UserInterface']['Extensions']['Widget']['Action']['WidgetId']['Value']
action = data['Event']['UserInterface']['Extensions']['Widget']['Action']['Type']['Value']
host = data['Event']['Identification']['IPAddress']['Value']
if (widget == "widget_1" and action == "clicked"):
send_dial(host)
except Exception as e:
print "Request did not contain any action type: Widget Clicked"
# Survey feedback response
try:
data = json.loads(request.data)
action = data['Event']['UserInterface']['Message']['Prompt']['Response']['FeedbackId']['Value']
ip = data['Event']['Identification']['IPAddress']['Value']
host = data['Event']['Identification']['SystemName']['Value']
booked = "N/A, "
if (action == "1"):
response = data['Event']['UserInterface']['Message']['Prompt']['Response']['OptionId']['Value']
callinfo = get_last(ip)
outFilecsv = open(surveycsv, 'a')
if (response == "1"):
calldetail = host + ", " + "Excellent, " + booked + callinfo
outFilecsv.write("\n")
outFilecsv.write(calldetail)
elif (response == "2"):
calldetail = host + ", " + "Good, " + booked + callinfo
outFilecsv.write("\n")
outFilecsv.write(calldetail)
elif (response == "3"):
calldetail = host + ", " + "Poor, " + booked + callinfo
outFilecsv.write("\n")
outFilecsv.write(calldetail)
else:
calldetail = host + ", " + "No Response, " + booked + callinfo
outFilecsv.write("\n")
outFilecsv.write(calldetail)
outFilecsv.close()
with open('codec/codec.json', 'r') as data_file:
data = json.load(data_file)
for codec in data:
if (codec['IP'] == ip):
codec["Call"] = "No"
with open('codec/codec.json', 'w') as data_file:
data_file.write(json.dumps(data, sort_keys=True, indent=4, separators=(',', ': ')))
return make_response("ok")
except Exception as e:
print "Request did not contain any action type: Receive Survey Feedback"
return make_response("ok")
###############################
#check if survey file exsits
def check_surveyfile():
now = datetime.datetime.now()
now_str = now.strftime("%Y-%m")
surveycsv = 'survey/Feedback-{}.csv'.format(now_str)
if not os.path.exists(surveycsv):
outFile = open(surveycsv, 'w')
outFile.write(
"SystemName, Quality, Booked, Call Number, Start Time, Duration, In/Out Video Loss, In/Out Audio Loss")
outFile.close()
print "Create new csv survey file"
# Check status of codecs
def check_status():
threading.Timer(30.0, check_status).start()
with open('codec/codec.json', 'r') as data_file:
data = json.load(data_file)
for codec in data:
status = get_status(codec['IP'])
sip = get_sip(codec['IP'])
people = get_people(codec['IP'])
video, audio = get_loss(codec['IP'])
diagstatus = get_diag(codec['IP'])
codec['SIP'] = sip
codec['Status'] = status
codec['People'] = people
codec['VideoPacketloss'] = video
codec['AudioPacketloss'] = audio
# Update call status
if (video == "N/A" and audio == "N/A"):
codec['Call'] = "No"
else:
codec['Call'] = "Yes"
#Update diagstatus
if (diagstatus != "None"):
codec['Diag'] = "Errors"
else:
codec['Diag'] = diagstatus
# Update if occupied
if codec['Status'] == "Off":
codec['Occupied'] = "Yes"
else:
codec['Occupied'] = "No"
# Network and SIP alerts
if (codec['Status'] == "Down" and codec['NetworkAlert'] == "No"):
print "Send email now system down"
codec['NetworkAlert'] = "Yes"
codec['Occupied'] = "Down"
print codec['Occupied']
sub = codec['SystemName'] + " Down"
bod = "System is not reachable at: https://" + codec['IP']
send_email(sub, bod)
elif (codec['Status'] != "Down" and codec['NetworkAlert'] == "Yes"):
print "Send email now system up"
codec['NetworkAlert'] = "No"
codec['Occupied'] = "No"
sub = codec['SystemName'] + " Up"
bod = "System is now reachable at: https://" + codec['IP']
send_email(sub, bod)
elif (codec['SIP'] != "Registered" and codec['SIPAlert'] == "No"):
if codec['NetworkAlert'] == "No":
print "Send email now system is not registered"
codec['SIPAlert'] = "Yes"
sub = codec['SystemName'] + " not registered"
bod = "System is not registered"
send_email(sub, bod)
elif (codec['SIP'] == "Registered" and codec['SIPAlert'] == "Yes"):
print "Send email now system is registered"
codec['SIPAlert'] = "No"
sub = codec['SystemName'] + " is registered"
bod = "System is now registered"
send_email(sub, bod)
with open('codec/codec.json', 'w') as data_file:
data_file.write(json.dumps(data, sort_keys=True, indent=4, separators=(',', ': ')))
# Register all codecs
def codec_register():
with open('codec/codec.json', 'r') as data_file:
data = json.load(data_file)
for codec in data:
print send_register(codec['IP'])
# Email alerts
def send_email(subject, body):
FROM = email_user
TO = email_dest
SUBJECT = subject
TEXT = body
SERVER = email_server
# Prepare actual message
message = """From: %s\nTo: %s\nSubject: %s\n\n%s
""" % (FROM, ", ".join(TO), SUBJECT, TEXT)
try:
# SMTP_SSL Example
server_ssl = smtplib.SMTP_SSL(SERVER, 465)
server_ssl.ehlo() # optional, called by login()
server_ssl.login(email_user, email_pwd)
# ssl server doesn't support or need tls, so don't call server_ssl.starttls()
server_ssl.sendmail(FROM, TO, message)
# server_ssl.quit()
server_ssl.close()
print 'successfully sent the mail'
except:
print "failed to send mail"
check_status()
codec_register()
bot.run(host='0.0.0.0', port=5000)
#if __name__ == "__main__":
# bot.run(ssl_context=('cert.pem', 'key.pem'))
| 38.33815
| 228
| 0.567132
|
import csv
import datetime
import json
import os
import smtplib
import threading
from flask import request, make_response, Flask, render_template
import config
from codec.actions import get_status, send_survey, send_register, get_last, get_sip, get_people, get_loss, get_diag, send_dial
email_user= config.email_user
email_pwd= config.email_pwd
email_dest = config.email_dest
email_server = config.email_server
path=os.path.abspath(os.curdir)
log = path + "/message_log.txt"
bot = Flask(__name__)
def get_rooms():
with open('codec/codec.json') as data_file:
data = json.load(data_file)
return data
def get_surveys():
now = datetime.datetime.now()
now_str = now.strftime("%Y-%m")
surveycsv = 'survey/Feedback-{}.csv'.format(now_str)
surveyjson = 'survey/Feedback-{}.json'.format(now_str)
with open(surveycsv) as f:
reader = csv.DictReader(f, skipinitialspace=True)
rows = list(reader)
with open(surveyjson, 'w') as data_file:
data_file.write(json.dumps(rows, sort_keys=True, indent=4, separators=(',', ': ')))
with open(surveyjson) as data_file:
data = json.load(data_file)
return data
@bot.route('/')
def hello():
return dashboard()
@bot.route('/rooms', methods=['GET','POST'])
def rooms():
if request.method == 'GET':
return render_template('rooms.html')
elif request.method == 'POST':
rooms = get_rooms()
return render_template('rooms.html', rooms=rooms)
@bot.route('/surveys', methods=['GET', 'POST'])
def surveys():
if request.method == 'GET':
return render_template('surveys.html')
elif request.method == 'POST':
check_surveyfile()
surveys = get_surveys()
return render_template('surveys.html', surveys=surveys)
@bot.route('/dashboard', methods=['GET', 'POST'])
def dashboard():
sytemsdown = 0
activecalls = 0
occupiedrooms = 0
diagerrors = "No"
roomnum = 0
videopacketloss = "No"
audiopacketloss = "No"
with open('codec/codec.json') as data_file:
data = json.load(data_file)
for codec in data:
roomnum += 1
if (codec['NetworkAlert'] == "Yes" or codec['SIPAlert'] == "Yes"):
sytemsdown += 1
if (codec['Occupied'] == "Yes"):
occupiedrooms += 1
if (codec['Call'] == "Yes"):
activecalls += 1
if (codec['Diag'] == "Errors"):
diagerrors = "Yes"
if (codec['VideoPacketloss'] == "Yes"):
videopacketloss = "Yes"
if (codec['AudioPacketloss'] == "Yes"):
audiopacketloss = "Yes"
return render_template('dashboard.html', systemsdown=sytemsdown, activecalls=activecalls, occupiedrooms=occupiedrooms, diagerrors=diagerrors, videopacketloss=videopacketloss, audiopacketloss=audiopacketloss, roomnum=roomnum)
@bot.route('/surveygraph', methods=['GET', 'POST'])
def surveygraph():
check_surveyfile()
numexcellent = 0
numgood = 0
numpoor = 0
numnone = 0
now = datetime.datetime.now()
now_str = now.strftime("%Y-%m")
surveyjson = 'survey/Feedback-{}.json'.format(now_str)
with open(surveyjson) as data_file:
data = json.load(data_file)
for codec in data:
if (codec['Quality'] == "Excellent"):
numexcellent += 1
if (codec['Quality'] == "Good"):
numgood += 1
if (codec['Quality'] == "Poor"):
numpoor += 1
if (codec['Quality'] == "No response"):
numnone += 1
return render_template('surveygraph.html', numexcellent=numexcellent, numgood=numgood, numpoor=numpoor, numnone=numnone)
@bot.route('/codec', methods=['POST'])
def receivepostfromcodec():
f = open(log, "a")
f.write("\n")
f.write(request.data)
check_surveyfile()
now = datetime.datetime.now()
now_str = now.strftime("%Y-%m")
surveycsv = 'survey/Feedback-{}.csv'.format(now_str)
try:
data = json.loads(request.data)
action = data['Status']['Call'][0]['Status']['Value']
newunit = "Yes"
print("Received status call: {}".format(action))
if action == "Connected":
host = data['Status']['Identification']['IPAddress']['Value']
name = data['Status']['Identification']['SystemName']['Value']
with open('codec/codec.json', 'r') as data_file:
data = json.load(data_file)
for codec in data:
if (codec['SystemName'] == name):
codec["Call"] = "Yes"
codec["IP"] = host
newunit = "No"
if (codec['IP'] == host):
codec["Call"] = "Yes"
codec["SystemName"] = name
newunit = "No"
if newunit == "Yes":
print "New unit found"
entry = {"Booked": "N/A", "Call": "Yes", "Diag": "None", "DiagAlert": "No", "IP": host,
"NetworkAlert": "No", "Occupied": "No", "AudioPacketloss": "No", "VideoPacketloss": "No", "People": "N/A",
"SIP": "Registered", "SIPAlert": "No", "Status": "Standby",
"SystemName": name}
data.append(entry)
print json.dumps(data)
with open('codec/codec.json', 'w') as data_file:
data_file.write(json.dumps(data, sort_keys=True, indent=4, separators=(',', ': ')))
except Exception as e:
print "Request did not contain any action type: Call Connected"
try:
data = json.loads(request.data)
action = data['Event']['CallDisconnect']['CauseType']['Value']
print("Received status call: {}".format(action))
if (action == "LocalDisconnect" or action == "RemoteDisconnect"):
host = data['Event']['Identification']['IPAddress']['Value']
print send_survey(host)
except Exception as e:
print "Request did not contain any action type: Send Survey"
try:
data = json.loads(request.data)
widget = data['Event']['UserInterface']['Extensions']['Widget']['Action']['WidgetId']['Value']
action = data['Event']['UserInterface']['Extensions']['Widget']['Action']['Type']['Value']
host = data['Event']['Identification']['IPAddress']['Value']
if (widget == "widget_1" and action == "clicked"):
send_dial(host)
except Exception as e:
print "Request did not contain any action type: Widget Clicked"
try:
data = json.loads(request.data)
action = data['Event']['UserInterface']['Message']['Prompt']['Response']['FeedbackId']['Value']
ip = data['Event']['Identification']['IPAddress']['Value']
host = data['Event']['Identification']['SystemName']['Value']
booked = "N/A, "
if (action == "1"):
response = data['Event']['UserInterface']['Message']['Prompt']['Response']['OptionId']['Value']
callinfo = get_last(ip)
outFilecsv = open(surveycsv, 'a')
if (response == "1"):
calldetail = host + ", " + "Excellent, " + booked + callinfo
outFilecsv.write("\n")
outFilecsv.write(calldetail)
elif (response == "2"):
calldetail = host + ", " + "Good, " + booked + callinfo
outFilecsv.write("\n")
outFilecsv.write(calldetail)
elif (response == "3"):
calldetail = host + ", " + "Poor, " + booked + callinfo
outFilecsv.write("\n")
outFilecsv.write(calldetail)
else:
calldetail = host + ", " + "No Response, " + booked + callinfo
outFilecsv.write("\n")
outFilecsv.write(calldetail)
outFilecsv.close()
with open('codec/codec.json', 'r') as data_file:
data = json.load(data_file)
for codec in data:
if (codec['IP'] == ip):
codec["Call"] = "No"
with open('codec/codec.json', 'w') as data_file:
data_file.write(json.dumps(data, sort_keys=True, indent=4, separators=(',', ': ')))
return make_response("ok")
except Exception as e:
print "Request did not contain any action type: Receive Survey Feedback"
return make_response("ok")
def check_surveyfile():
now = datetime.datetime.now()
now_str = now.strftime("%Y-%m")
surveycsv = 'survey/Feedback-{}.csv'.format(now_str)
if not os.path.exists(surveycsv):
outFile = open(surveycsv, 'w')
outFile.write(
"SystemName, Quality, Booked, Call Number, Start Time, Duration, In/Out Video Loss, In/Out Audio Loss")
outFile.close()
print "Create new csv survey file"
def check_status():
threading.Timer(30.0, check_status).start()
with open('codec/codec.json', 'r') as data_file:
data = json.load(data_file)
for codec in data:
status = get_status(codec['IP'])
sip = get_sip(codec['IP'])
people = get_people(codec['IP'])
video, audio = get_loss(codec['IP'])
diagstatus = get_diag(codec['IP'])
codec['SIP'] = sip
codec['Status'] = status
codec['People'] = people
codec['VideoPacketloss'] = video
codec['AudioPacketloss'] = audio
if (video == "N/A" and audio == "N/A"):
codec['Call'] = "No"
else:
codec['Call'] = "Yes"
if (diagstatus != "None"):
codec['Diag'] = "Errors"
else:
codec['Diag'] = diagstatus
if codec['Status'] == "Off":
codec['Occupied'] = "Yes"
else:
codec['Occupied'] = "No"
if (codec['Status'] == "Down" and codec['NetworkAlert'] == "No"):
print "Send email now system down"
codec['NetworkAlert'] = "Yes"
codec['Occupied'] = "Down"
print codec['Occupied']
sub = codec['SystemName'] + " Down"
bod = "System is not reachable at: https://" + codec['IP']
send_email(sub, bod)
elif (codec['Status'] != "Down" and codec['NetworkAlert'] == "Yes"):
print "Send email now system up"
codec['NetworkAlert'] = "No"
codec['Occupied'] = "No"
sub = codec['SystemName'] + " Up"
bod = "System is now reachable at: https://" + codec['IP']
send_email(sub, bod)
elif (codec['SIP'] != "Registered" and codec['SIPAlert'] == "No"):
if codec['NetworkAlert'] == "No":
print "Send email now system is not registered"
codec['SIPAlert'] = "Yes"
sub = codec['SystemName'] + " not registered"
bod = "System is not registered"
send_email(sub, bod)
elif (codec['SIP'] == "Registered" and codec['SIPAlert'] == "Yes"):
print "Send email now system is registered"
codec['SIPAlert'] = "No"
sub = codec['SystemName'] + " is registered"
bod = "System is now registered"
send_email(sub, bod)
with open('codec/codec.json', 'w') as data_file:
data_file.write(json.dumps(data, sort_keys=True, indent=4, separators=(',', ': ')))
def codec_register():
with open('codec/codec.json', 'r') as data_file:
data = json.load(data_file)
for codec in data:
print send_register(codec['IP'])
def send_email(subject, body):
FROM = email_user
TO = email_dest
SUBJECT = subject
TEXT = body
SERVER = email_server
message = """From: %s\nTo: %s\nSubject: %s\n\n%s
""" % (FROM, ", ".join(TO), SUBJECT, TEXT)
try:
server_ssl = smtplib.SMTP_SSL(SERVER, 465)
server_ssl.ehlo() server_ssl.login(email_user, email_pwd)
server_ssl.sendmail(FROM, TO, message)
server_ssl.close()
print 'successfully sent the mail'
except:
print "failed to send mail"
check_status()
codec_register()
bot.run(host='0.0.0.0', port=5000)
| false
| true
|
f701e15de95b18608b398cedc0243f079d79d203
| 8,082
|
py
|
Python
|
dvc/external_repo.py
|
asford/dvc
|
4ed55d00511ea3d9115b76c463e1a466408b11ef
|
[
"Apache-2.0"
] | null | null | null |
dvc/external_repo.py
|
asford/dvc
|
4ed55d00511ea3d9115b76c463e1a466408b11ef
|
[
"Apache-2.0"
] | 81
|
2021-04-13T08:02:09.000Z
|
2022-03-30T16:10:17.000Z
|
dvc/external_repo.py
|
asford/dvc
|
4ed55d00511ea3d9115b76c463e1a466408b11ef
|
[
"Apache-2.0"
] | 2
|
2021-06-14T19:12:25.000Z
|
2021-06-14T19:12:29.000Z
|
import logging
import os
import tempfile
import threading
from contextlib import contextmanager
from typing import Dict
from funcy import retry, wrap_with
from dvc.exceptions import (
FileMissingError,
NoOutputInExternalRepoError,
NoRemoteInExternalRepoError,
NotDvcRepoError,
OutputNotFoundError,
PathMissingError,
)
from dvc.repo import Repo
from dvc.utils import relpath
logger = logging.getLogger(__name__)
@contextmanager
def external_repo(
url, rev=None, for_write=False, cache_dir=None, cache_types=None, **kwargs
):
from dvc.config import NoRemoteError
from dvc.scm.git import Git
logger.debug("Creating external repo %s@%s", url, rev)
path = _cached_clone(url, rev, for_write=for_write)
# Local HEAD points to the tip of whatever branch we first cloned from
# (which may not be the default branch), use origin/HEAD here to get
# the tip of the default branch
rev = rev or "refs/remotes/origin/HEAD"
cache_config = {
"cache": {
"dir": cache_dir or _get_cache_dir(url),
"type": cache_types,
}
}
config = _get_remote_config(url) if os.path.isdir(url) else {}
config.update(cache_config)
def make_repo(path, **_kwargs):
_config = cache_config.copy()
if os.path.isdir(url):
rel = os.path.relpath(path, _kwargs["scm"].root_dir)
repo_path = os.path.join(url, rel)
_config.update(_get_remote_config(repo_path))
return Repo(path, config=_config, **_kwargs)
root_dir = path if for_write else os.path.realpath(path)
repo_kwargs = dict(
root_dir=root_dir,
url=url,
scm=None if for_write else Git(root_dir),
rev=None if for_write else rev,
config=config,
repo_factory=make_repo,
**kwargs,
)
if "subrepos" not in repo_kwargs:
repo_kwargs["subrepos"] = True
if "uninitialized" not in repo_kwargs:
repo_kwargs["uninitialized"] = True
repo = Repo(**repo_kwargs)
try:
yield repo
except NoRemoteError as exc:
raise NoRemoteInExternalRepoError(url) from exc
except OutputNotFoundError as exc:
if exc.repo is repo:
raise NoOutputInExternalRepoError(
exc.output, repo.root_dir, url
) from exc
raise
except FileMissingError as exc:
raise PathMissingError(exc.path, url) from exc
finally:
repo.close()
if for_write:
_remove(path)
CLONES: Dict[str, str] = {}
CACHE_DIRS: Dict[str, str] = {}
@wrap_with(threading.Lock())
def _get_cache_dir(url):
try:
cache_dir = CACHE_DIRS[url]
except KeyError:
cache_dir = CACHE_DIRS[url] = tempfile.mkdtemp("dvc-cache")
return cache_dir
def clean_repos():
# Outside code should not see cache while we are removing
paths = [path for path, _ in CLONES.values()] + list(CACHE_DIRS.values())
CLONES.clear()
CACHE_DIRS.clear()
for path in paths:
_remove(path)
def _get_remote_config(url):
try:
repo = Repo(url)
except NotDvcRepoError:
return {}
try:
name = repo.config["core"].get("remote")
if not name:
# Fill the empty upstream entry with a new remote pointing to the
# original repo's cache location.
name = "auto-generated-upstream"
return {
"core": {"remote": name},
"remote": {name: {"url": repo.odb.local.cache_dir}},
}
# Use original remote to make sure that we are using correct url,
# credential paths, etc if they are relative to the config location.
return {"remote": {name: repo.config["remote"][name]}}
finally:
repo.close()
def _cached_clone(url, rev, for_write=False):
"""Clone an external git repo to a temporary directory.
Returns the path to a local temporary directory with the specified
revision checked out. If for_write is set prevents reusing this dir via
cache.
"""
from distutils.dir_util import copy_tree
# even if we have already cloned this repo, we may need to
# fetch/fast-forward to get specified rev
clone_path, shallow = _clone_default_branch(url, rev, for_write=for_write)
if not for_write and (url) in CLONES:
return CLONES[url][0]
# Copy to a new dir to keep the clone clean
repo_path = tempfile.mkdtemp("dvc-erepo")
logger.debug("erepo: making a copy of %s clone", url)
copy_tree(clone_path, repo_path)
# Check out the specified revision
if for_write:
_git_checkout(repo_path, rev)
else:
CLONES[url] = (repo_path, shallow)
return repo_path
@wrap_with(threading.Lock())
def _clone_default_branch(url, rev, for_write=False):
"""Get or create a clean clone of the url.
The cloned is reactualized with git pull unless rev is a known sha.
"""
from dvc.scm.git import Git
clone_path, shallow = CLONES.get(url, (None, False))
git = None
try:
if clone_path:
git = Git(clone_path)
# Do not pull for known shas, branches and tags might move
if not Git.is_sha(rev) or not git.has_rev(rev):
if shallow:
# If we are missing a rev in a shallow clone, fallback to
# a full (unshallowed) clone. Since fetching specific rev
# SHAs is only available in certain git versions, if we
# have need to reference multiple specific revs for a
# given repo URL it is easier/safer for us to work with
# full clones in this case.
logger.debug("erepo: unshallowing clone for '%s'", url)
_unshallow(git)
shallow = False
CLONES[url] = (clone_path, shallow)
else:
logger.debug("erepo: git pull '%s'", url)
git.pull()
else:
logger.debug("erepo: git clone '%s' to a temporary dir", url)
clone_path = tempfile.mkdtemp("dvc-clone")
if not for_write and rev and not Git.is_sha(rev):
# If rev is a tag or branch name try shallow clone first
from dvc.scm.base import CloneError
try:
git = Git.clone(url, clone_path, shallow_branch=rev)
shallow = True
logger.debug(
"erepo: using shallow clone for branch '%s'", rev
)
except CloneError:
pass
if not git:
git = Git.clone(url, clone_path)
shallow = False
CLONES[url] = (clone_path, shallow)
finally:
if git:
git.close()
return clone_path, shallow
def _unshallow(git):
if git.gitpython.repo.head.is_detached:
# If this is a detached head (i.e. we shallow cloned a tag) switch to
# the default branch
origin_refs = git.gitpython.repo.remotes["origin"].refs
ref = origin_refs["HEAD"].reference
branch_name = ref.name.split("/")[-1]
branch = git.gitpython.repo.create_head(branch_name, ref)
branch.set_tracking_branch(ref)
branch.checkout()
git.pull(unshallow=True)
def _git_checkout(repo_path, rev):
from dvc.scm.git import Git
logger.debug("erepo: git checkout %s@%s", repo_path, rev)
git = Git(repo_path)
try:
git.checkout(rev)
finally:
git.close()
def _remove(path):
from dvc.utils.fs import remove
if os.name == "nt":
# git.exe may hang for a while not permitting to remove temp dir
os_retry = retry(5, errors=OSError, timeout=0.1)
try:
os_retry(remove)(path)
except PermissionError:
logger.warning(
"Failed to remove '%s'", relpath(path), exc_info=True
)
else:
remove(path)
| 30.730038
| 78
| 0.607152
|
import logging
import os
import tempfile
import threading
from contextlib import contextmanager
from typing import Dict
from funcy import retry, wrap_with
from dvc.exceptions import (
FileMissingError,
NoOutputInExternalRepoError,
NoRemoteInExternalRepoError,
NotDvcRepoError,
OutputNotFoundError,
PathMissingError,
)
from dvc.repo import Repo
from dvc.utils import relpath
logger = logging.getLogger(__name__)
@contextmanager
def external_repo(
url, rev=None, for_write=False, cache_dir=None, cache_types=None, **kwargs
):
from dvc.config import NoRemoteError
from dvc.scm.git import Git
logger.debug("Creating external repo %s@%s", url, rev)
path = _cached_clone(url, rev, for_write=for_write)
rev = rev or "refs/remotes/origin/HEAD"
cache_config = {
"cache": {
"dir": cache_dir or _get_cache_dir(url),
"type": cache_types,
}
}
config = _get_remote_config(url) if os.path.isdir(url) else {}
config.update(cache_config)
def make_repo(path, **_kwargs):
_config = cache_config.copy()
if os.path.isdir(url):
rel = os.path.relpath(path, _kwargs["scm"].root_dir)
repo_path = os.path.join(url, rel)
_config.update(_get_remote_config(repo_path))
return Repo(path, config=_config, **_kwargs)
root_dir = path if for_write else os.path.realpath(path)
repo_kwargs = dict(
root_dir=root_dir,
url=url,
scm=None if for_write else Git(root_dir),
rev=None if for_write else rev,
config=config,
repo_factory=make_repo,
**kwargs,
)
if "subrepos" not in repo_kwargs:
repo_kwargs["subrepos"] = True
if "uninitialized" not in repo_kwargs:
repo_kwargs["uninitialized"] = True
repo = Repo(**repo_kwargs)
try:
yield repo
except NoRemoteError as exc:
raise NoRemoteInExternalRepoError(url) from exc
except OutputNotFoundError as exc:
if exc.repo is repo:
raise NoOutputInExternalRepoError(
exc.output, repo.root_dir, url
) from exc
raise
except FileMissingError as exc:
raise PathMissingError(exc.path, url) from exc
finally:
repo.close()
if for_write:
_remove(path)
CLONES: Dict[str, str] = {}
CACHE_DIRS: Dict[str, str] = {}
@wrap_with(threading.Lock())
def _get_cache_dir(url):
try:
cache_dir = CACHE_DIRS[url]
except KeyError:
cache_dir = CACHE_DIRS[url] = tempfile.mkdtemp("dvc-cache")
return cache_dir
def clean_repos():
paths = [path for path, _ in CLONES.values()] + list(CACHE_DIRS.values())
CLONES.clear()
CACHE_DIRS.clear()
for path in paths:
_remove(path)
def _get_remote_config(url):
try:
repo = Repo(url)
except NotDvcRepoError:
return {}
try:
name = repo.config["core"].get("remote")
if not name:
name = "auto-generated-upstream"
return {
"core": {"remote": name},
"remote": {name: {"url": repo.odb.local.cache_dir}},
}
# Use original remote to make sure that we are using correct url,
# credential paths, etc if they are relative to the config location.
return {"remote": {name: repo.config["remote"][name]}}
finally:
repo.close()
def _cached_clone(url, rev, for_write=False):
from distutils.dir_util import copy_tree
# even if we have already cloned this repo, we may need to
# fetch/fast-forward to get specified rev
clone_path, shallow = _clone_default_branch(url, rev, for_write=for_write)
if not for_write and (url) in CLONES:
return CLONES[url][0]
# Copy to a new dir to keep the clone clean
repo_path = tempfile.mkdtemp("dvc-erepo")
logger.debug("erepo: making a copy of %s clone", url)
copy_tree(clone_path, repo_path)
# Check out the specified revision
if for_write:
_git_checkout(repo_path, rev)
else:
CLONES[url] = (repo_path, shallow)
return repo_path
@wrap_with(threading.Lock())
def _clone_default_branch(url, rev, for_write=False):
from dvc.scm.git import Git
clone_path, shallow = CLONES.get(url, (None, False))
git = None
try:
if clone_path:
git = Git(clone_path)
# Do not pull for known shas, branches and tags might move
if not Git.is_sha(rev) or not git.has_rev(rev):
if shallow:
# If we are missing a rev in a shallow clone, fallback to
# a full (unshallowed) clone. Since fetching specific rev
# SHAs is only available in certain git versions, if we
# have need to reference multiple specific revs for a
# given repo URL it is easier/safer for us to work with
# full clones in this case.
logger.debug("erepo: unshallowing clone for '%s'", url)
_unshallow(git)
shallow = False
CLONES[url] = (clone_path, shallow)
else:
logger.debug("erepo: git pull '%s'", url)
git.pull()
else:
logger.debug("erepo: git clone '%s' to a temporary dir", url)
clone_path = tempfile.mkdtemp("dvc-clone")
if not for_write and rev and not Git.is_sha(rev):
# If rev is a tag or branch name try shallow clone first
from dvc.scm.base import CloneError
try:
git = Git.clone(url, clone_path, shallow_branch=rev)
shallow = True
logger.debug(
"erepo: using shallow clone for branch '%s'", rev
)
except CloneError:
pass
if not git:
git = Git.clone(url, clone_path)
shallow = False
CLONES[url] = (clone_path, shallow)
finally:
if git:
git.close()
return clone_path, shallow
def _unshallow(git):
if git.gitpython.repo.head.is_detached:
# If this is a detached head (i.e. we shallow cloned a tag) switch to
# the default branch
origin_refs = git.gitpython.repo.remotes["origin"].refs
ref = origin_refs["HEAD"].reference
branch_name = ref.name.split("/")[-1]
branch = git.gitpython.repo.create_head(branch_name, ref)
branch.set_tracking_branch(ref)
branch.checkout()
git.pull(unshallow=True)
def _git_checkout(repo_path, rev):
from dvc.scm.git import Git
logger.debug("erepo: git checkout %s@%s", repo_path, rev)
git = Git(repo_path)
try:
git.checkout(rev)
finally:
git.close()
def _remove(path):
from dvc.utils.fs import remove
if os.name == "nt":
# git.exe may hang for a while not permitting to remove temp dir
os_retry = retry(5, errors=OSError, timeout=0.1)
try:
os_retry(remove)(path)
except PermissionError:
logger.warning(
"Failed to remove '%s'", relpath(path), exc_info=True
)
else:
remove(path)
| true
| true
|
f701e16a8bd60c349e5ada9ef3d576de5a949a72
| 366
|
py
|
Python
|
app/account/urls.py
|
GeoffMahugu/django-blog-backend
|
f77b8ebda4267f30757ebedd531c485a16cbf9c3
|
[
"MIT"
] | null | null | null |
app/account/urls.py
|
GeoffMahugu/django-blog-backend
|
f77b8ebda4267f30757ebedd531c485a16cbf9c3
|
[
"MIT"
] | null | null | null |
app/account/urls.py
|
GeoffMahugu/django-blog-backend
|
f77b8ebda4267f30757ebedd531c485a16cbf9c3
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from .views import AuthorSignupView, AuthorList, AuthorDetailView
urlpatterns = [
url(r'^$', AuthorList.as_view(), name='author-list'),
url(r'^(?P<pk>\d+)/$', AuthorDetailView, name='author-rud'),
url(r'^signup/$', AuthorSignupView, name='author-signup'),
]
| 36.6
| 65
| 0.726776
|
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from .views import AuthorSignupView, AuthorList, AuthorDetailView
urlpatterns = [
url(r'^$', AuthorList.as_view(), name='author-list'),
url(r'^(?P<pk>\d+)/$', AuthorDetailView, name='author-rud'),
url(r'^signup/$', AuthorSignupView, name='author-signup'),
]
| true
| true
|
f701e246974e360051eafba0655fea735ddd49d1
| 4,012
|
py
|
Python
|
tests/test_modeler.py
|
joanvaquer/SDV
|
83e4fdf0ff72e6c5b72cfc8c6ec9584dbd34de28
|
[
"MIT"
] | null | null | null |
tests/test_modeler.py
|
joanvaquer/SDV
|
83e4fdf0ff72e6c5b72cfc8c6ec9584dbd34de28
|
[
"MIT"
] | null | null | null |
tests/test_modeler.py
|
joanvaquer/SDV
|
83e4fdf0ff72e6c5b72cfc8c6ec9584dbd34de28
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from unittest.mock import Mock, call
import pandas as pd
from sdv.metadata import Metadata
from sdv.modeler import Modeler
from sdv.models.base import SDVModel
from sdv.models.copulas import GaussianCopula
class TestModeler(TestCase):
def test___init__default(self):
"""Test create new Modeler instance with default values"""
# Run
modeler = Modeler('test')
# Asserts
assert modeler.models == dict()
assert modeler.metadata == 'test'
assert modeler.model == GaussianCopula
assert modeler.model_kwargs == dict()
def test___init__with_arguments(self):
# Run
model = Mock()
modeler = Modeler({'some': 'metadata'}, model=model, model_kwargs={'some': 'kwargs'})
# Asserts
assert modeler.models == dict()
assert modeler.metadata == {'some': 'metadata'}
assert modeler.model == model
assert modeler.model_kwargs == {'some': 'kwargs'}
def test__get_extensions(self):
"""Test get list of extensions from childs"""
# Setup
model = Mock(spec=SDVModel)
model.return_value = model
model.get_parameters.side_effect = [
{'model': 'data 1'},
{'model': 'data 2'},
{'model': 'data 3'}
]
modeler = Mock(spec=Modeler)
modeler.model = model
modeler.model_kwargs = dict()
modeler.metadata = Mock(spec=Metadata)
# Run
child_table = pd.DataFrame({'foo': ['aaa', 'bbb', 'ccc']})
result = Modeler._get_extension(modeler, 'some_name', child_table, 'foo')
# Asserts
expected = pd.DataFrame({
'__some_name__model': ['data 1', 'data 2', 'data 3'],
'__some_name__child_rows': [1, 1, 1]
}, index=['aaa', 'bbb', 'ccc'])
pd.testing.assert_frame_equal(result, expected)
assert model.get_parameters.call_count == 3
def test_cpa_with_tables_no_primary_key(self):
"""Test CPA with tables and no primary key."""
# Setup
modeler = Mock(spec=Modeler)
modeler.metadata = Mock(spec=Metadata)
modeler.model = Mock(spec=SDVModel)
modeler.model_kwargs = dict()
modeler.models = dict()
modeler.table_sizes = {'data': 5}
modeler.metadata.transform.return_value = pd.DataFrame({'data': [1, 2, 3]})
modeler.metadata.get_primary_key.return_value = None
# Run
tables = {'test': pd.DataFrame({'data': ['a', 'b', 'c']})}
result = Modeler.cpa(modeler, 'test', tables)
# Asserts
expected = pd.DataFrame({'data': [1, 2, 3]})
expected_transform_call = pd.DataFrame({'data': ['a', 'b', 'c']})
assert modeler.metadata.load_table.call_count == 0
assert modeler.metadata.transform.call_args[0][0] == 'test'
pd.testing.assert_frame_equal(
modeler.metadata.transform.call_args[0][1],
expected_transform_call
)
pd.testing.assert_frame_equal(result, expected)
def test_model_database(self):
"""Test model using RCPA"""
# Setup
def rcpa_side_effect(table_name, tables):
tables[table_name] = table_name
metadata_table_names = ['foo', 'bar', 'tar']
metadata_parents = [None, 'bar_parent', None]
modeler = Mock()
modeler.metadata.get_tables.return_value = metadata_table_names
modeler.metadata.get_parents.side_effect = metadata_parents
modeler.rcpa.side_effect = rcpa_side_effect
modeler.models = dict()
# Run
Modeler.model_database(modeler)
# Asserts
expected_metadata_parents_call_count = 3
expected_metadata_parents_call = [call('foo'), call('bar'), call('tar')]
assert modeler.metadata.get_parents.call_count == expected_metadata_parents_call_count
assert modeler.metadata.get_parents.call_args_list == expected_metadata_parents_call
| 34.886957
| 94
| 0.620887
|
from unittest import TestCase
from unittest.mock import Mock, call
import pandas as pd
from sdv.metadata import Metadata
from sdv.modeler import Modeler
from sdv.models.base import SDVModel
from sdv.models.copulas import GaussianCopula
class TestModeler(TestCase):
def test___init__default(self):
modeler = Modeler('test')
assert modeler.models == dict()
assert modeler.metadata == 'test'
assert modeler.model == GaussianCopula
assert modeler.model_kwargs == dict()
def test___init__with_arguments(self):
model = Mock()
modeler = Modeler({'some': 'metadata'}, model=model, model_kwargs={'some': 'kwargs'})
assert modeler.models == dict()
assert modeler.metadata == {'some': 'metadata'}
assert modeler.model == model
assert modeler.model_kwargs == {'some': 'kwargs'}
def test__get_extensions(self):
model = Mock(spec=SDVModel)
model.return_value = model
model.get_parameters.side_effect = [
{'model': 'data 1'},
{'model': 'data 2'},
{'model': 'data 3'}
]
modeler = Mock(spec=Modeler)
modeler.model = model
modeler.model_kwargs = dict()
modeler.metadata = Mock(spec=Metadata)
child_table = pd.DataFrame({'foo': ['aaa', 'bbb', 'ccc']})
result = Modeler._get_extension(modeler, 'some_name', child_table, 'foo')
expected = pd.DataFrame({
'__some_name__model': ['data 1', 'data 2', 'data 3'],
'__some_name__child_rows': [1, 1, 1]
}, index=['aaa', 'bbb', 'ccc'])
pd.testing.assert_frame_equal(result, expected)
assert model.get_parameters.call_count == 3
def test_cpa_with_tables_no_primary_key(self):
modeler = Mock(spec=Modeler)
modeler.metadata = Mock(spec=Metadata)
modeler.model = Mock(spec=SDVModel)
modeler.model_kwargs = dict()
modeler.models = dict()
modeler.table_sizes = {'data': 5}
modeler.metadata.transform.return_value = pd.DataFrame({'data': [1, 2, 3]})
modeler.metadata.get_primary_key.return_value = None
tables = {'test': pd.DataFrame({'data': ['a', 'b', 'c']})}
result = Modeler.cpa(modeler, 'test', tables)
expected = pd.DataFrame({'data': [1, 2, 3]})
expected_transform_call = pd.DataFrame({'data': ['a', 'b', 'c']})
assert modeler.metadata.load_table.call_count == 0
assert modeler.metadata.transform.call_args[0][0] == 'test'
pd.testing.assert_frame_equal(
modeler.metadata.transform.call_args[0][1],
expected_transform_call
)
pd.testing.assert_frame_equal(result, expected)
def test_model_database(self):
def rcpa_side_effect(table_name, tables):
tables[table_name] = table_name
metadata_table_names = ['foo', 'bar', 'tar']
metadata_parents = [None, 'bar_parent', None]
modeler = Mock()
modeler.metadata.get_tables.return_value = metadata_table_names
modeler.metadata.get_parents.side_effect = metadata_parents
modeler.rcpa.side_effect = rcpa_side_effect
modeler.models = dict()
Modeler.model_database(modeler)
expected_metadata_parents_call_count = 3
expected_metadata_parents_call = [call('foo'), call('bar'), call('tar')]
assert modeler.metadata.get_parents.call_count == expected_metadata_parents_call_count
assert modeler.metadata.get_parents.call_args_list == expected_metadata_parents_call
| true
| true
|
f701e2724db3063e715cbb18755d4cc1f682a147
| 404
|
py
|
Python
|
project2/tests/q2_1_3.py
|
DrRossTaylor/intro-DS-Assignments
|
88f0747b89869cae4e4227e6f3a936f0f1583937
|
[
"CC0-1.0"
] | null | null | null |
project2/tests/q2_1_3.py
|
DrRossTaylor/intro-DS-Assignments
|
88f0747b89869cae4e4227e6f3a936f0f1583937
|
[
"CC0-1.0"
] | null | null | null |
project2/tests/q2_1_3.py
|
DrRossTaylor/intro-DS-Assignments
|
88f0747b89869cae4e4227e6f3a936f0f1583937
|
[
"CC0-1.0"
] | null | null | null |
test = {
'name': 'q2_1_3',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> np.isclose(distance_from_batman_returns('titanic'), 0.0023550202650824965)
True
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
| 17.565217
| 88
| 0.398515
|
test = {
'name': 'q2_1_3',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> np.isclose(distance_from_batman_returns('titanic'), 0.0023550202650824965)
True
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
| true
| true
|
f701e5714469931c76e895573715c567623da2a7
| 13,446
|
py
|
Python
|
tests.py
|
vishalsodani/deal
|
da3b06da2f7c463431fd5d674b9762133e9296a3
|
[
"MIT"
] | null | null | null |
tests.py
|
vishalsodani/deal
|
da3b06da2f7c463431fd5d674b9762133e9296a3
|
[
"MIT"
] | null | null | null |
tests.py
|
vishalsodani/deal
|
da3b06da2f7c463431fd5d674b9762133e9296a3
|
[
"MIT"
] | null | null | null |
import unittest
from typing import NoReturn
import marshmallow
import urllib3
import vaa
import deal
import pytest
class TestPreDeal:
@pytest.mark.parametrize('correct,incorrect', [(1, -1), (2, -2), (3, -3), (5, -5), (7, -7), (11, -11)])
def test_pre_contract_fulfilled(self, correct, incorrect):
func = deal.pre(lambda x: x > 0)(lambda x: x)
assert func(correct) == correct
with pytest.raises(deal.PreContractError):
func(incorrect)
@pytest.mark.parametrize('correct,incorrect_min,incorrect_max',
[(1, -1, 20), (2, -2, 21), (3, -3, 22), (5, -5, 23), (7, -7, 24), (9, -11, 25)])
def test_chain_all_contracts_fulfilled(self, correct, incorrect_min, incorrect_max):
func = deal.pre(lambda x: x < 10)(lambda x: x)
func = deal.pre(lambda x: x > 0)(func)
assert func(correct) == correct
with pytest.raises(deal.PreContractError):
func(incorrect_min)
with pytest.raises(deal.PreContractError):
func(incorrect_max)
def test_correct_exceptions_raised_on_contract_fail(self):
func = deal.pre(lambda x: x > 0)(lambda x: x)
with pytest.raises(deal.PreContractError):
func(-2)
func = deal.pre(lambda x: x > 0, message='TEST')(lambda x: x)
try:
func(-2)
except AssertionError as e:
assert e.args[0] == 'TEST'
func = deal.pre(lambda x: x > 0, exception=NameError)(lambda x: x)
with pytest.raises(NameError):
func(-2)
func = deal.pre(lambda x: x > 0, exception=NameError('TEST'))(lambda x: x)
with pytest.raises(NameError):
func(-2)
try:
func(-2)
except NameError as e:
assert e.args[0] == 'TEST'
func = deal.pre(lambda x: x > 0, message='TEST', exception=NameError)(lambda x: x)
with pytest.raises(NameError):
func(-2)
try:
func(-2)
except NameError as e:
assert e.args[0] == 'TEST'
def test_raise_error_with_param_on_contract_failure(self):
func = deal.pre(lambda x: x > 0 or 'TEST')(lambda x: x)
assert func(4) == 4
with pytest.raises(deal.PreContractError):
func(-2)
try:
func(-2)
except deal.PreContractError as e:
assert e.args[0] == 'TEST'
def test_method_decoration_name_is_correct(self):
@deal.pre(lambda x: x > 0)
def some_function(x):
return x
assert some_function.__name__ == 'some_function'
def test_class_method_decorator_raises_error_on_contract_fail(self):
class Class:
y = 7
@deal.pre(lambda self, x: x > 0)
def method(self, x):
return x * 2
@deal.pre(lambda self, x: x > 0)
def method2(self, y):
return self.y
assert Class().method(2) == 4
assert Class().method2(2) == 7
with pytest.raises(deal.PreContractError):
Class().method(-2)
with pytest.raises(deal.PreContractError):
Class().method2(-2)
# ignored test
def _test_validator(self, validator):
func = deal.pre(validator)(lambda x: x)
assert func(4) == 4
with pytest.raises(deal.PreContractError):
func(-2)
try:
func(-2)
except deal.PreContractError as e:
assert e.args[0] == 'TEST'
class TestPostDeal:
def test_return_value_fulfils_contract(self):
func = deal.post(lambda x: x > 0)(lambda x: -x)
assert func(-4) == 4
with pytest.raises(deal.PostContractError):
func(4)
class TestInvDeal:
def test_setting_object_attribute_fulfills_contract(self):
@deal.inv(lambda obj: obj.x > 0)
class A:
x = 2
a = A()
a.x = 4
with pytest.raises(deal.InvContractError):
a.x = -2
def test_setting_wrong_args_by_method_raises_error(self):
@deal.inv(lambda obj: obj.x > 0)
class A:
x = 2
def f(self, x):
self.x = x
a = A()
a.f(4)
with pytest.raises(deal.InvContractError):
a.f(-2)
def test_chain_contracts_both_fulfill(self):
@deal.inv(lambda obj: obj.x > 0)
@deal.inv(lambda obj: obj.x < 10)
class A:
x = 2
a = A()
a.x = 4
with pytest.raises(deal.InvContractError):
a.x = -2
with pytest.raises(deal.InvContractError):
a.x = 20
def test_patched_invariants_instance(self):
class A:
x = 2
PatchedA = deal.inv(lambda obj: obj.x > 0)(A) # noQA
a = PatchedA()
assert isinstance(a, PatchedA)
assert isinstance(a, A)
PatchedA2 = deal.inv(lambda obj: obj.x > 0)(PatchedA) # noQA
a = PatchedA2()
assert isinstance(a, PatchedA)
assert isinstance(a, PatchedA2)
assert isinstance(a, A)
assert a.__class__.__name__.count('Invarianted') == 1
class MarshmallowSchemeTests(unittest.TestCase):
def setUp(self):
class _Scheme(marshmallow.Schema):
name = marshmallow.fields.Str()
self.Scheme = vaa.marshmallow(_Scheme)
def test_scheme_string_validation_args_correct(self):
@deal.pre(self.Scheme)
def func(name):
return name * 2
assert func('Chris') == 'ChrisChris'
with pytest.raises(deal.PreContractError):
func(123)
try:
func(123)
except deal.PreContractError as e:
assert e.args[0] == {'name': ['Not a valid string.']}
def test_method_chain_decorator_with_scheme_is_fulfilled(self):
@deal.pre(self.Scheme)
@deal.pre(lambda name: name != 'Oleg')
def func(name):
return name * 2
assert func('Chris') == 'ChrisChris'
with pytest.raises(deal.PreContractError):
func(123)
with pytest.raises(deal.PreContractError):
func('Oleg')
def test_scheme_contract_is_satisfied_when_setting_arg(self):
@deal.inv(self.Scheme)
class User:
name = ''
user = User()
user.name = 'Chris'
with pytest.raises(deal.InvContractError):
user.name = 123
try:
user.name = 123
except deal.InvContractError as e:
assert e.args[0] == {'name': ['Not a valid string.']}
def test_scheme_contract_is_satisfied_within_chain(self):
@deal.inv(lambda user: user.name != 'Oleg')
@deal.inv(self.Scheme)
@deal.inv(lambda user: user.name != 'Chris')
class User:
name = ''
user = User()
user.name = 'Gram'
user = User()
with pytest.raises(deal.InvContractError):
user.name = 'Oleg'
user = User()
with pytest.raises(deal.InvContractError):
user.name = 123
user = User()
with pytest.raises(deal.InvContractError):
user.name = 'Chris'
def test_scheme_contract_is_satisfied_when_passing_args(self):
@deal.pre(self.Scheme)
def func(name):
return name * 2
assert func('Chris') == 'ChrisChris'
assert func(name='Chris') == 'ChrisChris'
@deal.pre(self.Scheme)
def func(**kwargs):
return kwargs['name'] * 3
assert func(name='Chris') == 'ChrisChrisChris'
@deal.pre(self.Scheme)
def func(name='Max'):
return name * 2
assert func() == 'MaxMax'
class TestDefaultScheme(MarshmallowSchemeTests):
def setUp(self):
class MyScheme(deal.Scheme):
def is_valid(self):
if not isinstance(self.data['name'], str):
self.errors = {'name': ['Not a valid string.']}
return False
return True
self.Scheme = MyScheme
class TestRaises:
def test_raises_expects_function_to_raise_error(self):
func = deal.raises(ZeroDivisionError)(lambda x: 1 / x)
with pytest.raises(ZeroDivisionError):
func(0)
func(2)
func = deal.raises(KeyError)(lambda x: 1 / x)
with pytest.raises(deal.RaisesContractError):
func(0)
def test_raises_doesnt_override_another_constract(self):
@deal.raises(ZeroDivisionError)
@deal.offline
def func(do, number):
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
1 / number
func(False, 1)
with pytest.raises(deal.OfflineContractError):
func(True, 1)
with pytest.raises(ZeroDivisionError):
func(False, 0)
class TestOffline:
def test_network_request_in_offline_raises_exception(self):
@deal.offline
def func(do):
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
func(False)
with pytest.raises(deal.OfflineContractError):
func(True)
def test_network_request_in_offline_and_raises_specified_exception(self):
@deal.offline(exception=KeyError)
def func(do):
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
func(False)
with pytest.raises(KeyError):
func(True)
class TestSilent:
def test_silent_contract_not_allow_print(self):
@deal.silent
def func(msg):
if msg:
print(msg)
func(None)
with pytest.raises(deal.SilentContractError):
func('bad')
class TestChain:
def test_chained_contract_decorator(self):
@deal.chain(deal.silent, deal.offline)
def func(msg, do):
if msg:
print(msg)
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
func(False, False)
with pytest.raises(deal.SilentContractError):
func(True, False)
with pytest.raises(deal.OfflineContractError):
func(False, True)
class TestState:
def setUp(self):
deal.reset()
def tearDown(self):
deal.reset()
def test_contract_state_switch_custom_param(self):
func = deal.pre(lambda x: x > 0, debug=True)(lambda x: x * 2)
deal.switch(debug=False)
func(-2)
deal.switch(debug=True)
with pytest.raises(deal.PreContractError):
func(-2)
def test_contract_state_switch_default_param(self):
func = deal.pre(lambda x: x > 0)(lambda x: x * 2)
deal.switch(main=False)
func(-2)
deal.switch(main=True)
with pytest.raises(deal.PreContractError):
func(-2)
class TestEnsure:
def test_parameters_and_result_fulfill_constact(self):
@deal.ensure(lambda a, b, result: a > 0 and b > 0 and result != 'same number')
def func(a, b):
if a == b:
return 'same number'
else:
return 'different numbers'
assert func(1, 2) == 'different numbers'
with pytest.raises(deal.PostContractError):
func(0, 1)
with pytest.raises(deal.PostContractError):
func(1, 0)
with pytest.raises(deal.PostContractError):
func(1, 1)
class CaseTest(unittest.TestCase):
def setUp(self):
@deal.raises(ZeroDivisionError)
@deal.pre(lambda a, b: a > 0 and b > 0)
def div(a: int, b: int) -> float:
assert isinstance(a, int)
assert isinstance(b, int)
assert a > 0
assert b > 0
return a / b
self.func = div
def test_count(self):
for count in (1, 10, 20, 50):
cases = deal.cases(self.func, count=count)
assert len(list(cases)) == count
def test_params_detected(self):
for case in deal.cases(self.func, count=10):
assert set(case.kwargs) == {'a', 'b'}
def test_params_type(self):
for case in deal.cases(self.func, count=10):
assert type(case.kwargs['a']) is int
assert type(case.kwargs['b']) is int
def test_params_ok_with_excs(self):
results = []
for case in deal.cases(self.func, count=20):
result = case()
results.append(result)
assert any(r is not NoReturn for r in results), 'exception occured on every run'
assert any(r is NoReturn for r in results), 'no exception occured'
def test_return_type_checks(self):
def div(a: int, b: int):
return 1
for case in deal.cases(div, count=20):
case()
def div(a: int, b: int) -> str:
return 1
with pytest.raises(TypeError):
case = next(iter(deal.cases(div, count=20)))
case()
def test_explicit_kwargs(self):
def div(a: int, b: int):
assert b == 4
for case in deal.cases(div, kwargs=dict(b=4), count=20):
case()
if __name__ == '__main__':
pytest.main(['tests.py'])
| 28.487288
| 109
| 0.562175
|
import unittest
from typing import NoReturn
import marshmallow
import urllib3
import vaa
import deal
import pytest
class TestPreDeal:
@pytest.mark.parametrize('correct,incorrect', [(1, -1), (2, -2), (3, -3), (5, -5), (7, -7), (11, -11)])
def test_pre_contract_fulfilled(self, correct, incorrect):
func = deal.pre(lambda x: x > 0)(lambda x: x)
assert func(correct) == correct
with pytest.raises(deal.PreContractError):
func(incorrect)
@pytest.mark.parametrize('correct,incorrect_min,incorrect_max',
[(1, -1, 20), (2, -2, 21), (3, -3, 22), (5, -5, 23), (7, -7, 24), (9, -11, 25)])
def test_chain_all_contracts_fulfilled(self, correct, incorrect_min, incorrect_max):
func = deal.pre(lambda x: x < 10)(lambda x: x)
func = deal.pre(lambda x: x > 0)(func)
assert func(correct) == correct
with pytest.raises(deal.PreContractError):
func(incorrect_min)
with pytest.raises(deal.PreContractError):
func(incorrect_max)
def test_correct_exceptions_raised_on_contract_fail(self):
func = deal.pre(lambda x: x > 0)(lambda x: x)
with pytest.raises(deal.PreContractError):
func(-2)
func = deal.pre(lambda x: x > 0, message='TEST')(lambda x: x)
try:
func(-2)
except AssertionError as e:
assert e.args[0] == 'TEST'
func = deal.pre(lambda x: x > 0, exception=NameError)(lambda x: x)
with pytest.raises(NameError):
func(-2)
func = deal.pre(lambda x: x > 0, exception=NameError('TEST'))(lambda x: x)
with pytest.raises(NameError):
func(-2)
try:
func(-2)
except NameError as e:
assert e.args[0] == 'TEST'
func = deal.pre(lambda x: x > 0, message='TEST', exception=NameError)(lambda x: x)
with pytest.raises(NameError):
func(-2)
try:
func(-2)
except NameError as e:
assert e.args[0] == 'TEST'
def test_raise_error_with_param_on_contract_failure(self):
func = deal.pre(lambda x: x > 0 or 'TEST')(lambda x: x)
assert func(4) == 4
with pytest.raises(deal.PreContractError):
func(-2)
try:
func(-2)
except deal.PreContractError as e:
assert e.args[0] == 'TEST'
def test_method_decoration_name_is_correct(self):
@deal.pre(lambda x: x > 0)
def some_function(x):
return x
assert some_function.__name__ == 'some_function'
def test_class_method_decorator_raises_error_on_contract_fail(self):
class Class:
y = 7
@deal.pre(lambda self, x: x > 0)
def method(self, x):
return x * 2
@deal.pre(lambda self, x: x > 0)
def method2(self, y):
return self.y
assert Class().method(2) == 4
assert Class().method2(2) == 7
with pytest.raises(deal.PreContractError):
Class().method(-2)
with pytest.raises(deal.PreContractError):
Class().method2(-2)
def _test_validator(self, validator):
func = deal.pre(validator)(lambda x: x)
assert func(4) == 4
with pytest.raises(deal.PreContractError):
func(-2)
try:
func(-2)
except deal.PreContractError as e:
assert e.args[0] == 'TEST'
class TestPostDeal:
def test_return_value_fulfils_contract(self):
func = deal.post(lambda x: x > 0)(lambda x: -x)
assert func(-4) == 4
with pytest.raises(deal.PostContractError):
func(4)
class TestInvDeal:
def test_setting_object_attribute_fulfills_contract(self):
@deal.inv(lambda obj: obj.x > 0)
class A:
x = 2
a = A()
a.x = 4
with pytest.raises(deal.InvContractError):
a.x = -2
def test_setting_wrong_args_by_method_raises_error(self):
@deal.inv(lambda obj: obj.x > 0)
class A:
x = 2
def f(self, x):
self.x = x
a = A()
a.f(4)
with pytest.raises(deal.InvContractError):
a.f(-2)
def test_chain_contracts_both_fulfill(self):
@deal.inv(lambda obj: obj.x > 0)
@deal.inv(lambda obj: obj.x < 10)
class A:
x = 2
a = A()
a.x = 4
with pytest.raises(deal.InvContractError):
a.x = -2
with pytest.raises(deal.InvContractError):
a.x = 20
def test_patched_invariants_instance(self):
class A:
x = 2
PatchedA = deal.inv(lambda obj: obj.x > 0)(A) a = PatchedA()
assert isinstance(a, PatchedA)
assert isinstance(a, A)
PatchedA2 = deal.inv(lambda obj: obj.x > 0)(PatchedA) a = PatchedA2()
assert isinstance(a, PatchedA)
assert isinstance(a, PatchedA2)
assert isinstance(a, A)
assert a.__class__.__name__.count('Invarianted') == 1
class MarshmallowSchemeTests(unittest.TestCase):
def setUp(self):
class _Scheme(marshmallow.Schema):
name = marshmallow.fields.Str()
self.Scheme = vaa.marshmallow(_Scheme)
def test_scheme_string_validation_args_correct(self):
@deal.pre(self.Scheme)
def func(name):
return name * 2
assert func('Chris') == 'ChrisChris'
with pytest.raises(deal.PreContractError):
func(123)
try:
func(123)
except deal.PreContractError as e:
assert e.args[0] == {'name': ['Not a valid string.']}
def test_method_chain_decorator_with_scheme_is_fulfilled(self):
@deal.pre(self.Scheme)
@deal.pre(lambda name: name != 'Oleg')
def func(name):
return name * 2
assert func('Chris') == 'ChrisChris'
with pytest.raises(deal.PreContractError):
func(123)
with pytest.raises(deal.PreContractError):
func('Oleg')
def test_scheme_contract_is_satisfied_when_setting_arg(self):
@deal.inv(self.Scheme)
class User:
name = ''
user = User()
user.name = 'Chris'
with pytest.raises(deal.InvContractError):
user.name = 123
try:
user.name = 123
except deal.InvContractError as e:
assert e.args[0] == {'name': ['Not a valid string.']}
def test_scheme_contract_is_satisfied_within_chain(self):
@deal.inv(lambda user: user.name != 'Oleg')
@deal.inv(self.Scheme)
@deal.inv(lambda user: user.name != 'Chris')
class User:
name = ''
user = User()
user.name = 'Gram'
user = User()
with pytest.raises(deal.InvContractError):
user.name = 'Oleg'
user = User()
with pytest.raises(deal.InvContractError):
user.name = 123
user = User()
with pytest.raises(deal.InvContractError):
user.name = 'Chris'
def test_scheme_contract_is_satisfied_when_passing_args(self):
@deal.pre(self.Scheme)
def func(name):
return name * 2
assert func('Chris') == 'ChrisChris'
assert func(name='Chris') == 'ChrisChris'
@deal.pre(self.Scheme)
def func(**kwargs):
return kwargs['name'] * 3
assert func(name='Chris') == 'ChrisChrisChris'
@deal.pre(self.Scheme)
def func(name='Max'):
return name * 2
assert func() == 'MaxMax'
class TestDefaultScheme(MarshmallowSchemeTests):
def setUp(self):
class MyScheme(deal.Scheme):
def is_valid(self):
if not isinstance(self.data['name'], str):
self.errors = {'name': ['Not a valid string.']}
return False
return True
self.Scheme = MyScheme
class TestRaises:
def test_raises_expects_function_to_raise_error(self):
func = deal.raises(ZeroDivisionError)(lambda x: 1 / x)
with pytest.raises(ZeroDivisionError):
func(0)
func(2)
func = deal.raises(KeyError)(lambda x: 1 / x)
with pytest.raises(deal.RaisesContractError):
func(0)
def test_raises_doesnt_override_another_constract(self):
@deal.raises(ZeroDivisionError)
@deal.offline
def func(do, number):
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
1 / number
func(False, 1)
with pytest.raises(deal.OfflineContractError):
func(True, 1)
with pytest.raises(ZeroDivisionError):
func(False, 0)
class TestOffline:
def test_network_request_in_offline_raises_exception(self):
@deal.offline
def func(do):
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
func(False)
with pytest.raises(deal.OfflineContractError):
func(True)
def test_network_request_in_offline_and_raises_specified_exception(self):
@deal.offline(exception=KeyError)
def func(do):
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
func(False)
with pytest.raises(KeyError):
func(True)
class TestSilent:
def test_silent_contract_not_allow_print(self):
@deal.silent
def func(msg):
if msg:
print(msg)
func(None)
with pytest.raises(deal.SilentContractError):
func('bad')
class TestChain:
def test_chained_contract_decorator(self):
@deal.chain(deal.silent, deal.offline)
def func(msg, do):
if msg:
print(msg)
if do:
http = urllib3.PoolManager()
http.request('GET', 'http://httpbin.org/robots.txt')
func(False, False)
with pytest.raises(deal.SilentContractError):
func(True, False)
with pytest.raises(deal.OfflineContractError):
func(False, True)
class TestState:
def setUp(self):
deal.reset()
def tearDown(self):
deal.reset()
def test_contract_state_switch_custom_param(self):
func = deal.pre(lambda x: x > 0, debug=True)(lambda x: x * 2)
deal.switch(debug=False)
func(-2)
deal.switch(debug=True)
with pytest.raises(deal.PreContractError):
func(-2)
def test_contract_state_switch_default_param(self):
func = deal.pre(lambda x: x > 0)(lambda x: x * 2)
deal.switch(main=False)
func(-2)
deal.switch(main=True)
with pytest.raises(deal.PreContractError):
func(-2)
class TestEnsure:
def test_parameters_and_result_fulfill_constact(self):
@deal.ensure(lambda a, b, result: a > 0 and b > 0 and result != 'same number')
def func(a, b):
if a == b:
return 'same number'
else:
return 'different numbers'
assert func(1, 2) == 'different numbers'
with pytest.raises(deal.PostContractError):
func(0, 1)
with pytest.raises(deal.PostContractError):
func(1, 0)
with pytest.raises(deal.PostContractError):
func(1, 1)
class CaseTest(unittest.TestCase):
def setUp(self):
@deal.raises(ZeroDivisionError)
@deal.pre(lambda a, b: a > 0 and b > 0)
def div(a: int, b: int) -> float:
assert isinstance(a, int)
assert isinstance(b, int)
assert a > 0
assert b > 0
return a / b
self.func = div
def test_count(self):
for count in (1, 10, 20, 50):
cases = deal.cases(self.func, count=count)
assert len(list(cases)) == count
def test_params_detected(self):
for case in deal.cases(self.func, count=10):
assert set(case.kwargs) == {'a', 'b'}
def test_params_type(self):
for case in deal.cases(self.func, count=10):
assert type(case.kwargs['a']) is int
assert type(case.kwargs['b']) is int
def test_params_ok_with_excs(self):
results = []
for case in deal.cases(self.func, count=20):
result = case()
results.append(result)
assert any(r is not NoReturn for r in results), 'exception occured on every run'
assert any(r is NoReturn for r in results), 'no exception occured'
def test_return_type_checks(self):
def div(a: int, b: int):
return 1
for case in deal.cases(div, count=20):
case()
def div(a: int, b: int) -> str:
return 1
with pytest.raises(TypeError):
case = next(iter(deal.cases(div, count=20)))
case()
def test_explicit_kwargs(self):
def div(a: int, b: int):
assert b == 4
for case in deal.cases(div, kwargs=dict(b=4), count=20):
case()
if __name__ == '__main__':
pytest.main(['tests.py'])
| true
| true
|
f701e5ba1cf40d87257e9a32a37af0b6e506bd69
| 5,565
|
py
|
Python
|
pykeyatome/client.py
|
jugla/pyKeyAtome
|
2ea833f54f95c15279254565e912e4036869b139
|
[
"MIT"
] | 1
|
2021-12-21T23:08:58.000Z
|
2021-12-21T23:08:58.000Z
|
pykeyatome/client.py
|
jugla/pyKeyAtome
|
2ea833f54f95c15279254565e912e4036869b139
|
[
"MIT"
] | 1
|
2022-01-26T21:40:34.000Z
|
2022-01-27T01:10:43.000Z
|
pykeyatome/client.py
|
jugla/pyKeyAtome
|
2ea833f54f95c15279254565e912e4036869b139
|
[
"MIT"
] | null | null | null |
"""Class client for atome protocol."""
import json
import logging
import requests
import simplejson
from fake_useragent import UserAgent
# export const
DAILY_PERIOD_TYPE = "day"
WEEKLY_PERIOD_TYPE = "week"
MONTHLY_PERIOD_TYPE = "month"
YEARLY_PERIOD_TYPE = "year"
# internal const
COOKIE_NAME = "PHPSESSID"
API_BASE_URI = "https://esoftlink.esoftthings.com"
API_ENDPOINT_LOGIN = "/api/user/login.json"
API_ENDPOINT_LIVE = "/measure/live.json"
API_ENDPOINT_CONSUMPTION = "/consumption.json"
LOGIN_URL = API_BASE_URI + API_ENDPOINT_LOGIN
DEFAULT_TIMEOUT = 10
MAX_RETRIES = 3
_LOGGER = logging.getLogger(__name__)
class PyAtomeError(Exception):
"""Exception class."""
pass
class AtomeClient(object):
"""The client class."""
def __init__(
self, username, password, atome_linky_number=1, session=None, timeout=None
):
"""Initialize the client object."""
self.username = username
self.password = password
self._user_id = None
self._user_reference = None
self._session = session
self._data = {}
self._timeout = timeout
# internal array start from 0 and not 1. Shift by 1.
self._atome_linky_number = int(atome_linky_number) - 1
def login(self):
"""Set http session."""
if self._session is None:
self._session = requests.session()
# adding fake user-agent header
self._session.headers.update({"User-agent": str(UserAgent().random)})
return self._login()
def _login(self):
"""Login to Atome's API."""
error_flag = False
payload = {"email": self.username, "plainPassword": self.password}
try:
req = self._session.post(
LOGIN_URL,
json=payload,
headers={"content-type": "application/json"},
timeout=self._timeout,
)
except OSError:
_LOGGER.debug("Can not login to API")
error_flag = True
if error_flag:
return None
try:
response_json = req.json()
user_id = str(response_json["id"])
user_reference = response_json["subscriptions"][self._atome_linky_number][
"reference"
]
self._user_id = user_id
self._user_reference = user_reference
except (
KeyError,
OSError,
json.decoder.JSONDecodeError,
simplejson.errors.JSONDecodeError,
) as e:
_LOGGER.debug(
"Impossible to decode response: \nResponse was: [%s] %s",
str(e),
str(req.status_code),
str(req.text),
)
error_flag = True
if error_flag:
return None
return response_json
def get_user_reference(self):
"""Get user reference respect to linky number."""
return self._user_reference
def _get_info_from_server(self, url, max_retries=0):
error_flag = False
if max_retries > MAX_RETRIES:
_LOGGER.debug("Can't gather proper data. Max retries exceeded.")
error_flag = True
return None
try:
req = self._session.get(url, timeout=self._timeout)
except OSError as e:
_LOGGER.debug("Could not access Atome's API: " + str(e))
error_flag = True
if error_flag:
return None
if req.status_code == 403:
# session is wrong, need to relogin
self.login()
logging.info("Got error %s, relogging (max retries: %s)", str(req.status_code), str(max_retries))
return self._get_info_from_server(url, max_retries + 1)
if req.text == "":
_LOGGER.debug("No data")
error_flag = True
return None
try:
json_output = req.json()
except (
OSError,
json.decoder.JSONDecodeError,
simplejson.errors.JSONDecodeError,
) as e:
_LOGGER.debug(
"Impossible to decode response: "
+ str(e)
+ "\nResponse was: "
+ str(req.text)
)
error_flag = True
if error_flag:
return None
return json_output
def get_live(self):
"""Get current data."""
live_url = (
API_BASE_URI
+ "/api/subscription/"
+ self._user_id
+ "/"
+ self._user_reference
+ API_ENDPOINT_LIVE
)
return self._get_info_from_server(live_url)
def get_consumption(self, period):
"""Get current data."""
if period not in [
DAILY_PERIOD_TYPE,
WEEKLY_PERIOD_TYPE,
MONTHLY_PERIOD_TYPE,
YEARLY_PERIOD_TYPE,
]:
raise ValueError(
"Period %s out of range. Shall be either 'day', 'week', 'month' or 'year'.",
str(period),
)
consumption_url = (
API_BASE_URI
+ "/api/subscription/"
+ self._user_id
+ "/"
+ self._user_reference
+ API_ENDPOINT_CONSUMPTION
+ "?period=so"
+ period[:1]
)
return self._get_info_from_server(consumption_url)
def close_session(self):
"""Close current session."""
self._session.close()
self._session = None
| 27.825
| 109
| 0.554537
|
import json
import logging
import requests
import simplejson
from fake_useragent import UserAgent
DAILY_PERIOD_TYPE = "day"
WEEKLY_PERIOD_TYPE = "week"
MONTHLY_PERIOD_TYPE = "month"
YEARLY_PERIOD_TYPE = "year"
COOKIE_NAME = "PHPSESSID"
API_BASE_URI = "https://esoftlink.esoftthings.com"
API_ENDPOINT_LOGIN = "/api/user/login.json"
API_ENDPOINT_LIVE = "/measure/live.json"
API_ENDPOINT_CONSUMPTION = "/consumption.json"
LOGIN_URL = API_BASE_URI + API_ENDPOINT_LOGIN
DEFAULT_TIMEOUT = 10
MAX_RETRIES = 3
_LOGGER = logging.getLogger(__name__)
class PyAtomeError(Exception):
pass
class AtomeClient(object):
def __init__(
self, username, password, atome_linky_number=1, session=None, timeout=None
):
self.username = username
self.password = password
self._user_id = None
self._user_reference = None
self._session = session
self._data = {}
self._timeout = timeout
self._atome_linky_number = int(atome_linky_number) - 1
def login(self):
if self._session is None:
self._session = requests.session()
self._session.headers.update({"User-agent": str(UserAgent().random)})
return self._login()
def _login(self):
error_flag = False
payload = {"email": self.username, "plainPassword": self.password}
try:
req = self._session.post(
LOGIN_URL,
json=payload,
headers={"content-type": "application/json"},
timeout=self._timeout,
)
except OSError:
_LOGGER.debug("Can not login to API")
error_flag = True
if error_flag:
return None
try:
response_json = req.json()
user_id = str(response_json["id"])
user_reference = response_json["subscriptions"][self._atome_linky_number][
"reference"
]
self._user_id = user_id
self._user_reference = user_reference
except (
KeyError,
OSError,
json.decoder.JSONDecodeError,
simplejson.errors.JSONDecodeError,
) as e:
_LOGGER.debug(
"Impossible to decode response: \nResponse was: [%s] %s",
str(e),
str(req.status_code),
str(req.text),
)
error_flag = True
if error_flag:
return None
return response_json
def get_user_reference(self):
return self._user_reference
def _get_info_from_server(self, url, max_retries=0):
error_flag = False
if max_retries > MAX_RETRIES:
_LOGGER.debug("Can't gather proper data. Max retries exceeded.")
error_flag = True
return None
try:
req = self._session.get(url, timeout=self._timeout)
except OSError as e:
_LOGGER.debug("Could not access Atome's API: " + str(e))
error_flag = True
if error_flag:
return None
if req.status_code == 403:
self.login()
logging.info("Got error %s, relogging (max retries: %s)", str(req.status_code), str(max_retries))
return self._get_info_from_server(url, max_retries + 1)
if req.text == "":
_LOGGER.debug("No data")
error_flag = True
return None
try:
json_output = req.json()
except (
OSError,
json.decoder.JSONDecodeError,
simplejson.errors.JSONDecodeError,
) as e:
_LOGGER.debug(
"Impossible to decode response: "
+ str(e)
+ "\nResponse was: "
+ str(req.text)
)
error_flag = True
if error_flag:
return None
return json_output
def get_live(self):
live_url = (
API_BASE_URI
+ "/api/subscription/"
+ self._user_id
+ "/"
+ self._user_reference
+ API_ENDPOINT_LIVE
)
return self._get_info_from_server(live_url)
def get_consumption(self, period):
if period not in [
DAILY_PERIOD_TYPE,
WEEKLY_PERIOD_TYPE,
MONTHLY_PERIOD_TYPE,
YEARLY_PERIOD_TYPE,
]:
raise ValueError(
"Period %s out of range. Shall be either 'day', 'week', 'month' or 'year'.",
str(period),
)
consumption_url = (
API_BASE_URI
+ "/api/subscription/"
+ self._user_id
+ "/"
+ self._user_reference
+ API_ENDPOINT_CONSUMPTION
+ "?period=so"
+ period[:1]
)
return self._get_info_from_server(consumption_url)
def close_session(self):
self._session.close()
self._session = None
| true
| true
|
f701e753de84fad7ebd01a4470ae510c0c88bfb2
| 870
|
py
|
Python
|
var/spack/repos/builtin.mock/packages/when-directives-false/package.py
|
xiki-tempula/spack
|
9d66c05e93ab8a933fc59915040c0e0c86a4aac4
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 9
|
2018-04-18T07:51:40.000Z
|
2021-09-10T03:56:57.000Z
|
var/spack/repos/builtin.mock/packages/when-directives-false/package.py
|
xiki-tempula/spack
|
9d66c05e93ab8a933fc59915040c0e0c86a4aac4
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 907
|
2018-04-18T11:17:57.000Z
|
2022-03-31T13:20:25.000Z
|
var/spack/repos/builtin.mock/packages/when-directives-false/package.py
|
xiki-tempula/spack
|
9d66c05e93ab8a933fc59915040c0e0c86a4aac4
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 29
|
2018-11-05T16:14:23.000Z
|
2022-02-03T16:07:09.000Z
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class WhenDirectivesFalse(Package):
"""Package that tests False when specs on directives."""
homepage = "http://www.example.com"
url = "http://www.example.com/example-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
patch('https://example.com/foo.patch',
sha256='abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
when=False)
extends('extendee', when=False)
depends_on('b', when=False)
conflicts('@1.0', when=False)
resource(url="http://www.example.com/example-1.0-resource.tar.gz",
md5='0123456789abcdef0123456789abcdef',
when=False)
| 33.461538
| 84
| 0.697701
|
from spack import *
class WhenDirectivesFalse(Package):
homepage = "http://www.example.com"
url = "http://www.example.com/example-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
patch('https://example.com/foo.patch',
sha256='abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
when=False)
extends('extendee', when=False)
depends_on('b', when=False)
conflicts('@1.0', when=False)
resource(url="http://www.example.com/example-1.0-resource.tar.gz",
md5='0123456789abcdef0123456789abcdef',
when=False)
| true
| true
|
f701e9169b162fe7563cb382ae08c1a558adaba4
| 1,100
|
py
|
Python
|
cache.py
|
greerviau/HackUMass
|
25ef2ea9fecbe4bbfa91f0a9f32bd9f2703a176a
|
[
"MIT"
] | 2
|
2019-10-31T15:18:06.000Z
|
2021-02-13T00:14:07.000Z
|
cache.py
|
greerviau/HackUMass
|
25ef2ea9fecbe4bbfa91f0a9f32bd9f2703a176a
|
[
"MIT"
] | 1
|
2019-10-21T21:23:02.000Z
|
2019-10-21T21:23:02.000Z
|
cache.py
|
greerviau/HackUMass
|
25ef2ea9fecbe4bbfa91f0a9f32bd9f2703a176a
|
[
"MIT"
] | 3
|
2020-01-20T21:46:10.000Z
|
2021-05-27T09:49:54.000Z
|
import numpy as np
import math
class Cache():
def __init__(self, max_size=10):
self.cache = []
self.size = 0
self.max_size=max_size
def add(self, element):
self.cache.append(element)
self.size+=1
if self.size > self.max_size:
del self.cache[0]
self.size = self.max_size
def mean(self):
return np.mean(np.array(self.cache), axis=0)
def empty(self):
return self.size == 0
def get_size(self):
return self.size
def get_last(self):
return self.cache[self.size-1]
def print_cache(self):
for e in self.cache:
print(e)
if __name__ == '__main__':
print('===Test Cache===')
cache = Cache(max_size=5)
cache.add([5,4])
print(cache.get_size())
print(cache.print_cache())
cache.add([8,1])
cache.add([3,2])
cache.add([4,5])
cache.add([6,2])
print(cache.get_size())
print(cache.print_cache())
cache.add([1,4])
print(cache.get_size())
print(cache.print_cache())
print(cache.mean())
| 21.153846
| 52
| 0.564545
|
import numpy as np
import math
class Cache():
def __init__(self, max_size=10):
self.cache = []
self.size = 0
self.max_size=max_size
def add(self, element):
self.cache.append(element)
self.size+=1
if self.size > self.max_size:
del self.cache[0]
self.size = self.max_size
def mean(self):
return np.mean(np.array(self.cache), axis=0)
def empty(self):
return self.size == 0
def get_size(self):
return self.size
def get_last(self):
return self.cache[self.size-1]
def print_cache(self):
for e in self.cache:
print(e)
if __name__ == '__main__':
print('===Test Cache===')
cache = Cache(max_size=5)
cache.add([5,4])
print(cache.get_size())
print(cache.print_cache())
cache.add([8,1])
cache.add([3,2])
cache.add([4,5])
cache.add([6,2])
print(cache.get_size())
print(cache.print_cache())
cache.add([1,4])
print(cache.get_size())
print(cache.print_cache())
print(cache.mean())
| true
| true
|
f701e93ad73088654fd6b538457d54c52ecec092
| 22,885
|
py
|
Python
|
Src/Scripts/generate_exceptions.py
|
0xFireball/exascript2
|
f6c69ac41f8fa1e5e9e42a677717e85b9ff9d0c3
|
[
"Apache-2.0"
] | null | null | null |
Src/Scripts/generate_exceptions.py
|
0xFireball/exascript2
|
f6c69ac41f8fa1e5e9e42a677717e85b9ff9d0c3
|
[
"Apache-2.0"
] | null | null | null |
Src/Scripts/generate_exceptions.py
|
0xFireball/exascript2
|
f6c69ac41f8fa1e5e9e42a677717e85b9ff9d0c3
|
[
"Apache-2.0"
] | 1
|
2019-09-18T05:37:46.000Z
|
2019-09-18T05:37:46.000Z
|
#####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# [email protected]. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
from generate import generate
import System
import clr
import exceptions
def collect_excs():
ret = []
for e in exceptions.__dict__.values():
if not hasattr(e, '__bases__'): continue
if e.__name__ == "exceptions": continue
if e.__name__ == "__builtin__": continue
assert len(e.__bases__) <= 1, e
if len(e.__bases__) == 0:
continue
#supername = None
else:
supername = e.__bases__[0].__name__
ret.append( (e, supername) )
return ret
excs = collect_excs()
pythonExcs = ['ImportError', 'RuntimeError', 'UnicodeTranslateError', 'PendingDeprecationWarning', 'EnvironmentError',
'LookupError', 'OSError', 'DeprecationWarning', 'UnicodeError', 'FloatingPointError', 'ReferenceError',
'FutureWarning', 'AssertionError', 'RuntimeWarning', 'ImportWarning', 'UserWarning', 'SyntaxWarning',
'UnicodeWarning', 'StopIteration', 'BytesWarning', 'BufferError']
class ExceptionInfo(object):
def __init__(self, name, clrException, args, fields, subclasses, silverlightSupported = True, baseMapping = None):
self.name = name
self.clrException = clrException
self.args = args
self.fields = fields
self.subclasses = subclasses
self.silverlightSupported = silverlightSupported
self.parent = None
self.baseMapping = baseMapping
for child in subclasses:
child.parent = self
@property
def ConcreteParent(self):
while not self.parent.fields:
self = self.parent
if self.parent == None: return exceptionHierarchy
return self.parent
@property
def PythonType(self):
if not self.parent:
return 'DynamicHelpers.GetPythonTypeFromType(typeof(%s))' % self.name
else:
return self.name
@property
def ClrType(self):
if not self.parent:
return 'BaseException'
elif self.fields:
return '_' + self.name
else:
return self.name
@property
def ExceptionMappingName(self):
if self.baseMapping:
return self.baseMapping[self.baseMapping.rfind('.')+1:]
return self.DotNetExceptionName
@property
def DotNetExceptionName(self):
return self.clrException[self.clrException.rfind('.')+1:]
@property
def InternalPythonType(self):
if not self.parent:
return 'PythonExceptions._' + self.name
else:
return 'PythonExceptions.' + self.name
def BeginSilverlight(self, cw):
if not self.silverlightSupported:
cw.writeline('')
cw.writeline('#if !SILVERLIGHT');
def EndSilverlight(self, cw):
if not self.silverlightSupported:
cw.writeline('#endif // !SILVERLIGHT')
cw.writeline('');
def MakeNewException(self):
if self.fields or self.name == 'BaseException':
return 'new PythonExceptions._%s()' % (self.name)
else:
return 'new PythonExceptions.%s(PythonExceptions.%s)' % (self.ConcreteParent.ClrType, self.name)
# format is name, args, (fields, ...), (subclasses, ...)
exceptionHierarchy = ExceptionInfo('BaseException', 'IronPython.Runtime.Exceptions.PythonException', None, None, (
ExceptionInfo('GeneratorExit', 'IronPython.Runtime.Exceptions.GeneratorExitException', None, (), ()),
ExceptionInfo('SystemExit', 'IronPython.Runtime.Exceptions.SystemExitException', None, ('code',), ()),
ExceptionInfo('KeyboardInterrupt', 'Microsoft.Scripting.KeyboardInterruptException', None, (), ()),
ExceptionInfo('Exception', 'IronPython.Runtime.Exceptions.PythonException', None, (), (
ExceptionInfo('StopIteration', 'IronPython.Runtime.Exceptions.StopIterationException', None, (), ()),
ExceptionInfo('StandardError', 'System.ApplicationException', None, (), (
ExceptionInfo('BufferError', 'IronPython.Runtime.Exceptions.BufferException', None, (), ()),
ExceptionInfo('ArithmeticError', 'System.ArithmeticException', None, (), (
ExceptionInfo('FloatingPointError', 'IronPython.Runtime.Exceptions.FloatingPointException', None, (), ()),
ExceptionInfo('OverflowError', 'System.OverflowException', None, (), ()),
ExceptionInfo('ZeroDivisionError', 'System.DivideByZeroException', None, (), ()),
),
),
ExceptionInfo('AssertionError', 'IronPython.Runtime.Exceptions.AssertionException', None, (), ()),
ExceptionInfo('AttributeError', 'IronPython.Runtime.Exceptions.AttributeErrorException', None, (), (), baseMapping = 'System.MissingMemberException'),
ExceptionInfo('EnvironmentError', 'System.Runtime.InteropServices.ExternalException', None, ('errno', 'strerror', 'filename'), (
ExceptionInfo('IOError', 'System.IO.IOException', None, (), ()),
ExceptionInfo('OSError', 'IronPython.Runtime.Exceptions.OSException', None, (), (
ExceptionInfo('WindowsError', 'System.ComponentModel.Win32Exception', None, ('winerror',), ()),
),
),
),
),
ExceptionInfo('EOFError', 'System.IO.EndOfStreamException', None, (), ()),
ExceptionInfo('ImportError', 'IronPython.Runtime.Exceptions.ImportException', None, (), ()),
ExceptionInfo('LookupError', 'IronPython.Runtime.Exceptions.LookupException', None, (), (
ExceptionInfo('IndexError', 'System.IndexOutOfRangeException', None, (), ()),
ExceptionInfo('KeyError', 'System.Collections.Generic.KeyNotFoundException', None, (), ()),
),
),
ExceptionInfo('MemoryError', 'System.OutOfMemoryException', None, (), ()),
ExceptionInfo('NameError', 'IronPython.Runtime.UnboundNameException', None, (), (
ExceptionInfo('UnboundLocalError', 'IronPython.Runtime.UnboundLocalException', None, (), ()),
),
),
ExceptionInfo('ReferenceError', 'IronPython.Runtime.Exceptions.ReferenceException', None, (), ()),
ExceptionInfo('RuntimeError', 'IronPython.Runtime.Exceptions.RuntimeException', None, (), (
ExceptionInfo('NotImplementedError', 'System.NotImplementedException', None, (), ()),
),
),
ExceptionInfo('SyntaxError', 'Microsoft.Scripting.SyntaxErrorException', None, ('text', 'print_file_and_line', 'filename', 'lineno', 'offset', 'msg'), (
ExceptionInfo('IndentationError', 'IronPython.Runtime.Exceptions.IndentationException', None, (), (
ExceptionInfo('TabError', 'IronPython.Runtime.Exceptions.TabException', None, (), ()),
),
),
),
),
ExceptionInfo('SystemError', 'System.SystemException', None, (), ()),
ExceptionInfo('TypeError', 'IronPython.Runtime.Exceptions.TypeErrorException', None, (), (), baseMapping = 'Microsoft.Scripting.ArgumentTypeException'),
ExceptionInfo('ValueError', 'IronPython.Runtime.Exceptions.ValueErrorException', None, (), (
ExceptionInfo('UnicodeError', 'IronPython.Runtime.Exceptions.UnicodeException', None, (),
(
ExceptionInfo('UnicodeDecodeError', 'System.Text.DecoderFallbackException', ('encoding', 'object', 'start', 'end', 'reason'), ('start', 'reason', 'object', 'end', 'encoding'), ()),
ExceptionInfo('UnicodeEncodeError', 'System.Text.EncoderFallbackException', ('encoding', 'object', 'start', 'end', 'reason'), ('start', 'reason', 'object', 'end', 'encoding'), ()),
ExceptionInfo('UnicodeTranslateError', 'IronPython.Runtime.Exceptions.UnicodeTranslateException', None, ('start', 'reason', 'object', 'end', 'encoding'), ()),
),
),
),
baseMapping = 'System.ArgumentException'
),
),
),
ExceptionInfo('Warning', 'System.ComponentModel.WarningException', None, (), (
ExceptionInfo('DeprecationWarning', 'IronPython.Runtime.Exceptions.DeprecationWarningException', None, (), ()),
ExceptionInfo('PendingDeprecationWarning', 'IronPython.Runtime.Exceptions.PendingDeprecationWarningException', None, (), ()),
ExceptionInfo('RuntimeWarning', 'IronPython.Runtime.Exceptions.RuntimeWarningException', None, (), ()),
ExceptionInfo('SyntaxWarning', 'IronPython.Runtime.Exceptions.SyntaxWarningException', None, (), ()),
ExceptionInfo('UserWarning', 'IronPython.Runtime.Exceptions.UserWarningException', None, (), ()),
ExceptionInfo('FutureWarning', 'IronPython.Runtime.Exceptions.FutureWarningException', None, (), ()),
ExceptionInfo('ImportWarning', 'IronPython.Runtime.Exceptions.ImportWarningException', None, (), ()),
ExceptionInfo('UnicodeWarning', 'IronPython.Runtime.Exceptions.UnicodeWarningException', None, (), ()),
ExceptionInfo('BytesWarning', 'IronPython.Runtime.Exceptions.BytesWarningException', None, (), ()),
),
),
),
),
),
)
def get_exception_info(pythonName, curHierarchy):
for exception in curHierarchy.subclasses:
if exception.name == pythonName:
return exception
for exception in curHierarchy.subclasses:
res = get_exception_info(pythonName, exception)
if res is not None:
return res
def get_all_exceps(l, curHierarchy):
# if we have duplicate CLR exceptions (e.g. VMSError and Exception)
# only generate the one highest in the Python hierarchy
for exception in curHierarchy.subclasses:
found = False
for e in l:
if e.clrException == exception.clrException:
found = True
break
if not found:
l.append(exception)
for exception in curHierarchy.subclasses:
get_all_exceps(l, exception)
return l
ip = clr.LoadAssemblyByPartialName('ironpython')
ms = clr.LoadAssemblyByPartialName('Microsoft.Scripting')
md = clr.LoadAssemblyByPartialName('Microsoft.Dynamic')
sysdll = clr.LoadAssemblyByPartialName('System')
def get_type(name):
if name.startswith('IronPython'): return ip.GetType(name)
if name.startswith('Microsoft.Scripting'):
res = ms.GetType(name)
return res if res is not None else md.GetType(name)
if name.startswith('System.ComponentModel'): return sysdll.GetType(name)
return System.Type.GetType(name)
def exception_distance(a):
distance = 0
while a.FullName != "System.Exception":
a = a.BaseType
distance += 1
return distance
def get_compare_name(ex_info):
return ex_info.baseMapping or ex_info.clrException
def compare_exceptions(a, b):
a, b = get_compare_name(a), get_compare_name(b)
ta = get_type(a)
tb = get_type(b)
if ta == None:
raise Exception("Exception class not found %s " % a)
if tb == None:
raise Exception("Exception class not found %s " % b)
if ta.IsSubclassOf(tb): return -1
if tb.IsSubclassOf(ta): return 1
da = exception_distance(ta)
db = exception_distance(tb)
# put exceptions further from System.Exception 1st, those further later...
if da != db: return db - da
return cmp(ta.Name, tb.Name)
def gen_topython_helper(cw):
cw.enter_block("private static BaseException/*!*/ ToPythonHelper(System.Exception clrException)")
allExceps = get_all_exceps([], exceptionHierarchy)
allExceps.sort(cmp=compare_exceptions)
for x in allExceps[:-1]: # skip System.Exception which is last...
if not x.silverlightSupported: cw.writeline('#if !SILVERLIGHT')
cw.writeline('if (clrException is %s) return %s;' % (x.ExceptionMappingName, x.MakeNewException()))
if not x.silverlightSupported: cw.writeline('#endif')
cw.writeline('return new BaseException(Exception);')
cw.exit_block()
def get_clr_name(e):
return e.replace('Error', '') + 'Exception'
FACTORY = """
public static Exception %(name)s(string format, params object[] args) {
return new %(clrname)s(string.Format(format, args));
}"""
def factory_gen(cw):
for e in pythonExcs:
cw.write(FACTORY, name=e, clrname=get_clr_name(e))
CLASS1 = """
[Serializable]
public class %(name)s : %(supername)s, IPythonAwareException {
private object _pyExceptionObject;
private List<DynamicStackFrame> _frames;
private TraceBack _traceback;
public %(name)s() : base() { }
public %(name)s(string msg) : base(msg) { }
public %(name)s(string message, Exception innerException)
: base(message, innerException) {
}
#if FEATURE_SERIALIZATION
protected %(name)s(SerializationInfo info, StreamingContext context) : base(info, context) { }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2123:OverrideLinkDemandsShouldBeIdenticalToBase")]
public override void GetObjectData(SerializationInfo info, StreamingContext context) {
info.AddValue("frames", _frames);
info.AddValue("traceback", _traceback);
base.GetObjectData(info, context);
}
#endif
object IPythonAwareException.PythonException {
get {
if (_pyExceptionObject == null) {
var newEx = %(make_new_exception)s;
newEx.InitializeFromClr(this);
_pyExceptionObject = newEx;
}
return _pyExceptionObject;
}
set { _pyExceptionObject = value; }
}
List<DynamicStackFrame> IPythonAwareException.Frames {
get { return _frames; }
set { _frames = value; }
}
TraceBack IPythonAwareException.TraceBack {
get { return _traceback; }
set { _traceback = value; }
}
}
"""
def gen_one_exception(cw, e):
supername = getattr(exceptions, e).__bases__[0].__name__
if not supername in pythonExcs and supername != 'Warning':
supername = ''
cw.write(CLASS1, name=get_clr_name(e), supername=get_clr_name(supername), make_new_exception = get_exception_info(e, exceptionHierarchy).MakeNewException())
def gen_one_exception_maker(e):
def gen_one_exception_specialized(x):
return gen_one_exception(x, e)
return gen_one_exception_specialized
def fix_object(name):
if name == "object": return "@object"
return name
def gen_one_new_exception(cw, exception, parent):
if exception.fields:
exception.BeginSilverlight(cw)
cw.writeline('[MultiRuntimeAware]')
cw.writeline('private static PythonType %sStorage;' % (exception.name, ))
cw.enter_block('public static PythonType %s' % (exception.name, ))
cw.enter_block('get')
cw.enter_block('if (%sStorage == null)' % (exception.name, ))
cw.enter_block('lock (_pythonExceptionsLock)')
cw.writeline('%sStorage = CreateSubType(%s, typeof(_%s), msg => new %s(msg));' % (exception.name, exception.parent.PythonType, exception.name, exception.DotNetExceptionName))
cw.exit_block() # lock
cw.exit_block() # if
cw.writeline('return %sStorage;' % (exception.name, ))
cw.exit_block()
cw.exit_block()
cw.writeline()
cw.writeline('[PythonType("%s"), PythonHidden, DynamicBaseTypeAttribute, Serializable]' % exception.name)
if exception.ConcreteParent.fields:
cw.enter_block('public partial class _%s : _%s' % (exception.name, exception.ConcreteParent.name))
else:
cw.enter_block('public partial class _%s : %s' % (exception.name, exception.ConcreteParent.name))
for field in exception.fields:
cw.writeline('private object _%s;' % field)
if exception.fields:
cw.writeline('')
cw.writeline('public _%s() : base(%s) { }' % (exception.name, exception.name))
cw.writeline('public _%s(PythonType type) : base(type) { }' % (exception.name, ))
cw.writeline('')
cw.enter_block('public new static object __new__(PythonType cls, [ParamDictionary]IDictionary<object, object> kwArgs, params object[] args)')
cw.writeline('return Activator.CreateInstance(cls.UnderlyingSystemType, cls);')
cw.exit_block()
cw.writeline('')
if exception.args:
argstr = ', '.join(['object ' + fix_object(x) for x in exception.args])
cw.enter_block('public void __init__(%s)' % (argstr))
for arg in exception.args:
cw.writeline('_%s = %s;' % (arg, fix_object(arg)))
cw.writeline('args = PythonTuple.MakeTuple(' + ', '.join([fix_object(x) for x in exception.args]) + ');')
cw.exit_block()
cw.writeline('')
cw.enter_block('public override void __init__(params object[] args)')
cw.enter_block('if (args == null || args.Length != %d)' % (len(exception.args), ))
cw.writeline('throw PythonOps.TypeError("__init__ takes exactly %d arguments ({0} given)", args.Length);' % len(exception.args))
cw.exit_block()
cw.writeline('__init__(' + ', '.join([fix_object(x) for x in exception.args]) + ');')
cw.exit_block()
cw.writeline('')
for field in exception.fields:
cw.enter_block('public object %s' % fix_object(field))
cw.writeline('get { return _%s; }' % field)
cw.writeline('set { _%s = value; }' % field)
cw.exit_block()
cw.writeline('')
cw.exit_block()
cw.writeline('')
exception.EndSilverlight(cw)
else:
cw.writeline('[MultiRuntimeAware]')
cw.writeline('private static PythonType %sStorage;' % (exception.name, ))
cw.enter_block('public static PythonType %s' % (exception.name, ))
cw.enter_block('get')
cw.enter_block('if (%sStorage == null)' % (exception.name, ))
cw.enter_block('lock (_pythonExceptionsLock)')
cw.writeline('%sStorage = CreateSubType(%s, "%s", msg => new %s(msg));' % (exception.name, exception.parent.PythonType, exception.name, exception.DotNetExceptionName))
cw.exit_block() # lock
cw.exit_block() # if
cw.writeline('return %sStorage;' % (exception.name, ))
cw.exit_block()
cw.exit_block()
cw.writeline()
for child in exception.subclasses:
gen_one_new_exception(cw, child, exception)
def newstyle_gen(cw):
for child in exceptionHierarchy.subclasses:
gen_one_new_exception(cw, child, exceptionHierarchy)
def gen_one_exception_module_entry(cw, exception, parent):
exception.BeginSilverlight(cw)
cw.write("public static PythonType %s = %s;" % (exception.name, exception.InternalPythonType))
exception.EndSilverlight(cw)
for child in exception.subclasses:
gen_one_exception_module_entry(cw, child, exception)
def module_gen(cw):
cw.write("public static object BaseException = DynamicHelpers.GetPythonTypeFromType(typeof(PythonExceptions.BaseException));")
for child in exceptionHierarchy.subclasses:
gen_one_exception_module_entry(cw, child, exceptionHierarchy)
def gen_one_exception_builtin_entry(cw, exception, parent):
exception.BeginSilverlight(cw)
cw.enter_block("public static PythonType %s" % (exception.name, ))
if exception.fields:
cw.write('get { return %s; }' % (exception.InternalPythonType, ))
else:
cw.write('get { return %s; }' % (exception.InternalPythonType, ))
cw.exit_block()
exception.EndSilverlight(cw)
for child in exception.subclasses:
gen_one_exception_builtin_entry(cw, child, exception)
def builtin_gen(cw):
for child in exceptionHierarchy.subclasses:
gen_one_exception_builtin_entry(cw, child, exceptionHierarchy)
def main():
gens = [
("ToPython Exception Helper", gen_topython_helper),
("Exception Factories", factory_gen),
("Python New-Style Exceptions", newstyle_gen),
("builtin exceptions", builtin_gen),
]
for e in pythonExcs:
gens.append((get_clr_name(e), gen_one_exception_maker(e)))
return generate(*gens)
if __name__ == "__main__":
main()
| 45.678643
| 224
| 0.590387
|
from generate import generate
import System
import clr
import exceptions
def collect_excs():
ret = []
for e in exceptions.__dict__.values():
if not hasattr(e, '__bases__'): continue
if e.__name__ == "exceptions": continue
if e.__name__ == "__builtin__": continue
assert len(e.__bases__) <= 1, e
if len(e.__bases__) == 0:
continue
else:
supername = e.__bases__[0].__name__
ret.append( (e, supername) )
return ret
excs = collect_excs()
pythonExcs = ['ImportError', 'RuntimeError', 'UnicodeTranslateError', 'PendingDeprecationWarning', 'EnvironmentError',
'LookupError', 'OSError', 'DeprecationWarning', 'UnicodeError', 'FloatingPointError', 'ReferenceError',
'FutureWarning', 'AssertionError', 'RuntimeWarning', 'ImportWarning', 'UserWarning', 'SyntaxWarning',
'UnicodeWarning', 'StopIteration', 'BytesWarning', 'BufferError']
class ExceptionInfo(object):
def __init__(self, name, clrException, args, fields, subclasses, silverlightSupported = True, baseMapping = None):
self.name = name
self.clrException = clrException
self.args = args
self.fields = fields
self.subclasses = subclasses
self.silverlightSupported = silverlightSupported
self.parent = None
self.baseMapping = baseMapping
for child in subclasses:
child.parent = self
@property
def ConcreteParent(self):
while not self.parent.fields:
self = self.parent
if self.parent == None: return exceptionHierarchy
return self.parent
@property
def PythonType(self):
if not self.parent:
return 'DynamicHelpers.GetPythonTypeFromType(typeof(%s))' % self.name
else:
return self.name
@property
def ClrType(self):
if not self.parent:
return 'BaseException'
elif self.fields:
return '_' + self.name
else:
return self.name
@property
def ExceptionMappingName(self):
if self.baseMapping:
return self.baseMapping[self.baseMapping.rfind('.')+1:]
return self.DotNetExceptionName
@property
def DotNetExceptionName(self):
return self.clrException[self.clrException.rfind('.')+1:]
@property
def InternalPythonType(self):
if not self.parent:
return 'PythonExceptions._' + self.name
else:
return 'PythonExceptions.' + self.name
def BeginSilverlight(self, cw):
if not self.silverlightSupported:
cw.writeline('')
cw.writeline('#if !SILVERLIGHT');
def EndSilverlight(self, cw):
if not self.silverlightSupported:
cw.writeline('#endif // !SILVERLIGHT')
cw.writeline('');
def MakeNewException(self):
if self.fields or self.name == 'BaseException':
return 'new PythonExceptions._%s()' % (self.name)
else:
return 'new PythonExceptions.%s(PythonExceptions.%s)' % (self.ConcreteParent.ClrType, self.name)
exceptionHierarchy = ExceptionInfo('BaseException', 'IronPython.Runtime.Exceptions.PythonException', None, None, (
ExceptionInfo('GeneratorExit', 'IronPython.Runtime.Exceptions.GeneratorExitException', None, (), ()),
ExceptionInfo('SystemExit', 'IronPython.Runtime.Exceptions.SystemExitException', None, ('code',), ()),
ExceptionInfo('KeyboardInterrupt', 'Microsoft.Scripting.KeyboardInterruptException', None, (), ()),
ExceptionInfo('Exception', 'IronPython.Runtime.Exceptions.PythonException', None, (), (
ExceptionInfo('StopIteration', 'IronPython.Runtime.Exceptions.StopIterationException', None, (), ()),
ExceptionInfo('StandardError', 'System.ApplicationException', None, (), (
ExceptionInfo('BufferError', 'IronPython.Runtime.Exceptions.BufferException', None, (), ()),
ExceptionInfo('ArithmeticError', 'System.ArithmeticException', None, (), (
ExceptionInfo('FloatingPointError', 'IronPython.Runtime.Exceptions.FloatingPointException', None, (), ()),
ExceptionInfo('OverflowError', 'System.OverflowException', None, (), ()),
ExceptionInfo('ZeroDivisionError', 'System.DivideByZeroException', None, (), ()),
),
),
ExceptionInfo('AssertionError', 'IronPython.Runtime.Exceptions.AssertionException', None, (), ()),
ExceptionInfo('AttributeError', 'IronPython.Runtime.Exceptions.AttributeErrorException', None, (), (), baseMapping = 'System.MissingMemberException'),
ExceptionInfo('EnvironmentError', 'System.Runtime.InteropServices.ExternalException', None, ('errno', 'strerror', 'filename'), (
ExceptionInfo('IOError', 'System.IO.IOException', None, (), ()),
ExceptionInfo('OSError', 'IronPython.Runtime.Exceptions.OSException', None, (), (
ExceptionInfo('WindowsError', 'System.ComponentModel.Win32Exception', None, ('winerror',), ()),
),
),
),
),
ExceptionInfo('EOFError', 'System.IO.EndOfStreamException', None, (), ()),
ExceptionInfo('ImportError', 'IronPython.Runtime.Exceptions.ImportException', None, (), ()),
ExceptionInfo('LookupError', 'IronPython.Runtime.Exceptions.LookupException', None, (), (
ExceptionInfo('IndexError', 'System.IndexOutOfRangeException', None, (), ()),
ExceptionInfo('KeyError', 'System.Collections.Generic.KeyNotFoundException', None, (), ()),
),
),
ExceptionInfo('MemoryError', 'System.OutOfMemoryException', None, (), ()),
ExceptionInfo('NameError', 'IronPython.Runtime.UnboundNameException', None, (), (
ExceptionInfo('UnboundLocalError', 'IronPython.Runtime.UnboundLocalException', None, (), ()),
),
),
ExceptionInfo('ReferenceError', 'IronPython.Runtime.Exceptions.ReferenceException', None, (), ()),
ExceptionInfo('RuntimeError', 'IronPython.Runtime.Exceptions.RuntimeException', None, (), (
ExceptionInfo('NotImplementedError', 'System.NotImplementedException', None, (), ()),
),
),
ExceptionInfo('SyntaxError', 'Microsoft.Scripting.SyntaxErrorException', None, ('text', 'print_file_and_line', 'filename', 'lineno', 'offset', 'msg'), (
ExceptionInfo('IndentationError', 'IronPython.Runtime.Exceptions.IndentationException', None, (), (
ExceptionInfo('TabError', 'IronPython.Runtime.Exceptions.TabException', None, (), ()),
),
),
),
),
ExceptionInfo('SystemError', 'System.SystemException', None, (), ()),
ExceptionInfo('TypeError', 'IronPython.Runtime.Exceptions.TypeErrorException', None, (), (), baseMapping = 'Microsoft.Scripting.ArgumentTypeException'),
ExceptionInfo('ValueError', 'IronPython.Runtime.Exceptions.ValueErrorException', None, (), (
ExceptionInfo('UnicodeError', 'IronPython.Runtime.Exceptions.UnicodeException', None, (),
(
ExceptionInfo('UnicodeDecodeError', 'System.Text.DecoderFallbackException', ('encoding', 'object', 'start', 'end', 'reason'), ('start', 'reason', 'object', 'end', 'encoding'), ()),
ExceptionInfo('UnicodeEncodeError', 'System.Text.EncoderFallbackException', ('encoding', 'object', 'start', 'end', 'reason'), ('start', 'reason', 'object', 'end', 'encoding'), ()),
ExceptionInfo('UnicodeTranslateError', 'IronPython.Runtime.Exceptions.UnicodeTranslateException', None, ('start', 'reason', 'object', 'end', 'encoding'), ()),
),
),
),
baseMapping = 'System.ArgumentException'
),
),
),
ExceptionInfo('Warning', 'System.ComponentModel.WarningException', None, (), (
ExceptionInfo('DeprecationWarning', 'IronPython.Runtime.Exceptions.DeprecationWarningException', None, (), ()),
ExceptionInfo('PendingDeprecationWarning', 'IronPython.Runtime.Exceptions.PendingDeprecationWarningException', None, (), ()),
ExceptionInfo('RuntimeWarning', 'IronPython.Runtime.Exceptions.RuntimeWarningException', None, (), ()),
ExceptionInfo('SyntaxWarning', 'IronPython.Runtime.Exceptions.SyntaxWarningException', None, (), ()),
ExceptionInfo('UserWarning', 'IronPython.Runtime.Exceptions.UserWarningException', None, (), ()),
ExceptionInfo('FutureWarning', 'IronPython.Runtime.Exceptions.FutureWarningException', None, (), ()),
ExceptionInfo('ImportWarning', 'IronPython.Runtime.Exceptions.ImportWarningException', None, (), ()),
ExceptionInfo('UnicodeWarning', 'IronPython.Runtime.Exceptions.UnicodeWarningException', None, (), ()),
ExceptionInfo('BytesWarning', 'IronPython.Runtime.Exceptions.BytesWarningException', None, (), ()),
),
),
),
),
),
)
def get_exception_info(pythonName, curHierarchy):
for exception in curHierarchy.subclasses:
if exception.name == pythonName:
return exception
for exception in curHierarchy.subclasses:
res = get_exception_info(pythonName, exception)
if res is not None:
return res
def get_all_exceps(l, curHierarchy):
for exception in curHierarchy.subclasses:
found = False
for e in l:
if e.clrException == exception.clrException:
found = True
break
if not found:
l.append(exception)
for exception in curHierarchy.subclasses:
get_all_exceps(l, exception)
return l
ip = clr.LoadAssemblyByPartialName('ironpython')
ms = clr.LoadAssemblyByPartialName('Microsoft.Scripting')
md = clr.LoadAssemblyByPartialName('Microsoft.Dynamic')
sysdll = clr.LoadAssemblyByPartialName('System')
def get_type(name):
if name.startswith('IronPython'): return ip.GetType(name)
if name.startswith('Microsoft.Scripting'):
res = ms.GetType(name)
return res if res is not None else md.GetType(name)
if name.startswith('System.ComponentModel'): return sysdll.GetType(name)
return System.Type.GetType(name)
def exception_distance(a):
distance = 0
while a.FullName != "System.Exception":
a = a.BaseType
distance += 1
return distance
def get_compare_name(ex_info):
return ex_info.baseMapping or ex_info.clrException
def compare_exceptions(a, b):
a, b = get_compare_name(a), get_compare_name(b)
ta = get_type(a)
tb = get_type(b)
if ta == None:
raise Exception("Exception class not found %s " % a)
if tb == None:
raise Exception("Exception class not found %s " % b)
if ta.IsSubclassOf(tb): return -1
if tb.IsSubclassOf(ta): return 1
da = exception_distance(ta)
db = exception_distance(tb)
if da != db: return db - da
return cmp(ta.Name, tb.Name)
def gen_topython_helper(cw):
cw.enter_block("private static BaseException/*!*/ ToPythonHelper(System.Exception clrException)")
allExceps = get_all_exceps([], exceptionHierarchy)
allExceps.sort(cmp=compare_exceptions)
for x in allExceps[:-1]: if not x.silverlightSupported: cw.writeline('#if !SILVERLIGHT')
cw.writeline('if (clrException is %s) return %s;' % (x.ExceptionMappingName, x.MakeNewException()))
if not x.silverlightSupported: cw.writeline('#endif')
cw.writeline('return new BaseException(Exception);')
cw.exit_block()
def get_clr_name(e):
return e.replace('Error', '') + 'Exception'
FACTORY = """
public static Exception %(name)s(string format, params object[] args) {
return new %(clrname)s(string.Format(format, args));
}"""
def factory_gen(cw):
for e in pythonExcs:
cw.write(FACTORY, name=e, clrname=get_clr_name(e))
CLASS1 = """
[Serializable]
public class %(name)s : %(supername)s, IPythonAwareException {
private object _pyExceptionObject;
private List<DynamicStackFrame> _frames;
private TraceBack _traceback;
public %(name)s() : base() { }
public %(name)s(string msg) : base(msg) { }
public %(name)s(string message, Exception innerException)
: base(message, innerException) {
}
#if FEATURE_SERIALIZATION
protected %(name)s(SerializationInfo info, StreamingContext context) : base(info, context) { }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2123:OverrideLinkDemandsShouldBeIdenticalToBase")]
public override void GetObjectData(SerializationInfo info, StreamingContext context) {
info.AddValue("frames", _frames);
info.AddValue("traceback", _traceback);
base.GetObjectData(info, context);
}
#endif
object IPythonAwareException.PythonException {
get {
if (_pyExceptionObject == null) {
var newEx = %(make_new_exception)s;
newEx.InitializeFromClr(this);
_pyExceptionObject = newEx;
}
return _pyExceptionObject;
}
set { _pyExceptionObject = value; }
}
List<DynamicStackFrame> IPythonAwareException.Frames {
get { return _frames; }
set { _frames = value; }
}
TraceBack IPythonAwareException.TraceBack {
get { return _traceback; }
set { _traceback = value; }
}
}
"""
def gen_one_exception(cw, e):
supername = getattr(exceptions, e).__bases__[0].__name__
if not supername in pythonExcs and supername != 'Warning':
supername = ''
cw.write(CLASS1, name=get_clr_name(e), supername=get_clr_name(supername), make_new_exception = get_exception_info(e, exceptionHierarchy).MakeNewException())
def gen_one_exception_maker(e):
def gen_one_exception_specialized(x):
return gen_one_exception(x, e)
return gen_one_exception_specialized
def fix_object(name):
if name == "object": return "@object"
return name
def gen_one_new_exception(cw, exception, parent):
if exception.fields:
exception.BeginSilverlight(cw)
cw.writeline('[MultiRuntimeAware]')
cw.writeline('private static PythonType %sStorage;' % (exception.name, ))
cw.enter_block('public static PythonType %s' % (exception.name, ))
cw.enter_block('get')
cw.enter_block('if (%sStorage == null)' % (exception.name, ))
cw.enter_block('lock (_pythonExceptionsLock)')
cw.writeline('%sStorage = CreateSubType(%s, typeof(_%s), msg => new %s(msg));' % (exception.name, exception.parent.PythonType, exception.name, exception.DotNetExceptionName))
cw.exit_block() cw.exit_block() cw.writeline('return %sStorage;' % (exception.name, ))
cw.exit_block()
cw.exit_block()
cw.writeline()
cw.writeline('[PythonType("%s"), PythonHidden, DynamicBaseTypeAttribute, Serializable]' % exception.name)
if exception.ConcreteParent.fields:
cw.enter_block('public partial class _%s : _%s' % (exception.name, exception.ConcreteParent.name))
else:
cw.enter_block('public partial class _%s : %s' % (exception.name, exception.ConcreteParent.name))
for field in exception.fields:
cw.writeline('private object _%s;' % field)
if exception.fields:
cw.writeline('')
cw.writeline('public _%s() : base(%s) { }' % (exception.name, exception.name))
cw.writeline('public _%s(PythonType type) : base(type) { }' % (exception.name, ))
cw.writeline('')
cw.enter_block('public new static object __new__(PythonType cls, [ParamDictionary]IDictionary<object, object> kwArgs, params object[] args)')
cw.writeline('return Activator.CreateInstance(cls.UnderlyingSystemType, cls);')
cw.exit_block()
cw.writeline('')
if exception.args:
argstr = ', '.join(['object ' + fix_object(x) for x in exception.args])
cw.enter_block('public void __init__(%s)' % (argstr))
for arg in exception.args:
cw.writeline('_%s = %s;' % (arg, fix_object(arg)))
cw.writeline('args = PythonTuple.MakeTuple(' + ', '.join([fix_object(x) for x in exception.args]) + ');')
cw.exit_block()
cw.writeline('')
cw.enter_block('public override void __init__(params object[] args)')
cw.enter_block('if (args == null || args.Length != %d)' % (len(exception.args), ))
cw.writeline('throw PythonOps.TypeError("__init__ takes exactly %d arguments ({0} given)", args.Length);' % len(exception.args))
cw.exit_block()
cw.writeline('__init__(' + ', '.join([fix_object(x) for x in exception.args]) + ');')
cw.exit_block()
cw.writeline('')
for field in exception.fields:
cw.enter_block('public object %s' % fix_object(field))
cw.writeline('get { return _%s; }' % field)
cw.writeline('set { _%s = value; }' % field)
cw.exit_block()
cw.writeline('')
cw.exit_block()
cw.writeline('')
exception.EndSilverlight(cw)
else:
cw.writeline('[MultiRuntimeAware]')
cw.writeline('private static PythonType %sStorage;' % (exception.name, ))
cw.enter_block('public static PythonType %s' % (exception.name, ))
cw.enter_block('get')
cw.enter_block('if (%sStorage == null)' % (exception.name, ))
cw.enter_block('lock (_pythonExceptionsLock)')
cw.writeline('%sStorage = CreateSubType(%s, "%s", msg => new %s(msg));' % (exception.name, exception.parent.PythonType, exception.name, exception.DotNetExceptionName))
cw.exit_block() cw.exit_block() cw.writeline('return %sStorage;' % (exception.name, ))
cw.exit_block()
cw.exit_block()
cw.writeline()
for child in exception.subclasses:
gen_one_new_exception(cw, child, exception)
def newstyle_gen(cw):
for child in exceptionHierarchy.subclasses:
gen_one_new_exception(cw, child, exceptionHierarchy)
def gen_one_exception_module_entry(cw, exception, parent):
exception.BeginSilverlight(cw)
cw.write("public static PythonType %s = %s;" % (exception.name, exception.InternalPythonType))
exception.EndSilverlight(cw)
for child in exception.subclasses:
gen_one_exception_module_entry(cw, child, exception)
def module_gen(cw):
cw.write("public static object BaseException = DynamicHelpers.GetPythonTypeFromType(typeof(PythonExceptions.BaseException));")
for child in exceptionHierarchy.subclasses:
gen_one_exception_module_entry(cw, child, exceptionHierarchy)
def gen_one_exception_builtin_entry(cw, exception, parent):
exception.BeginSilverlight(cw)
cw.enter_block("public static PythonType %s" % (exception.name, ))
if exception.fields:
cw.write('get { return %s; }' % (exception.InternalPythonType, ))
else:
cw.write('get { return %s; }' % (exception.InternalPythonType, ))
cw.exit_block()
exception.EndSilverlight(cw)
for child in exception.subclasses:
gen_one_exception_builtin_entry(cw, child, exception)
def builtin_gen(cw):
for child in exceptionHierarchy.subclasses:
gen_one_exception_builtin_entry(cw, child, exceptionHierarchy)
def main():
gens = [
("ToPython Exception Helper", gen_topython_helper),
("Exception Factories", factory_gen),
("Python New-Style Exceptions", newstyle_gen),
("builtin exceptions", builtin_gen),
]
for e in pythonExcs:
gens.append((get_clr_name(e), gen_one_exception_maker(e)))
return generate(*gens)
if __name__ == "__main__":
main()
| true
| true
|
f701e9411422a12eaa72972194de87ab9163810c
| 27,768
|
py
|
Python
|
dask/array/tests/test_slicing.py
|
abhinavralhan/dask
|
e840ba38eadfa93c3b9959347f0a43c1279a94ab
|
[
"BSD-3-Clause"
] | 2
|
2018-12-29T13:47:40.000Z
|
2018-12-29T13:47:49.000Z
|
dask/array/tests/test_slicing.py
|
abhinavralhan/dask
|
e840ba38eadfa93c3b9959347f0a43c1279a94ab
|
[
"BSD-3-Clause"
] | 2
|
2019-03-19T22:19:04.000Z
|
2019-03-26T19:04:00.000Z
|
dask/array/tests/test_slicing.py
|
abhinavralhan/dask
|
e840ba38eadfa93c3b9959347f0a43c1279a94ab
|
[
"BSD-3-Clause"
] | 1
|
2021-11-05T10:06:27.000Z
|
2021-11-05T10:06:27.000Z
|
import itertools
from operator import getitem
import pytest
from toolz import merge
np = pytest.importorskip('numpy')
import dask
import dask.array as da
from dask.array.slicing import (_sanitize_index_element, _slice_1d,
new_blockdim, sanitize_index, slice_array,
take, normalize_index, slicing_plan)
from dask.array.utils import assert_eq, same_keys
def test_slice_1d():
expected = {0: slice(10, 25, 1), 1: slice(None, None, None), 2: slice(0, 1, 1)}
result = _slice_1d(100, [25] * 4, slice(10, 51, None))
assert expected == result
# x[100:12:-3]
expected = {0: slice(-2, -8, -3),
1: slice(-1, -21, -3),
2: slice(-3, -21, -3),
3: slice(-2, -21, -3),
4: slice(-1, -21, -3)}
result = _slice_1d(100, [20] * 5, slice(100, 12, -3))
assert expected == result
# x[102::-3]
expected = {0: slice(-2, -21, -3),
1: slice(-1, -21, -3),
2: slice(-3, -21, -3),
3: slice(-2, -21, -3),
4: slice(-1, -21, -3)}
result = _slice_1d(100, [20] * 5, slice(102, None, -3))
assert expected == result
# x[::-4]
expected = {0: slice(-1, -21, -4),
1: slice(-1, -21, -4),
2: slice(-1, -21, -4),
3: slice(-1, -21, -4),
4: slice(-1, -21, -4)}
result = _slice_1d(100, [20] * 5, slice(None, None, -4))
assert expected == result
# x[::-7]
expected = {0: slice(-5, -21, -7),
1: slice(-4, -21, -7),
2: slice(-3, -21, -7),
3: slice(-2, -21, -7),
4: slice(-1, -21, -7)}
result = _slice_1d(100, [20] * 5, slice(None, None, -7))
assert expected == result
# x=range(115)
# x[::-7]
expected = {0: slice(-7, -24, -7),
1: slice(-2, -24, -7),
2: slice(-4, -24, -7),
3: slice(-6, -24, -7),
4: slice(-1, -24, -7)}
result = _slice_1d(115, [23] * 5, slice(None, None, -7))
assert expected == result
# x[79::-3]
expected = {0: slice(-1, -21, -3),
1: slice(-3, -21, -3),
2: slice(-2, -21, -3),
3: slice(-1, -21, -3)}
result = _slice_1d(100, [20] * 5, slice(79, None, -3))
assert expected == result
# x[-1:-8:-1]
expected = {4: slice(-1, -8, -1)}
result = _slice_1d(100, [20, 20, 20, 20, 20], slice(-1, 92, -1))
assert expected == result
# x[20:0:-1]
expected = {0: slice(-1, -20, -1),
1: slice(-20, -21, -1)}
result = _slice_1d(100, [20, 20, 20, 20, 20], slice(20, 0, -1))
assert expected == result
# x[:0]
expected = {}
result = _slice_1d(100, [20, 20, 20, 20, 20], slice(0))
assert result
# x=range(99)
expected = {0: slice(-3, -21, -3),
1: slice(-2, -21, -3),
2: slice(-1, -21, -3),
3: slice(-2, -20, -3),
4: slice(-1, -21, -3)}
# This array has non-uniformly sized blocks
result = _slice_1d(99, [20, 20, 20, 19, 20], slice(100, None, -3))
assert expected == result
# x=range(104)
# x[::-3]
expected = {0: slice(-1, -21, -3),
1: slice(-3, -24, -3),
2: slice(-3, -28, -3),
3: slice(-1, -14, -3),
4: slice(-1, -22, -3)}
# This array has non-uniformly sized blocks
result = _slice_1d(104, [20, 23, 27, 13, 21], slice(None, None, -3))
assert expected == result
# x=range(104)
# x[:27:-3]
expected = {1: slice(-3, -16, -3),
2: slice(-3, -28, -3),
3: slice(-1, -14, -3),
4: slice(-1, -22, -3)}
# This array has non-uniformly sized blocks
result = _slice_1d(104, [20, 23, 27, 13, 21], slice(None, 27, -3))
assert expected == result
# x=range(104)
# x[100:27:-3]
expected = {1: slice(-3, -16, -3),
2: slice(-3, -28, -3),
3: slice(-1, -14, -3),
4: slice(-4, -22, -3)}
# This array has non-uniformly sized blocks
result = _slice_1d(104, [20, 23, 27, 13, 21], slice(100, 27, -3))
assert expected == result
# x=range(1000000000000)
# x[1000:]
expected = {0: slice(1000, 1000000000, 1)}
expected.update({ii: slice(None, None, None) for ii in range(1, 1000)})
# This array is large
result = _slice_1d(1000000000000,
[1000000000] * 1000,
slice(1000, None, None))
assert expected == result
def test_slice_singleton_value_on_boundary():
assert _slice_1d(15, [5, 5, 5], 10) == {2: 0}
assert _slice_1d(30, (5, 5, 5, 5, 5, 5), 10) == {2: 0}
def test_slice_array_1d():
#x[24::2]
expected = {('y', 0): (getitem, ('x', 0), (slice(24, 25, 2),)),
('y', 1): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 2): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 3): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [[25] * 4], [slice(24, None, 2)])
assert expected == result
#x[26::2]
expected = {('y', 0): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 1): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 2): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [[25] * 4], [slice(26, None, 2)])
assert expected == result
#x[24::2]
expected = {('y', 0): (getitem, ('x', 0), (slice(24, 25, 2),)),
('y', 1): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 2): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 3): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [(25, ) * 4], (slice(24, None, 2), ))
assert expected == result
#x[26::2]
expected = {('y', 0): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 1): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 2): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [(25, ) * 4], (slice(26, None, 2), ))
assert expected == result
def test_slice_array_2d():
#2d slices: x[13::2,10::1]
expected = {('y', 0, 0): (getitem, ('x', 0, 0),
(slice(13, 20, 2), slice(10, 20, 1))),
('y', 0, 1): (getitem, ('x', 0, 1),
(slice(13, 20, 2), slice(None, None, None))),
('y', 0, 2): (getitem, ('x', 0, 2),
(slice(13, 20, 2), slice(None, None, None)))}
result, chunks = slice_array('y', 'x', [[20], [20, 20, 5]],
[slice(13, None, 2), slice(10, None, 1)])
assert expected == result
#2d slices with one dimension: x[5,10::1]
expected = {('y', 0): (getitem, ('x', 0, 0),
(5, slice(10, 20, 1))),
('y', 1): (getitem, ('x', 0, 1),
(5, slice(None, None, None))),
('y', 2): (getitem, ('x', 0, 2),
(5, slice(None, None, None)))}
result, chunks = slice_array('y', 'x', ([20], [20, 20, 5]),
[5, slice(10, None, 1)])
assert expected == result
def test_slice_optimizations():
#bar[:]
expected = {('foo', 0): ('bar', 0)}
result, chunks = slice_array('foo', 'bar', [[100]], (slice(None, None, None),))
assert expected == result
#bar[:,:,:]
expected = {('foo', 0): ('bar', 0),
('foo', 1): ('bar', 1),
('foo', 2): ('bar', 2)}
result, chunks = slice_array('foo', 'bar', [(100, 1000, 10000)],
(slice(None, None, None),
slice(None, None, None),
slice(None, None, None)))
assert expected == result
def test_slicing_with_singleton_indices():
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]), (slice(0, 5), 8))
expected = {('y', 0): (getitem, ('x', 0, 1), (slice(None, None, None), 3))}
assert expected == result
def test_slicing_with_newaxis():
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(slice(0, 3), None, slice(None, None, None)))
expected = {
('y', 0, 0, 0): (getitem, ('x', 0, 0),
(slice(0, 3, 1), None, slice(None, None, None))),
('y', 0, 0, 1): (getitem, ('x', 0, 1),
(slice(0, 3, 1), None, slice(None, None, None)))}
assert expected == result
assert chunks == ((3,), (1,), (5, 5))
def test_take():
chunks, dsk = take('y', 'x', [(20, 20, 20, 20)], [5, 1, 47, 3], axis=0)
expected = {('y', 0): (getitem, ('x', 0), (np.array([5, 1]),)),
('y', 1): (getitem, ('x', 2), (np.array([7]),)),
('y', 2): (getitem, ('x', 0), (np.array([3]),))}
np.testing.assert_equal(sorted(dsk.items()), sorted(expected.items()))
assert chunks == ((2, 1, 1),)
chunks, dsk = take('y', 'x', [(20, 20, 20, 20), (20, 20)], [
5, 1, 47, 3], axis=0)
expected = {('y', 0, 0): (getitem, ('x', 0, 0), (np.array([5, 1]), slice(None, None, None))),
('y', 0, 1): (getitem, ('x', 0, 1), (np.array([5, 1]), slice(None, None, None))),
('y', 1, 0): (getitem, ('x', 2, 0), (np.array([7]), slice(None, None, None))),
('y', 1, 1): (getitem, ('x', 2, 1), (np.array([7]), slice(None, None, None))),
('y', 2, 0): (getitem, ('x', 0, 0), (np.array([3]), slice(None, None, None))),
('y', 2, 1): (getitem, ('x', 0, 1), (np.array([3]), slice(None, None, None)))}
np.testing.assert_equal(sorted(dsk.items()), sorted(expected.items()))
assert chunks == ((2, 1, 1), (20, 20))
def test_take_sorted():
chunks, dsk = take('y', 'x', [(20, 20, 20, 20)], [1, 3, 5, 47], axis=0)
expected = {('y', 0): (getitem, ('x', 0), ([1, 3, 5],)),
('y', 1): (getitem, ('x', 2), ([7],))}
np.testing.assert_equal(dsk, expected)
assert chunks == ((3, 1),)
chunks, dsk = take('y', 'x', [(20, 20, 20, 20), (20, 20)], [1, 3, 5, 37], axis=1)
expected = merge(dict((('y', i, 0), (getitem, ('x', i, 0),
(slice(None, None, None), [1, 3, 5])))
for i in range(4)),
dict((('y', i, 1), (getitem, ('x', i, 1),
(slice(None, None, None), [17])))
for i in range(4)))
np.testing.assert_equal(dsk, expected)
assert chunks == ((20, 20, 20, 20), (3, 1))
def test_slicing_chunks():
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(1, np.array([2, 0, 3])))
assert chunks == ((3,), )
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(slice(0, 7), np.array([2, 0, 3])))
assert chunks == ((5, 2), (3, ))
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(slice(0, 7), 1))
assert chunks == ((5, 2), )
def test_slicing_with_numpy_arrays():
a, bd1 = slice_array('y', 'x', ((3, 3, 3, 1), (3, 3, 3, 1)),
(np.array([1, 2, 9]), slice(None, None, None)))
b, bd2 = slice_array('y', 'x', ((3, 3, 3, 1), (3, 3, 3, 1)),
(np.array([1, 2, 9]), slice(None, None, None)))
assert bd1 == bd2
np.testing.assert_equal(a, b)
i = [False, True, True, False, False,
False, False, False, False, True]
index = (i, slice(None, None, None))
index = normalize_index(index, (10, 10))
c, bd3 = slice_array('y', 'x', ((3, 3, 3, 1), (3, 3, 3, 1)), index)
assert bd1 == bd3
np.testing.assert_equal(a, c)
def test_slicing_and_chunks():
o = da.ones((24, 16), chunks=((4, 8, 8, 4), (2, 6, 6, 2)))
t = o[4:-4, 2:-2]
assert t.chunks == ((8, 8), (6, 6))
def test_slicing_identities():
a = da.ones((24, 16), chunks=((4, 8, 8, 4), (2, 6, 6, 2)))
assert a is a[slice(None)]
assert a is a[:]
assert a is a[::]
assert a is a[...]
assert a is a[0:]
assert a is a[0::]
assert a is a[::1]
assert a is a[0:len(a)]
assert a is a[0::1]
assert a is a[0:len(a):1]
def test_slice_stop_0():
# from gh-125
a = da.ones(10, chunks=(10,))[:0].compute()
b = np.ones(10)[:0]
assert_eq(a, b)
def test_slice_list_then_None():
x = da.zeros(shape=(5, 5), chunks=(3, 3))
y = x[[2, 1]][None]
assert_eq(y, np.zeros((1, 2, 5)))
class ReturnItem(object):
def __getitem__(self, key):
return key
@pytest.mark.skip(reason='really long test')
def test_slicing_exhaustively():
x = np.random.rand(6, 7, 8)
a = da.from_array(x, chunks=(3, 3, 3))
I = ReturnItem()
# independent indexing along different axes
indexers = [0, -2, I[:], I[:5], [0, 1], [0, 1, 2], [4, 2], I[::-1], None, I[:0], []]
for i in indexers:
assert_eq(x[i], a[i]), i
for j in indexers:
assert_eq(x[i][:, j], a[i][:, j]), (i, j)
assert_eq(x[:, i][j], a[:, i][j]), (i, j)
for k in indexers:
assert_eq(x[..., i][:, j][k], a[..., i][:, j][k]), (i, j, k)
# repeated indexing along the first axis
first_indexers = [I[:], I[:5], np.arange(5), [3, 1, 4, 5, 0], np.arange(6) < 6]
second_indexers = [0, -1, 3, I[:], I[:3], I[2:-1], [2, 4], [], I[:0]]
for i in first_indexers:
for j in second_indexers:
assert_eq(x[i][j], a[i][j]), (i, j)
def test_slicing_with_negative_step_flops_keys():
x = da.arange(10, chunks=5)
y = x[:1:-1]
assert (x.name, 1) in y.dask[(y.name, 0)]
assert (x.name, 0) in y.dask[(y.name, 1)]
assert_eq(y, np.arange(10)[:1:-1])
assert y.chunks == ((5, 3),)
assert y.dask[(y.name, 0)] == (getitem, (x.name, 1),
(slice(-1, -6, -1),))
assert y.dask[(y.name, 1)] == (getitem, (x.name, 0),
(slice(-1, -4, -1),))
def test_empty_slice():
x = da.ones((5, 5), chunks=(2, 2), dtype='i4')
y = x[:0]
assert_eq(y, np.ones((5, 5), dtype='i4')[:0])
def test_multiple_list_slicing():
x = np.random.rand(6, 7, 8)
a = da.from_array(x, chunks=(3, 3, 3))
assert_eq(x[:, [0, 1, 2]][[0, 1]], a[:, [0, 1, 2]][[0, 1]])
@pytest.mark.skipif(np.__version__ < '1.13.0',
reason='boolean lists are not treated as boolean indexes')
def test_boolean_list_slicing():
with pytest.raises(IndexError):
da.asarray(range(2))[[True]]
with pytest.raises(IndexError):
da.asarray(range(2))[[False, False, False]]
x = np.arange(5)
ind = [True, False, False, False, True]
assert_eq(da.asarray(x)[ind], x[ind])
# https://github.com/dask/dask/issues/3706
ind = [True]
assert_eq(da.asarray([0])[ind], np.arange(1)[ind])
def test_boolean_numpy_array_slicing():
with pytest.raises(IndexError):
da.asarray(range(2))[np.array([True])]
with pytest.raises(IndexError):
da.asarray(range(2))[np.array([False, False, False])]
x = np.arange(5)
ind = np.array([True, False, False, False, True])
assert_eq(da.asarray(x)[ind], x[ind])
# https://github.com/dask/dask/issues/3706
ind = np.array([True])
assert_eq(da.asarray([0])[ind], np.arange(1)[ind])
def test_empty_list():
x = np.ones((5, 5, 5), dtype='i4')
dx = da.from_array(x, chunks=2)
assert_eq(dx[[], :3, :2], x[[], :3, :2])
assert_eq(dx[:3, [], :2], x[:3, [], :2])
assert_eq(dx[:3, :2, []], x[:3, :2, []])
def test_uneven_chunks():
assert da.ones(20, chunks=5)[::2].chunks == ((3, 2, 3, 2),)
def test_new_blockdim():
assert new_blockdim(20, [5, 5, 5, 5], slice(0, None, 2)) == [3, 2, 3, 2]
def test_slicing_consistent_names():
x = np.arange(100).reshape((10, 10))
a = da.from_array(x, chunks=(5, 5))
assert same_keys(a[0], a[0])
assert same_keys(a[:, [1, 2, 3]], a[:, [1, 2, 3]])
assert same_keys(a[:, 5:2:-1], a[:, 5:2:-1])
assert same_keys(a[0, ...], a[0, ...])
assert same_keys(a[...], a[...])
assert same_keys(a[[1, 3, 5]], a[[1, 3, 5]])
assert same_keys(a[-11:11], a[:])
assert same_keys(a[-11:-9], a[:1])
assert same_keys(a[-1], a[9])
assert same_keys(a[0::-1], a[0:-11:-1])
def test_slicing_consistent_names_after_normalization():
x = da.zeros(10, chunks=(5,))
assert same_keys(x[0:], x[:10])
assert same_keys(x[0:], x[0:10])
assert same_keys(x[0:], x[0:10:1])
assert same_keys(x[:], x[0:10:1])
def test_sanitize_index_element():
with pytest.raises(TypeError):
_sanitize_index_element('Hello!')
def test_sanitize_index():
pd = pytest.importorskip('pandas')
with pytest.raises(TypeError):
sanitize_index('Hello!')
np.testing.assert_equal(sanitize_index(pd.Series([1, 2, 3])), [1, 2, 3])
np.testing.assert_equal(sanitize_index((1, 2, 3)), [1, 2, 3])
def test_uneven_blockdims():
blockdims = ((31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30), (100,))
index = (slice(240, 270), slice(None))
dsk_out, bd_out = slice_array('in', 'out', blockdims, index)
sol = {('in', 0, 0): (getitem, ('out', 7, 0), (slice(28, 31, 1), slice(None))),
('in', 1, 0): (getitem, ('out', 8, 0), (slice(0, 27, 1), slice(None)))}
assert dsk_out == sol
assert bd_out == ((3, 27), (100,))
blockdims = ((31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30),) * 2
index = (slice(240, 270), slice(180, 230))
dsk_out, bd_out = slice_array('in', 'out', blockdims, index)
sol = {('in', 0, 0): (getitem, ('out', 7, 5), (slice(28, 31, 1), slice(29, 30, 1))),
('in', 0, 1): (getitem, ('out', 7, 6), (slice(28, 31, 1), slice(None))),
('in', 0, 2): (getitem, ('out', 7, 7), (slice(28, 31, 1), slice(0, 18, 1))),
('in', 1, 0): (getitem, ('out', 8, 5), (slice(0, 27, 1), slice(29, 30, 1))),
('in', 1, 1): (getitem, ('out', 8, 6), (slice(0, 27, 1), slice(None))),
('in', 1, 2): (getitem, ('out', 8, 7), (slice(0, 27, 1), slice(0, 18, 1)))}
assert dsk_out == sol
assert bd_out == ((3, 27), (1, 31, 18))
def test_oob_check():
x = da.ones(5, chunks=(2,))
with pytest.raises(IndexError):
x[6]
with pytest.raises(IndexError):
x[[6]]
with pytest.raises(IndexError):
x[-10]
with pytest.raises(IndexError):
x[[-10]]
with pytest.raises(IndexError):
x[0, 0]
@pytest.mark.parametrize('idx_chunks', [None, 3, 2, 1])
@pytest.mark.parametrize('x_chunks', [None, (3, 5), (2, 3), (1, 2), (1, 1)])
def test_index_with_int_dask_array(x_chunks, idx_chunks):
# test data is crafted to stress use cases:
# - pick from different chunks of x out of order
# - a chunk of x contains no matches
# - only one chunk of x
x = np.array([[10, 20, 30, 40, 50],
[60, 70, 80, 90, 100],
[110, 120, 130, 140, 150]])
idx = np.array([3, 0, 1])
expect = np.array([[40, 10, 20],
[90, 60, 70],
[140, 110, 120]])
if x_chunks is not None:
x = da.from_array(x, chunks=x_chunks)
if idx_chunks is not None:
idx = da.from_array(idx, chunks=idx_chunks)
assert_eq(x[:, idx], expect)
assert_eq(x.T[idx, :], expect.T)
@pytest.mark.parametrize('chunks', [1, 2, 3])
def test_index_with_int_dask_array_0d(chunks):
# Slice by 0-dimensional array
x = da.from_array([[10, 20, 30],
[40, 50, 60]], chunks=chunks)
idx0 = da.from_array(1, chunks=1)
assert_eq(x[idx0, :], x[1, :])
assert_eq(x[:, idx0], x[:, 1])
@pytest.mark.parametrize('chunks', [1, 2, 3, 4, 5])
def test_index_with_int_dask_array_nanchunks(chunks):
# Slice by array with nan-sized chunks
a = da.arange(-2, 3, chunks=chunks)
assert_eq(a[a.nonzero()], np.array([-2, -1, 1, 2]))
# Edge case: the nan-sized chunks resolve to size 0
a = da.zeros(5, chunks=chunks)
assert_eq(a[a.nonzero()], np.array([]))
@pytest.mark.parametrize('chunks', [2, 4])
def test_index_with_int_dask_array_negindex(chunks):
a = da.arange(4, chunks=chunks)
idx = da.from_array([-1, -4], chunks=1)
assert_eq(a[idx], np.array([3, 0]))
@pytest.mark.parametrize('chunks', [2, 4])
def test_index_with_int_dask_array_indexerror(chunks):
a = da.arange(4, chunks=chunks)
idx = da.from_array([4], chunks=1)
with pytest.raises(IndexError):
a[idx].compute()
idx = da.from_array([-5], chunks=1)
with pytest.raises(IndexError):
a[idx].compute()
@pytest.mark.parametrize('dtype', ['int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64'])
def test_index_with_int_dask_array_dtypes(dtype):
a = da.from_array([10, 20, 30, 40], chunks=-1)
idx = da.from_array(np.array([1, 2]).astype(dtype), chunks=1)
assert_eq(a[idx], np.array([20, 30]))
def test_index_with_int_dask_array_nocompute():
""" Test that when the indices are a dask array
they are not accidentally computed
"""
def crash():
raise NotImplementedError()
x = da.arange(5, chunks=-1)
idx = da.Array({('x', 0): (crash,)}, name='x',
chunks=((2,), ), dtype=np.int64)
result = x[idx]
with pytest.raises(NotImplementedError):
result.compute()
def test_index_with_bool_dask_array():
x = np.arange(36).reshape((6, 6))
d = da.from_array(x, chunks=(3, 3))
ind = np.asarray([True, True, False, True, False, False], dtype=bool)
ind = da.from_array(ind, chunks=2)
for index in [ind, (slice(1, 9, 2), ind), (ind, slice(2, 8, 1))]:
x_index = dask.compute(index)[0]
assert_eq(x[x_index], d[index])
def test_index_with_bool_dask_array_2():
x = np.random.random((10, 10, 10))
ind = np.random.random(10) > 0.5
d = da.from_array(x, chunks=(3, 4, 5))
dind = da.from_array(ind, chunks=4)
index = [slice(1, 9, 1), slice(None)]
for i in range(x.ndim):
index2 = index[:]
index2.insert(i, dind)
index3 = index[:]
index3.insert(i, ind)
assert_eq(x[tuple(index3)], d[tuple(index2)])
@pytest.mark.xfail
def test_cull():
x = da.ones(1000, chunks=(10,))
for slc in [1, slice(0, 30), slice(0, None, 100)]:
y = x[slc]
assert len(y.dask) < len(x.dask)
@pytest.mark.parametrize('shape', [(2,), (2, 3), (2, 3, 5)])
@pytest.mark.parametrize('index', [(Ellipsis,),
(None, Ellipsis),
(Ellipsis, None),
(None, Ellipsis, None)])
def test_slicing_with_Nones(shape, index):
x = np.random.random(shape)
d = da.from_array(x, chunks=shape)
assert_eq(x[index], d[index])
indexers = [Ellipsis, slice(2), 0, 1, -2, -1, slice(-2, None), None]
"""
# We comment this out because it is 4096 tests
@pytest.mark.parametrize('a', indexers)
@pytest.mark.parametrize('b', indexers)
@pytest.mark.parametrize('c', indexers)
@pytest.mark.parametrize('d', indexers)
def test_slicing_none_int_ellipses(a, b, c, d):
if (a, b, c, d).count(Ellipsis) > 1:
return
shape = (2,3,5,7,11)
x = np.arange(np.prod(shape)).reshape(shape)
y = da.core.asarray(x)
xx = x[a, b, c, d]
yy = y[a, b, c, d]
assert_eq(xx, yy)
"""
def test_slicing_integer_no_warnings():
# https://github.com/dask/dask/pull/2457/
X = da.random.random((100, 2), (2, 2))
idx = np.array([0, 0, 1, 1])
with pytest.warns(None) as rec:
X[idx].compute()
assert len(rec) == 0
@pytest.mark.slow
def test_slicing_none_int_ellipes():
shape = (2, 3, 5, 7, 11)
x = np.arange(np.prod(shape)).reshape(shape)
y = da.core.asarray(x)
for ind in itertools.product(indexers, indexers, indexers, indexers):
if ind.count(Ellipsis) > 1:
continue
assert_eq(x[ind], y[ind])
def test_None_overlap_int():
a, b, c, d = (0, slice(None, 2, None), None, Ellipsis)
shape = (2, 3, 5, 7, 11)
x = np.arange(np.prod(shape)).reshape(shape)
y = da.core.asarray(x)
xx = x[a, b, c, d]
yy = y[a, b, c, d]
assert_eq(xx, yy)
def test_negative_n_slicing():
assert_eq(da.ones(2, chunks=2)[-2], np.ones(2)[-2])
def test_negative_list_slicing():
x = np.arange(5)
dx = da.from_array(x, chunks=2)
assert_eq(dx[[0, -5]], x[[0, -5]])
assert_eq(dx[[4, -1]], x[[4, -1]])
def test_permit_oob_slices():
x = np.arange(5)
dx = da.from_array(x, chunks=2)
assert_eq(x[-102:], dx[-102:])
assert_eq(x[102:], dx[102:])
assert_eq(x[:102], dx[:102])
assert_eq(x[:-102], dx[:-102])
def test_normalize_index():
assert normalize_index((Ellipsis, None), (10,)) == (slice(None), None)
assert normalize_index(5, (np.nan,)) == (5,)
assert normalize_index(-5, (np.nan,)) == (-5,)
(result,) = normalize_index([-5, -2, 1], (np.nan,))
assert result.tolist() == [-5, -2, 1]
assert normalize_index(slice(-5, -2), (np.nan,)) == (slice(-5, -2),)
def test_take_semi_sorted():
x = da.ones(10, chunks=(5,))
index = np.arange(15) % 10
y = x[index]
assert y.chunks == ((5, 5, 5),)
@pytest.mark.parametrize('chunks,index,expected', [
(
(5, 5, 5),
np.arange(5, 15) % 10,
[(1, np.arange(5)),
(0, np.arange(5))]
),
(
(5, 5, 5, 5),
np.arange(20) // 2,
[(0, np.arange(10) // 2),
(1, np.arange(10) // 2)]
),
(
(10, 10),
[15, 2, 3, 15],
[(1, [5]),
(0, [2, 3]),
(1, [5])]
),
])
def test_slicing_plan(chunks, index, expected):
plan = slicing_plan(chunks, index)
assert len(plan) == len(expected)
for (i, x), (j, y) in zip(plan, expected):
assert i == j
assert len(x) == len(y)
assert (x == y).all()
def test_pathological_unsorted_slicing():
x = da.ones(100, chunks=10)
# [0, 10, 20, ... 90, 1, 11, 21, ... 91, ...]
index = np.arange(100).reshape(10, 10).ravel(order='F')
with pytest.warns(da.PerformanceWarning) as info:
x[index]
assert '10' in str(info.list[0])
assert 'out-of-order' in str(info.list[0])
@pytest.mark.parametrize('params', [(2, 2, 1), (5, 3, 2)])
def test_setitem_with_different_chunks_preserves_shape(params):
""" Reproducer for https://github.com/dask/dask/issues/3730.
Mutating based on an array with different chunks can cause new chunks to be
used. We need to ensure those new chunk sizes are applied to the mutated
array, otherwise the array won't generate the correct keys.
"""
array_size, chunk_size1, chunk_size2 = params
x = da.zeros(array_size, chunks=chunk_size1)
mask = da.zeros(array_size, chunks=chunk_size2)
x[mask] = 1
result = x.compute()
assert x.shape == result.shape
def test_gh3579():
assert_eq(np.arange(10)[0::-1], da.arange(10, chunks=3)[0::-1])
assert_eq(np.arange(10)[::-1], da.arange(10, chunks=3)[::-1])
@pytest.mark.parametrize('lock', [True, False])
@pytest.mark.parametrize('asarray', [True, False])
@pytest.mark.parametrize('fancy', [True, False])
def test_gh4043(lock, asarray, fancy):
a1 = da.from_array(np.zeros(3,), chunks=1, asarray=asarray, lock=lock, fancy=fancy)
a2 = da.from_array(np.ones(3,), chunks=1, asarray=asarray, lock=lock, fancy=fancy)
al = da.stack([a1, a2])
assert_eq(al, al)
| 33.334934
| 97
| 0.509327
|
import itertools
from operator import getitem
import pytest
from toolz import merge
np = pytest.importorskip('numpy')
import dask
import dask.array as da
from dask.array.slicing import (_sanitize_index_element, _slice_1d,
new_blockdim, sanitize_index, slice_array,
take, normalize_index, slicing_plan)
from dask.array.utils import assert_eq, same_keys
def test_slice_1d():
expected = {0: slice(10, 25, 1), 1: slice(None, None, None), 2: slice(0, 1, 1)}
result = _slice_1d(100, [25] * 4, slice(10, 51, None))
assert expected == result
expected = {0: slice(-2, -8, -3),
1: slice(-1, -21, -3),
2: slice(-3, -21, -3),
3: slice(-2, -21, -3),
4: slice(-1, -21, -3)}
result = _slice_1d(100, [20] * 5, slice(100, 12, -3))
assert expected == result
expected = {0: slice(-2, -21, -3),
1: slice(-1, -21, -3),
2: slice(-3, -21, -3),
3: slice(-2, -21, -3),
4: slice(-1, -21, -3)}
result = _slice_1d(100, [20] * 5, slice(102, None, -3))
assert expected == result
expected = {0: slice(-1, -21, -4),
1: slice(-1, -21, -4),
2: slice(-1, -21, -4),
3: slice(-1, -21, -4),
4: slice(-1, -21, -4)}
result = _slice_1d(100, [20] * 5, slice(None, None, -4))
assert expected == result
expected = {0: slice(-5, -21, -7),
1: slice(-4, -21, -7),
2: slice(-3, -21, -7),
3: slice(-2, -21, -7),
4: slice(-1, -21, -7)}
result = _slice_1d(100, [20] * 5, slice(None, None, -7))
assert expected == result
expected = {0: slice(-7, -24, -7),
1: slice(-2, -24, -7),
2: slice(-4, -24, -7),
3: slice(-6, -24, -7),
4: slice(-1, -24, -7)}
result = _slice_1d(115, [23] * 5, slice(None, None, -7))
assert expected == result
expected = {0: slice(-1, -21, -3),
1: slice(-3, -21, -3),
2: slice(-2, -21, -3),
3: slice(-1, -21, -3)}
result = _slice_1d(100, [20] * 5, slice(79, None, -3))
assert expected == result
expected = {4: slice(-1, -8, -1)}
result = _slice_1d(100, [20, 20, 20, 20, 20], slice(-1, 92, -1))
assert expected == result
expected = {0: slice(-1, -20, -1),
1: slice(-20, -21, -1)}
result = _slice_1d(100, [20, 20, 20, 20, 20], slice(20, 0, -1))
assert expected == result
expected = {}
result = _slice_1d(100, [20, 20, 20, 20, 20], slice(0))
assert result
expected = {0: slice(-3, -21, -3),
1: slice(-2, -21, -3),
2: slice(-1, -21, -3),
3: slice(-2, -20, -3),
4: slice(-1, -21, -3)}
result = _slice_1d(99, [20, 20, 20, 19, 20], slice(100, None, -3))
assert expected == result
expected = {0: slice(-1, -21, -3),
1: slice(-3, -24, -3),
2: slice(-3, -28, -3),
3: slice(-1, -14, -3),
4: slice(-1, -22, -3)}
result = _slice_1d(104, [20, 23, 27, 13, 21], slice(None, None, -3))
assert expected == result
expected = {1: slice(-3, -16, -3),
2: slice(-3, -28, -3),
3: slice(-1, -14, -3),
4: slice(-1, -22, -3)}
result = _slice_1d(104, [20, 23, 27, 13, 21], slice(None, 27, -3))
assert expected == result
expected = {1: slice(-3, -16, -3),
2: slice(-3, -28, -3),
3: slice(-1, -14, -3),
4: slice(-4, -22, -3)}
result = _slice_1d(104, [20, 23, 27, 13, 21], slice(100, 27, -3))
assert expected == result
expected = {0: slice(1000, 1000000000, 1)}
expected.update({ii: slice(None, None, None) for ii in range(1, 1000)})
result = _slice_1d(1000000000000,
[1000000000] * 1000,
slice(1000, None, None))
assert expected == result
def test_slice_singleton_value_on_boundary():
assert _slice_1d(15, [5, 5, 5], 10) == {2: 0}
assert _slice_1d(30, (5, 5, 5, 5, 5, 5), 10) == {2: 0}
def test_slice_array_1d():
expected = {('y', 0): (getitem, ('x', 0), (slice(24, 25, 2),)),
('y', 1): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 2): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 3): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [[25] * 4], [slice(24, None, 2)])
assert expected == result
expected = {('y', 0): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 1): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 2): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [[25] * 4], [slice(26, None, 2)])
assert expected == result
expected = {('y', 0): (getitem, ('x', 0), (slice(24, 25, 2),)),
('y', 1): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 2): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 3): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [(25, ) * 4], (slice(24, None, 2), ))
assert expected == result
expected = {('y', 0): (getitem, ('x', 1), (slice(1, 25, 2),)),
('y', 1): (getitem, ('x', 2), (slice(0, 25, 2),)),
('y', 2): (getitem, ('x', 3), (slice(1, 25, 2),))}
result, chunks = slice_array('y', 'x', [(25, ) * 4], (slice(26, None, 2), ))
assert expected == result
def test_slice_array_2d():
expected = {('y', 0, 0): (getitem, ('x', 0, 0),
(slice(13, 20, 2), slice(10, 20, 1))),
('y', 0, 1): (getitem, ('x', 0, 1),
(slice(13, 20, 2), slice(None, None, None))),
('y', 0, 2): (getitem, ('x', 0, 2),
(slice(13, 20, 2), slice(None, None, None)))}
result, chunks = slice_array('y', 'x', [[20], [20, 20, 5]],
[slice(13, None, 2), slice(10, None, 1)])
assert expected == result
expected = {('y', 0): (getitem, ('x', 0, 0),
(5, slice(10, 20, 1))),
('y', 1): (getitem, ('x', 0, 1),
(5, slice(None, None, None))),
('y', 2): (getitem, ('x', 0, 2),
(5, slice(None, None, None)))}
result, chunks = slice_array('y', 'x', ([20], [20, 20, 5]),
[5, slice(10, None, 1)])
assert expected == result
def test_slice_optimizations():
expected = {('foo', 0): ('bar', 0)}
result, chunks = slice_array('foo', 'bar', [[100]], (slice(None, None, None),))
assert expected == result
expected = {('foo', 0): ('bar', 0),
('foo', 1): ('bar', 1),
('foo', 2): ('bar', 2)}
result, chunks = slice_array('foo', 'bar', [(100, 1000, 10000)],
(slice(None, None, None),
slice(None, None, None),
slice(None, None, None)))
assert expected == result
def test_slicing_with_singleton_indices():
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]), (slice(0, 5), 8))
expected = {('y', 0): (getitem, ('x', 0, 1), (slice(None, None, None), 3))}
assert expected == result
def test_slicing_with_newaxis():
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(slice(0, 3), None, slice(None, None, None)))
expected = {
('y', 0, 0, 0): (getitem, ('x', 0, 0),
(slice(0, 3, 1), None, slice(None, None, None))),
('y', 0, 0, 1): (getitem, ('x', 0, 1),
(slice(0, 3, 1), None, slice(None, None, None)))}
assert expected == result
assert chunks == ((3,), (1,), (5, 5))
def test_take():
chunks, dsk = take('y', 'x', [(20, 20, 20, 20)], [5, 1, 47, 3], axis=0)
expected = {('y', 0): (getitem, ('x', 0), (np.array([5, 1]),)),
('y', 1): (getitem, ('x', 2), (np.array([7]),)),
('y', 2): (getitem, ('x', 0), (np.array([3]),))}
np.testing.assert_equal(sorted(dsk.items()), sorted(expected.items()))
assert chunks == ((2, 1, 1),)
chunks, dsk = take('y', 'x', [(20, 20, 20, 20), (20, 20)], [
5, 1, 47, 3], axis=0)
expected = {('y', 0, 0): (getitem, ('x', 0, 0), (np.array([5, 1]), slice(None, None, None))),
('y', 0, 1): (getitem, ('x', 0, 1), (np.array([5, 1]), slice(None, None, None))),
('y', 1, 0): (getitem, ('x', 2, 0), (np.array([7]), slice(None, None, None))),
('y', 1, 1): (getitem, ('x', 2, 1), (np.array([7]), slice(None, None, None))),
('y', 2, 0): (getitem, ('x', 0, 0), (np.array([3]), slice(None, None, None))),
('y', 2, 1): (getitem, ('x', 0, 1), (np.array([3]), slice(None, None, None)))}
np.testing.assert_equal(sorted(dsk.items()), sorted(expected.items()))
assert chunks == ((2, 1, 1), (20, 20))
def test_take_sorted():
chunks, dsk = take('y', 'x', [(20, 20, 20, 20)], [1, 3, 5, 47], axis=0)
expected = {('y', 0): (getitem, ('x', 0), ([1, 3, 5],)),
('y', 1): (getitem, ('x', 2), ([7],))}
np.testing.assert_equal(dsk, expected)
assert chunks == ((3, 1),)
chunks, dsk = take('y', 'x', [(20, 20, 20, 20), (20, 20)], [1, 3, 5, 37], axis=1)
expected = merge(dict((('y', i, 0), (getitem, ('x', i, 0),
(slice(None, None, None), [1, 3, 5])))
for i in range(4)),
dict((('y', i, 1), (getitem, ('x', i, 1),
(slice(None, None, None), [17])))
for i in range(4)))
np.testing.assert_equal(dsk, expected)
assert chunks == ((20, 20, 20, 20), (3, 1))
def test_slicing_chunks():
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(1, np.array([2, 0, 3])))
assert chunks == ((3,), )
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(slice(0, 7), np.array([2, 0, 3])))
assert chunks == ((5, 2), (3, ))
result, chunks = slice_array('y', 'x', ([5, 5], [5, 5]),
(slice(0, 7), 1))
assert chunks == ((5, 2), )
def test_slicing_with_numpy_arrays():
a, bd1 = slice_array('y', 'x', ((3, 3, 3, 1), (3, 3, 3, 1)),
(np.array([1, 2, 9]), slice(None, None, None)))
b, bd2 = slice_array('y', 'x', ((3, 3, 3, 1), (3, 3, 3, 1)),
(np.array([1, 2, 9]), slice(None, None, None)))
assert bd1 == bd2
np.testing.assert_equal(a, b)
i = [False, True, True, False, False,
False, False, False, False, True]
index = (i, slice(None, None, None))
index = normalize_index(index, (10, 10))
c, bd3 = slice_array('y', 'x', ((3, 3, 3, 1), (3, 3, 3, 1)), index)
assert bd1 == bd3
np.testing.assert_equal(a, c)
def test_slicing_and_chunks():
o = da.ones((24, 16), chunks=((4, 8, 8, 4), (2, 6, 6, 2)))
t = o[4:-4, 2:-2]
assert t.chunks == ((8, 8), (6, 6))
def test_slicing_identities():
a = da.ones((24, 16), chunks=((4, 8, 8, 4), (2, 6, 6, 2)))
assert a is a[slice(None)]
assert a is a[:]
assert a is a[::]
assert a is a[...]
assert a is a[0:]
assert a is a[0::]
assert a is a[::1]
assert a is a[0:len(a)]
assert a is a[0::1]
assert a is a[0:len(a):1]
def test_slice_stop_0():
a = da.ones(10, chunks=(10,))[:0].compute()
b = np.ones(10)[:0]
assert_eq(a, b)
def test_slice_list_then_None():
x = da.zeros(shape=(5, 5), chunks=(3, 3))
y = x[[2, 1]][None]
assert_eq(y, np.zeros((1, 2, 5)))
class ReturnItem(object):
def __getitem__(self, key):
return key
@pytest.mark.skip(reason='really long test')
def test_slicing_exhaustively():
x = np.random.rand(6, 7, 8)
a = da.from_array(x, chunks=(3, 3, 3))
I = ReturnItem()
indexers = [0, -2, I[:], I[:5], [0, 1], [0, 1, 2], [4, 2], I[::-1], None, I[:0], []]
for i in indexers:
assert_eq(x[i], a[i]), i
for j in indexers:
assert_eq(x[i][:, j], a[i][:, j]), (i, j)
assert_eq(x[:, i][j], a[:, i][j]), (i, j)
for k in indexers:
assert_eq(x[..., i][:, j][k], a[..., i][:, j][k]), (i, j, k)
first_indexers = [I[:], I[:5], np.arange(5), [3, 1, 4, 5, 0], np.arange(6) < 6]
second_indexers = [0, -1, 3, I[:], I[:3], I[2:-1], [2, 4], [], I[:0]]
for i in first_indexers:
for j in second_indexers:
assert_eq(x[i][j], a[i][j]), (i, j)
def test_slicing_with_negative_step_flops_keys():
x = da.arange(10, chunks=5)
y = x[:1:-1]
assert (x.name, 1) in y.dask[(y.name, 0)]
assert (x.name, 0) in y.dask[(y.name, 1)]
assert_eq(y, np.arange(10)[:1:-1])
assert y.chunks == ((5, 3),)
assert y.dask[(y.name, 0)] == (getitem, (x.name, 1),
(slice(-1, -6, -1),))
assert y.dask[(y.name, 1)] == (getitem, (x.name, 0),
(slice(-1, -4, -1),))
def test_empty_slice():
x = da.ones((5, 5), chunks=(2, 2), dtype='i4')
y = x[:0]
assert_eq(y, np.ones((5, 5), dtype='i4')[:0])
def test_multiple_list_slicing():
x = np.random.rand(6, 7, 8)
a = da.from_array(x, chunks=(3, 3, 3))
assert_eq(x[:, [0, 1, 2]][[0, 1]], a[:, [0, 1, 2]][[0, 1]])
@pytest.mark.skipif(np.__version__ < '1.13.0',
reason='boolean lists are not treated as boolean indexes')
def test_boolean_list_slicing():
with pytest.raises(IndexError):
da.asarray(range(2))[[True]]
with pytest.raises(IndexError):
da.asarray(range(2))[[False, False, False]]
x = np.arange(5)
ind = [True, False, False, False, True]
assert_eq(da.asarray(x)[ind], x[ind])
ind = [True]
assert_eq(da.asarray([0])[ind], np.arange(1)[ind])
def test_boolean_numpy_array_slicing():
with pytest.raises(IndexError):
da.asarray(range(2))[np.array([True])]
with pytest.raises(IndexError):
da.asarray(range(2))[np.array([False, False, False])]
x = np.arange(5)
ind = np.array([True, False, False, False, True])
assert_eq(da.asarray(x)[ind], x[ind])
ind = np.array([True])
assert_eq(da.asarray([0])[ind], np.arange(1)[ind])
def test_empty_list():
x = np.ones((5, 5, 5), dtype='i4')
dx = da.from_array(x, chunks=2)
assert_eq(dx[[], :3, :2], x[[], :3, :2])
assert_eq(dx[:3, [], :2], x[:3, [], :2])
assert_eq(dx[:3, :2, []], x[:3, :2, []])
def test_uneven_chunks():
assert da.ones(20, chunks=5)[::2].chunks == ((3, 2, 3, 2),)
def test_new_blockdim():
assert new_blockdim(20, [5, 5, 5, 5], slice(0, None, 2)) == [3, 2, 3, 2]
def test_slicing_consistent_names():
x = np.arange(100).reshape((10, 10))
a = da.from_array(x, chunks=(5, 5))
assert same_keys(a[0], a[0])
assert same_keys(a[:, [1, 2, 3]], a[:, [1, 2, 3]])
assert same_keys(a[:, 5:2:-1], a[:, 5:2:-1])
assert same_keys(a[0, ...], a[0, ...])
assert same_keys(a[...], a[...])
assert same_keys(a[[1, 3, 5]], a[[1, 3, 5]])
assert same_keys(a[-11:11], a[:])
assert same_keys(a[-11:-9], a[:1])
assert same_keys(a[-1], a[9])
assert same_keys(a[0::-1], a[0:-11:-1])
def test_slicing_consistent_names_after_normalization():
x = da.zeros(10, chunks=(5,))
assert same_keys(x[0:], x[:10])
assert same_keys(x[0:], x[0:10])
assert same_keys(x[0:], x[0:10:1])
assert same_keys(x[:], x[0:10:1])
def test_sanitize_index_element():
with pytest.raises(TypeError):
_sanitize_index_element('Hello!')
def test_sanitize_index():
pd = pytest.importorskip('pandas')
with pytest.raises(TypeError):
sanitize_index('Hello!')
np.testing.assert_equal(sanitize_index(pd.Series([1, 2, 3])), [1, 2, 3])
np.testing.assert_equal(sanitize_index((1, 2, 3)), [1, 2, 3])
def test_uneven_blockdims():
blockdims = ((31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30), (100,))
index = (slice(240, 270), slice(None))
dsk_out, bd_out = slice_array('in', 'out', blockdims, index)
sol = {('in', 0, 0): (getitem, ('out', 7, 0), (slice(28, 31, 1), slice(None))),
('in', 1, 0): (getitem, ('out', 8, 0), (slice(0, 27, 1), slice(None)))}
assert dsk_out == sol
assert bd_out == ((3, 27), (100,))
blockdims = ((31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30),) * 2
index = (slice(240, 270), slice(180, 230))
dsk_out, bd_out = slice_array('in', 'out', blockdims, index)
sol = {('in', 0, 0): (getitem, ('out', 7, 5), (slice(28, 31, 1), slice(29, 30, 1))),
('in', 0, 1): (getitem, ('out', 7, 6), (slice(28, 31, 1), slice(None))),
('in', 0, 2): (getitem, ('out', 7, 7), (slice(28, 31, 1), slice(0, 18, 1))),
('in', 1, 0): (getitem, ('out', 8, 5), (slice(0, 27, 1), slice(29, 30, 1))),
('in', 1, 1): (getitem, ('out', 8, 6), (slice(0, 27, 1), slice(None))),
('in', 1, 2): (getitem, ('out', 8, 7), (slice(0, 27, 1), slice(0, 18, 1)))}
assert dsk_out == sol
assert bd_out == ((3, 27), (1, 31, 18))
def test_oob_check():
x = da.ones(5, chunks=(2,))
with pytest.raises(IndexError):
x[6]
with pytest.raises(IndexError):
x[[6]]
with pytest.raises(IndexError):
x[-10]
with pytest.raises(IndexError):
x[[-10]]
with pytest.raises(IndexError):
x[0, 0]
@pytest.mark.parametrize('idx_chunks', [None, 3, 2, 1])
@pytest.mark.parametrize('x_chunks', [None, (3, 5), (2, 3), (1, 2), (1, 1)])
def test_index_with_int_dask_array(x_chunks, idx_chunks):
x = np.array([[10, 20, 30, 40, 50],
[60, 70, 80, 90, 100],
[110, 120, 130, 140, 150]])
idx = np.array([3, 0, 1])
expect = np.array([[40, 10, 20],
[90, 60, 70],
[140, 110, 120]])
if x_chunks is not None:
x = da.from_array(x, chunks=x_chunks)
if idx_chunks is not None:
idx = da.from_array(idx, chunks=idx_chunks)
assert_eq(x[:, idx], expect)
assert_eq(x.T[idx, :], expect.T)
@pytest.mark.parametrize('chunks', [1, 2, 3])
def test_index_with_int_dask_array_0d(chunks):
x = da.from_array([[10, 20, 30],
[40, 50, 60]], chunks=chunks)
idx0 = da.from_array(1, chunks=1)
assert_eq(x[idx0, :], x[1, :])
assert_eq(x[:, idx0], x[:, 1])
@pytest.mark.parametrize('chunks', [1, 2, 3, 4, 5])
def test_index_with_int_dask_array_nanchunks(chunks):
a = da.arange(-2, 3, chunks=chunks)
assert_eq(a[a.nonzero()], np.array([-2, -1, 1, 2]))
a = da.zeros(5, chunks=chunks)
assert_eq(a[a.nonzero()], np.array([]))
@pytest.mark.parametrize('chunks', [2, 4])
def test_index_with_int_dask_array_negindex(chunks):
a = da.arange(4, chunks=chunks)
idx = da.from_array([-1, -4], chunks=1)
assert_eq(a[idx], np.array([3, 0]))
@pytest.mark.parametrize('chunks', [2, 4])
def test_index_with_int_dask_array_indexerror(chunks):
a = da.arange(4, chunks=chunks)
idx = da.from_array([4], chunks=1)
with pytest.raises(IndexError):
a[idx].compute()
idx = da.from_array([-5], chunks=1)
with pytest.raises(IndexError):
a[idx].compute()
@pytest.mark.parametrize('dtype', ['int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64'])
def test_index_with_int_dask_array_dtypes(dtype):
a = da.from_array([10, 20, 30, 40], chunks=-1)
idx = da.from_array(np.array([1, 2]).astype(dtype), chunks=1)
assert_eq(a[idx], np.array([20, 30]))
def test_index_with_int_dask_array_nocompute():
def crash():
raise NotImplementedError()
x = da.arange(5, chunks=-1)
idx = da.Array({('x', 0): (crash,)}, name='x',
chunks=((2,), ), dtype=np.int64)
result = x[idx]
with pytest.raises(NotImplementedError):
result.compute()
def test_index_with_bool_dask_array():
x = np.arange(36).reshape((6, 6))
d = da.from_array(x, chunks=(3, 3))
ind = np.asarray([True, True, False, True, False, False], dtype=bool)
ind = da.from_array(ind, chunks=2)
for index in [ind, (slice(1, 9, 2), ind), (ind, slice(2, 8, 1))]:
x_index = dask.compute(index)[0]
assert_eq(x[x_index], d[index])
def test_index_with_bool_dask_array_2():
x = np.random.random((10, 10, 10))
ind = np.random.random(10) > 0.5
d = da.from_array(x, chunks=(3, 4, 5))
dind = da.from_array(ind, chunks=4)
index = [slice(1, 9, 1), slice(None)]
for i in range(x.ndim):
index2 = index[:]
index2.insert(i, dind)
index3 = index[:]
index3.insert(i, ind)
assert_eq(x[tuple(index3)], d[tuple(index2)])
@pytest.mark.xfail
def test_cull():
x = da.ones(1000, chunks=(10,))
for slc in [1, slice(0, 30), slice(0, None, 100)]:
y = x[slc]
assert len(y.dask) < len(x.dask)
@pytest.mark.parametrize('shape', [(2,), (2, 3), (2, 3, 5)])
@pytest.mark.parametrize('index', [(Ellipsis,),
(None, Ellipsis),
(Ellipsis, None),
(None, Ellipsis, None)])
def test_slicing_with_Nones(shape, index):
x = np.random.random(shape)
d = da.from_array(x, chunks=shape)
assert_eq(x[index], d[index])
indexers = [Ellipsis, slice(2), 0, 1, -2, -1, slice(-2, None), None]
def test_slicing_integer_no_warnings():
X = da.random.random((100, 2), (2, 2))
idx = np.array([0, 0, 1, 1])
with pytest.warns(None) as rec:
X[idx].compute()
assert len(rec) == 0
@pytest.mark.slow
def test_slicing_none_int_ellipes():
shape = (2, 3, 5, 7, 11)
x = np.arange(np.prod(shape)).reshape(shape)
y = da.core.asarray(x)
for ind in itertools.product(indexers, indexers, indexers, indexers):
if ind.count(Ellipsis) > 1:
continue
assert_eq(x[ind], y[ind])
def test_None_overlap_int():
a, b, c, d = (0, slice(None, 2, None), None, Ellipsis)
shape = (2, 3, 5, 7, 11)
x = np.arange(np.prod(shape)).reshape(shape)
y = da.core.asarray(x)
xx = x[a, b, c, d]
yy = y[a, b, c, d]
assert_eq(xx, yy)
def test_negative_n_slicing():
assert_eq(da.ones(2, chunks=2)[-2], np.ones(2)[-2])
def test_negative_list_slicing():
x = np.arange(5)
dx = da.from_array(x, chunks=2)
assert_eq(dx[[0, -5]], x[[0, -5]])
assert_eq(dx[[4, -1]], x[[4, -1]])
def test_permit_oob_slices():
x = np.arange(5)
dx = da.from_array(x, chunks=2)
assert_eq(x[-102:], dx[-102:])
assert_eq(x[102:], dx[102:])
assert_eq(x[:102], dx[:102])
assert_eq(x[:-102], dx[:-102])
def test_normalize_index():
assert normalize_index((Ellipsis, None), (10,)) == (slice(None), None)
assert normalize_index(5, (np.nan,)) == (5,)
assert normalize_index(-5, (np.nan,)) == (-5,)
(result,) = normalize_index([-5, -2, 1], (np.nan,))
assert result.tolist() == [-5, -2, 1]
assert normalize_index(slice(-5, -2), (np.nan,)) == (slice(-5, -2),)
def test_take_semi_sorted():
x = da.ones(10, chunks=(5,))
index = np.arange(15) % 10
y = x[index]
assert y.chunks == ((5, 5, 5),)
@pytest.mark.parametrize('chunks,index,expected', [
(
(5, 5, 5),
np.arange(5, 15) % 10,
[(1, np.arange(5)),
(0, np.arange(5))]
),
(
(5, 5, 5, 5),
np.arange(20) // 2,
[(0, np.arange(10) // 2),
(1, np.arange(10) // 2)]
),
(
(10, 10),
[15, 2, 3, 15],
[(1, [5]),
(0, [2, 3]),
(1, [5])]
),
])
def test_slicing_plan(chunks, index, expected):
plan = slicing_plan(chunks, index)
assert len(plan) == len(expected)
for (i, x), (j, y) in zip(plan, expected):
assert i == j
assert len(x) == len(y)
assert (x == y).all()
def test_pathological_unsorted_slicing():
x = da.ones(100, chunks=10)
index = np.arange(100).reshape(10, 10).ravel(order='F')
with pytest.warns(da.PerformanceWarning) as info:
x[index]
assert '10' in str(info.list[0])
assert 'out-of-order' in str(info.list[0])
@pytest.mark.parametrize('params', [(2, 2, 1), (5, 3, 2)])
def test_setitem_with_different_chunks_preserves_shape(params):
array_size, chunk_size1, chunk_size2 = params
x = da.zeros(array_size, chunks=chunk_size1)
mask = da.zeros(array_size, chunks=chunk_size2)
x[mask] = 1
result = x.compute()
assert x.shape == result.shape
def test_gh3579():
assert_eq(np.arange(10)[0::-1], da.arange(10, chunks=3)[0::-1])
assert_eq(np.arange(10)[::-1], da.arange(10, chunks=3)[::-1])
@pytest.mark.parametrize('lock', [True, False])
@pytest.mark.parametrize('asarray', [True, False])
@pytest.mark.parametrize('fancy', [True, False])
def test_gh4043(lock, asarray, fancy):
a1 = da.from_array(np.zeros(3,), chunks=1, asarray=asarray, lock=lock, fancy=fancy)
a2 = da.from_array(np.ones(3,), chunks=1, asarray=asarray, lock=lock, fancy=fancy)
al = da.stack([a1, a2])
assert_eq(al, al)
| true
| true
|
f701e9533dc1976894d91db2b7c579ad252ba370
| 4,438
|
py
|
Python
|
zag/examples/distance_calculator.py
|
ToolsForHumans/taskflow
|
ce13d65e70213cb50ef19a714581997793b4e5fb
|
[
"Apache-2.0"
] | 1
|
2018-11-06T16:17:21.000Z
|
2018-11-06T16:17:21.000Z
|
zag/examples/distance_calculator.py
|
ToolsForHumans/zag
|
ce13d65e70213cb50ef19a714581997793b4e5fb
|
[
"Apache-2.0"
] | 45
|
2018-11-07T21:35:55.000Z
|
2021-01-08T06:26:50.000Z
|
zag/examples/distance_calculator.py
|
ToolsForHumans/taskflow
|
ce13d65e70213cb50ef19a714581997793b4e5fb
|
[
"Apache-2.0"
] | 3
|
2018-11-05T16:07:45.000Z
|
2019-09-01T07:52:16.000Z
|
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import math
import os
import sys
top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir))
sys.path.insert(0, top_dir)
from zag import engines
from zag.patterns import linear_flow
from zag import task
# INTRO: This shows how to use a tasks/atoms ability to take requirements from
# its execute functions default parameters and shows how to provide those
# via different methods when needed, to influence those parameters to in
# this case calculate the distance between two points in 2D space.
# A 2D point.
Point = collections.namedtuple("Point", "x,y")
def is_near(val, expected, tolerance=0.001):
# Floats don't really provide equality...
if val > (expected + tolerance):
return False
if val < (expected - tolerance):
return False
return True
class DistanceTask(task.Task):
# See: http://en.wikipedia.org/wiki/Distance#Distance_in_Euclidean_space
default_provides = 'distance'
def execute(self, a=Point(0, 0), b=Point(0, 0)):
return math.sqrt(math.pow(b.x - a.x, 2) + math.pow(b.y - a.y, 2))
if __name__ == '__main__':
# For these we rely on the execute() methods points by default being
# at the origin (and we override it with store values when we want) at
# execution time (which then influences what is calculated).
any_distance = linear_flow.Flow("origin").add(DistanceTask())
results = engines.run(any_distance)
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
0.0,
is_near(results['distance'], 0.0)))
results = engines.run(any_distance, store={'a': Point(1, 1)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
1.4142,
is_near(results['distance'],
1.4142)))
results = engines.run(any_distance, store={'a': Point(10, 10)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
14.14199,
is_near(results['distance'],
14.14199)))
results = engines.run(any_distance,
store={'a': Point(5, 5), 'b': Point(10, 10)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
7.07106,
is_near(results['distance'],
7.07106)))
# For this we use the ability to override at task creation time the
# optional arguments so that we don't need to continue to send them
# in via the 'store' argument like in the above (and we fix the new
# starting point 'a' at (10, 10) instead of (0, 0)...
ten_distance = linear_flow.Flow("ten")
ten_distance.add(DistanceTask(inject={'a': Point(10, 10)}))
results = engines.run(ten_distance, store={'b': Point(10, 10)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
0.0,
is_near(results['distance'], 0.0)))
results = engines.run(ten_distance)
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
14.14199,
is_near(results['distance'],
14.14199)))
| 40.345455
| 78
| 0.557008
|
import collections
import math
import os
import sys
top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir))
sys.path.insert(0, top_dir)
from zag import engines
from zag.patterns import linear_flow
from zag import task
Point = collections.namedtuple("Point", "x,y")
def is_near(val, expected, tolerance=0.001):
if val > (expected + tolerance):
return False
if val < (expected - tolerance):
return False
return True
class DistanceTask(task.Task):
# See: http://en.wikipedia.org/wiki/Distance#Distance_in_Euclidean_space
default_provides = 'distance'
def execute(self, a=Point(0, 0), b=Point(0, 0)):
return math.sqrt(math.pow(b.x - a.x, 2) + math.pow(b.y - a.y, 2))
if __name__ == '__main__':
# For these we rely on the execute() methods points by default being
# at the origin (and we override it with store values when we want) at
# execution time (which then influences what is calculated).
any_distance = linear_flow.Flow("origin").add(DistanceTask())
results = engines.run(any_distance)
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
0.0,
is_near(results['distance'], 0.0)))
results = engines.run(any_distance, store={'a': Point(1, 1)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
1.4142,
is_near(results['distance'],
1.4142)))
results = engines.run(any_distance, store={'a': Point(10, 10)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
14.14199,
is_near(results['distance'],
14.14199)))
results = engines.run(any_distance,
store={'a': Point(5, 5), 'b': Point(10, 10)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
7.07106,
is_near(results['distance'],
7.07106)))
# For this we use the ability to override at task creation time the
# optional arguments so that we don't need to continue to send them
ten_distance = linear_flow.Flow("ten")
ten_distance.add(DistanceTask(inject={'a': Point(10, 10)}))
results = engines.run(ten_distance, store={'b': Point(10, 10)})
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
0.0,
is_near(results['distance'], 0.0)))
results = engines.run(ten_distance)
print(results)
print("%s is near-enough to %s: %s" % (results['distance'],
14.14199,
is_near(results['distance'],
14.14199)))
| true
| true
|
f701eae7d9153c4940a9de47ed79a004e74e6560
| 33,232
|
py
|
Python
|
correlcalc/antpcf.py
|
rohinkumar/CorrelCalc
|
d7887448af8d3dc3170c00c0aae6ee2561b8a3d5
|
[
"MIT"
] | null | null | null |
correlcalc/antpcf.py
|
rohinkumar/CorrelCalc
|
d7887448af8d3dc3170c00c0aae6ee2561b8a3d5
|
[
"MIT"
] | null | null | null |
correlcalc/antpcf.py
|
rohinkumar/CorrelCalc
|
d7887448af8d3dc3170c00c0aae6ee2561b8a3d5
|
[
"MIT"
] | null | null | null |
__author__ = 'Rohin Kumar Y'
# Calculate anisotropic 2pCF
from tpcf import *
import scipy as sp
# antpcf(dat,datR,bins,parmetric,permetric) returns numpy 2d array DD, RR, DR correl
# poserr(xi,DD) returns (1.0+xi)/np.sqrt(DD)
def atpcf(datfile, binspar, binsper, **kwargs):
"""Main function to calculate anisotropic 2pCF. Takes multiple arguments such as randfile, maskfile, calculation estimator etc. for different geometry, cosmology models
Usage of the package is given in jupyter notebook "Using correlcalc example-anisotropic.nb" and in `main.py`
All the methods in correlcalc can be imported using the following command
`from correlcalc import *`
We first need to define bins (in $c/H_0$ units) to calculate 2pCF. For e.g. to calculate correlation between 0-180Mpc in steps of 6Mpc, we say
`bins=np.arange(0.002,0.06,0.002)`
To calculate anisotropic 2pCF using input data file (both ascii and fits files are supported), use `atpcf` method as follows
`correl3d, poserr=atpcf('/path/to/datfile.dat',binspar, binsper, randfile='/path/to/randomfile.dat', vtype='sigpi', weights=True)`
If random file is not available or not provided, we can generate random catalog by providing the mangle mask file in `.ply` format along with specifying the size of the catalog in multiples of size of data catalog (default 2x size). To do this
`correl3d, poserr=atpcf('/path/to/datfile.dat', binspar, binsper, maskfile='/path/to/maskfile.ply', vtype='smu', weights='eq', randfact=3)`
This returns `correl3d` and `poserr` `numpy` arrays corresponding to anisotropic Two-point correlation and Poisson error
### Keyword Arguments
The following keyword arguments can be included as needed
#### Data file (Mandatory)
Data file of galaxy/quasar redshift survey must be passed as the first argument to both `tpcf` and `atpcf` methods.
**Supported filetypes**: ascii text files with columns, csv files or fits files are all supported. Most files provided by SDSS Value added catalogs should be directly usable.
**To contain**: Any type of file provided must at least have columns named **Z** (redshift), **RA** (Right Ascension), **DEC** (Declination). These column names can be in any case.
If one intends to use `weights=True` option (must to obtain accurate results) the data file must also contain radial weights with column title **radial_weight** or **WEIGHT_SYSTOT**
#### binspar (Mandatory)
A numpy array with ascending values in $c/H_0$ units (for distances) or $\delta z$ as per choice of `'vtype'` must be provided as the second argument to `atpcf` method.
#### binsper (Mandatory)
A numpy array with ascending values in $c/H_0$ units (for distances), $z\delta \theta$ or $\mu = \cos \alpha$ must be provided as the third argument to `atpcf` method.
#### `randfile=` Path to random file (semi-Optional)
If not provided, `maskfile=` argument must be given `.ply` file.
**Supported filetypes**: ascii text files with columns, csv files or fits files are all supported. Most files provided by SDSS Value added catalogs should be directly usable.
**To contain**: Any type of file provided must at least have columns named **Z** (redshift), **RA** (Right Ascension), **DEC** (Declination). These column names can be in any case.
If one intends to use `weights=True` option the data file must also contain radial weights with column title **radial_weight** or **WEIGHT_SYSTOT**
**Beta Testing:** Beta support for other column titles for weights is added.
Also added is calculation of weights from n(z) during random catalog generation.
#### `mask=` Path to mangle polygon file (semi-Optional)
If not provided, `randfile=` argument must be provided.
**Supported filetypes**: `.ply` file containing Mangle polygons describing survey geometry in the standard format. Most files provided by SDSS Value added catalogs should be directly usable.
#### `randfact=` (Optional)
Size of the random catalog in integer multiples of size of data catalog if random catalog file is not provided. Default value is `2`
#### `weights=` (Optional)
It is highly recommended to use weights argument by providing `weights=True` or `weights='eq'` to obtain accurate two-point correlation calculations. This picks up radial weights in the prescribed format (with column title **radial_weight** or **WEIGHT_SYSTOT** ) from the data and random files provided.
`weights=`eq'` sets equal weights and hence adds *+1* - This implementation is parallelized and is faster than `weights=False` implementation on most machines
If `weights=False`, by default *+1* will be added for each galaxy/random pair found within the bin instead of adding total weight. For more details on weights and references, see http://www.sdss3.org/dr9/tutorials/lss_galaxy.php
#### Metrics in parallel and perpendicular directions
Calculates anisotropic 2pCF for the following cases.
#### `vtype=`
Valuation method
**Available options**:
`'smu'` (default)- Calculates 2pCF in s - mu
`'sigpi'` - Calculates 2pCF using parallel and perpendicular distances
`'ap'` calculates 2pCF for small $\Delta \theta$ and $z \Delta\theta$ . But results can be converted to any cosmology model of choice (ref: https://arxiv.org/pdf/1312.0003.pdf)
**Customization**
Formulae for calculation of distances in parallel and perpendicular directions is taken from https://arxiv.org/pdf/1312.0003.pdf. Using the formulae in this paper, $\Delta z$ and $z \Delta \theta$ are computed in the `metrics.pyx` file for the above mentioned. `Cython` is chosen for implementation to obtain faster results in building `BallTree`s calculating `cdist` and to reduce `query` time.
One can customize metric definitions as per one's need by editing the `metrics.pyx` file. After changing this compile it using `python metricsetup.py build_ext --inplace`
**To add:**
Direct calculation of distances in LOS and perpendicular to the LOS to be added to support standard model Cosmology and other popular models. For now, one needs to manually convert the angular bins to physical distances to get the approximate results
#### `cosmology='lcdm'` (Optional)
Used to calculate co-moving distances from redshifts.
**Available options**:
`'lcdm'` (default)- for Lambda CDM model
`'lc'` - for $R_h=ct$ and linear coasting models
**To add**: `wcdm` and other popular cosmology models soon
#### `geometry='flat'` (Optional)
Used to calculate co-moving distances between a pair of objects
**Available options**:
`'flat'` (default)- for Lambda CDM model
`'open'`
`'close'`
#### `estimator=` (Optional)
**Available options**:
`'dp'` - Davis - Peebles estimator (default - fastest)
`'ls'`- Landy - Szalay estimator
`'ph'` - Peebles- Hauser estimator
`'hew'` - Hewitt estimator
`'h'` - Hamilton estimator
For more details on estimator formulae see https://arxiv.org/pdf/1211.6211.pdf
"""
# Default function arguments
global binsparv
global binsperv
global maxrad
global dat
global datR
global Nd
global Nr
DD = DR = RD = RR = np.zeros((len(binspar)-1, len(binsper)-1))
weightsflag = True
useones = True
cosmology = 'lcdm'
sflag = True
geometry='flat'
filtermetric = flatdistsq
permetric = musqlcdmf
parmetric = flatdistsq
vtype = 'smu'
randcatfact = 2
estimator = 'dp'
binsparv = binspar**2
binsperv = binsper**2
randfile = None
maskfile = None
# Options for correl calculation estimators and cosmology models
mlist = ['dp', 'ls', 'ph', 'hew', 'h']
clist = ['lcdm', 'lc'] # to add wcdm
glist = ['flat', 'open', 'close']
parper = ['ap', 'sigpi', 'smu']
if kwargs is not None:
for key, value in kwargs.items():
# print (key, value)
if key.lower() == 'randfile':
randfile = value
elif key.lower() == 'randfact':
randcatfact = value
elif key.lower() == 'geometry':
if value.lower() in glist:
geometry = value.lower()
# geometry = 'flat'
# filtermetric = flatdistsq
# elif value.lower() == 'open':
# geometry = 'open'
# filtermetric = opendistsq
# elif value.lower() == 'close':
# geometry = 'close'
# filtermetric = closedistsq
elif key.lower() == 'cosmology':
if value.lower() in clist:
cosmology = value.lower()
else:
print("Incorrect Cosmology provided! Using 'lcdm' as default")
elif key.lower() == 'vtype':
if value.lower() in parper:
vtype = value.lower()
elif key.lower() == 'estimator':
if value.lower() in mlist:
estimator = value.lower()
else:
print("Incorrect estimator provided! Using 'dp' as default")
elif key.lower() == 'mask':
maskfile = value
elif key.lower() == 'weights':
if value is True:
weightsflag = True
useones = False
elif isinstance(value, str):
if value.lower() == 'eq':
weightsflag = True
useones = True
else:
weightsflag = False
else:
print ("key argument `%s` not valid" % key)
else:
print ("Refer documentation to enter valid keyword arguments")
if vtype == 'ap':
parmetric = APdz
binsparv = binspar
binsperv = binsper
sflag = False
filtermetric = APzdth
permetric = APzdth
maxrad = max(np.sqrt(binsparv**2 + binsperv**2))
elif vtype == 'smu':
# binsparv = binspar**2
# binsperv = binsper**2
maxrad = max(binsparv)
if geometry == 'open':
parmetric = opendistsq
filtermetric = opendistsq
if cosmology == 'lc':
permetric = musqlco
else:
permetric = musqlcdmo
elif geometry == 'close':
parmetric = closedistsq
filtermetric = closedistsq
if cosmology == 'lc':
permetric = musqlcc
else:
permetric = musqlcdmc
else:
parmetric = flatdistsq
filtermetric = flatdistsq
if cosmology == 'lc':
permetric = musqlcf
else:
permetric = musqlcdmf
elif vtype == 'sigpi':
# binsparv = binspar**2
# binsperv = binsper**2
maxrad = max(binsparv+binsperv)
if geometry == 'open':
filtermetric = opendistsq
if cosmology == 'lc':
parmetric = sparsqlc
permetric = spersqlco
else:
parmetric = sparsqlcdm
permetric = spersqlcdmo
elif geometry == 'close':
filtermetric = closedistsq
if cosmology == 'lc':
parmetric = sparsqlc
permetric = spersqlcc
else:
parmetric = sparsqlcdm
permetric = spersqlcdmc
else:
filtermetric = flatdistsq
if cosmology == 'lc':
parmetric = sparsqlc
permetric = spersqlcf
else:
parmetric = sparsqlcdm
permetric = spersqlcdmf
else:
print ("No valid valuation method provided. Using 'smu' as default")
print("Calculating Anisotropic Correlation function with the following parameters")
print("data file=")
print(datfile)
print("random file=")
print(randfile)
print("Random catalog size factor(if random file is None)=")
print(randcatfact)
print("mask/window file=")
print(maskfile)
print ("Cosmology=")
print(cosmology)
print ("Geometry=")
print (geometry)
print("Weights=")
print(weightsflag)
print ("Using ones as weights?=")
print (useones)
print("perpendicular metric=")
print(permetric)
print("parallel metric=")
print(parmetric)
print("Correl estimator=")
print(estimator)
print("Valuation type=")
print(vtype)
print ("binsparv=")
print (binsparv)
print ("binsperv=")
print (binsperv)
print("---------------------------------------------------------------------------")
if sflag is False:
# Prepare dat from data file
dat, weights = datprepz(datfile, 'data', cosmology)
Nd = len(dat)
# Prepare datR from random file or generate a random catalog
if randfile is None:
randcatsize = randcatfact*Nd
if maskfile is None:
print ("Mask file compulsory. Please provide mask='maskfilepath.ply'")
else:
datR, rweights = randcatprepz(datfile, randcatsize, maskfile, cosmology)
else:
datR, rweights = datprepz(randfile, 'random', cosmology)
else:
# Prepare dat from data file
dat, weights = datprep(datfile, 'data', cosmology)
Nd = len(dat)
# Prepare datR from random file or generate a random catalog
if randfile is None:
randcatsize = randcatfact*Nd
if maskfile is None:
print ("Mask file compulsory. Please provide mask='maskfilepath.ply'")
else:
datR, rweights = randcatprep(datfile, randcatsize, maskfile, cosmology)
else:
datR, rweights = datprep(randfile, 'random', cosmology)
Nr = len(datR)
fact = (1.0*Nr)/Nd
global adbt
global arbt
print ("Creating BallTree for data points using ...")
print (filtermetric)
adbt = BallTree(dat, metric='pyfunc', func=filtermetric)
print ("Creating BallTree for random points using ...")
print (filtermetric)
arbt = BallTree(datR, metric='pyfunc', func=filtermetric)
rng = np.array([[min(binsparv), max(binsparv)], [min(binsperv), max(binsperv)]])
print ("Calculating anisotropic 2pCF...")
# Reference: arXiv: 1211.6211
if estimator == 'dp':
if weightsflag is False: # or len(weights) != Nd
# print (weightsflag)
# print(len(weights))
# print(len(datR))
DD = aDDcalc(dat, binsparv, binsperv, parmetric, permetric, rng)
DR = aDRcalc(dat, datR, binsparv, binsperv, parmetric, permetric, rng)
RD = aRDcalc(dat, datR, binsparv, binsperv, parmetric, permetric, rng)
else:
# if len(rweights)!=len(datR):
# DD = aDDwcalc(dat, binsq, parmetric, permetric, rng, weights)
if useones is True or len(weights) != Nd:
weights = np.ones(Nd)
rweights = np.ones(Nr)
print ("Calculating anisotropic DD with weights (parallelized)...\n DD=")
DD = amulti_autocp(dat, binsparv, binsperv, parmetric, permetric, rng, weights, Nd, pcpus)
# DR = aRDwcalc(dat, datR, binsq, parmetric, permetric, rng, weights)
print ("Calculating anisotropic DR with weights (parallelized)...\n DR=")
DR = amulti_crosscp(dat, datR, binsparv, binsperv, parmetric, permetric, rng, weights, Nr, pcpus)
print ("Calculating anisotropic RD with weights (parallelized)...\n RD=")
RD = amulti_crosscpr(dat, datR, binsparv, binsperv, parmetric, permetric, rng, rweights, Nd, pcpus)
# else:
# DD=aDDwcalc(dat,binsq,parmetric,permetric,rng,weights)
# DR=aDRwcalc(dat,datR,binsq,parmetric,permetric,rng,weights,rweights)
print ("Using Davis-Peebles estimator")
correl = fact*(DD*2.0/(DR+RD))-1.0
elif estimator == 'ph':
if weightsflag is False: # or len(weights) != Nd or len(rweights) != len(datR):
DD = aDDcalc(dat, binsparv, binsperv, parmetric, permetric, rng)
RR = aRRcalc(datR, binsparv, binsperv, parmetric, permetric, rng)
else:
if useones is True or len(weights) != Nd:
weights = np.ones(Nd)
rweights = np.ones(Nr)
print ("Calculating anisotropic DD with weights (parallelized)...\n DD=")
# DD = aDDwcalc(dat, binsq, parmetric, permetric, rng, weights)
DD = amulti_autocp(dat, binsparv, binsperv, parmetric, permetric, rng, weights, Nd, pcpus)
# if len(rweights) != Nr:
# RR = aRRcalc(datR, binsparv, binsperv, parmetric, permetric, rng)
# else:
print ("Calculating anisotropic RR with weights (parallelized)...\n RR=")
RR = amulti_autocpr(datR, binsparv, binsperv, parmetric, permetric, rng, rweights, Nr, pcpus)
print ("Using Peebles-Hauser estimator")
correl = fact**2*(DD/RR)-1.0
else:
if weightsflag is False: # or len(weights) != Nd or len(rweights) != len(datR):
DD = aDDcalc(dat, binsparv, binsperv, parmetric, permetric, rng)
RR = aRRcalc(datR, binsparv, binsperv, parmetric, permetric, rng)
DR = aDRcalc(dat, datR, binsparv, binsperv, parmetric, permetric, rng)
RD = aRDcalc(dat, datR, binsparv, binsperv, parmetric, permetric, rng)
else:
if useones is True or len(weights) != Nd:
weights = np.ones(Nd)
rweights = np.ones(Nr)
print ("Calculating anisotropic DD with weights (parallelized)...\n DD=")
# DD = aDDwcalc(dat, binsq, parmetric, permetric, rng, weights)
DD = amulti_autocp(dat, binsparv, binsperv, parmetric, permetric, rng, weights, Nd, pcpus)
# print ("Calculating anisotropic RR with weights (parallelized)...\n RR=")
# RR = aRRwcalc(datR, binsq, parmetric, permetric, rng, rweights)
# RR = amulti_autocpr(datR, binsq, parmetric, permetric, rng, rweights, Nr, pcpus)
# DR = aRDwcalc(dat, datR, binsq, parmetric, permetric, rng, weights)
print ("Calculating anisotropic DR with weights (parallelized)...\n DR=")
DR = amulti_crosscp(dat, datR, binsparv, binsperv, parmetric, permetric, rng, weights, Nr, pcpus)
print ("Calculating anisotropic RD with weights (parallelized)...\n RD=")
RD = amulti_crosscpr(dat, datR, binsparv, binsperv, parmetric, permetric, rng, rweights, Nd, pcpus)
# if len(rweights) != Nr:
# RR = aRRcalc(datR, binsparv, binsperv, parmetric, permetric, rng)
# else:
print ("Calculating anisotropic RR with weights (parallelized)...\n RR=")
RR = amulti_autocpr(datR, binsparv, binsperv, parmetric, permetric, rng, rweights, Nr, pcpus)
if estimator == 'ls':
print ("Using Landy-Szalay estimator")
correl = fact**2*(DD/RR)-fact*(DR+RD)/RR+1.0
# correl = fact**2*(DD/RR)-2.0*fact*(DR/RR)+1.0
elif estimator == 'hew':
print ("Using Hewett estimator")
correl = fact**2*(DD/RR)-fact*0.5*(DR+RD)/RR
# correl = fact**2*(DD/RR)-fact*(DR/RR)
elif estimator == 'h':
print ("Using Hamilton estimator")
correl = (4.0*DD*RR)/(DR+RD)**2 - 1.0
# correl = (DD*RR)/DR**2 - 1.0
correlerr = poserr(correl, DD)
print("Anisotropic Two-point correlation=")
np.savetxt("aDD_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", DD)
np.savetxt("aDR_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", DR)
np.savetxt("aRD_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", RD)
np.savetxt("aRR_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", RR)
np.savetxt("abinspar_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", binspar)
np.savetxt("abinsper_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", binsper)
np.savetxt("atpcf_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", correl)
np.savetxt("atpcferr_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", correlerr)
print (correl, correlerr)
return correl, correlerr
def aDDcalc(dat, binspar, binsper, parmetric, permetric, rng):
print ("Calculating anisotropic DD...\n DD=")
dd = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(range(len(dat))):
ind = adbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], dat[j[j>i]], parmetric)[0]
# print("dist0")
# print dist0
dist1 = dist.cdist([dat[i], ], dat[j[j>i]], permetric)[0]
# print("dist1")
# print dist1
# print np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper))[0]
dd += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper))[0]
# print ("rng")
# print rng
# print("binspar")
# print binspar
# print("binsper")
# print binsper
# print dd
dd[dd == 0] = 1.0
# Nd = len(dat)
# DD = dd/(Nd*(Nd-1.0))
print (dd)
return dd
def aRRcalc(datR, binspar, binsper, parmetric, permetric, rng):
print ("Calculating anisotropic RR...\n RR=")
rr = np.zeros((len(binspar)-1, len(binsper)-1))
# rrbt = BallTree(datR, metric='pyfunc', func=permetric)
for i in tqdm(range(len(datR))):
ind = arbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], datR[j[j>i]], parmetric)[0]
dist1 = dist.cdist([datR[i], ], datR[j[j>i]], permetric)[0]
rr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper))[0]
rr[rr == 0] = 1.0
# Nr = len(datR)
# RR = rr/(Nr*(Nr-1.0))
print (rr)
return rr
def aDRcalc(dat, datR, binspar, binsper, parmetric, permetric, rng):
print ("Calculating anisotropic DR...\n DR=")
dr = np.zeros((len(binspar)-1, len(binsper)-1))
# rrbt = BallTree(datR, metric='pyfunc', func=permetric)
for i in tqdm(range(len(dat))):
ind = arbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], datR[j[j>i]], parmetric)[0]
dist1 = dist.cdist([dat[i], ], datR[j[j>i]], permetric)[0]
dr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper))[0]
dr[dr == 0] = 1.0
# Nd = len(dat)
# Nr = len(datR)
# DR = dr/(Nd*Nr)
print (dr)
return dr
def aRDcalc(dat, datR, binspar, binsper, parmetric, permetric, rng):
print ("Calculating anisotropic RD...\n RD=")
rd = np.zeros((len(binspar)-1, len(binsper)-1))
# rrbt = BallTree(datR, metric='pyfunc', func=permetric)
for i in tqdm(range(len(datR))):
ind = arbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], dat[j[j>i]], parmetric)[0]
dist1 = dist.cdist([datR[i], ], dat[j[j>i]], permetric)[0]
rd += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper))[0]
rd[rd == 0] = 1.0
# Nd = len(dat)
# Nr = len(datR)
# DR = dr/(Nd*Nr)
print (rd)
return rd
def aDDwcalc(dat, binspar, binsper, parmetric, permetric, rng, weights):
print ("Calculating anisotropic DD with weights...\n DD=")
dd = np.zeros((len(binspar)-1, len(binsper)-1))
# ddbt = BallTree(dat, metric='pyfunc', func=permetric)
for i in tqdm(range(len(dat))):
ind = adbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], dat[j[j>i]], parmetric)[0]
dist1 = dist.cdist([dat[i], ], dat[j[j>i]], permetric)[0]
dd += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=weights[j[j>i]])[0]
dd[dd == 0] = 1.0
# Nd = len(dat)
# DD = dd/(Nd*(Nd-1.0)) # factor of 2 cancels with 1/2 that needs to be done to remove double counting of pairs
# print (dd)
return dd
def aRRwcalc(datR, binspar, binsper, parmetric, permetric, rng, rweights):
print ("Calculating anisotropic RR with weights...\n RR=")
rr = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(range(len(datR))):
ind = arbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], datR[j[j>i]], parmetric)[0]
dist1 = dist.cdist([datR[i], ], datR[j[j>i]], permetric)[0]
rr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=rweights[j[j>i]])[0]
rr[rr == 0] = 1.0
# Nr = len(datR)
# RR = rr/(Nr*(Nr-1.0)) # factor of 2 cancels with 1/2 that needs to be done to remove double counting of pairs
# print (rr)
return rr
def aDRwcalc(dat, datR, binspar, binsper, parmetric, permetric, rng, rweights):
print ("Calculating anisotropic DR with weights...\n DR=")
dr = np.zeros((len(binspar)-1, len(binsper)-1))
# rrbt = BallTree(datR, metric='pyfunc', func=permetric)
for i in tqdm(range(len(dat))):
ind = arbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], datR[j], parmetric)[0]
dist1 = dist.cdist([dat[i], ], datR[j], permetric)[0]
dr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=rweights[j])[0]
dr[dr == 0] = 1.0
# Nd = len(dat)
# Nr = len(datR)
# DR = dr/(Nd*Nr)
# print (dr/2.0)
return dr/2.0
def aRDwcalc(dat, datR, binspar, binsper, parmetric, permetric, rng, weights):
print ("Calculating anisotropic RD with weights...\n DR=")
dr = np.zeros((len(binspar)-1, len(binsper)-1))
# bt = BallTree(dat, metric='pyfunc', func=permetric)
for i in tqdm(range(len(datR))):
ind = arbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], dat[j], parmetric)[0]
dist1 = dist.cdist([datR[i], ], dat[j], permetric)[0]
dr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=weights[j])[0]
dr[dr == 0] = 1.0
# DR = dr/(Nd*Nr)
# print (dr/2.0)
return dr/2.0
def aDDwcalcp(dat, binspar, binsper, parmetric, permetric, rng, weights, rNd, multi=False, queue=0):
dd = np.zeros((len(binspar)-1, len(binsper)-1))
# ddbt = BallTree(dat, metric='pyfunc', func=permetric)
for i in tqdm(rNd):
ind = adbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], dat[j[j>i]], parmetric)[0]
dist1 = dist.cdist([dat[i], ], dat[j[j>i]], permetric)[0]
dd += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=weights[j[j>i]])[0]
if multi:
queue.put(dd)
else:
return dd
# print (DD)
return dd
def aRRwcalcp(datR, binspar, binsper, parmetric, permetric, rng, rweights, rNr, multi=False, queue=0):
rr = np.zeros((len(binspar)-1, len(binsper)-1))
# rrbt = BallTree(datR, metric='pyfunc', func=permetric)
for i in tqdm(rNr):
ind = arbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], datR[j[j>i]], parmetric)[0]
dist1 = dist.cdist([datR[i], ], datR[j[j>i]], permetric)[0]
rr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=rweights[j[j>i]])[0]
if multi:
queue.put(rr)
else:
return rr
# rr[rr == 0] = 1.0
# Nr = len(datR)
# RR = rr/(Nr*(Nr-1.0)) # factor of 2 cancels with 1/2 that needs to be done to remove double counting of pairs
# print (RR)
return rr
def aDRwcalcp(dat, datR, binspar, binsper, parmetric, permetric, rng, rweights, rNd, multi=False, queue=0):
# print ("Calculating anisotropic DR with weights (parallelized)...\n DR=")
dr = np.zeros((len(binspar)-1, len(binsper)-1))
# rrbt = BallTree(datR, metric='pyfunc', func=permetric)
for i in tqdm(rNd):
ind = arbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], datR[j], parmetric)[0]
dist1 = dist.cdist([dat[i], ], datR[j], permetric)[0]
dr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=rweights[j])[0]
if multi:
queue.put(dr)
else:
return dr
# print (DR)
return dr
def aRDwcalcp(dat, datR, binspar, binsper, parmetric, permetric, rng, weights, rNr, multi=False, queue=0):
# print ("Calculating anisotropic RD with weights (parallelized)...\n DR=")
dr = np.zeros((len(binspar)-1, len(binsper)-1))
# bt = BallTree(dat, metric='pyfunc', func=permetric)
for i in tqdm(rNr):
ind = adbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], dat[j], parmetric)[0]
dist1 = dist.cdist([datR[i], ], dat[j], permetric)[0]
dr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=weights[j])[0]
if multi:
queue.put(dr)
else:
return dr
return dr
def amulti_autocp(dat, binspar, binsper, parmetric, permetric, rng, weights, Nd, CORES=pcpus):
DD = np.zeros((len(binspar)-1, len(binsper)-1))
queues = [RetryQueue() for i in range(CORES)]
args = [(dat, binspar, binsper, parmetric, permetric, rng, weights, range(int(Nd*i/CORES),int(Nd*(i+1)/CORES)), True, queues[i]) for i in range(CORES)]
jobs = [Process(target=aDDwcalcp, args=(a)) for a in args]
for j in jobs: j.start()
for q in queues: DD += q.get()
for j in jobs: j.join()
DD[DD == 0] = 1.0
# DD = DD/(Nd*(Nd-1.0)) # factor of 2 cancels with 1/2 that needs to be done to remove double counting of pairs
print (DD)
return DD
def amulti_autocpr(datR, binspar, binsper, parmetric, permetric, rng, rweights, Nr, CORES=pcpus):
RR = np.zeros((len(binspar)-1, len(binsper)-1))
queues = [RetryQueue() for i in range(CORES)]
args = [(datR, binspar, binsper, parmetric, permetric, rng, rweights, range(int(Nr*i/CORES),int(Nr*(i+1)/CORES)), True, queues[i]) for i in range(CORES)]
jobs = [Process(target=aRRwcalcp, args=(a)) for a in args]
for j in jobs: j.start()
for q in queues: RR += q.get()
for j in jobs: j.join()
RR[RR == 0] = 1.0
# RR = RR/(Nr*(Nr-1.0)) # factor of 2 cancels with 1/2 that needs to be done to remove double counting of pairs
print (RR)
return RR
def amulti_crosscp(dat, datR, binspar, binsper, parmetric, permetric, rng, weights, Nr, CORES=pcpus):
RD = np.zeros((len(binspar)-1, len(binsper)-1))
queues = [RetryQueue() for i in range(CORES)]
args = [(dat, datR, binspar, binsper, parmetric, permetric, rng, weights, range(int(Nr*i/CORES), int(Nr*(i+1)/CORES)), True, queues[i]) for i in range(CORES)]
jobs = [Process(target=aRDwcalcp, args=(a)) for a in args]
for j in jobs: j.start()
for q in queues: RD += q.get()
for j in jobs: j.join()
RD[RD == 0] = 1.0
# Nd=len(dat)
# DR = DR/(Nd*Nr)
print (RD/2.0)
return RD/2.0
def amulti_crosscpr(dat, datR, binspar, binsper, parmetric, permetric, rng, rweights, Nd, CORES=pcpus):
DR = np.zeros((len(binspar)-1, len(binsper)-1))
queues = [RetryQueue() for i in range(CORES)]
args = [(dat, datR, binspar, binsper, parmetric, permetric, rng, rweights, range(int(Nd*i/CORES), int(Nd*(i+1)/CORES)), True, queues[i]) for i in range(CORES)]
jobs = [Process(target=aDRwcalcp, args=(a)) for a in args]
for j in jobs: j.start()
for q in queues: DR += q.get()
for j in jobs: j.join()
DR[DR == 0] = 1.0
# Nd=len(dat)
# DR = DR/(Nd*Nr)
print (DR/2.0)
return DR/2.0
def ximonopole(correlsmu, mu):
xi0 = np.sum(correlsmu*sp.special.legendre(0)(mu),axis=1)/len(mu)
np.savetxt("xi0.txt",xi0)
return xi0
def xidipole(correlsmu, mu):
xi2 = np.sum(5.0*correlsmu*sp.special.legendre(2)(mu),axis=1)/len(mu)
np.savetxt("xi2.txt",xi2)
return xi2
def xiquadpole(correlsmu, mu):
xi4 = np.sum(9.0*correlsmu*sp.special.legendre(4)(mu),axis=1)/len(mu)
np.savetxt("xi4.txt",xi4)
return xi4
def beta(correlsmu, mu):
xis0 = ximonopole(correlsmu,mu)
xis2 = xidipole(correlsmu,mu)
xis4 = xiquadpole(correlsmu,mu)
xir = xis0*sp.special.legendre(0)(mu) + xis2*sp.special.legendre(2)(mu) + xis4*sp.special.legendre(4)(mu)
r = xir/xis0
return 5.0/3.0*(np.sqrt(1.8*r-0.8)-1.0)
# def beta(correlsmu, mu):
# betav =
| 41.748744
| 401
| 0.604327
|
__author__ = 'Rohin Kumar Y'
from tpcf import *
import scipy as sp
def atpcf(datfile, binspar, binsper, **kwargs):
global binsparv
global binsperv
global maxrad
global dat
global datR
global Nd
global Nr
DD = DR = RD = RR = np.zeros((len(binspar)-1, len(binsper)-1))
weightsflag = True
useones = True
cosmology = 'lcdm'
sflag = True
geometry='flat'
filtermetric = flatdistsq
permetric = musqlcdmf
parmetric = flatdistsq
vtype = 'smu'
randcatfact = 2
estimator = 'dp'
binsparv = binspar**2
binsperv = binsper**2
randfile = None
maskfile = None
mlist = ['dp', 'ls', 'ph', 'hew', 'h']
clist = ['lcdm', 'lc'] glist = ['flat', 'open', 'close']
parper = ['ap', 'sigpi', 'smu']
if kwargs is not None:
for key, value in kwargs.items():
if key.lower() == 'randfile':
randfile = value
elif key.lower() == 'randfact':
randcatfact = value
elif key.lower() == 'geometry':
if value.lower() in glist:
geometry = value.lower()
elif key.lower() == 'cosmology':
if value.lower() in clist:
cosmology = value.lower()
else:
print("Incorrect Cosmology provided! Using 'lcdm' as default")
elif key.lower() == 'vtype':
if value.lower() in parper:
vtype = value.lower()
elif key.lower() == 'estimator':
if value.lower() in mlist:
estimator = value.lower()
else:
print("Incorrect estimator provided! Using 'dp' as default")
elif key.lower() == 'mask':
maskfile = value
elif key.lower() == 'weights':
if value is True:
weightsflag = True
useones = False
elif isinstance(value, str):
if value.lower() == 'eq':
weightsflag = True
useones = True
else:
weightsflag = False
else:
print ("key argument `%s` not valid" % key)
else:
print ("Refer documentation to enter valid keyword arguments")
if vtype == 'ap':
parmetric = APdz
binsparv = binspar
binsperv = binsper
sflag = False
filtermetric = APzdth
permetric = APzdth
maxrad = max(np.sqrt(binsparv**2 + binsperv**2))
elif vtype == 'smu':
maxrad = max(binsparv)
if geometry == 'open':
parmetric = opendistsq
filtermetric = opendistsq
if cosmology == 'lc':
permetric = musqlco
else:
permetric = musqlcdmo
elif geometry == 'close':
parmetric = closedistsq
filtermetric = closedistsq
if cosmology == 'lc':
permetric = musqlcc
else:
permetric = musqlcdmc
else:
parmetric = flatdistsq
filtermetric = flatdistsq
if cosmology == 'lc':
permetric = musqlcf
else:
permetric = musqlcdmf
elif vtype == 'sigpi':
maxrad = max(binsparv+binsperv)
if geometry == 'open':
filtermetric = opendistsq
if cosmology == 'lc':
parmetric = sparsqlc
permetric = spersqlco
else:
parmetric = sparsqlcdm
permetric = spersqlcdmo
elif geometry == 'close':
filtermetric = closedistsq
if cosmology == 'lc':
parmetric = sparsqlc
permetric = spersqlcc
else:
parmetric = sparsqlcdm
permetric = spersqlcdmc
else:
filtermetric = flatdistsq
if cosmology == 'lc':
parmetric = sparsqlc
permetric = spersqlcf
else:
parmetric = sparsqlcdm
permetric = spersqlcdmf
else:
print ("No valid valuation method provided. Using 'smu' as default")
print("Calculating Anisotropic Correlation function with the following parameters")
print("data file=")
print(datfile)
print("random file=")
print(randfile)
print("Random catalog size factor(if random file is None)=")
print(randcatfact)
print("mask/window file=")
print(maskfile)
print ("Cosmology=")
print(cosmology)
print ("Geometry=")
print (geometry)
print("Weights=")
print(weightsflag)
print ("Using ones as weights?=")
print (useones)
print("perpendicular metric=")
print(permetric)
print("parallel metric=")
print(parmetric)
print("Correl estimator=")
print(estimator)
print("Valuation type=")
print(vtype)
print ("binsparv=")
print (binsparv)
print ("binsperv=")
print (binsperv)
print("---------------------------------------------------------------------------")
if sflag is False:
dat, weights = datprepz(datfile, 'data', cosmology)
Nd = len(dat)
if randfile is None:
randcatsize = randcatfact*Nd
if maskfile is None:
print ("Mask file compulsory. Please provide mask='maskfilepath.ply'")
else:
datR, rweights = randcatprepz(datfile, randcatsize, maskfile, cosmology)
else:
datR, rweights = datprepz(randfile, 'random', cosmology)
else:
dat, weights = datprep(datfile, 'data', cosmology)
Nd = len(dat)
if randfile is None:
randcatsize = randcatfact*Nd
if maskfile is None:
print ("Mask file compulsory. Please provide mask='maskfilepath.ply'")
else:
datR, rweights = randcatprep(datfile, randcatsize, maskfile, cosmology)
else:
datR, rweights = datprep(randfile, 'random', cosmology)
Nr = len(datR)
fact = (1.0*Nr)/Nd
global adbt
global arbt
print ("Creating BallTree for data points using ...")
print (filtermetric)
adbt = BallTree(dat, metric='pyfunc', func=filtermetric)
print ("Creating BallTree for random points using ...")
print (filtermetric)
arbt = BallTree(datR, metric='pyfunc', func=filtermetric)
rng = np.array([[min(binsparv), max(binsparv)], [min(binsperv), max(binsperv)]])
print ("Calculating anisotropic 2pCF...")
if estimator == 'dp':
if weightsflag is False: DD = aDDcalc(dat, binsparv, binsperv, parmetric, permetric, rng)
DR = aDRcalc(dat, datR, binsparv, binsperv, parmetric, permetric, rng)
RD = aRDcalc(dat, datR, binsparv, binsperv, parmetric, permetric, rng)
else:
if useones is True or len(weights) != Nd:
weights = np.ones(Nd)
rweights = np.ones(Nr)
print ("Calculating anisotropic DD with weights (parallelized)...\n DD=")
DD = amulti_autocp(dat, binsparv, binsperv, parmetric, permetric, rng, weights, Nd, pcpus)
print ("Calculating anisotropic DR with weights (parallelized)...\n DR=")
DR = amulti_crosscp(dat, datR, binsparv, binsperv, parmetric, permetric, rng, weights, Nr, pcpus)
print ("Calculating anisotropic RD with weights (parallelized)...\n RD=")
RD = amulti_crosscpr(dat, datR, binsparv, binsperv, parmetric, permetric, rng, rweights, Nd, pcpus)
print ("Using Davis-Peebles estimator")
correl = fact*(DD*2.0/(DR+RD))-1.0
elif estimator == 'ph':
if weightsflag is False: DD = aDDcalc(dat, binsparv, binsperv, parmetric, permetric, rng)
RR = aRRcalc(datR, binsparv, binsperv, parmetric, permetric, rng)
else:
if useones is True or len(weights) != Nd:
weights = np.ones(Nd)
rweights = np.ones(Nr)
print ("Calculating anisotropic DD with weights (parallelized)...\n DD=")
DD = amulti_autocp(dat, binsparv, binsperv, parmetric, permetric, rng, weights, Nd, pcpus)
print ("Calculating anisotropic RR with weights (parallelized)...\n RR=")
RR = amulti_autocpr(datR, binsparv, binsperv, parmetric, permetric, rng, rweights, Nr, pcpus)
print ("Using Peebles-Hauser estimator")
correl = fact**2*(DD/RR)-1.0
else:
if weightsflag is False: DD = aDDcalc(dat, binsparv, binsperv, parmetric, permetric, rng)
RR = aRRcalc(datR, binsparv, binsperv, parmetric, permetric, rng)
DR = aDRcalc(dat, datR, binsparv, binsperv, parmetric, permetric, rng)
RD = aRDcalc(dat, datR, binsparv, binsperv, parmetric, permetric, rng)
else:
if useones is True or len(weights) != Nd:
weights = np.ones(Nd)
rweights = np.ones(Nr)
print ("Calculating anisotropic DD with weights (parallelized)...\n DD=")
DD = amulti_autocp(dat, binsparv, binsperv, parmetric, permetric, rng, weights, Nd, pcpus)
print ("Calculating anisotropic DR with weights (parallelized)...\n DR=")
DR = amulti_crosscp(dat, datR, binsparv, binsperv, parmetric, permetric, rng, weights, Nr, pcpus)
print ("Calculating anisotropic RD with weights (parallelized)...\n RD=")
RD = amulti_crosscpr(dat, datR, binsparv, binsperv, parmetric, permetric, rng, rweights, Nd, pcpus)
print ("Calculating anisotropic RR with weights (parallelized)...\n RR=")
RR = amulti_autocpr(datR, binsparv, binsperv, parmetric, permetric, rng, rweights, Nr, pcpus)
if estimator == 'ls':
print ("Using Landy-Szalay estimator")
correl = fact**2*(DD/RR)-fact*(DR+RD)/RR+1.0
elif estimator == 'hew':
print ("Using Hewett estimator")
correl = fact**2*(DD/RR)-fact*0.5*(DR+RD)/RR
elif estimator == 'h':
print ("Using Hamilton estimator")
correl = (4.0*DD*RR)/(DR+RD)**2 - 1.0
correlerr = poserr(correl, DD)
print("Anisotropic Two-point correlation=")
np.savetxt("aDD_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", DD)
np.savetxt("aDR_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", DR)
np.savetxt("aRD_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", RD)
np.savetxt("aRR_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", RR)
np.savetxt("abinspar_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", binspar)
np.savetxt("abinsper_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", binsper)
np.savetxt("atpcf_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", correl)
np.savetxt("atpcferr_"+str(cosmology)+"_"+str(geometry)+"_"+str(vtype)+"_"+str(estimator)+".txt", correlerr)
print (correl, correlerr)
return correl, correlerr
def aDDcalc(dat, binspar, binsper, parmetric, permetric, rng):
print ("Calculating anisotropic DD...\n DD=")
dd = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(range(len(dat))):
ind = adbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], dat[j[j>i]], parmetric)[0]
dist1 = dist.cdist([dat[i], ], dat[j[j>i]], permetric)[0]
dd += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper))[0]
dd[dd == 0] = 1.0
print (dd)
return dd
def aRRcalc(datR, binspar, binsper, parmetric, permetric, rng):
print ("Calculating anisotropic RR...\n RR=")
rr = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(range(len(datR))):
ind = arbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], datR[j[j>i]], parmetric)[0]
dist1 = dist.cdist([datR[i], ], datR[j[j>i]], permetric)[0]
rr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper))[0]
rr[rr == 0] = 1.0
print (rr)
return rr
def aDRcalc(dat, datR, binspar, binsper, parmetric, permetric, rng):
print ("Calculating anisotropic DR...\n DR=")
dr = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(range(len(dat))):
ind = arbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], datR[j[j>i]], parmetric)[0]
dist1 = dist.cdist([dat[i], ], datR[j[j>i]], permetric)[0]
dr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper))[0]
dr[dr == 0] = 1.0
print (dr)
return dr
def aRDcalc(dat, datR, binspar, binsper, parmetric, permetric, rng):
print ("Calculating anisotropic RD...\n RD=")
rd = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(range(len(datR))):
ind = arbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], dat[j[j>i]], parmetric)[0]
dist1 = dist.cdist([datR[i], ], dat[j[j>i]], permetric)[0]
rd += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper))[0]
rd[rd == 0] = 1.0
print (rd)
return rd
def aDDwcalc(dat, binspar, binsper, parmetric, permetric, rng, weights):
print ("Calculating anisotropic DD with weights...\n DD=")
dd = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(range(len(dat))):
ind = adbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], dat[j[j>i]], parmetric)[0]
dist1 = dist.cdist([dat[i], ], dat[j[j>i]], permetric)[0]
dd += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=weights[j[j>i]])[0]
dd[dd == 0] = 1.0
return dd
def aRRwcalc(datR, binspar, binsper, parmetric, permetric, rng, rweights):
print ("Calculating anisotropic RR with weights...\n RR=")
rr = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(range(len(datR))):
ind = arbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], datR[j[j>i]], parmetric)[0]
dist1 = dist.cdist([datR[i], ], datR[j[j>i]], permetric)[0]
rr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=rweights[j[j>i]])[0]
rr[rr == 0] = 1.0
return rr
def aDRwcalc(dat, datR, binspar, binsper, parmetric, permetric, rng, rweights):
print ("Calculating anisotropic DR with weights...\n DR=")
dr = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(range(len(dat))):
ind = arbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], datR[j], parmetric)[0]
dist1 = dist.cdist([dat[i], ], datR[j], permetric)[0]
dr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=rweights[j])[0]
dr[dr == 0] = 1.0
return dr/2.0
def aRDwcalc(dat, datR, binspar, binsper, parmetric, permetric, rng, weights):
print ("Calculating anisotropic RD with weights...\n DR=")
dr = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(range(len(datR))):
ind = arbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], dat[j], parmetric)[0]
dist1 = dist.cdist([datR[i], ], dat[j], permetric)[0]
dr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=weights[j])[0]
dr[dr == 0] = 1.0
return dr/2.0
def aDDwcalcp(dat, binspar, binsper, parmetric, permetric, rng, weights, rNd, multi=False, queue=0):
dd = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(rNd):
ind = adbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], dat[j[j>i]], parmetric)[0]
dist1 = dist.cdist([dat[i], ], dat[j[j>i]], permetric)[0]
dd += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=weights[j[j>i]])[0]
if multi:
queue.put(dd)
else:
return dd
return dd
def aRRwcalcp(datR, binspar, binsper, parmetric, permetric, rng, rweights, rNr, multi=False, queue=0):
rr = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(rNr):
ind = arbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], datR[j[j>i]], parmetric)[0]
dist1 = dist.cdist([datR[i], ], datR[j[j>i]], permetric)[0]
rr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=rweights[j[j>i]])[0]
if multi:
queue.put(rr)
else:
return rr
return rr
def aDRwcalcp(dat, datR, binspar, binsper, parmetric, permetric, rng, rweights, rNd, multi=False, queue=0):
dr = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(rNd):
ind = arbt.query_radius(dat[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([dat[i], ], datR[j], parmetric)[0]
dist1 = dist.cdist([dat[i], ], datR[j], permetric)[0]
dr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=rweights[j])[0]
if multi:
queue.put(dr)
else:
return dr
return dr
def aRDwcalcp(dat, datR, binspar, binsper, parmetric, permetric, rng, weights, rNr, multi=False, queue=0):
dr = np.zeros((len(binspar)-1, len(binsper)-1))
for i in tqdm(rNr):
ind = adbt.query_radius(datR[i].reshape(1, -1), maxrad)
for j in ind:
dist0 = dist.cdist([datR[i], ], dat[j], parmetric)[0]
dist1 = dist.cdist([datR[i], ], dat[j], permetric)[0]
dr += np.histogram2d(dist0, dist1, range=rng, bins=(binspar, binsper), weights=weights[j])[0]
if multi:
queue.put(dr)
else:
return dr
return dr
def amulti_autocp(dat, binspar, binsper, parmetric, permetric, rng, weights, Nd, CORES=pcpus):
DD = np.zeros((len(binspar)-1, len(binsper)-1))
queues = [RetryQueue() for i in range(CORES)]
args = [(dat, binspar, binsper, parmetric, permetric, rng, weights, range(int(Nd*i/CORES),int(Nd*(i+1)/CORES)), True, queues[i]) for i in range(CORES)]
jobs = [Process(target=aDDwcalcp, args=(a)) for a in args]
for j in jobs: j.start()
for q in queues: DD += q.get()
for j in jobs: j.join()
DD[DD == 0] = 1.0
print (DD)
return DD
def amulti_autocpr(datR, binspar, binsper, parmetric, permetric, rng, rweights, Nr, CORES=pcpus):
RR = np.zeros((len(binspar)-1, len(binsper)-1))
queues = [RetryQueue() for i in range(CORES)]
args = [(datR, binspar, binsper, parmetric, permetric, rng, rweights, range(int(Nr*i/CORES),int(Nr*(i+1)/CORES)), True, queues[i]) for i in range(CORES)]
jobs = [Process(target=aRRwcalcp, args=(a)) for a in args]
for j in jobs: j.start()
for q in queues: RR += q.get()
for j in jobs: j.join()
RR[RR == 0] = 1.0
print (RR)
return RR
def amulti_crosscp(dat, datR, binspar, binsper, parmetric, permetric, rng, weights, Nr, CORES=pcpus):
RD = np.zeros((len(binspar)-1, len(binsper)-1))
queues = [RetryQueue() for i in range(CORES)]
args = [(dat, datR, binspar, binsper, parmetric, permetric, rng, weights, range(int(Nr*i/CORES), int(Nr*(i+1)/CORES)), True, queues[i]) for i in range(CORES)]
jobs = [Process(target=aRDwcalcp, args=(a)) for a in args]
for j in jobs: j.start()
for q in queues: RD += q.get()
for j in jobs: j.join()
RD[RD == 0] = 1.0
print (RD/2.0)
return RD/2.0
def amulti_crosscpr(dat, datR, binspar, binsper, parmetric, permetric, rng, rweights, Nd, CORES=pcpus):
DR = np.zeros((len(binspar)-1, len(binsper)-1))
queues = [RetryQueue() for i in range(CORES)]
args = [(dat, datR, binspar, binsper, parmetric, permetric, rng, rweights, range(int(Nd*i/CORES), int(Nd*(i+1)/CORES)), True, queues[i]) for i in range(CORES)]
jobs = [Process(target=aDRwcalcp, args=(a)) for a in args]
for j in jobs: j.start()
for q in queues: DR += q.get()
for j in jobs: j.join()
DR[DR == 0] = 1.0
print (DR/2.0)
return DR/2.0
def ximonopole(correlsmu, mu):
xi0 = np.sum(correlsmu*sp.special.legendre(0)(mu),axis=1)/len(mu)
np.savetxt("xi0.txt",xi0)
return xi0
def xidipole(correlsmu, mu):
xi2 = np.sum(5.0*correlsmu*sp.special.legendre(2)(mu),axis=1)/len(mu)
np.savetxt("xi2.txt",xi2)
return xi2
def xiquadpole(correlsmu, mu):
xi4 = np.sum(9.0*correlsmu*sp.special.legendre(4)(mu),axis=1)/len(mu)
np.savetxt("xi4.txt",xi4)
return xi4
def beta(correlsmu, mu):
xis0 = ximonopole(correlsmu,mu)
xis2 = xidipole(correlsmu,mu)
xis4 = xiquadpole(correlsmu,mu)
xir = xis0*sp.special.legendre(0)(mu) + xis2*sp.special.legendre(2)(mu) + xis4*sp.special.legendre(4)(mu)
r = xir/xis0
return 5.0/3.0*(np.sqrt(1.8*r-0.8)-1.0)
| true
| true
|
f701eb659d29808c9a1f074f5e5d862a3df1c0a1
| 4,076
|
py
|
Python
|
tests/test_resource_synchronization.py
|
TUDelft-CITG/Hydraulic-Infrastructure-Realisation
|
c5888ef8f8bd1676536e268701dbb974e6f87c40
|
[
"MIT"
] | 6
|
2019-11-14T08:12:08.000Z
|
2021-04-08T11:13:35.000Z
|
tests/test_resource_synchronization.py
|
TUDelft-CITG/Hydraulic-Infrastructure-Realisation
|
c5888ef8f8bd1676536e268701dbb974e6f87c40
|
[
"MIT"
] | 16
|
2019-06-25T16:44:13.000Z
|
2022-02-15T18:05:28.000Z
|
tests/test_resource_synchronization.py
|
TUDelft-CITG/Hydraulic-Infrastructure-Realisation
|
c5888ef8f8bd1676536e268701dbb974e6f87c40
|
[
"MIT"
] | 8
|
2019-07-03T08:28:26.000Z
|
2021-07-12T08:11:53.000Z
|
"""Test package."""
import shapely.geometry
import simpy
import openclsim.core as core
import openclsim.model as model
from .test_utils import assert_log
def test_test_resource_synchronization():
"""Test resource Synchronization."""
simulation_start = 0
my_env = simpy.Environment(initial_time=simulation_start)
registry = {}
Site = type(
"Site",
(
core.Identifiable,
core.Log,
core.Locatable,
core.HasContainer,
core.HasResource,
),
{},
)
TransportProcessingResource = type(
"TransportProcessingResource",
(
core.Identifiable,
core.Log,
core.ContainerDependentMovable,
core.Processor,
core.HasResource,
core.LoadingFunction,
core.UnloadingFunction,
),
{},
)
location_from_site = shapely.geometry.Point(4.18055556, 52.18664444)
from_site = Site(
env=my_env,
name="Winlocatie",
ID="6dbbbdf4-4589-11e9-a501-b469212bff5d",
geometry=location_from_site,
capacity=10,
level=8,
)
hopper1 = TransportProcessingResource(
env=my_env,
name="Hopper 01",
ID="6dbbbdf6-4589-11e9-95a2-b469212bff5b",
geometry=location_from_site,
loading_rate=1,
unloading_rate=1,
capacity=4,
compute_v=lambda x: 10,
)
hopper2 = TransportProcessingResource(
env=my_env,
name="Hopper 02",
ID="5dbbbdf6-4589-11e9-95a2-b469212bff5b",
geometry=location_from_site,
loading_rate=1,
unloading_rate=1,
capacity=4,
compute_v=lambda x: 10,
)
requested_resources1 = {}
activity1 = model.ShiftAmountActivity(
env=my_env,
name="Transfer1",
ID="6dbbbdf7-4589-11e9-bf3b-b469212bff52",
registry=registry,
processor=hopper1,
origin=from_site,
destination=hopper1,
amount=1,
duration=20,
requested_resources=requested_resources1,
)
seq_activity1 = model.SequentialActivity(
env=my_env,
name="Sequential process1",
ID="6dbbbdf7-4589-11e9-bf3b-b469212bff60",
registry=registry,
sub_processes=[activity1],
requested_resources=requested_resources1,
)
while1 = model.WhileActivity(
env=my_env,
name="while1",
ID="6dbbbdf7-4589-11e9-bf3b-b469212bff5g",
registry=registry,
sub_processes=[seq_activity1],
requested_resources=requested_resources1,
condition_event=[
{
"or": [
{"type": "container", "concept": hopper1, "state": "full"},
{"type": "container", "concept": from_site, "state": "empty"},
]
}
],
)
activity2 = model.ShiftAmountActivity(
env=my_env,
name="Transfer2",
ID="5dbbbdf7-4589-11e9-bf3b-b469212bff52",
registry=registry,
processor=hopper2,
origin=from_site,
destination=hopper2,
amount=1,
duration=20,
)
seq_activity2 = model.SequentialActivity(
env=my_env,
name="Sequential process2",
ID="5dbbbdf7-4589-11e9-bf3b-b469212bff60",
registry=registry,
sub_processes=[activity2],
)
while2 = model.WhileActivity(
env=my_env,
name="while2",
ID="5dbbbdf7-4589-11e9-bf3b-b469212bff5g",
registry=registry,
sub_processes=[seq_activity2],
condition_event=[
{
"or": [
{"type": "container", "concept": hopper2, "state": "full"},
{"type": "container", "concept": from_site, "state": "empty"},
]
}
],
)
model.register_processes([while1, while2])
my_env.run()
assert my_env.now == 160
assert_log(from_site)
assert_log(while1)
assert_log(while2)
| 25.63522
| 82
| 0.569676
|
import shapely.geometry
import simpy
import openclsim.core as core
import openclsim.model as model
from .test_utils import assert_log
def test_test_resource_synchronization():
simulation_start = 0
my_env = simpy.Environment(initial_time=simulation_start)
registry = {}
Site = type(
"Site",
(
core.Identifiable,
core.Log,
core.Locatable,
core.HasContainer,
core.HasResource,
),
{},
)
TransportProcessingResource = type(
"TransportProcessingResource",
(
core.Identifiable,
core.Log,
core.ContainerDependentMovable,
core.Processor,
core.HasResource,
core.LoadingFunction,
core.UnloadingFunction,
),
{},
)
location_from_site = shapely.geometry.Point(4.18055556, 52.18664444)
from_site = Site(
env=my_env,
name="Winlocatie",
ID="6dbbbdf4-4589-11e9-a501-b469212bff5d",
geometry=location_from_site,
capacity=10,
level=8,
)
hopper1 = TransportProcessingResource(
env=my_env,
name="Hopper 01",
ID="6dbbbdf6-4589-11e9-95a2-b469212bff5b",
geometry=location_from_site,
loading_rate=1,
unloading_rate=1,
capacity=4,
compute_v=lambda x: 10,
)
hopper2 = TransportProcessingResource(
env=my_env,
name="Hopper 02",
ID="5dbbbdf6-4589-11e9-95a2-b469212bff5b",
geometry=location_from_site,
loading_rate=1,
unloading_rate=1,
capacity=4,
compute_v=lambda x: 10,
)
requested_resources1 = {}
activity1 = model.ShiftAmountActivity(
env=my_env,
name="Transfer1",
ID="6dbbbdf7-4589-11e9-bf3b-b469212bff52",
registry=registry,
processor=hopper1,
origin=from_site,
destination=hopper1,
amount=1,
duration=20,
requested_resources=requested_resources1,
)
seq_activity1 = model.SequentialActivity(
env=my_env,
name="Sequential process1",
ID="6dbbbdf7-4589-11e9-bf3b-b469212bff60",
registry=registry,
sub_processes=[activity1],
requested_resources=requested_resources1,
)
while1 = model.WhileActivity(
env=my_env,
name="while1",
ID="6dbbbdf7-4589-11e9-bf3b-b469212bff5g",
registry=registry,
sub_processes=[seq_activity1],
requested_resources=requested_resources1,
condition_event=[
{
"or": [
{"type": "container", "concept": hopper1, "state": "full"},
{"type": "container", "concept": from_site, "state": "empty"},
]
}
],
)
activity2 = model.ShiftAmountActivity(
env=my_env,
name="Transfer2",
ID="5dbbbdf7-4589-11e9-bf3b-b469212bff52",
registry=registry,
processor=hopper2,
origin=from_site,
destination=hopper2,
amount=1,
duration=20,
)
seq_activity2 = model.SequentialActivity(
env=my_env,
name="Sequential process2",
ID="5dbbbdf7-4589-11e9-bf3b-b469212bff60",
registry=registry,
sub_processes=[activity2],
)
while2 = model.WhileActivity(
env=my_env,
name="while2",
ID="5dbbbdf7-4589-11e9-bf3b-b469212bff5g",
registry=registry,
sub_processes=[seq_activity2],
condition_event=[
{
"or": [
{"type": "container", "concept": hopper2, "state": "full"},
{"type": "container", "concept": from_site, "state": "empty"},
]
}
],
)
model.register_processes([while1, while2])
my_env.run()
assert my_env.now == 160
assert_log(from_site)
assert_log(while1)
assert_log(while2)
| true
| true
|
f701ebc777f0e75bb15805fc60c334c03e73cfd1
| 590
|
py
|
Python
|
Hello-producer/producer.py
|
mydevground/ApacheKafkaPython
|
96da0e5d0294a9823776f9622fca6a3b46ff06bb
|
[
"MIT"
] | 5
|
2021-06-06T13:25:47.000Z
|
2022-01-16T15:48:22.000Z
|
Hello-producer/producer.py
|
mydevground/ApacheKafkaPython
|
96da0e5d0294a9823776f9622fca6a3b46ff06bb
|
[
"MIT"
] | null | null | null |
Hello-producer/producer.py
|
mydevground/ApacheKafkaPython
|
96da0e5d0294a9823776f9622fca6a3b46ff06bb
|
[
"MIT"
] | 2
|
2021-09-21T17:34:18.000Z
|
2021-10-01T21:40:10.000Z
|
from confluent_kafka import Producer
import socket
if __name__ == '__main__':
print("Starting Kafka Producer")
producer_config = {'client.id': socket.gethostname(),
'bootstrap.servers': 'localhost:9092'}
print("Creating Producer")
producer = Producer(producer_config)
print("Producing Kafka Message")
for i in range(1, 101):
for j in range(1, 10001):
producer.produce('hello-producer', key=str(j*i), value="Simple Message-" + str(j*i))
producer.poll()
producer.flush()
print("Finished Kafka Producer")
| 28.095238
| 96
| 0.640678
|
from confluent_kafka import Producer
import socket
if __name__ == '__main__':
print("Starting Kafka Producer")
producer_config = {'client.id': socket.gethostname(),
'bootstrap.servers': 'localhost:9092'}
print("Creating Producer")
producer = Producer(producer_config)
print("Producing Kafka Message")
for i in range(1, 101):
for j in range(1, 10001):
producer.produce('hello-producer', key=str(j*i), value="Simple Message-" + str(j*i))
producer.poll()
producer.flush()
print("Finished Kafka Producer")
| true
| true
|
f701ec8d1673c733a95290013d8b9a3737b4b8a1
| 35,290
|
py
|
Python
|
pytests/security/SecretsMgmtTests.py
|
sumedhpb/testrunner
|
9ff887231c75571624abc31a3fb5248110e01203
|
[
"Apache-2.0"
] | 14
|
2015-02-06T02:47:57.000Z
|
2020-03-14T15:06:05.000Z
|
pytests/security/SecretsMgmtTests.py
|
sumedhpb/testrunner
|
9ff887231c75571624abc31a3fb5248110e01203
|
[
"Apache-2.0"
] | 3
|
2019-02-27T19:29:11.000Z
|
2021-06-02T02:14:27.000Z
|
pytests/security/SecretsMgmtTests.py
|
sumedhpb/testrunner
|
9ff887231c75571624abc31a3fb5248110e01203
|
[
"Apache-2.0"
] | 108
|
2015-03-26T08:58:49.000Z
|
2022-03-21T05:21:39.000Z
|
from membase.api.rest_client import RestConnection, RestHelper
import urllib.request, urllib.parse, urllib.error
import json
from remote.remote_util import RemoteMachineShellConnection, RemoteMachineHelper
from newupgradebasetest import NewUpgradeBaseTest
from security.auditmain import audit
import subprocess
import socket
import fileinput
import sys
from subprocess import Popen, PIPE
from .SecretsMasterBase import SecretsMasterBase
from basetestcase import BaseTestCase
import _thread
from testconstants import STANDARD_BUCKET_PORT
from membase.api.rest_client import RestConnection, Bucket, RestHelper
from membase.api.exception import BucketCreationException
from membase.helper.bucket_helper import BucketOperationHelper
from couchbase_helper.documentgenerator import BlobGenerator
class SecretsMgmtTests(BaseTestCase):
def setUp(self):
super(SecretsMgmtTests, self).setUp()
self.secretmgmt_base_obj = SecretsMasterBase(self.master)
self.password = self.input.param('password', 'p@ssword')
enable_audit = self.input.param('audit', None)
if enable_audit:
Audit = audit(host=self.master)
currentState = Audit.getAuditStatus()
self.log.info("Current status of audit on ip - {0} is {1}".format(self.master.ip, currentState))
if not currentState:
self.log.info("Enabling Audit ")
Audit.setAuditEnable('true')
self.sleep(30)
def tearDown(self):
self.log.info("---------------Into Teardown---------------")
for server in self.servers:
self.secretmgmt_base_obj = SecretsMasterBase(server)
self.secretmgmt_base_obj.set_password(server, "")
self.secretmgmt_base_obj.change_config_to_orginal(server, "")
log_dir = (self.secretmgmt_base_obj.get_log_dir(server))[1:-1]
babysitter_file = str(log_dir + "/babysitter.log")
shell = RemoteMachineShellConnection(server)
command = str(" mv " + babysitter_file + " " + log_dir + "/babysitterOLD.log")
shell.execute_command(command=command)
self.print_memcached_ip()
shell.disconnect()
super(SecretsMgmtTests, self).tearDown()
def suite_setUp(self):
self.log.info("---------------Suite Setup---------------")
def suite_tearDown(self):
self.log.info("---------------Suite Teardown---------------")
def print_memcached_ip(self):
shell = RemoteMachineShellConnection(self.master)
o, _ = shell.execute_command("ps aux | grep 'memcached' | awk '{print $2}'")
if o:
mem_pid = o[0]
shell.disconnect()
def test_evn_variable(self):
self.secretmgmt_base_obj.set_password(self.master, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_return = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_return, "Babysitter.log does not contain node initialization code")
def test_multiple_prompt_3times(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
# self.secretmgmt_base_obj.incorrect_password(self.master,cmd="/opt/couchbase/bin/cbmaster_password")
temp_result = self.secretmgmt_base_obj.incorrect_password(self.master, cmd=cmd)
self.assertTrue(temp_result, "Issue with passing incorrect password 3 times")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
print('Process Memcached is not running')
# shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
shell.execute_command(
"export CB_MASTER_PASSWORD=" + self.password + "; /opt/couchbase/etc/couchbase_init.d start")
def test_multiple_prompt_enter_correct_2retries(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
# self.secretmgmt_base_obj.incorrect_password(self.master, cmd="/opt/couchbase/bin/cbmaster_password",
# retries_number=2,input_correct_pass=True,correct_pass=self.password)
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
temp_result = self.secretmgmt_base_obj.incorrect_password(self.master, cmd=cmd,
retries_number=2, input_correct_pass=True,
correct_pass=self.password)
self.assertTrue(temp_result, "Issue with incorrect password for 2 times and then correct password")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
def test_multiple_prompt_enter_correct_1retries(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
# self.secretmgmt_base_obj.incorrect_password(self.master, cmd="/opt/couchbase/bin/cbmaster_password",
# retries_number=1, input_correct_pass=True, correct_pass='temp')
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
temp_result = self.secretmgmt_base_obj.incorrect_password(self.master, cmd=cmd,
retries_number=1, input_correct_pass=True,
correct_pass=self.password)
self.assertTrue(temp_result, "Issue with incorrect password for 1 times and then correct password")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
def test_prompt_enter_correct_password(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
shell.disconnect()
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
# self.secretmgmt_base_obj.incorrect_password(self.master, cmd="/opt/couchbase/bin/cbmaster_password",
# retries_number=1, input_correct_pass=True, correct_pass='temp')
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
temp_result = self.secretmgmt_base_obj.correct_password_on_prompt(self.master, self.password, cmd=cmd)
self.assertTrue(temp_result, "Issue with passing in correct password on prompt")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
def test_env_variable_change_pass(self):
new_pass = self.input.param("new_password", "new_p@ssw0rd")
self.secretmgmt_base_obj.set_password(self.master, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
self.secretmgmt_base_obj.set_password(self.master, new_pass)
self.secretmgmt_base_obj.restart_server_with_env(self.master, new_pass)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def generate_pass(self):
type = self.input.param("type", 'char')
pass_length = self.input.param('pass_length', 10)
num_pass = self.input.param('num_pass', 10)
if type in ('char', 'int', 'ext'):
pass_list = self.secretmgmt_base_obj.generate_password_simple(type, pass_length, num_pass)
else:
pass_list = self.secretmgmt_base_obj.generate_password_dual(type, pass_length, num_pass)
for item in pass_list:
item = item.decode('ISO-8859-1').strip()
self.secretmgmt_base_obj.set_password(self.master, item)
self.secretmgmt_base_obj.restart_server_with_env(self.master, item)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def generate_pass_file(self):
with open("./pytests/security/password_list.txt") as f:
for item in f:
item = item.decode('ISO-8859-1').strip()
self.secretmgmt_base_obj.set_password(self.master, item)
self.secretmgmt_base_obj.restart_server_with_env(self.master, item)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def test_cluster_rebalance_in_env_var(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
temp_result = self.cluster.rebalance(self.servers, servers_in, [])
self.assertTrue(temp_result, "Rebalance-in did not complete with password setup node")
def test_cluster_rebalance_out(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
self.cluster.rebalance(self.servers, servers_in, [])
servers_out = self.servers[2:]
temp_result = self.cluster.rebalance(self.servers, [], servers_out)
self.assertTrue(temp_result, 'Rebalance-out did not complete with password node setup')
def test_cluster_rebalance_in_prompt(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password, startup_type='prompt')
temp_result = self.cluster.rebalance(self.servers, servers_in, [])
self.assertTrue(temp_result, 'Rebalance-in did not complete with password node setup')
def test_cluster_rebalance_out_prompt(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password, startup_type='prompt')
self.cluster.rebalance(self.servers, servers_in, [])
servers_out = self.servers[2:]
temp_result = self.cluster.rebalance(self.servers, [], servers_out)
self.assertTrue(temp_result, 'Rebalance-out did not complete with password node setup')
def test_cluster_rebalance_in_diff_modes(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
extra_pass = self.input.param('extra_pass', 'p@ssw0rd1')
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, extra_pass, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
temp_result = self.cluster.rebalance(self.servers, servers_in, [])
self.assertTrue(temp_result, 'Rebalance-in did not complete with password node setup')
def test_cluster_rebalance_out_diff_modes(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
extra_pass = self.input.param('extra_pass', 'p@ssw0rd1')
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, extra_pass, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
self.cluster.rebalance(self.servers, servers_in, [])
servers_out = self.servers[2:]
temp_result = self.cluster.rebalance(self.servers, [], servers_out)
self.assertTrue(temp_result, 'Rebalance-out did not complete with password node setup')
# services_in=kv-index-n1ql,nodes_init=1,nodes_in=3
def test_cluster_rebalance_in_env_var_services(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
self.assertTrue(rebalance.result(), "Issue with Reablance in with different services")
# services_in=kv-index-n1ql,nodes_init=1,nodes_in=3
def test_cluster_rebalance_in_diff_type_var_services(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, self.password, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
self.assertTrue(rebalance.result(), "Rebalance in with different servers")
# services_in=kv-index-n1ql,nodes_init=1,nodes_in=3
def test_cluster_rebalance_out_env_var_services(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
print(("result of rebalance is {0}".format(rebalance.result())))
servers_out = self.servers[2:]
rebalance = self.cluster.async_rebalance(self.servers, [], servers_out)
print(("result of rebalance is {0}".format(rebalance.result())))
self.assertTrue(rebalance.result(), "Rebalance out with different service")
# services_in=kv-index-n1ql,nodes_init=1,nodes_in=3
def test_cluster_rebalance_out_diff_type_var_services(self):
extra_pass = self.input.param("extra_pass", 'p@ssw0rd01')
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, extra_pass, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
rebalance.result()
servers_out = self.servers[1:]
rebalance = self.cluster.async_rebalance(self.servers, [], servers_out)
print((rebalance.result()))
self.assertTrue(rebalance.result(), "Rebalance in and out with different servers")
# services_init = kv - kv:n1ql - index - kv:index, nodes_init = 4, nodes_out = 1, nodes_out_dist = kv:1, graceful = False
# services_init = kv - kv:n1ql - index - kv:index, nodes_init = 4, nodes_out = 1, nodes_out_dist = kv:1, graceful = False,recoveryType=delta
def test_failover_add_back(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
try:
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
self.sleep(30)
rest = RestConnection(self.master)
self.graceful = self.input.param('graceful', False)
recoveryType = self.input.param("recoveryType", "full")
self.find_nodes_in_list()
self.generate_map_nodes_out_dist()
servr_out = self.nodes_out_list
nodes_all = rest.node_statuses()
failover_task = self.cluster.async_failover([self.master],
failover_nodes=servr_out, graceful=self.graceful)
failover_task.result()
nodes_all = rest.node_statuses()
nodes = []
if servr_out[0].ip == "127.0.0.1":
for failover_node in servr_out:
nodes.extend([node for node in nodes_all
if (str(node.port) == failover_node.port)])
else:
for failover_node in servr_out:
nodes.extend([node for node in nodes_all
if node.ip == failover_node.ip])
for node in nodes:
self.log.info(node)
rest.add_back_node(node.id)
rest.set_recovery_type(otpNode=node.id, recoveryType=recoveryType)
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], [], [])
self.assertTrue(rebalance.result(), "Failover with different servers")
except Exception as ex:
raise
# services_init=kv-kv-index-index:n1ql,nodes_init=4,nodes_out=1,nodes_out_dist=kv:1,graceful=True
def test_failover(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
try:
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, 'temp')
self.sleep(30)
self.find_nodes_in_list()
self.generate_map_nodes_out_dist()
servr_out = self.nodes_out_list
print(servr_out)
self.graceful = self.input.param('graceful', False)
failover_task = self.cluster.async_failover([self.master],
failover_nodes=servr_out, graceful=self.graceful)
failover_task.result()
self.log.info("Rebalance first time")
rebalance = self.cluster.rebalance(self.servers[:self.nodes_init], [], [])
self.log.info("Rebalance Second time")
rebalance = self.cluster.rebalance(self.servers[:self.nodes_init], [], [])
except Exception as ex:
raise
# services_init=kv-kv-index-index:n1ql,nodes_init=4,targetProcess=memcached
# services_init=kv-kv-index-index:n1ql,nodes_init=4,targetProcess=babysitter
def kill_process(self):
self.targetProcess = self.input.param("targetProcess", 'memcached')
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
for servers in self.servers:
remote = RemoteMachineShellConnection(servers)
if self.targetProcess == "memcached":
remote.kill_memcached()
else:
remote.terminate_process(process_name=self.targetProcess)
for servers in self.servers:
self.secretmgmt_base_obj.restart_server_with_env(servers, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Issue with server restart after killing of process")
def restart_server(self):
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
for servers in self.servers:
self.secretmgmt_base_obj.restart_server_with_env(servers, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Issue with server restart of server")
# services_init=kv-kv-index-index:n1ql,nodes_init=4,default_bucket=False,bucket_type=sasl
# services_init=kv-kv-index-index:n1ql,nodes_init=4,default_bucket=False,bucket_type=standard
# services_init=kv-kv-index-index:n1ql,nodes_init=4,default_bucket=False,bucket_type=standard,password=a@cd#efgh@
# services_init=kv-kv-index-index:n1ql,nodes_init=4,default_bucket=False,bucket_type=standard,password=a@cd#efgh@
def test_bucket_create_password(self, bucket_name='secretsbucket', num_replicas=1, bucket_size=100):
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
bucket_type = self.input.param("bucket_type", 'couchbase')
tasks = []
if bucket_type == 'couchbase':
# self.cluster.create_sasl_bucket(self.master, bucket_name, self.password, num_replicas)
rest = RestConnection(self.master)
rest.create_bucket(bucket_name, ramQuotaMB=100)
elif bucket_type == 'standard':
self.cluster.create_standard_bucket(self.master, bucket_name, STANDARD_BUCKET_PORT + 1,
bucket_size)
elif bucket_type == "memcached":
tasks.append(
self.cluster.async_create_memcached_bucket(self.master, bucket_name, STANDARD_BUCKET_PORT + 1,
bucket_size))
for task in tasks:
self.assertTrue(task.result(), "Issue with bucket creation")
else:
self.log.error('Bucket type not specified')
return
self.assertTrue(BucketOperationHelper.wait_for_bucket_creation(bucket_name, RestConnection(self.master)),
msg='failed to start up bucket with name "{0}'.format(bucket_name))
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
install_path = self.secretmgmt_base_obj._get_install_path(self.master)
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, '/config/config.dat',
self.password)
self.assertTrue(temp_result, "Password found in config.dat")
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, 'isasl.pw', self.password)
self.assertTrue(temp_result, "Password found in isasl.pw")
def test_bucket_edit_password(self, bucket_name='secretsbucket', num_replicas=1, bucket_size=100):
updated_pass = "p@ssw0rd_updated"
rest = RestConnection(self.master)
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
bucket_type = self.input.param("bucket_type", 'standard')
tasks = []
if bucket_type == 'sasl':
self.cluster.create_sasl_bucket(self.master, bucket_name, self.password, num_replicas, bucket_size)
self.sleep(10)
rest.change_bucket_props(bucket_name, saslPassword=updated_pass)
else:
self.log.error('Bucket type not specified')
return
self.assertTrue(BucketOperationHelper.wait_for_bucket_creation(bucket_name, RestConnection(self.master)),
msg='failed to start up bucket with name "{0}'.format(bucket_name))
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
install_path = self.secretmgmt_base_obj._get_install_path(self.master)
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, '/config/config.dat',
updated_pass)
self.assertTrue(temp_result, "Password found in config.dat")
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, 'isasl.pw', updated_pass)
self.assertTrue(temp_result, "Password found in isasl.pw")
def test_cli_setting(self):
temp_result = self.secretmgmt_base_obj.execute_cli(self.master, new_password=self.password)
self.assertTrue(temp_result, "Output of the command is not correct")
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def test_cbcollect(self):
rest = RestConnection(self.master)
bucket_name = 'cbcollectbucket'
num_replicas = 1
bucket_size = 100
# self.cluster.create_sasl_bucket(self.master, bucket_name, self.password, num_replicas, bucket_size)
rest.create_bucket(bucket_name, ramQuotaMB=100)
result = self.secretmgmt_base_obj.generate_cb_collect(self.master, "cbcollect.zip", self.password)
self.assertTrue(result, "Bucket password appears in the cbcollect info")
def rotate_data_key(self):
temp_result = self.secretmgmt_base_obj.read_ns_config(self.master)
self.assertTrue(temp_result, "Config.dat is not refereshed after data key")
def cli_rotate_key(self):
temp_result = self.secretmgmt_base_obj.execute_cli_rotate_key(self.master)
self.assertTrue(temp_result, "Issue with rotate key on cli side")
def audit_change_password(self):
self.secretmgmt_base_obj.set_password(self.master, self.password)
Audit = audit(eventID='8233', host=self.master)
expectedResults = {"real_userid:source": "ns_server", "real_userid:user": "Administrator",
"ip": self.ipAddress, "port": 123456}
fieldVerification, valueVerification = self.Audit.validateEvents(expectedResults)
self.assertTrue(fieldVerification, "One of the fields is not matching")
self.assertTrue(valueVerification, "Values for one of the fields is not matching")
def audit_change_password(self):
self.secretmgmt_base_obj.execute_cli_rotate_key(self.master)
Audit = audit(eventID='8234', host=self.master)
expectedResults = {"real_userid:source": "ns_server", "real_userid:user": "Administrator",
"ip": self.ipAddress, "port": 123456}
fieldVerification, valueVerification = self.Audit.validateEvents(expectedResults)
self.assertTrue(fieldVerification, "One of the fields is not matching")
self.assertTrue(valueVerification, "Values for one of the fields is not matching")
class SecretsMgmtUpgrade(NewUpgradeBaseTest):
def setUp(self):
super(SecretsMgmtUpgrade, self).setUp()
self.initial_version = self.input.param("initial_version", '4.1.0-5005')
self.upgrade_version = self.input.param("upgrade_version", "4.6.0-3467")
self.secretmgmt_base_obj = SecretsMasterBase(self.master)
self.password = self.input.param('password', 'password')
def tearDown(self):
super(SecretsMgmtUpgrade, self).tearDown()
def upgrade_all_nodes(self):
servers_in = self.servers[1:]
self._install(self.servers)
self.cluster.rebalance(self.servers, servers_in, [])
upgrade_threads = self._async_update(upgrade_version=self.upgrade_version, servers=self.servers)
for threads in upgrade_threads:
threads.join()
for server in self.servers:
self.secretmgmt_base_obj.setup_pass_node(server, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
for server in self.servers:
rest = RestConnection(server)
temp = rest.cluster_status()
self.log.info("Initial status of {0} cluster is {1}".format(server.ip, temp['nodes'][0]['status']))
while (temp['nodes'][0]['status'] == 'warmup'):
self.log.info("Waiting for cluster to become healthy")
self.sleep(5)
temp = rest.cluster_status()
self.log.info("current status of {0} is {1}".format(server.ip, temp['nodes'][0]['status']))
def upgrade_all_nodes_post_463(self):
servers_in = self.servers[1:]
self._install(self.servers)
self.cluster.rebalance(self.servers, servers_in, [])
for server in self.servers:
self.secretmgmt_base_obj.setup_pass_node(server, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
upgrade_threads = self._async_update(upgrade_version=self.upgrade_version, servers=self.servers)
for threads in upgrade_threads:
threads.join()
for server in self.servers:
rest = RestConnection(server)
temp = rest.cluster_status()
self.log.info("Initial status of {0} cluster is {1}".format(server.ip, temp['nodes'][0]['status']))
while (temp['nodes'][0]['status'] == 'warmup'):
self.log.info("Waiting for cluster to become healthy")
self.sleep(5)
temp = rest.cluster_status()
self.log.info("current status of {0} is {1}".format(server.ip, temp['nodes'][0]['status']))
def upgrade_half_nodes(self):
serv_upgrade = self.servers[2:4]
servers_in = self.servers[1:]
self._install(self.servers)
self.cluster.rebalance(self.servers, servers_in, [])
upgrade_threads = self._async_update(upgrade_version=self.upgrade_version, servers=serv_upgrade)
for threads in upgrade_threads:
threads.join()
for server in serv_upgrade:
rest = RestConnection(server)
temp = rest.cluster_status()
self.log.info("Initial status of {0} cluster is {1}".format(server.ip, temp['nodes'][0]['status']))
while (temp['nodes'][0]['status'] == 'warmup'):
self.log.info("Waiting for cluster to become healthy")
self.sleep(5)
temp = rest.cluster_status()
self.log.info("current status of {0} is {1}".format(server.ip, temp['nodes'][0]['status']))
| 58.330579
| 144
| 0.662992
|
from membase.api.rest_client import RestConnection, RestHelper
import urllib.request, urllib.parse, urllib.error
import json
from remote.remote_util import RemoteMachineShellConnection, RemoteMachineHelper
from newupgradebasetest import NewUpgradeBaseTest
from security.auditmain import audit
import subprocess
import socket
import fileinput
import sys
from subprocess import Popen, PIPE
from .SecretsMasterBase import SecretsMasterBase
from basetestcase import BaseTestCase
import _thread
from testconstants import STANDARD_BUCKET_PORT
from membase.api.rest_client import RestConnection, Bucket, RestHelper
from membase.api.exception import BucketCreationException
from membase.helper.bucket_helper import BucketOperationHelper
from couchbase_helper.documentgenerator import BlobGenerator
class SecretsMgmtTests(BaseTestCase):
def setUp(self):
super(SecretsMgmtTests, self).setUp()
self.secretmgmt_base_obj = SecretsMasterBase(self.master)
self.password = self.input.param('password', 'p@ssword')
enable_audit = self.input.param('audit', None)
if enable_audit:
Audit = audit(host=self.master)
currentState = Audit.getAuditStatus()
self.log.info("Current status of audit on ip - {0} is {1}".format(self.master.ip, currentState))
if not currentState:
self.log.info("Enabling Audit ")
Audit.setAuditEnable('true')
self.sleep(30)
def tearDown(self):
self.log.info("---------------Into Teardown---------------")
for server in self.servers:
self.secretmgmt_base_obj = SecretsMasterBase(server)
self.secretmgmt_base_obj.set_password(server, "")
self.secretmgmt_base_obj.change_config_to_orginal(server, "")
log_dir = (self.secretmgmt_base_obj.get_log_dir(server))[1:-1]
babysitter_file = str(log_dir + "/babysitter.log")
shell = RemoteMachineShellConnection(server)
command = str(" mv " + babysitter_file + " " + log_dir + "/babysitterOLD.log")
shell.execute_command(command=command)
self.print_memcached_ip()
shell.disconnect()
super(SecretsMgmtTests, self).tearDown()
def suite_setUp(self):
self.log.info("---------------Suite Setup---------------")
def suite_tearDown(self):
self.log.info("---------------Suite Teardown---------------")
def print_memcached_ip(self):
shell = RemoteMachineShellConnection(self.master)
o, _ = shell.execute_command("ps aux | grep 'memcached' | awk '{print $2}'")
if o:
mem_pid = o[0]
shell.disconnect()
def test_evn_variable(self):
self.secretmgmt_base_obj.set_password(self.master, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_return = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_return, "Babysitter.log does not contain node initialization code")
def test_multiple_prompt_3times(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
temp_result = self.secretmgmt_base_obj.incorrect_password(self.master, cmd=cmd)
self.assertTrue(temp_result, "Issue with passing incorrect password 3 times")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
print('Process Memcached is not running')
shell.execute_command(
"export CB_MASTER_PASSWORD=" + self.password + "; /opt/couchbase/etc/couchbase_init.d start")
def test_multiple_prompt_enter_correct_2retries(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
temp_result = self.secretmgmt_base_obj.incorrect_password(self.master, cmd=cmd,
retries_number=2, input_correct_pass=True,
correct_pass=self.password)
self.assertTrue(temp_result, "Issue with incorrect password for 2 times and then correct password")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
def test_multiple_prompt_enter_correct_1retries(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
temp_result = self.secretmgmt_base_obj.incorrect_password(self.master, cmd=cmd,
retries_number=1, input_correct_pass=True,
correct_pass=self.password)
self.assertTrue(temp_result, "Issue with incorrect password for 1 times and then correct password")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
def test_prompt_enter_correct_password(self):
try:
self.secretmgmt_base_obj.set_password(self.master, self.password)
shell = RemoteMachineShellConnection(self.master)
shell.execute_command("/opt/couchbase/etc/couchbase_init.d stop")
shell.disconnect()
self.secretmgmt_base_obj.start_server_prompt_diff_window(self.master)
self.sleep(10)
cmd = "/opt/couchbase/bin/couchbase-cli master-password -c localhost:8091 -u Administrator -p password --send-password"
temp_result = self.secretmgmt_base_obj.correct_password_on_prompt(self.master, self.password, cmd=cmd)
self.assertTrue(temp_result, "Issue with passing in correct password on prompt")
finally:
for server in self.servers:
shell = RemoteMachineShellConnection(server)
if (RemoteMachineHelper(shell).is_process_running('memcached') is None):
shell.set_environment_variable("CB_MASTER_PASSWORD", self.password)
def test_env_variable_change_pass(self):
new_pass = self.input.param("new_password", "new_p@ssw0rd")
self.secretmgmt_base_obj.set_password(self.master, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
self.secretmgmt_base_obj.set_password(self.master, new_pass)
self.secretmgmt_base_obj.restart_server_with_env(self.master, new_pass)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def generate_pass(self):
type = self.input.param("type", 'char')
pass_length = self.input.param('pass_length', 10)
num_pass = self.input.param('num_pass', 10)
if type in ('char', 'int', 'ext'):
pass_list = self.secretmgmt_base_obj.generate_password_simple(type, pass_length, num_pass)
else:
pass_list = self.secretmgmt_base_obj.generate_password_dual(type, pass_length, num_pass)
for item in pass_list:
item = item.decode('ISO-8859-1').strip()
self.secretmgmt_base_obj.set_password(self.master, item)
self.secretmgmt_base_obj.restart_server_with_env(self.master, item)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def generate_pass_file(self):
with open("./pytests/security/password_list.txt") as f:
for item in f:
item = item.decode('ISO-8859-1').strip()
self.secretmgmt_base_obj.set_password(self.master, item)
self.secretmgmt_base_obj.restart_server_with_env(self.master, item)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def test_cluster_rebalance_in_env_var(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
temp_result = self.cluster.rebalance(self.servers, servers_in, [])
self.assertTrue(temp_result, "Rebalance-in did not complete with password setup node")
def test_cluster_rebalance_out(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
self.cluster.rebalance(self.servers, servers_in, [])
servers_out = self.servers[2:]
temp_result = self.cluster.rebalance(self.servers, [], servers_out)
self.assertTrue(temp_result, 'Rebalance-out did not complete with password node setup')
def test_cluster_rebalance_in_prompt(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password, startup_type='prompt')
temp_result = self.cluster.rebalance(self.servers, servers_in, [])
self.assertTrue(temp_result, 'Rebalance-in did not complete with password node setup')
def test_cluster_rebalance_out_prompt(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password, startup_type='prompt')
self.cluster.rebalance(self.servers, servers_in, [])
servers_out = self.servers[2:]
temp_result = self.cluster.rebalance(self.servers, [], servers_out)
self.assertTrue(temp_result, 'Rebalance-out did not complete with password node setup')
def test_cluster_rebalance_in_diff_modes(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
extra_pass = self.input.param('extra_pass', 'p@ssw0rd1')
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, extra_pass, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
temp_result = self.cluster.rebalance(self.servers, servers_in, [])
self.assertTrue(temp_result, 'Rebalance-in did not complete with password node setup')
def test_cluster_rebalance_out_diff_modes(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
extra_pass = self.input.param('extra_pass', 'p@ssw0rd1')
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, extra_pass, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
self.cluster.rebalance(self.servers, servers_in, [])
servers_out = self.servers[2:]
temp_result = self.cluster.rebalance(self.servers, [], servers_out)
self.assertTrue(temp_result, 'Rebalance-out did not complete with password node setup')
def test_cluster_rebalance_in_env_var_services(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
self.assertTrue(rebalance.result(), "Issue with Reablance in with different services")
def test_cluster_rebalance_in_diff_type_var_services(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, self.password, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
self.assertTrue(rebalance.result(), "Rebalance in with different servers")
def test_cluster_rebalance_out_env_var_services(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
for servers in servers_in:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
print(("result of rebalance is {0}".format(rebalance.result())))
servers_out = self.servers[2:]
rebalance = self.cluster.async_rebalance(self.servers, [], servers_out)
print(("result of rebalance is {0}".format(rebalance.result())))
self.assertTrue(rebalance.result(), "Rebalance out with different service")
def test_cluster_rebalance_out_diff_type_var_services(self):
extra_pass = self.input.param("extra_pass", 'p@ssw0rd01')
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
self.find_nodes_in_list()
servers_in = self.servers[1:]
server_env_var = servers_in[0]
server_prompt = servers_in[1]
server_plain = servers_in[2]
self.secretmgmt_base_obj.setup_pass_node(server_env_var, self.password)
self.secretmgmt_base_obj.setup_pass_node(server_prompt, extra_pass, startup_type='prompt')
self.secretmgmt_base_obj.setup_pass_node(server_plain, startup_type='simple')
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list, [],
services=self.services_in)
rebalance.result()
servers_out = self.servers[1:]
rebalance = self.cluster.async_rebalance(self.servers, [], servers_out)
print((rebalance.result()))
self.assertTrue(rebalance.result(), "Rebalance in and out with different servers")
def test_failover_add_back(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
try:
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
self.sleep(30)
rest = RestConnection(self.master)
self.graceful = self.input.param('graceful', False)
recoveryType = self.input.param("recoveryType", "full")
self.find_nodes_in_list()
self.generate_map_nodes_out_dist()
servr_out = self.nodes_out_list
nodes_all = rest.node_statuses()
failover_task = self.cluster.async_failover([self.master],
failover_nodes=servr_out, graceful=self.graceful)
failover_task.result()
nodes_all = rest.node_statuses()
nodes = []
if servr_out[0].ip == "127.0.0.1":
for failover_node in servr_out:
nodes.extend([node for node in nodes_all
if (str(node.port) == failover_node.port)])
else:
for failover_node in servr_out:
nodes.extend([node for node in nodes_all
if node.ip == failover_node.ip])
for node in nodes:
self.log.info(node)
rest.add_back_node(node.id)
rest.set_recovery_type(otpNode=node.id, recoveryType=recoveryType)
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], [], [])
self.assertTrue(rebalance.result(), "Failover with different servers")
except Exception as ex:
raise
def test_failover(self):
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
try:
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, 'temp')
self.sleep(30)
self.find_nodes_in_list()
self.generate_map_nodes_out_dist()
servr_out = self.nodes_out_list
print(servr_out)
self.graceful = self.input.param('graceful', False)
failover_task = self.cluster.async_failover([self.master],
failover_nodes=servr_out, graceful=self.graceful)
failover_task.result()
self.log.info("Rebalance first time")
rebalance = self.cluster.rebalance(self.servers[:self.nodes_init], [], [])
self.log.info("Rebalance Second time")
rebalance = self.cluster.rebalance(self.servers[:self.nodes_init], [], [])
except Exception as ex:
raise
def kill_process(self):
self.targetProcess = self.input.param("targetProcess", 'memcached')
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
for servers in self.servers:
remote = RemoteMachineShellConnection(servers)
if self.targetProcess == "memcached":
remote.kill_memcached()
else:
remote.terminate_process(process_name=self.targetProcess)
for servers in self.servers:
self.secretmgmt_base_obj.restart_server_with_env(servers, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Issue with server restart after killing of process")
def restart_server(self):
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
for servers in self.servers:
self.secretmgmt_base_obj.restart_server_with_env(servers, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log",
"Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Issue with server restart of server")
def test_bucket_create_password(self, bucket_name='secretsbucket', num_replicas=1, bucket_size=100):
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
bucket_type = self.input.param("bucket_type", 'couchbase')
tasks = []
if bucket_type == 'couchbase':
rest = RestConnection(self.master)
rest.create_bucket(bucket_name, ramQuotaMB=100)
elif bucket_type == 'standard':
self.cluster.create_standard_bucket(self.master, bucket_name, STANDARD_BUCKET_PORT + 1,
bucket_size)
elif bucket_type == "memcached":
tasks.append(
self.cluster.async_create_memcached_bucket(self.master, bucket_name, STANDARD_BUCKET_PORT + 1,
bucket_size))
for task in tasks:
self.assertTrue(task.result(), "Issue with bucket creation")
else:
self.log.error('Bucket type not specified')
return
self.assertTrue(BucketOperationHelper.wait_for_bucket_creation(bucket_name, RestConnection(self.master)),
msg='failed to start up bucket with name "{0}'.format(bucket_name))
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
install_path = self.secretmgmt_base_obj._get_install_path(self.master)
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, '/config/config.dat',
self.password)
self.assertTrue(temp_result, "Password found in config.dat")
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, 'isasl.pw', self.password)
self.assertTrue(temp_result, "Password found in isasl.pw")
def test_bucket_edit_password(self, bucket_name='secretsbucket', num_replicas=1, bucket_size=100):
updated_pass = "p@ssw0rd_updated"
rest = RestConnection(self.master)
for servers in self.servers:
self.secretmgmt_base_obj.setup_pass_node(servers, self.password)
bucket_type = self.input.param("bucket_type", 'standard')
tasks = []
if bucket_type == 'sasl':
self.cluster.create_sasl_bucket(self.master, bucket_name, self.password, num_replicas, bucket_size)
self.sleep(10)
rest.change_bucket_props(bucket_name, saslPassword=updated_pass)
else:
self.log.error('Bucket type not specified')
return
self.assertTrue(BucketOperationHelper.wait_for_bucket_creation(bucket_name, RestConnection(self.master)),
msg='failed to start up bucket with name "{0}'.format(bucket_name))
gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items)
self._load_all_buckets(self.master, gen_load, "create", 0)
install_path = self.secretmgmt_base_obj._get_install_path(self.master)
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, '/config/config.dat',
updated_pass)
self.assertTrue(temp_result, "Password found in config.dat")
temp_result = self.secretmgmt_base_obj.check_config_files(self.master, install_path, 'isasl.pw', updated_pass)
self.assertTrue(temp_result, "Password found in isasl.pw")
def test_cli_setting(self):
temp_result = self.secretmgmt_base_obj.execute_cli(self.master, new_password=self.password)
self.assertTrue(temp_result, "Output of the command is not correct")
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted. Waiting for shutdown request")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
def test_cbcollect(self):
rest = RestConnection(self.master)
bucket_name = 'cbcollectbucket'
num_replicas = 1
bucket_size = 100
rest.create_bucket(bucket_name, ramQuotaMB=100)
result = self.secretmgmt_base_obj.generate_cb_collect(self.master, "cbcollect.zip", self.password)
self.assertTrue(result, "Bucket password appears in the cbcollect info")
def rotate_data_key(self):
temp_result = self.secretmgmt_base_obj.read_ns_config(self.master)
self.assertTrue(temp_result, "Config.dat is not refereshed after data key")
def cli_rotate_key(self):
temp_result = self.secretmgmt_base_obj.execute_cli_rotate_key(self.master)
self.assertTrue(temp_result, "Issue with rotate key on cli side")
def audit_change_password(self):
self.secretmgmt_base_obj.set_password(self.master, self.password)
Audit = audit(eventID='8233', host=self.master)
expectedResults = {"real_userid:source": "ns_server", "real_userid:user": "Administrator",
"ip": self.ipAddress, "port": 123456}
fieldVerification, valueVerification = self.Audit.validateEvents(expectedResults)
self.assertTrue(fieldVerification, "One of the fields is not matching")
self.assertTrue(valueVerification, "Values for one of the fields is not matching")
def audit_change_password(self):
self.secretmgmt_base_obj.execute_cli_rotate_key(self.master)
Audit = audit(eventID='8234', host=self.master)
expectedResults = {"real_userid:source": "ns_server", "real_userid:user": "Administrator",
"ip": self.ipAddress, "port": 123456}
fieldVerification, valueVerification = self.Audit.validateEvents(expectedResults)
self.assertTrue(fieldVerification, "One of the fields is not matching")
self.assertTrue(valueVerification, "Values for one of the fields is not matching")
class SecretsMgmtUpgrade(NewUpgradeBaseTest):
def setUp(self):
super(SecretsMgmtUpgrade, self).setUp()
self.initial_version = self.input.param("initial_version", '4.1.0-5005')
self.upgrade_version = self.input.param("upgrade_version", "4.6.0-3467")
self.secretmgmt_base_obj = SecretsMasterBase(self.master)
self.password = self.input.param('password', 'password')
def tearDown(self):
super(SecretsMgmtUpgrade, self).tearDown()
def upgrade_all_nodes(self):
servers_in = self.servers[1:]
self._install(self.servers)
self.cluster.rebalance(self.servers, servers_in, [])
upgrade_threads = self._async_update(upgrade_version=self.upgrade_version, servers=self.servers)
for threads in upgrade_threads:
threads.join()
for server in self.servers:
self.secretmgmt_base_obj.setup_pass_node(server, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
for server in self.servers:
rest = RestConnection(server)
temp = rest.cluster_status()
self.log.info("Initial status of {0} cluster is {1}".format(server.ip, temp['nodes'][0]['status']))
while (temp['nodes'][0]['status'] == 'warmup'):
self.log.info("Waiting for cluster to become healthy")
self.sleep(5)
temp = rest.cluster_status()
self.log.info("current status of {0} is {1}".format(server.ip, temp['nodes'][0]['status']))
def upgrade_all_nodes_post_463(self):
servers_in = self.servers[1:]
self._install(self.servers)
self.cluster.rebalance(self.servers, servers_in, [])
for server in self.servers:
self.secretmgmt_base_obj.setup_pass_node(server, self.password)
self.secretmgmt_base_obj.restart_server_with_env(self.master, self.password)
temp_result = self.secretmgmt_base_obj.check_log_files(self.master, "/babysitter.log", "Booted")
self.assertTrue(temp_result, "Babysitter.log does not contain node initialization code")
upgrade_threads = self._async_update(upgrade_version=self.upgrade_version, servers=self.servers)
for threads in upgrade_threads:
threads.join()
for server in self.servers:
rest = RestConnection(server)
temp = rest.cluster_status()
self.log.info("Initial status of {0} cluster is {1}".format(server.ip, temp['nodes'][0]['status']))
while (temp['nodes'][0]['status'] == 'warmup'):
self.log.info("Waiting for cluster to become healthy")
self.sleep(5)
temp = rest.cluster_status()
self.log.info("current status of {0} is {1}".format(server.ip, temp['nodes'][0]['status']))
def upgrade_half_nodes(self):
serv_upgrade = self.servers[2:4]
servers_in = self.servers[1:]
self._install(self.servers)
self.cluster.rebalance(self.servers, servers_in, [])
upgrade_threads = self._async_update(upgrade_version=self.upgrade_version, servers=serv_upgrade)
for threads in upgrade_threads:
threads.join()
for server in serv_upgrade:
rest = RestConnection(server)
temp = rest.cluster_status()
self.log.info("Initial status of {0} cluster is {1}".format(server.ip, temp['nodes'][0]['status']))
while (temp['nodes'][0]['status'] == 'warmup'):
self.log.info("Waiting for cluster to become healthy")
self.sleep(5)
temp = rest.cluster_status()
self.log.info("current status of {0} is {1}".format(server.ip, temp['nodes'][0]['status']))
| true
| true
|
f701ece674622dd3e1e9f5d4513a6d4ada63ad22
| 246
|
py
|
Python
|
test_app/urls.py
|
arielcalzadadeveloper/django-test-app
|
c304d3be20f0280405229635fc01bce6f5732383
|
[
"MIT"
] | null | null | null |
test_app/urls.py
|
arielcalzadadeveloper/django-test-app
|
c304d3be20f0280405229635fc01bce6f5732383
|
[
"MIT"
] | null | null | null |
test_app/urls.py
|
arielcalzadadeveloper/django-test-app
|
c304d3be20f0280405229635fc01bce6f5732383
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from test_app.views.home import Home
from test_app.views.ajax import Ajax
app_name = "test_app"
urlpatterns = [
url(regex=r"^$", view=Home, name="home"),
url(regex=r"^ajax$", view=Ajax, name="ajax"),
]
| 20.5
| 49
| 0.691057
|
from django.conf.urls import url
from test_app.views.home import Home
from test_app.views.ajax import Ajax
app_name = "test_app"
urlpatterns = [
url(regex=r"^$", view=Home, name="home"),
url(regex=r"^ajax$", view=Ajax, name="ajax"),
]
| true
| true
|
f701ed7fcc38600056f8b02f2241a501d702625b
| 598
|
py
|
Python
|
watchmen/common/security/algorithm/algorithm_date.py
|
Insurance-Metrics-Measure-Advisory/watchman-data-connector
|
8ecab0c5b28174f1611e51deba8d94a42f53d51d
|
[
"MIT"
] | 125
|
2021-03-13T07:39:46.000Z
|
2022-02-28T03:14:17.000Z
|
pipeline/common/security/algorithm/algorithm_date.py
|
Indexical-Metrics-Measure-Advisory/watchmen-pipeline-engine
|
fb39695d2f7a1c5212a3871b04c4a8a9f03ee16e
|
[
"MIT"
] | null | null | null |
pipeline/common/security/algorithm/algorithm_date.py
|
Indexical-Metrics-Measure-Advisory/watchmen-pipeline-engine
|
fb39695d2f7a1c5212a3871b04c4a8a9f03ee16e
|
[
"MIT"
] | 17
|
2021-03-13T07:31:58.000Z
|
2021-05-20T09:38:02.000Z
|
import arrow
def __mask_day(date_str):
return date_str[:8] + "**"
def __mask_month(date_str):
return date_str[:5] + "**" + date_str[7:]
def encrypt_day(value_, params=None):
date = arrow.get(value_)
date_str = date.format('YYYY-MM-DD')
return __mask_day(date_str)
def encrypt_month(value_, params=None):
date = arrow.get(value_)
date_str = date.format('YYYY-MM-DD')
return __mask_month(date_str)
def encrypt_month_day(value_, params=None):
date = arrow.get(value_)
date_str = date.format('YYYY-MM-DD')
return __mask_day(__mask_month(date_str))
| 21.357143
| 45
| 0.685619
|
import arrow
def __mask_day(date_str):
return date_str[:8] + "**"
def __mask_month(date_str):
return date_str[:5] + "**" + date_str[7:]
def encrypt_day(value_, params=None):
date = arrow.get(value_)
date_str = date.format('YYYY-MM-DD')
return __mask_day(date_str)
def encrypt_month(value_, params=None):
date = arrow.get(value_)
date_str = date.format('YYYY-MM-DD')
return __mask_month(date_str)
def encrypt_month_day(value_, params=None):
date = arrow.get(value_)
date_str = date.format('YYYY-MM-DD')
return __mask_day(__mask_month(date_str))
| true
| true
|
f701ee0891de92a9e4c2d1f4a5f6b50e45dc2dab
| 1,048
|
py
|
Python
|
bot.py
|
ijoosong/pycascadediscordbot
|
2a04bc021165c5310d65428332862a4852009d92
|
[
"MIT"
] | 1
|
2022-02-05T18:37:59.000Z
|
2022-02-05T18:37:59.000Z
|
bot.py
|
ijoosong/pycascadediscordbot
|
2a04bc021165c5310d65428332862a4852009d92
|
[
"MIT"
] | null | null | null |
bot.py
|
ijoosong/pycascadediscordbot
|
2a04bc021165c5310d65428332862a4852009d92
|
[
"MIT"
] | null | null | null |
import discord
import config
import requests
client = discord.Client()
@client.event
async def on_ready():
for guild_id in client.guilds:
if guild_id.name == config.DISCORD_GUILD_NAME:
break
print(
f'{client.user} is connected to {guild_id.name}(id: {guild_id.id})'
)
@client.event
async def on_message(message):
if message.author == client.user:
return
wordbank = ['cat', 'puppy', 'bunny', 'giraffe', 'poop']
if message.content == 'pycascade':
response = 'Hello everyone! Welcome and have a great time!'
await message.channel.send(response)
elif message.content in wordbank:
await message.channel.send("please don't use bad words")
elif 'pokemon' in message.content:
# input: pokemon pikachu
pokemon = message.content.split()[1]
req = requests.get(f"https://getpokemonweakness.azurewebsites.net/api/getweakness?pokemon={pokemon}")
await message.channel.send(req.content)
client.run(config.DISCORD_BOT_TOKEN)
| 32.75
| 109
| 0.666031
|
import discord
import config
import requests
client = discord.Client()
@client.event
async def on_ready():
for guild_id in client.guilds:
if guild_id.name == config.DISCORD_GUILD_NAME:
break
print(
f'{client.user} is connected to {guild_id.name}(id: {guild_id.id})'
)
@client.event
async def on_message(message):
if message.author == client.user:
return
wordbank = ['cat', 'puppy', 'bunny', 'giraffe', 'poop']
if message.content == 'pycascade':
response = 'Hello everyone! Welcome and have a great time!'
await message.channel.send(response)
elif message.content in wordbank:
await message.channel.send("please don't use bad words")
elif 'pokemon' in message.content:
# input: pokemon pikachu
pokemon = message.content.split()[1]
req = requests.get(f"https://getpokemonweakness.azurewebsites.net/api/getweakness?pokemon={pokemon}")
await message.channel.send(req.content)
client.run(config.DISCORD_BOT_TOKEN)
| true
| true
|
f701f070e1babd894c04e750b2e0413d9f6581a5
| 779
|
py
|
Python
|
deskbookingsystem42/users/migrations/0002_auto_20210907_1827.py
|
rollinger/deskbooking42
|
bb032148fce9f36ac3ac8e57a59783a394d5c17c
|
[
"MIT"
] | 1
|
2021-09-03T19:01:16.000Z
|
2021-09-03T19:01:16.000Z
|
deskbookingsystem42/users/migrations/0002_auto_20210907_1827.py
|
rollinger/deskbooking42
|
bb032148fce9f36ac3ac8e57a59783a394d5c17c
|
[
"MIT"
] | 1
|
2022-03-30T18:21:27.000Z
|
2022-03-30T18:21:27.000Z
|
deskbookingsystem42/users/migrations/0002_auto_20210907_1827.py
|
rollinger/deskbooking42
|
bb032148fce9f36ac3ac8e57a59783a394d5c17c
|
[
"MIT"
] | 1
|
2021-09-05T13:45:05.000Z
|
2021-09-05T13:45:05.000Z
|
# Generated by Django 3.1.13 on 2021-09-07 16:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default=False, max_length=200)),
],
),
migrations.AddField(
model_name='user',
name='role',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='users.role'),
),
]
| 28.851852
| 122
| 0.591784
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default=False, max_length=200)),
],
),
migrations.AddField(
model_name='user',
name='role',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='users.role'),
),
]
| true
| true
|
f701f0b73b4b02efa8588306921412084c0c3993
| 4,217
|
py
|
Python
|
kittycad/api/file/create_file_conversion.py
|
KittyCAD/kittycad.py
|
7f7460d366dbd55fce50e5faa4a032b62e4baae4
|
[
"MIT"
] | 1
|
2022-02-06T05:07:25.000Z
|
2022-02-06T05:07:25.000Z
|
kittycad/api/file/create_file_conversion.py
|
KittyCAD/kittycad.py
|
7f7460d366dbd55fce50e5faa4a032b62e4baae4
|
[
"MIT"
] | 7
|
2022-02-04T11:29:25.000Z
|
2022-03-07T01:37:26.000Z
|
kittycad/api/file/create_file_conversion.py
|
KittyCAD/kittycad.py
|
7f7460d366dbd55fce50e5faa4a032b62e4baae4
|
[
"MIT"
] | null | null | null |
from typing import Any, Dict, Optional, Union, cast
import httpx
from ...client import Client
from ...models.file_conversion_with_output import FileConversionWithOutput
from ...models.error import Error
from ...models.file_conversion_output_format import FileConversionOutputFormat
from ...models.file_conversion_source_format import FileConversionSourceFormat
from ...types import Response
def _get_kwargs(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Dict[str, Any]:
url = "{}/file/conversion/{src_format}/{output_format}".format(client.base_url, output_format=output_format, src_format=src_format)
headers: Dict[str, Any] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"content": body,
}
def _parse_response(*, response: httpx.Response) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
if response.status_code == 201:
response_201 = FileConversionWithOutput.from_dict(response.json())
return response_201
if response.status_code == 400:
response_4XX = Error.from_dict(response.json())
return response_4XX
if response.status_code == 500:
response_5XX = Error.from_dict(response.json())
return response_5XX
return None
def _build_response(*, response: httpx.Response) -> Response[Union[Any, FileConversionWithOutput, Error]]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Response[Union[Any, FileConversionWithOutput, Error]]:
kwargs = _get_kwargs(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
response = httpx.post(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
""" Convert a CAD file from one format to another. If the file being converted is larger than 30MB, it will be performed asynchronously.
If the conversion is performed synchronously, the contents of the converted file (`output`) will be returned as a base64 encoded string.
If the conversion is performed asynchronously, the `id` of the conversion will be returned. You can use the `id` returned from the request to get status information about the async conversion from the `/file/conversions/{id}` endpoint. """
return sync_detailed(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
).parsed
async def asyncio_detailed(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Response[Union[Any, FileConversionWithOutput, Error]]:
kwargs = _get_kwargs(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.post(**kwargs)
return _build_response(response=response)
async def asyncio(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
""" Convert a CAD file from one format to another. If the file being converted is larger than 30MB, it will be performed asynchronously.
If the conversion is performed synchronously, the contents of the converted file (`output`) will be returned as a base64 encoded string.
If the conversion is performed asynchronously, the `id` of the conversion will be returned. You can use the `id` returned from the request to get status information about the async conversion from the `/file/conversions/{id}` endpoint. """
return (
await asyncio_detailed(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
).parsed
| 31.237037
| 239
| 0.77235
|
from typing import Any, Dict, Optional, Union, cast
import httpx
from ...client import Client
from ...models.file_conversion_with_output import FileConversionWithOutput
from ...models.error import Error
from ...models.file_conversion_output_format import FileConversionOutputFormat
from ...models.file_conversion_source_format import FileConversionSourceFormat
from ...types import Response
def _get_kwargs(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Dict[str, Any]:
url = "{}/file/conversion/{src_format}/{output_format}".format(client.base_url, output_format=output_format, src_format=src_format)
headers: Dict[str, Any] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"content": body,
}
def _parse_response(*, response: httpx.Response) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
if response.status_code == 201:
response_201 = FileConversionWithOutput.from_dict(response.json())
return response_201
if response.status_code == 400:
response_4XX = Error.from_dict(response.json())
return response_4XX
if response.status_code == 500:
response_5XX = Error.from_dict(response.json())
return response_5XX
return None
def _build_response(*, response: httpx.Response) -> Response[Union[Any, FileConversionWithOutput, Error]]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Response[Union[Any, FileConversionWithOutput, Error]]:
kwargs = _get_kwargs(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
response = httpx.post(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
return sync_detailed(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
).parsed
async def asyncio_detailed(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Response[Union[Any, FileConversionWithOutput, Error]]:
kwargs = _get_kwargs(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.post(**kwargs)
return _build_response(response=response)
async def asyncio(
output_format: FileConversionOutputFormat,
src_format: FileConversionSourceFormat,
body: bytes,
*,
client: Client,
) -> Optional[Union[Any, FileConversionWithOutput, Error]]:
return (
await asyncio_detailed(
output_format=output_format,
src_format=src_format,
body=body,
client=client,
)
).parsed
| true
| true
|
f701f15d445c5d00ebc440579ff2c5e6d88fda23
| 428
|
py
|
Python
|
students/k3343/laboratory_works/Zhabrovets_Ekaterina/laboratory_work_1/workshops/migrations/0007_workshop_name.py
|
TonikX/ITMO_ICT_-WebProgramming_2020
|
ba566c1b3ab04585665c69860b713741906935a0
|
[
"MIT"
] | 10
|
2020-03-20T09:06:12.000Z
|
2021-07-27T13:06:02.000Z
|
students/k3343/laboratory_works/Zhabrovets_Ekaterina/laboratory_work_1/workshops/migrations/0007_workshop_name.py
|
TonikX/ITMO_ICT_-WebProgramming_2020
|
ba566c1b3ab04585665c69860b713741906935a0
|
[
"MIT"
] | 134
|
2020-03-23T09:47:48.000Z
|
2022-03-12T01:05:19.000Z
|
students/k3343/laboratory_works/Zhabrovets_Ekaterina/laboratory_work_1/workshops/migrations/0007_workshop_name.py
|
TonikX/ITMO_ICT_-WebProgramming_2020
|
ba566c1b3ab04585665c69860b713741906935a0
|
[
"MIT"
] | 71
|
2020-03-20T12:45:56.000Z
|
2021-10-31T19:22:25.000Z
|
# Generated by Django 3.0.5 on 2020-04-14 19:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0006_auto_20200414_2235'),
]
operations = [
migrations.AddField(
model_name='workshop',
name='name',
field=models.CharField(default='kkkk', max_length=100, verbose_name='Название'),
),
]
| 22.526316
| 92
| 0.614486
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0006_auto_20200414_2235'),
]
operations = [
migrations.AddField(
model_name='workshop',
name='name',
field=models.CharField(default='kkkk', max_length=100, verbose_name='Название'),
),
]
| true
| true
|
f701f1abff83536a090c2e94d28527084e8c64b6
| 64,646
|
py
|
Python
|
tests/admin_widgets/tests.py
|
ronpad/django
|
312174f9730cd594980de58e6b9db32cbdebf4ba
|
[
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | 2
|
2020-09-11T10:43:58.000Z
|
2021-03-04T15:31:40.000Z
|
tests/admin_widgets/tests.py
|
fabioruicci/django
|
6efc35b4fe3009666e56a60af0675d7d532bf4ff
|
[
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | null | null | null |
tests/admin_widgets/tests.py
|
fabioruicci/django
|
6efc35b4fe3009666e56a60af0675d7d532bf4ff
|
[
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | 1
|
2021-07-19T08:16:56.000Z
|
2021-07-19T08:16:56.000Z
|
import gettext
import os
import re
from datetime import datetime, timedelta
from importlib import import_module
import pytz
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.auth.models import User
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db.models import (
CharField, DateField, DateTimeField, ManyToManyField, UUIDField,
)
from django.test import SimpleTestCase, TestCase, override_settings
from django.urls import reverse
from django.utils import translation
from .models import (
Advisor, Album, Band, Bee, Car, Company, Event, Honeycomb, Individual,
Inventory, Member, MyFileField, Profile, School, Student,
UnsafeLimitChoicesTo, VideoStream,
)
from .widgetadmin import site as widget_admin_site
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email=None)
cls.u2 = User.objects.create_user(username='testser', password='secret')
Car.objects.create(owner=cls.superuser, make='Volkswagen', model='Passat')
Car.objects.create(owner=cls.u2, make='BMW', model='M3')
class AdminFormfieldForDBFieldTests(SimpleTestCase):
"""
Tests for correct behavior of ModelAdmin.formfield_for_dbfield
"""
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):
"""
Helper to call formfield_for_dbfield for a given model and field name
and verify that the returned formfield is appropriate.
"""
# Override any settings on the model admin
class MyModelAdmin(admin.ModelAdmin):
pass
for k in admin_overrides:
setattr(MyModelAdmin, k, admin_overrides[k])
# Construct the admin, and ask it for a formfield
ma = MyModelAdmin(model, admin.site)
ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)
# "unwrap" the widget wrapper, if needed
if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):
widget = ff.widget.widget
else:
widget = ff.widget
self.assertIsInstance(widget, widgetclass)
# Return the formfield so that other tests can continue
return ff
def test_DateField(self):
self.assertFormfield(Event, 'start_date', widgets.AdminDateWidget)
def test_DateTimeField(self):
self.assertFormfield(Member, 'birthdate', widgets.AdminSplitDateTime)
def test_TimeField(self):
self.assertFormfield(Event, 'start_time', widgets.AdminTimeWidget)
def test_TextField(self):
self.assertFormfield(Event, 'description', widgets.AdminTextareaWidget)
def test_URLField(self):
self.assertFormfield(Event, 'link', widgets.AdminURLFieldWidget)
def test_IntegerField(self):
self.assertFormfield(Event, 'min_age', widgets.AdminIntegerFieldWidget)
def test_CharField(self):
self.assertFormfield(Member, 'name', widgets.AdminTextInputWidget)
def test_EmailField(self):
self.assertFormfield(Member, 'email', widgets.AdminEmailInputWidget)
def test_FileField(self):
self.assertFormfield(Album, 'cover_art', widgets.AdminFileWidget)
def test_ForeignKey(self):
self.assertFormfield(Event, 'main_band', forms.Select)
def test_raw_id_ForeignKey(self):
self.assertFormfield(Event, 'main_band', widgets.ForeignKeyRawIdWidget,
raw_id_fields=['main_band'])
def test_radio_fields_ForeignKey(self):
ff = self.assertFormfield(Event, 'main_band', widgets.AdminRadioSelect,
radio_fields={'main_band': admin.VERTICAL})
self.assertIsNone(ff.empty_label)
def test_many_to_many(self):
self.assertFormfield(Band, 'members', forms.SelectMultiple)
def test_raw_id_many_to_many(self):
self.assertFormfield(Band, 'members', widgets.ManyToManyRawIdWidget,
raw_id_fields=['members'])
def test_filtered_many_to_many(self):
self.assertFormfield(Band, 'members', widgets.FilteredSelectMultiple,
filter_vertical=['members'])
def test_formfield_overrides(self):
self.assertFormfield(Event, 'start_date', forms.TextInput,
formfield_overrides={DateField: {'widget': forms.TextInput}})
def test_formfield_overrides_widget_instances(self):
"""
Widget instances in formfield_overrides are not shared between
different fields. (#19423)
"""
class BandAdmin(admin.ModelAdmin):
formfield_overrides = {
CharField: {'widget': forms.TextInput(attrs={'size': '10'})}
}
ma = BandAdmin(Band, admin.site)
f1 = ma.formfield_for_dbfield(Band._meta.get_field('name'), request=None)
f2 = ma.formfield_for_dbfield(Band._meta.get_field('style'), request=None)
self.assertNotEqual(f1.widget, f2.widget)
self.assertEqual(f1.widget.attrs['maxlength'], '100')
self.assertEqual(f2.widget.attrs['maxlength'], '20')
self.assertEqual(f2.widget.attrs['size'], '10')
def test_formfield_overrides_m2m_filter_widget(self):
"""
The autocomplete_fields, raw_id_fields, filter_vertical, and
filter_horizontal widgets for ManyToManyFields may be overridden by
specifying a widget in formfield_overrides.
"""
class BandAdmin(admin.ModelAdmin):
filter_vertical = ['members']
formfield_overrides = {
ManyToManyField: {'widget': forms.CheckboxSelectMultiple},
}
ma = BandAdmin(Band, admin.site)
field = ma.formfield_for_dbfield(Band._meta.get_field('members'), request=None)
self.assertIsInstance(field.widget.widget, forms.CheckboxSelectMultiple)
def test_formfield_overrides_for_datetime_field(self):
"""
Overriding the widget for DateTimeField doesn't overrides the default
form_class for that field (#26449).
"""
class MemberAdmin(admin.ModelAdmin):
formfield_overrides = {DateTimeField: {'widget': widgets.AdminSplitDateTime}}
ma = MemberAdmin(Member, admin.site)
f1 = ma.formfield_for_dbfield(Member._meta.get_field('birthdate'), request=None)
self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime)
self.assertIsInstance(f1, forms.SplitDateTimeField)
def test_formfield_overrides_for_custom_field(self):
"""
formfield_overrides works for a custom field class.
"""
class AlbumAdmin(admin.ModelAdmin):
formfield_overrides = {MyFileField: {'widget': forms.TextInput()}}
ma = AlbumAdmin(Member, admin.site)
f1 = ma.formfield_for_dbfield(Album._meta.get_field('backside_art'), request=None)
self.assertIsInstance(f1.widget, forms.TextInput)
def test_field_with_choices(self):
self.assertFormfield(Member, 'gender', forms.Select)
def test_choices_with_radio_fields(self):
self.assertFormfield(Member, 'gender', widgets.AdminRadioSelect,
radio_fields={'gender': admin.VERTICAL})
def test_inheritance(self):
self.assertFormfield(Album, 'backside_art', widgets.AdminFileWidget)
def test_m2m_widgets(self):
"""m2m fields help text as it applies to admin app (#9321)."""
class AdvisorAdmin(admin.ModelAdmin):
filter_vertical = ['companies']
self.assertFormfield(Advisor, 'companies', widgets.FilteredSelectMultiple,
filter_vertical=['companies'])
ma = AdvisorAdmin(Advisor, admin.site)
f = ma.formfield_for_dbfield(Advisor._meta.get_field('companies'), request=None)
self.assertEqual(
f.help_text,
'Hold down “Control”, or “Command” on a Mac, to select more than one.'
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase):
def test_filter_choices_by_request_user(self):
"""
Ensure the user can only see their own cars in the foreign key dropdown.
"""
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
self.assertNotContains(response, "BMW M3")
self.assertContains(response, "Volkswagen Passat")
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_changelist_ForeignKey(self):
response = self.client.get(reverse('admin:admin_widgets_car_changelist'))
self.assertContains(response, '/auth/user/add/')
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_nonexistent_target_id(self):
band = Band.objects.create(name='Bogey Blues')
pk = band.pk
band.delete()
post_data = {
"main_band": str(pk),
}
# Try posting with a nonexistent pk in a raw id field: this
# should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data)
self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.')
def test_invalid_target_id(self):
for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234):
# This should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), {"main_band": test_str})
self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.')
def test_url_params_from_lookup_dict_any_iterable(self):
lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})
lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})
self.assertEqual(lookup1, {'color__in': 'red,blue'})
self.assertEqual(lookup1, lookup2)
def test_url_params_from_lookup_dict_callable(self):
def my_callable():
return 'works'
lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable})
lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()})
self.assertEqual(lookup1, lookup2)
def test_label_and_url_for_value_invalid_uuid(self):
field = Bee._meta.get_field('honeycomb')
self.assertIsInstance(field.target_field, UUIDField)
widget = widgets.ForeignKeyRawIdWidget(field.remote_field, admin.site)
self.assertEqual(widget.label_and_url_for_value('invalid-uuid'), ('', ''))
class FilteredSelectMultipleWidgetTest(SimpleTestCase):
def test_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', False)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple name="test" class="selectfilter" '
'data-field-name="test\\" data-is-stacked="0">\n</select>'
)
def test_stacked_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', True)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple name="test" class="selectfilterstacked" '
'data-field-name="test\\" data-is-stacked="1">\n</select>'
)
class AdminDateWidgetTest(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminDateWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="vDateField" name="test" size="10">',
)
# pass attrs to widget
w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="myDateField" name="test" size="20">',
)
class AdminTimeWidgetTest(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminTimeWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="vTimeField" name="test" size="8">',
)
# pass attrs to widget
w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="myTimeField" name="test" size="20">',
)
class AdminSplitDateTimeWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminSplitDateTime()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Date: <input value="2007-12-01" type="text" class="vDateField" '
'name="test_0" size="10"><br>'
'Time: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8"></p>'
)
def test_localization(self):
w = widgets.AdminSplitDateTime()
with self.settings(USE_L10N=True), translation.override('de-at'):
w.is_localized = True
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Datum: <input value="01.12.2007" type="text" '
'class="vDateField" name="test_0"size="10"><br>'
'Zeit: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8"></p>'
)
class AdminURLWidgetTest(SimpleTestCase):
def test_get_context_validates_url(self):
w = widgets.AdminURLFieldWidget()
for invalid in ['', '/not/a/full/url/', 'javascript:alert("Danger XSS!")']:
with self.subTest(url=invalid):
self.assertFalse(w.get_context('name', invalid, {})['url_valid'])
self.assertTrue(w.get_context('name', 'http://example.com', {})['url_valid'])
def test_render(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', ''),
'<input class="vURLField" name="test" type="url">'
)
self.assertHTMLEqual(
w.render('test', 'http://example.com'),
'<p class="url">Currently:<a href="http://example.com">'
'http://example.com</a><br>'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example.com"></p>'
)
def test_render_idn(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', 'http://example-äüö.com'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">'
'http://example-äüö.com</a><br>'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example-äüö.com"></p>'
)
def test_render_quoting(self):
"""
WARNING: This test doesn't use assertHTMLEqual since it will get rid
of some escapes which are tested here!
"""
HREF_RE = re.compile('href="([^"]+)"')
VALUE_RE = re.compile('value="([^"]+)"')
TEXT_RE = re.compile('<a[^>]+>([^>]+)</a>')
w = widgets.AdminURLFieldWidget()
output = w.render('test', 'http://example.com/<sometag>some-text</sometag>')
self.assertEqual(
HREF_RE.search(output)[1],
'http://example.com/%3Csometag%3Esome-text%3C/sometag%3E',
)
self.assertEqual(
TEXT_RE.search(output)[1],
'http://example.com/<sometag>some-text</sometag>',
)
self.assertEqual(
VALUE_RE.search(output)[1],
'http://example.com/<sometag>some-text</sometag>',
)
output = w.render('test', 'http://example-äüö.com/<sometag>some-text</sometag>')
self.assertEqual(
HREF_RE.search(output)[1],
'http://xn--example--7za4pnc.com/%3Csometag%3Esome-text%3C/sometag%3E',
)
self.assertEqual(
TEXT_RE.search(output)[1],
'http://example-äüö.com/<sometag>some-text</sometag>',
)
self.assertEqual(
VALUE_RE.search(output)[1],
'http://example-äüö.com/<sometag>some-text</sometag>',
)
output = w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"')
self.assertEqual(
HREF_RE.search(output)[1],
'http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22',
)
self.assertEqual(
TEXT_RE.search(output)[1],
'http://www.example.com/%C3%A4"><script>'
'alert("XSS!")</script>"'
)
self.assertEqual(
VALUE_RE.search(output)[1],
'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"',
)
class AdminUUIDWidgetTests(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminUUIDInputWidget()
self.assertHTMLEqual(
w.render('test', '550e8400-e29b-41d4-a716-446655440000'),
'<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="vUUIDField" name="test">',
)
w = widgets.AdminUUIDInputWidget(attrs={'class': 'myUUIDInput'})
self.assertHTMLEqual(
w.render('test', '550e8400-e29b-41d4-a716-446655440000'),
'<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="myUUIDInput" name="test">',
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminFileWidgetTests(TestDataMixin, TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
band = Band.objects.create(name='Linkin Park')
cls.album = band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
def test_render(self):
w = widgets.AdminFileWidget()
self.assertHTMLEqual(
w.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id"> '
'<label for="test-clear_id">Clear</label></span><br>'
'Change: <input type="file" name="test"></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
self.assertHTMLEqual(
w.render('test', SimpleUploadedFile('test', b'content')),
'<input type="file" name="test">',
)
def test_render_required(self):
widget = widgets.AdminFileWidget()
widget.is_required = True
self.assertHTMLEqual(
widget.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a><br>'
'Change: <input type="file" name="test"></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
def test_render_disabled(self):
widget = widgets.AdminFileWidget(attrs={'disabled': True})
self.assertHTMLEqual(
widget.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id" disabled>'
'<label for="test-clear_id">Clear</label></span><br>'
'Change: <input type="file" name="test" disabled></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
def test_readonly_fields(self):
"""
File widgets should render as a link when they're marked "read only."
"""
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,)))
self.assertContains(
response,
'<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">'
r'albums\hybrid_theory.jpg</a></div>' % {'STORAGE_URL': default_storage.url('')},
html=True,
)
self.assertNotContains(
response,
'<input type="file" name="cover_art" id="id_cover_art">',
html=True,
)
response = self.client.get(reverse('admin:admin_widgets_album_add'))
self.assertContains(
response,
'<div class="readonly"></div>',
html=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ForeignKeyRawIdWidgetTest(TestCase):
def test_render(self):
band = Band.objects.create(name='Linkin Park')
band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
rel = Album._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', band.uuid, attrs={}),
'<input type="text" name="test" value="%(banduuid)s" '
'class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/band/?_to_field=uuid" class="related-lookup" '
'id="lookup_id_test" title="Lookup"></a> <strong>'
'<a href="/admin_widgets/band/%(bandpk)s/change/">Linkin Park</a>'
'</strong>' % {'banduuid': band.uuid, 'bandpk': band.pk}
)
def test_relations_to_non_primary_key(self):
# ForeignKeyRawIdWidget works with fields which aren't related to
# the model's primary key.
apple = Inventory.objects.create(barcode=86, name='Apple')
Inventory.objects.create(barcode=22, name='Pear')
core = Inventory.objects.create(
barcode=87, name='Core', parent=apple
)
rel = Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', core.parent_id, attrs={}),
'<input type="text" name="test" value="86" '
'class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong><a href="/admin_widgets/inventory/%(pk)s/change/">'
'Apple</a></strong>' % {'pk': apple.pk}
)
def test_fk_related_model_not_in_admin(self):
# FK to a model not registered with admin site. Raw ID widget should
# have no magnifying glass link. See #16542
big_honeycomb = Honeycomb.objects.create(location='Old tree')
big_honeycomb.bee_set.create()
rel = Bee._meta.get_field('honeycomb').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('honeycomb_widget', big_honeycomb.pk, attrs={}),
'<input type="text" name="honeycomb_widget" value="%(hcombpk)s">'
' <strong>%(hcomb)s</strong>'
% {'hcombpk': big_honeycomb.pk, 'hcomb': big_honeycomb}
)
def test_fk_to_self_model_not_in_admin(self):
# FK to self, not registered with admin site. Raw ID widget should have
# no magnifying glass link. See #16542
subject1 = Individual.objects.create(name='Subject #1')
Individual.objects.create(name='Child', parent=subject1)
rel = Individual._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('individual_widget', subject1.pk, attrs={}),
'<input type="text" name="individual_widget" value="%(subj1pk)s">'
' <strong>%(subj1)s</strong>'
% {'subj1pk': subject1.pk, 'subj1': subject1}
)
def test_proper_manager_for_label_lookup(self):
# see #9258
rel = Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
hidden = Inventory.objects.create(
barcode=93, name='Hidden', hidden=True
)
child_of_hidden = Inventory.objects.create(
barcode=94, name='Child of hidden', parent=hidden
)
self.assertHTMLEqual(
w.render('test', child_of_hidden.parent_id, attrs={}),
'<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong><a href="/admin_widgets/inventory/%(pk)s/change/">'
'Hidden</a></strong>' % {'pk': hidden.pk}
)
def test_render_unsafe_limit_choices_to(self):
rel = UnsafeLimitChoicesTo._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', None),
'<input type="text" name="test" class="vForeignKeyRawIdAdminField">\n'
'<a href="/admin_widgets/band/?name=%22%26%3E%3Cescapeme&_to_field=artist_ptr" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
)
def test_render_fk_as_pk_model(self):
rel = VideoStream._meta.get_field('release_event').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', None),
'<input type="text" name="test" class="vForeignKeyRawIdAdminField">\n'
'<a href="/admin_widgets/releaseevent/?_to_field=album" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ManyToManyRawIdWidgetTest(TestCase):
def test_render(self):
band = Band.objects.create(name='Linkin Park')
m1 = Member.objects.create(name='Chester')
m2 = Member.objects.create(name='Mike')
band.members.add(m1, m2)
rel = Band._meta.get_field('members').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', [m1.pk, m2.pk], attrs={}), (
'<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % {'m1pk': m1.pk, 'm2pk': m2.pk}
)
self.assertHTMLEqual(
w.render('test', [m1.pk]), (
'<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % {'m1pk': m1.pk}
)
def test_m2m_related_model_not_in_admin(self):
# M2M relationship with model not registered with admin site. Raw ID
# widget should have no magnifying glass link. See #16542
consultor1 = Advisor.objects.create(name='Rockstar Techie')
c1 = Company.objects.create(name='Doodle')
c2 = Company.objects.create(name='Pear')
consultor1.companies.add(c1, c2)
rel = Advisor._meta.get_field('companies').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('company_widget1', [c1.pk, c2.pk], attrs={}),
'<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s">' % {'c1pk': c1.pk, 'c2pk': c2.pk}
)
self.assertHTMLEqual(
w.render('company_widget2', [c1.pk]),
'<input type="text" name="company_widget2" value="%(c1pk)s">' % {'c1pk': c1.pk}
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class RelatedFieldWidgetWrapperTests(SimpleTestCase):
def test_no_can_add_related(self):
rel = Individual._meta.get_field('parent').remote_field
w = widgets.AdminRadioSelect()
# Used to fail with a name error.
w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site)
self.assertFalse(w.can_add_related)
def test_select_multiple_widget_cant_change_delete_related(self):
rel = Individual._meta.get_field('parent').remote_field
widget = forms.SelectMultiple()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertFalse(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_on_delete_cascade_rel_cant_delete_related(self):
rel = Individual._meta.get_field('soulmate').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertTrue(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_custom_widget_render(self):
class CustomWidget(forms.Select):
def render(self, *args, **kwargs):
return 'custom render output'
rel = Album._meta.get_field('band').remote_field
widget = CustomWidget()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
output = wrapper.render('name', 'value')
self.assertIn('custom render output', output)
def test_widget_delegates_value_omitted_from_data(self):
class CustomWidget(forms.Select):
def value_omitted_from_data(self, data, files, name):
return False
rel = Album._meta.get_field('band').remote_field
widget = CustomWidget()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.value_omitted_from_data({}, {}, 'band'), False)
def test_widget_is_hidden(self):
rel = Album._meta.get_field('band').remote_field
widget = forms.HiddenInput()
widget.choices = ()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.is_hidden, True)
context = wrapper.get_context('band', None, {})
self.assertIs(context['is_hidden'], True)
output = wrapper.render('name', 'value')
# Related item links are hidden.
self.assertNotIn('<a ', output)
def test_widget_is_not_hidden(self):
rel = Album._meta.get_field('band').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.is_hidden, False)
context = wrapper.get_context('band', None, {})
self.assertIs(context['is_hidden'], False)
output = wrapper.render('name', 'value')
# Related item links are present.
self.assertIn('<a ', output)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminWidgetSeleniumTestCase(AdminSeleniumTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.u1 = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
class DateTimePickerSeleniumTests(AdminWidgetSeleniumTestCase):
def test_show_hide_date_time_picker_widgets(self):
"""
Pressing the ESC key or clicking on a widget value closes the date and
time picker widgets.
"""
from selenium.webdriver.common.keys import Keys
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# First, with the date picker widget ---------------------------------
cal_icon = self.selenium.find_element_by_id('calendarlink0')
# The date picker is hidden
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Click the calendar icon
cal_icon.click()
# The date picker is visible
self.assertTrue(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# The date picker is hidden again
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Click the calendar icon, then on the 15th of current month
cal_icon.click()
self.selenium.find_element_by_xpath("//a[contains(text(), '15')]").click()
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('id_birthdate_0').get_attribute('value'),
datetime.today().strftime('%Y-%m-') + '15',
)
# Then, with the time picker widget ----------------------------------
time_icon = self.selenium.find_element_by_id('clocklink0')
# The time picker is hidden
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
# Click the time icon
time_icon.click()
# The time picker is visible
self.assertTrue(self.selenium.find_element_by_id('clockbox0').is_displayed())
self.assertEqual(
[
x.text for x in
self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")
],
['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']
)
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# The time picker is hidden again
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
# Click the time icon, then select the 'Noon' value
time_icon.click()
self.selenium.find_element_by_xpath("//a[contains(text(), 'Noon')]").click()
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('id_birthdate_1').get_attribute('value'),
'12:00:00',
)
def test_calendar_nonday_class(self):
"""
Ensure cells that are not days of the month have the `nonday` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# make sure the first and last 6 cells have class nonday
for td in tds[:6] + tds[-6:]:
self.assertEqual(td.get_attribute('class'), 'nonday')
def test_calendar_selected_class(self):
"""
Ensure cell for the day in the input has the `selected` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify the selected cell
selected = tds[6]
self.assertEqual(selected.get_attribute('class'), 'selected')
self.assertEqual(selected.text, '1')
def test_calendar_no_selected_class(self):
"""
Ensure no cells are given the selected class when the field is empty.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify there are no cells with the selected class
selected = [td for td in tds if td.get_attribute('class') == 'selected']
self.assertEqual(len(selected), 0)
def test_calendar_show_date_from_input(self):
"""
The calendar shows the date from the input field for every locale
supported by Django.
"""
self.selenium.set_window_size(1024, 768)
self.admin_login(username='super', password='secret', login_url='/')
# Enter test data
member = Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M')
# Get month name translations for every locale
month_string = 'May'
path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale')
for language_code, language_name in settings.LANGUAGES:
try:
catalog = gettext.translation('djangojs', path, [language_code])
except OSError:
continue
if month_string in catalog._catalog:
month_name = catalog._catalog[month_string]
else:
month_name = month_string
# Get the expected caption
may_translation = month_name
expected_caption = '{:s} {:d}'.format(may_translation.upper(), 1984)
# Test with every locale
with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True):
# Open a page that has a date picker widget
url = reverse('admin:admin_widgets_member_change', args=(member.pk,))
self.selenium.get(self.live_server_url + url)
# Click on the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Make sure that the right month and year are displayed
self.wait_for_text('#calendarin0 caption', expected_caption)
@override_settings(TIME_ZONE='Asia/Singapore')
class DateTimePickerShortcutsSeleniumTests(AdminWidgetSeleniumTestCase):
def test_date_time_picker_shortcuts(self):
"""
date/time/datetime picker shortcuts work in the current time zone.
Refs #20663.
This test case is fairly tricky, it relies on selenium still running the browser
in the default time zone "America/Chicago" despite `override_settings` changing
the time zone to "Asia/Singapore".
"""
self.admin_login(username='super', password='secret', login_url='/')
error_margin = timedelta(seconds=10)
# If we are neighbouring a DST, we add an hour of error margin.
tz = pytz.timezone('America/Chicago')
utc_now = datetime.now(pytz.utc)
tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname()
tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname()
if tz_yesterday != tz_tomorrow:
error_margin += timedelta(hours=1)
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
self.selenium.find_element_by_id('id_name').send_keys('test')
# Click on the "today" and "now" shortcuts.
shortcuts = self.selenium.find_elements_by_css_selector('.field-birthdate .datetimeshortcuts')
now = datetime.now()
for shortcut in shortcuts:
shortcut.find_element_by_tag_name('a').click()
# There is a time zone mismatch warning.
# Warning: This would effectively fail if the TIME_ZONE defined in the
# settings has the same UTC offset as "Asia/Singapore" because the
# mismatch warning would be rightfully missing from the page.
self.selenium.find_elements_by_css_selector('.field-birthdate .timezonewarning')
# Submit the form.
with self.wait_page_loaded():
self.selenium.find_element_by_name('_save').click()
# Make sure that "now" in javascript is within 10 seconds
# from "now" on the server side.
member = Member.objects.get(name='test')
self.assertGreater(member.birthdate, now - error_margin)
self.assertLess(member.birthdate, now + error_margin)
# The above tests run with Asia/Singapore which are on the positive side of
# UTC. Here we test with a timezone on the negative side.
@override_settings(TIME_ZONE='US/Eastern')
class DateTimePickerAltTimezoneSeleniumTests(DateTimePickerShortcutsSeleniumTests):
pass
class HorizontalVerticalFilterSeleniumTests(AdminWidgetSeleniumTestCase):
def setUp(self):
super().setUp()
self.lisa = Student.objects.create(name='Lisa')
self.john = Student.objects.create(name='John')
self.bob = Student.objects.create(name='Bob')
self.peter = Student.objects.create(name='Peter')
self.jenny = Student.objects.create(name='Jenny')
self.jason = Student.objects.create(name='Jason')
self.cliff = Student.objects.create(name='Cliff')
self.arthur = Student.objects.create(name='Arthur')
self.school = School.objects.create(name='School of Awesome')
def assertActiveButtons(self, mode, field_name, choose, remove, choose_all=None, remove_all=None):
choose_link = '#id_%s_add_link' % field_name
choose_all_link = '#id_%s_add_all_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
remove_all_link = '#id_%s_remove_all_link' % field_name
self.assertEqual(self.has_css_class(choose_link, 'active'), choose)
self.assertEqual(self.has_css_class(remove_link, 'active'), remove)
if mode == 'horizontal':
self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all)
self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all)
def execute_basic_operations(self, mode, field_name):
original_url = self.selenium.current_url
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
choose_all_link = 'id_%s_add_all_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
remove_all_link = 'id_%s_remove_all_link' % field_name
# Initial positions ---------------------------------------------------
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id)])
self.assertActiveButtons(mode, field_name, False, False, True, True)
# Click 'Choose all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(choose_all_link).click()
elif mode == 'vertical':
# There 's no 'Choose all' button in vertical mode, so individually
# select all options and click 'Choose'.
for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'):
option.click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertActiveButtons(mode, field_name, False, False, False, True)
# Click 'Remove all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(remove_all_link).click()
elif mode == 'vertical':
# There 's no 'Remove all' button in vertical mode, so individually
# select all options and click 'Remove'.
for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'):
option.click()
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box, [
str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertSelectOptions(to_box, [])
self.assertActiveButtons(mode, field_name, False, False, True, False)
# Choose some options ------------------------------------------------
from_lisa_select_option = self.selenium.find_element_by_css_selector(
'{} > option[value="{}"]'.format(from_box, self.lisa.id)
)
# Check the title attribute is there for tool tips: ticket #20821
self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text'))
self.select_option(from_box, str(self.lisa.id))
self.select_option(from_box, str(self.jason.id))
self.select_option(from_box, str(self.bob.id))
self.select_option(from_box, str(self.john.id))
self.assertActiveButtons(mode, field_name, True, False, True, False)
self.selenium.find_element_by_id(choose_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.bob.id),
str(self.jason.id), str(self.john.id),
])
# Check the tooltip is still there after moving: ticket #20821
to_lisa_select_option = self.selenium.find_element_by_css_selector(
'{} > option[value="{}"]'.format(to_box, self.lisa.id)
)
self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text'))
# Remove some options -------------------------------------------------
self.select_option(to_box, str(self.lisa.id))
self.select_option(to_box, str(self.bob.id))
self.assertActiveButtons(mode, field_name, False, True, True, True)
self.selenium.find_element_by_id(remove_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)
])
self.assertSelectOptions(to_box, [str(self.jason.id), str(self.john.id)])
# Choose some more options --------------------------------------------
self.select_option(from_box, str(self.arthur.id))
self.select_option(from_box, str(self.cliff.id))
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id),
])
self.assertSelectOptions(to_box, [
str(self.jason.id), str(self.john.id),
str(self.arthur.id), str(self.cliff.id),
])
# Choose some more options --------------------------------------------
self.select_option(from_box, str(self.peter.id))
self.select_option(from_box, str(self.lisa.id))
# Confirm they're selected after clicking inactive buttons: ticket #26575
self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)])
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)])
# Unselect the options ------------------------------------------------
self.deselect_option(from_box, str(self.peter.id))
self.deselect_option(from_box, str(self.lisa.id))
# Choose some more options --------------------------------------------
self.select_option(to_box, str(self.jason.id))
self.select_option(to_box, str(self.john.id))
# Confirm they're selected after clicking inactive buttons: ticket #26575
self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)])
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)])
# Unselect the options ------------------------------------------------
self.deselect_option(to_box, str(self.jason.id))
self.deselect_option(to_box, str(self.john.id))
# Pressing buttons shouldn't change the URL.
self.assertEqual(self.selenium.current_url, original_url)
def test_basic(self):
self.selenium.set_window_size(1024, 768)
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))
self.wait_page_ready()
self.execute_basic_operations('vertical', 'students')
self.execute_basic_operations('horizontal', 'alumni')
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_ready()
self.school = School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()), [self.arthur, self.cliff, self.jason, self.john])
self.assertEqual(list(self.school.alumni.all()), [self.arthur, self.cliff, self.jason, self.john])
def test_filter(self):
"""
Typing in the search box filters out options displayed in the 'from'
box.
"""
from selenium.webdriver.common.keys import Keys
self.selenium.set_window_size(1024, 768)
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))
for field_name in ['students', 'alumni']:
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
input = self.selenium.find_element_by_id('id_%s_input' % field_name)
# Initial values
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
# Typing in some characters filters out non-matching options
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys('R')
self.assertSelectOptions(from_box, [str(self.arthur.id)])
# Clearing the text box makes the other options reappear
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
# -----------------------------------------------------------------
# Choosing a filtered option sends it properly to the 'to' box.
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
self.select_option(from_box, str(self.jason.id))
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id)])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.peter.id), str(self.jason.id),
])
self.select_option(to_box, str(self.lisa.id))
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE]) # Clear text box
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jenny.id),
str(self.john.id), str(self.lisa.id),
])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
# -----------------------------------------------------------------
# Pressing enter on a filtered option sends it properly to
# the 'to' box.
self.select_option(to_box, str(self.jason.id))
self.selenium.find_element_by_id(remove_link).click()
input.send_keys('ja')
self.assertSelectOptions(from_box, [str(self.jason.id)])
input.send_keys([Keys.ENTER])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE])
# Save and check that everything is properly stored in the database ---
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.school = School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()), [self.jason, self.peter])
self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter])
def test_back_button_bug(self):
"""
Some browsers had a bug where navigating away from the change page
and then clicking the browser's back button would clear the
filter_horizontal/filter_vertical widgets (#13614).
"""
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,))
self.selenium.get(self.live_server_url + change_url)
# Navigate away and go back to the change form page.
self.selenium.find_element_by_link_text('Home').click()
self.selenium.back()
expected_unselected_values = [
str(self.arthur.id), str(self.bob.id), str(self.cliff.id),
str(self.jason.id), str(self.jenny.id), str(self.john.id),
]
expected_selected_values = [str(self.lisa.id), str(self.peter.id)]
# Everything is still in place
self.assertSelectOptions('#id_students_from', expected_unselected_values)
self.assertSelectOptions('#id_students_to', expected_selected_values)
self.assertSelectOptions('#id_alumni_from', expected_unselected_values)
self.assertSelectOptions('#id_alumni_to', expected_selected_values)
def test_refresh_page(self):
"""
Horizontal and vertical filter widgets keep selected options on page
reload (#22955).
"""
self.school.students.add(self.arthur, self.jason)
self.school.alumni.add(self.arthur, self.jason)
self.admin_login(username='super', password='secret', login_url='/')
change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,))
self.selenium.get(self.live_server_url + change_url)
options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option'))
self.assertEqual(options_len, 2)
# self.selenium.refresh() or send_keys(Keys.F5) does hard reload and
# doesn't replicate what happens when a user clicks the browser's
# 'Refresh' button.
with self.wait_page_loaded():
self.selenium.execute_script("location.reload()")
options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option'))
self.assertEqual(options_len, 2)
class AdminRawIdWidgetSeleniumTests(AdminWidgetSeleniumTestCase):
def setUp(self):
super().setUp()
Band.objects.create(id=42, name='Bogey Blues')
Band.objects.create(id=98, name='Green Potatoes')
def test_ForeignKey(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add'))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(self.selenium.find_element_by_id('id_main_band').get_attribute('value'), '')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the other selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '98')
def test_many_to_many(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add'))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'), '')
# Help text for the field is displayed
self.assertEqual(
self.selenium.find_element_by_css_selector('.field-supporting_bands div.help').text,
'Supporting Bands.'
)
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the two selected bands' ids
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42,98')
class RelatedFieldWidgetSeleniumTests(AdminWidgetSeleniumTestCase):
def test_ForeignKey_using_to_field(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_profile_add'))
main_window = self.selenium.current_window_handle
# Click the Add User button to add new
self.selenium.find_element_by_id('add_id_user').click()
self.wait_for_and_switch_to_popup()
password_field = self.selenium.find_element_by_id('id_password')
password_field.send_keys('password')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'newuser'
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
# The field now contains the new user
self.selenium.find_element_by_css_selector('#id_user option[value=newuser]')
# Click the Change User button to change it
self.selenium.find_element_by_id('change_id_user').click()
self.wait_for_and_switch_to_popup()
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'changednewuser'
username_field.clear()
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]')
# Go ahead and submit the form to make sure it works
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.wait_for_text('li.success', 'The profile “changednewuser” was added successfully.')
profiles = Profile.objects.all()
self.assertEqual(len(profiles), 1)
self.assertEqual(profiles[0].user.username, username_value)
| 44.522039
| 119
| 0.63891
|
import gettext
import os
import re
from datetime import datetime, timedelta
from importlib import import_module
import pytz
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.auth.models import User
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db.models import (
CharField, DateField, DateTimeField, ManyToManyField, UUIDField,
)
from django.test import SimpleTestCase, TestCase, override_settings
from django.urls import reverse
from django.utils import translation
from .models import (
Advisor, Album, Band, Bee, Car, Company, Event, Honeycomb, Individual,
Inventory, Member, MyFileField, Profile, School, Student,
UnsafeLimitChoicesTo, VideoStream,
)
from .widgetadmin import site as widget_admin_site
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email=None)
cls.u2 = User.objects.create_user(username='testser', password='secret')
Car.objects.create(owner=cls.superuser, make='Volkswagen', model='Passat')
Car.objects.create(owner=cls.u2, make='BMW', model='M3')
class AdminFormfieldForDBFieldTests(SimpleTestCase):
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):
class MyModelAdmin(admin.ModelAdmin):
pass
for k in admin_overrides:
setattr(MyModelAdmin, k, admin_overrides[k])
ma = MyModelAdmin(model, admin.site)
ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)
if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):
widget = ff.widget.widget
else:
widget = ff.widget
self.assertIsInstance(widget, widgetclass)
return ff
def test_DateField(self):
self.assertFormfield(Event, 'start_date', widgets.AdminDateWidget)
def test_DateTimeField(self):
self.assertFormfield(Member, 'birthdate', widgets.AdminSplitDateTime)
def test_TimeField(self):
self.assertFormfield(Event, 'start_time', widgets.AdminTimeWidget)
def test_TextField(self):
self.assertFormfield(Event, 'description', widgets.AdminTextareaWidget)
def test_URLField(self):
self.assertFormfield(Event, 'link', widgets.AdminURLFieldWidget)
def test_IntegerField(self):
self.assertFormfield(Event, 'min_age', widgets.AdminIntegerFieldWidget)
def test_CharField(self):
self.assertFormfield(Member, 'name', widgets.AdminTextInputWidget)
def test_EmailField(self):
self.assertFormfield(Member, 'email', widgets.AdminEmailInputWidget)
def test_FileField(self):
self.assertFormfield(Album, 'cover_art', widgets.AdminFileWidget)
def test_ForeignKey(self):
self.assertFormfield(Event, 'main_band', forms.Select)
def test_raw_id_ForeignKey(self):
self.assertFormfield(Event, 'main_band', widgets.ForeignKeyRawIdWidget,
raw_id_fields=['main_band'])
def test_radio_fields_ForeignKey(self):
ff = self.assertFormfield(Event, 'main_band', widgets.AdminRadioSelect,
radio_fields={'main_band': admin.VERTICAL})
self.assertIsNone(ff.empty_label)
def test_many_to_many(self):
self.assertFormfield(Band, 'members', forms.SelectMultiple)
def test_raw_id_many_to_many(self):
self.assertFormfield(Band, 'members', widgets.ManyToManyRawIdWidget,
raw_id_fields=['members'])
def test_filtered_many_to_many(self):
self.assertFormfield(Band, 'members', widgets.FilteredSelectMultiple,
filter_vertical=['members'])
def test_formfield_overrides(self):
self.assertFormfield(Event, 'start_date', forms.TextInput,
formfield_overrides={DateField: {'widget': forms.TextInput}})
def test_formfield_overrides_widget_instances(self):
class BandAdmin(admin.ModelAdmin):
formfield_overrides = {
CharField: {'widget': forms.TextInput(attrs={'size': '10'})}
}
ma = BandAdmin(Band, admin.site)
f1 = ma.formfield_for_dbfield(Band._meta.get_field('name'), request=None)
f2 = ma.formfield_for_dbfield(Band._meta.get_field('style'), request=None)
self.assertNotEqual(f1.widget, f2.widget)
self.assertEqual(f1.widget.attrs['maxlength'], '100')
self.assertEqual(f2.widget.attrs['maxlength'], '20')
self.assertEqual(f2.widget.attrs['size'], '10')
def test_formfield_overrides_m2m_filter_widget(self):
class BandAdmin(admin.ModelAdmin):
filter_vertical = ['members']
formfield_overrides = {
ManyToManyField: {'widget': forms.CheckboxSelectMultiple},
}
ma = BandAdmin(Band, admin.site)
field = ma.formfield_for_dbfield(Band._meta.get_field('members'), request=None)
self.assertIsInstance(field.widget.widget, forms.CheckboxSelectMultiple)
def test_formfield_overrides_for_datetime_field(self):
class MemberAdmin(admin.ModelAdmin):
formfield_overrides = {DateTimeField: {'widget': widgets.AdminSplitDateTime}}
ma = MemberAdmin(Member, admin.site)
f1 = ma.formfield_for_dbfield(Member._meta.get_field('birthdate'), request=None)
self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime)
self.assertIsInstance(f1, forms.SplitDateTimeField)
def test_formfield_overrides_for_custom_field(self):
class AlbumAdmin(admin.ModelAdmin):
formfield_overrides = {MyFileField: {'widget': forms.TextInput()}}
ma = AlbumAdmin(Member, admin.site)
f1 = ma.formfield_for_dbfield(Album._meta.get_field('backside_art'), request=None)
self.assertIsInstance(f1.widget, forms.TextInput)
def test_field_with_choices(self):
self.assertFormfield(Member, 'gender', forms.Select)
def test_choices_with_radio_fields(self):
self.assertFormfield(Member, 'gender', widgets.AdminRadioSelect,
radio_fields={'gender': admin.VERTICAL})
def test_inheritance(self):
self.assertFormfield(Album, 'backside_art', widgets.AdminFileWidget)
def test_m2m_widgets(self):
class AdvisorAdmin(admin.ModelAdmin):
filter_vertical = ['companies']
self.assertFormfield(Advisor, 'companies', widgets.FilteredSelectMultiple,
filter_vertical=['companies'])
ma = AdvisorAdmin(Advisor, admin.site)
f = ma.formfield_for_dbfield(Advisor._meta.get_field('companies'), request=None)
self.assertEqual(
f.help_text,
'Hold down “Control”, or “Command” on a Mac, to select more than one.'
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase):
def test_filter_choices_by_request_user(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
self.assertNotContains(response, "BMW M3")
self.assertContains(response, "Volkswagen Passat")
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_changelist_ForeignKey(self):
response = self.client.get(reverse('admin:admin_widgets_car_changelist'))
self.assertContains(response, '/auth/user/add/')
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_nonexistent_target_id(self):
band = Band.objects.create(name='Bogey Blues')
pk = band.pk
band.delete()
post_data = {
"main_band": str(pk),
}
response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data)
self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.')
def test_invalid_target_id(self):
for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234):
# This should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), {"main_band": test_str})
self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.')
def test_url_params_from_lookup_dict_any_iterable(self):
lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})
lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})
self.assertEqual(lookup1, {'color__in': 'red,blue'})
self.assertEqual(lookup1, lookup2)
def test_url_params_from_lookup_dict_callable(self):
def my_callable():
return 'works'
lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable})
lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()})
self.assertEqual(lookup1, lookup2)
def test_label_and_url_for_value_invalid_uuid(self):
field = Bee._meta.get_field('honeycomb')
self.assertIsInstance(field.target_field, UUIDField)
widget = widgets.ForeignKeyRawIdWidget(field.remote_field, admin.site)
self.assertEqual(widget.label_and_url_for_value('invalid-uuid'), ('', ''))
class FilteredSelectMultipleWidgetTest(SimpleTestCase):
def test_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', False)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple name="test" class="selectfilter" '
'data-field-name="test\\" data-is-stacked="0">\n</select>'
)
def test_stacked_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', True)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple name="test" class="selectfilterstacked" '
'data-field-name="test\\" data-is-stacked="1">\n</select>'
)
class AdminDateWidgetTest(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminDateWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="vDateField" name="test" size="10">',
)
# pass attrs to widget
w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="myDateField" name="test" size="20">',
)
class AdminTimeWidgetTest(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminTimeWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="vTimeField" name="test" size="8">',
)
# pass attrs to widget
w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="myTimeField" name="test" size="20">',
)
class AdminSplitDateTimeWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminSplitDateTime()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Date: <input value="2007-12-01" type="text" class="vDateField" '
'name="test_0" size="10"><br>'
'Time: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8"></p>'
)
def test_localization(self):
w = widgets.AdminSplitDateTime()
with self.settings(USE_L10N=True), translation.override('de-at'):
w.is_localized = True
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Datum: <input value="01.12.2007" type="text" '
'class="vDateField" name="test_0"size="10"><br>'
'Zeit: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8"></p>'
)
class AdminURLWidgetTest(SimpleTestCase):
def test_get_context_validates_url(self):
w = widgets.AdminURLFieldWidget()
for invalid in ['', '/not/a/full/url/', 'javascript:alert("Danger XSS!")']:
with self.subTest(url=invalid):
self.assertFalse(w.get_context('name', invalid, {})['url_valid'])
self.assertTrue(w.get_context('name', 'http://example.com', {})['url_valid'])
def test_render(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', ''),
'<input class="vURLField" name="test" type="url">'
)
self.assertHTMLEqual(
w.render('test', 'http://example.com'),
'<p class="url">Currently:<a href="http://example.com">'
'http://example.com</a><br>'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example.com"></p>'
)
def test_render_idn(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', 'http://example-äüö.com'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">'
'http://example-äüö.com</a><br>'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example-äüö.com"></p>'
)
def test_render_quoting(self):
HREF_RE = re.compile('href="([^"]+)"')
VALUE_RE = re.compile('value="([^"]+)"')
TEXT_RE = re.compile('<a[^>]+>([^>]+)</a>')
w = widgets.AdminURLFieldWidget()
output = w.render('test', 'http://example.com/<sometag>some-text</sometag>')
self.assertEqual(
HREF_RE.search(output)[1],
'http://example.com/%3Csometag%3Esome-text%3C/sometag%3E',
)
self.assertEqual(
TEXT_RE.search(output)[1],
'http://example.com/<sometag>some-text</sometag>',
)
self.assertEqual(
VALUE_RE.search(output)[1],
'http://example.com/<sometag>some-text</sometag>',
)
output = w.render('test', 'http://example-äüö.com/<sometag>some-text</sometag>')
self.assertEqual(
HREF_RE.search(output)[1],
'http://xn--example--7za4pnc.com/%3Csometag%3Esome-text%3C/sometag%3E',
)
self.assertEqual(
TEXT_RE.search(output)[1],
'http://example-äüö.com/<sometag>some-text</sometag>',
)
self.assertEqual(
VALUE_RE.search(output)[1],
'http://example-äüö.com/<sometag>some-text</sometag>',
)
output = w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"')
self.assertEqual(
HREF_RE.search(output)[1],
'http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22',
)
self.assertEqual(
TEXT_RE.search(output)[1],
'http://www.example.com/%C3%A4"><script>'
'alert("XSS!")</script>"'
)
self.assertEqual(
VALUE_RE.search(output)[1],
'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"',
)
class AdminUUIDWidgetTests(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminUUIDInputWidget()
self.assertHTMLEqual(
w.render('test', '550e8400-e29b-41d4-a716-446655440000'),
'<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="vUUIDField" name="test">',
)
w = widgets.AdminUUIDInputWidget(attrs={'class': 'myUUIDInput'})
self.assertHTMLEqual(
w.render('test', '550e8400-e29b-41d4-a716-446655440000'),
'<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="myUUIDInput" name="test">',
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminFileWidgetTests(TestDataMixin, TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
band = Band.objects.create(name='Linkin Park')
cls.album = band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
def test_render(self):
w = widgets.AdminFileWidget()
self.assertHTMLEqual(
w.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id"> '
'<label for="test-clear_id">Clear</label></span><br>'
'Change: <input type="file" name="test"></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
self.assertHTMLEqual(
w.render('test', SimpleUploadedFile('test', b'content')),
'<input type="file" name="test">',
)
def test_render_required(self):
widget = widgets.AdminFileWidget()
widget.is_required = True
self.assertHTMLEqual(
widget.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a><br>'
'Change: <input type="file" name="test"></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
def test_render_disabled(self):
widget = widgets.AdminFileWidget(attrs={'disabled': True})
self.assertHTMLEqual(
widget.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id" disabled>'
'<label for="test-clear_id">Clear</label></span><br>'
'Change: <input type="file" name="test" disabled></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
def test_readonly_fields(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,)))
self.assertContains(
response,
'<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">'
r'albums\hybrid_theory.jpg</a></div>' % {'STORAGE_URL': default_storage.url('')},
html=True,
)
self.assertNotContains(
response,
'<input type="file" name="cover_art" id="id_cover_art">',
html=True,
)
response = self.client.get(reverse('admin:admin_widgets_album_add'))
self.assertContains(
response,
'<div class="readonly"></div>',
html=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ForeignKeyRawIdWidgetTest(TestCase):
def test_render(self):
band = Band.objects.create(name='Linkin Park')
band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
rel = Album._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', band.uuid, attrs={}),
'<input type="text" name="test" value="%(banduuid)s" '
'class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/band/?_to_field=uuid" class="related-lookup" '
'id="lookup_id_test" title="Lookup"></a> <strong>'
'<a href="/admin_widgets/band/%(bandpk)s/change/">Linkin Park</a>'
'</strong>' % {'banduuid': band.uuid, 'bandpk': band.pk}
)
def test_relations_to_non_primary_key(self):
# ForeignKeyRawIdWidget works with fields which aren't related to
apple = Inventory.objects.create(barcode=86, name='Apple')
Inventory.objects.create(barcode=22, name='Pear')
core = Inventory.objects.create(
barcode=87, name='Core', parent=apple
)
rel = Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', core.parent_id, attrs={}),
'<input type="text" name="test" value="86" '
'class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong><a href="/admin_widgets/inventory/%(pk)s/change/">'
'Apple</a></strong>' % {'pk': apple.pk}
)
def test_fk_related_model_not_in_admin(self):
# FK to a model not registered with admin site. Raw ID widget should
# have no magnifying glass link. See #16542
big_honeycomb = Honeycomb.objects.create(location='Old tree')
big_honeycomb.bee_set.create()
rel = Bee._meta.get_field('honeycomb').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('honeycomb_widget', big_honeycomb.pk, attrs={}),
'<input type="text" name="honeycomb_widget" value="%(hcombpk)s">'
' <strong>%(hcomb)s</strong>'
% {'hcombpk': big_honeycomb.pk, 'hcomb': big_honeycomb}
)
def test_fk_to_self_model_not_in_admin(self):
# FK to self, not registered with admin site. Raw ID widget should have
# no magnifying glass link. See #16542
subject1 = Individual.objects.create(name='Subject Individual.objects.create(name='Child', parent=subject1)
rel = Individual._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('individual_widget', subject1.pk, attrs={}),
'<input type="text" name="individual_widget" value="%(subj1pk)s">'
' <strong>%(subj1)s</strong>'
% {'subj1pk': subject1.pk, 'subj1': subject1}
)
def test_proper_manager_for_label_lookup(self):
# see #9258
rel = Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
hidden = Inventory.objects.create(
barcode=93, name='Hidden', hidden=True
)
child_of_hidden = Inventory.objects.create(
barcode=94, name='Child of hidden', parent=hidden
)
self.assertHTMLEqual(
w.render('test', child_of_hidden.parent_id, attrs={}),
'<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong><a href="/admin_widgets/inventory/%(pk)s/change/">'
'Hidden</a></strong>' % {'pk': hidden.pk}
)
def test_render_unsafe_limit_choices_to(self):
rel = UnsafeLimitChoicesTo._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', None),
'<input type="text" name="test" class="vForeignKeyRawIdAdminField">\n'
'<a href="/admin_widgets/band/?name=%22%26%3E%3Cescapeme&_to_field=artist_ptr" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
)
def test_render_fk_as_pk_model(self):
rel = VideoStream._meta.get_field('release_event').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', None),
'<input type="text" name="test" class="vForeignKeyRawIdAdminField">\n'
'<a href="/admin_widgets/releaseevent/?_to_field=album" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ManyToManyRawIdWidgetTest(TestCase):
def test_render(self):
band = Band.objects.create(name='Linkin Park')
m1 = Member.objects.create(name='Chester')
m2 = Member.objects.create(name='Mike')
band.members.add(m1, m2)
rel = Band._meta.get_field('members').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', [m1.pk, m2.pk], attrs={}), (
'<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % {'m1pk': m1.pk, 'm2pk': m2.pk}
)
self.assertHTMLEqual(
w.render('test', [m1.pk]), (
'<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % {'m1pk': m1.pk}
)
def test_m2m_related_model_not_in_admin(self):
# M2M relationship with model not registered with admin site. Raw ID
# widget should have no magnifying glass link. See #16542
consultor1 = Advisor.objects.create(name='Rockstar Techie')
c1 = Company.objects.create(name='Doodle')
c2 = Company.objects.create(name='Pear')
consultor1.companies.add(c1, c2)
rel = Advisor._meta.get_field('companies').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('company_widget1', [c1.pk, c2.pk], attrs={}),
'<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s">' % {'c1pk': c1.pk, 'c2pk': c2.pk}
)
self.assertHTMLEqual(
w.render('company_widget2', [c1.pk]),
'<input type="text" name="company_widget2" value="%(c1pk)s">' % {'c1pk': c1.pk}
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class RelatedFieldWidgetWrapperTests(SimpleTestCase):
def test_no_can_add_related(self):
rel = Individual._meta.get_field('parent').remote_field
w = widgets.AdminRadioSelect()
# Used to fail with a name error.
w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site)
self.assertFalse(w.can_add_related)
def test_select_multiple_widget_cant_change_delete_related(self):
rel = Individual._meta.get_field('parent').remote_field
widget = forms.SelectMultiple()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertFalse(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_on_delete_cascade_rel_cant_delete_related(self):
rel = Individual._meta.get_field('soulmate').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertTrue(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_custom_widget_render(self):
class CustomWidget(forms.Select):
def render(self, *args, **kwargs):
return 'custom render output'
rel = Album._meta.get_field('band').remote_field
widget = CustomWidget()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
output = wrapper.render('name', 'value')
self.assertIn('custom render output', output)
def test_widget_delegates_value_omitted_from_data(self):
class CustomWidget(forms.Select):
def value_omitted_from_data(self, data, files, name):
return False
rel = Album._meta.get_field('band').remote_field
widget = CustomWidget()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.value_omitted_from_data({}, {}, 'band'), False)
def test_widget_is_hidden(self):
rel = Album._meta.get_field('band').remote_field
widget = forms.HiddenInput()
widget.choices = ()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.is_hidden, True)
context = wrapper.get_context('band', None, {})
self.assertIs(context['is_hidden'], True)
output = wrapper.render('name', 'value')
# Related item links are hidden.
self.assertNotIn('<a ', output)
def test_widget_is_not_hidden(self):
rel = Album._meta.get_field('band').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.is_hidden, False)
context = wrapper.get_context('band', None, {})
self.assertIs(context['is_hidden'], False)
output = wrapper.render('name', 'value')
# Related item links are present.
self.assertIn('<a ', output)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminWidgetSeleniumTestCase(AdminSeleniumTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.u1 = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
class DateTimePickerSeleniumTests(AdminWidgetSeleniumTestCase):
def test_show_hide_date_time_picker_widgets(self):
from selenium.webdriver.common.keys import Keys
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# First, with the date picker widget ---------------------------------
cal_icon = self.selenium.find_element_by_id('calendarlink0')
# The date picker is hidden
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Click the calendar icon
cal_icon.click()
# The date picker is visible
self.assertTrue(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# The date picker is hidden again
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Click the calendar icon, then on the 15th of current month
cal_icon.click()
self.selenium.find_element_by_xpath("//a[contains(text(), '15')]").click()
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('id_birthdate_0').get_attribute('value'),
datetime.today().strftime('%Y-%m-') + '15',
)
# Then, with the time picker widget ----------------------------------
time_icon = self.selenium.find_element_by_id('clocklink0')
# The time picker is hidden
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
# Click the time icon
time_icon.click()
# The time picker is visible
self.assertTrue(self.selenium.find_element_by_id('clockbox0').is_displayed())
self.assertEqual(
[
x.text for x in
self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")
],
['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']
)
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# The time picker is hidden again
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
# Click the time icon, then select the 'Noon' value
time_icon.click()
self.selenium.find_element_by_xpath("//a[contains(text(), 'Noon')]").click()
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('id_birthdate_1').get_attribute('value'),
'12:00:00',
)
def test_calendar_nonday_class(self):
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# make sure the first and last 6 cells have class nonday
for td in tds[:6] + tds[-6:]:
self.assertEqual(td.get_attribute('class'), 'nonday')
def test_calendar_selected_class(self):
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify the selected cell
selected = tds[6]
self.assertEqual(selected.get_attribute('class'), 'selected')
self.assertEqual(selected.text, '1')
def test_calendar_no_selected_class(self):
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify there are no cells with the selected class
selected = [td for td in tds if td.get_attribute('class') == 'selected']
self.assertEqual(len(selected), 0)
def test_calendar_show_date_from_input(self):
self.selenium.set_window_size(1024, 768)
self.admin_login(username='super', password='secret', login_url='/')
# Enter test data
member = Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M')
# Get month name translations for every locale
month_string = 'May'
path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale')
for language_code, language_name in settings.LANGUAGES:
try:
catalog = gettext.translation('djangojs', path, [language_code])
except OSError:
continue
if month_string in catalog._catalog:
month_name = catalog._catalog[month_string]
else:
month_name = month_string
# Get the expected caption
may_translation = month_name
expected_caption = '{:s} {:d}'.format(may_translation.upper(), 1984)
# Test with every locale
with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True):
# Open a page that has a date picker widget
url = reverse('admin:admin_widgets_member_change', args=(member.pk,))
self.selenium.get(self.live_server_url + url)
# Click on the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Make sure that the right month and year are displayed
self.wait_for_text('
@override_settings(TIME_ZONE='Asia/Singapore')
class DateTimePickerShortcutsSeleniumTests(AdminWidgetSeleniumTestCase):
def test_date_time_picker_shortcuts(self):
self.admin_login(username='super', password='secret', login_url='/')
error_margin = timedelta(seconds=10)
# If we are neighbouring a DST, we add an hour of error margin.
tz = pytz.timezone('America/Chicago')
utc_now = datetime.now(pytz.utc)
tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname()
tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname()
if tz_yesterday != tz_tomorrow:
error_margin += timedelta(hours=1)
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
self.selenium.find_element_by_id('id_name').send_keys('test')
# Click on the "today" and "now" shortcuts.
shortcuts = self.selenium.find_elements_by_css_selector('.field-birthdate .datetimeshortcuts')
now = datetime.now()
for shortcut in shortcuts:
shortcut.find_element_by_tag_name('a').click()
# There is a time zone mismatch warning.
# Warning: This would effectively fail if the TIME_ZONE defined in the
# settings has the same UTC offset as "Asia/Singapore" because the
# mismatch warning would be rightfully missing from the page.
self.selenium.find_elements_by_css_selector('.field-birthdate .timezonewarning')
# Submit the form.
with self.wait_page_loaded():
self.selenium.find_element_by_name('_save').click()
# Make sure that "now" in javascript is within 10 seconds
# from "now" on the server side.
member = Member.objects.get(name='test')
self.assertGreater(member.birthdate, now - error_margin)
self.assertLess(member.birthdate, now + error_margin)
# The above tests run with Asia/Singapore which are on the positive side of
# UTC. Here we test with a timezone on the negative side.
@override_settings(TIME_ZONE='US/Eastern')
class DateTimePickerAltTimezoneSeleniumTests(DateTimePickerShortcutsSeleniumTests):
pass
class HorizontalVerticalFilterSeleniumTests(AdminWidgetSeleniumTestCase):
def setUp(self):
super().setUp()
self.lisa = Student.objects.create(name='Lisa')
self.john = Student.objects.create(name='John')
self.bob = Student.objects.create(name='Bob')
self.peter = Student.objects.create(name='Peter')
self.jenny = Student.objects.create(name='Jenny')
self.jason = Student.objects.create(name='Jason')
self.cliff = Student.objects.create(name='Cliff')
self.arthur = Student.objects.create(name='Arthur')
self.school = School.objects.create(name='School of Awesome')
def assertActiveButtons(self, mode, field_name, choose, remove, choose_all=None, remove_all=None):
choose_link = ' choose_all_link = ' remove_link = ' remove_all_link = ' self.assertEqual(self.has_css_class(choose_link, 'active'), choose)
self.assertEqual(self.has_css_class(remove_link, 'active'), remove)
if mode == 'horizontal':
self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all)
self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all)
def execute_basic_operations(self, mode, field_name):
original_url = self.selenium.current_url
from_box = ' to_box = ' choose_link = 'id_%s_add_link' % field_name
choose_all_link = 'id_%s_add_all_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
remove_all_link = 'id_%s_remove_all_link' % field_name
# Initial positions ---------------------------------------------------
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id)])
self.assertActiveButtons(mode, field_name, False, False, True, True)
# Click 'Choose all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(choose_all_link).click()
elif mode == 'vertical':
# There 's no 'Choose all' button in vertical mode, so individually
for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'):
option.click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertActiveButtons(mode, field_name, False, False, False, True)
if mode == 'horizontal':
self.selenium.find_element_by_id(remove_all_link).click()
elif mode == 'vertical':
# select all options and click 'Remove'.
for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'):
option.click()
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box, [
str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertSelectOptions(to_box, [])
self.assertActiveButtons(mode, field_name, False, False, True, False)
# Choose some options ------------------------------------------------
from_lisa_select_option = self.selenium.find_element_by_css_selector(
'{} > option[value="{}"]'.format(from_box, self.lisa.id)
)
# Check the title attribute is there for tool tips: ticket #20821
self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text'))
self.select_option(from_box, str(self.lisa.id))
self.select_option(from_box, str(self.jason.id))
self.select_option(from_box, str(self.bob.id))
self.select_option(from_box, str(self.john.id))
self.assertActiveButtons(mode, field_name, True, False, True, False)
self.selenium.find_element_by_id(choose_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.bob.id),
str(self.jason.id), str(self.john.id),
])
# Check the tooltip is still there after moving: ticket #20821
to_lisa_select_option = self.selenium.find_element_by_css_selector(
'{} > option[value="{}"]'.format(to_box, self.lisa.id)
)
self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text'))
# Remove some options -------------------------------------------------
self.select_option(to_box, str(self.lisa.id))
self.select_option(to_box, str(self.bob.id))
self.assertActiveButtons(mode, field_name, False, True, True, True)
self.selenium.find_element_by_id(remove_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)
])
self.assertSelectOptions(to_box, [str(self.jason.id), str(self.john.id)])
# Choose some more options --------------------------------------------
self.select_option(from_box, str(self.arthur.id))
self.select_option(from_box, str(self.cliff.id))
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id),
])
self.assertSelectOptions(to_box, [
str(self.jason.id), str(self.john.id),
str(self.arthur.id), str(self.cliff.id),
])
# Choose some more options --------------------------------------------
self.select_option(from_box, str(self.peter.id))
self.select_option(from_box, str(self.lisa.id))
# Confirm they're selected after clicking inactive buttons: ticket #26575
self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)])
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)])
self.deselect_option(from_box, str(self.peter.id))
self.deselect_option(from_box, str(self.lisa.id))
self.select_option(to_box, str(self.jason.id))
self.select_option(to_box, str(self.john.id))
self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)])
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)])
# Unselect the options ------------------------------------------------
self.deselect_option(to_box, str(self.jason.id))
self.deselect_option(to_box, str(self.john.id))
# Pressing buttons shouldn't change the URL.
self.assertEqual(self.selenium.current_url, original_url)
def test_basic(self):
self.selenium.set_window_size(1024, 768)
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))
self.wait_page_ready()
self.execute_basic_operations('vertical', 'students')
self.execute_basic_operations('horizontal', 'alumni')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_ready()
self.school = School.objects.get(id=self.school.id) self.assertEqual(list(self.school.students.all()), [self.arthur, self.cliff, self.jason, self.john])
self.assertEqual(list(self.school.alumni.all()), [self.arthur, self.cliff, self.jason, self.john])
def test_filter(self):
from selenium.webdriver.common.keys import Keys
self.selenium.set_window_size(1024, 768)
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))
for field_name in ['students', 'alumni']:
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
input = self.selenium.find_element_by_id('id_%s_input' % field_name)
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys('R')
self.assertSelectOptions(from_box, [str(self.arthur.id)])
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
self.select_option(from_box, str(self.jason.id))
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id)])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.peter.id), str(self.jason.id),
])
self.select_option(to_box, str(self.lisa.id))
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jenny.id),
str(self.john.id), str(self.lisa.id),
])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
self.select_option(to_box, str(self.jason.id))
self.selenium.find_element_by_id(remove_link).click()
input.send_keys('ja')
self.assertSelectOptions(from_box, [str(self.jason.id)])
input.send_keys([Keys.ENTER])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE])
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.school = School.objects.get(id=self.school.id) self.assertEqual(list(self.school.students.all()), [self.jason, self.peter])
self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter])
def test_back_button_bug(self):
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_link_text('Home').click()
self.selenium.back()
expected_unselected_values = [
str(self.arthur.id), str(self.bob.id), str(self.cliff.id),
str(self.jason.id), str(self.jenny.id), str(self.john.id),
]
expected_selected_values = [str(self.lisa.id), str(self.peter.id)]
self.assertSelectOptions('#id_students_from', expected_unselected_values)
self.assertSelectOptions('#id_students_to', expected_selected_values)
self.assertSelectOptions('#id_alumni_from', expected_unselected_values)
self.assertSelectOptions('#id_alumni_to', expected_selected_values)
def test_refresh_page(self):
self.school.students.add(self.arthur, self.jason)
self.school.alumni.add(self.arthur, self.jason)
self.admin_login(username='super', password='secret', login_url='/')
change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,))
self.selenium.get(self.live_server_url + change_url)
options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option'))
self.assertEqual(options_len, 2)
with self.wait_page_loaded():
self.selenium.execute_script("location.reload()")
options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option'))
self.assertEqual(options_len, 2)
class AdminRawIdWidgetSeleniumTests(AdminWidgetSeleniumTestCase):
def setUp(self):
super().setUp()
Band.objects.create(id=42, name='Bogey Blues')
Band.objects.create(id=98, name='Green Potatoes')
def test_ForeignKey(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add'))
main_window = self.selenium.current_window_handle
self.assertEqual(self.selenium.find_element_by_id('id_main_band').get_attribute('value'), '')
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
self.selenium.switch_to.window(main_window)
self.wait_for_value('
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the other selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '98')
def test_many_to_many(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add'))
main_window = self.selenium.current_window_handle
self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'), '')
self.assertEqual(
self.selenium.find_element_by_css_selector('.field-supporting_bands div.help').text,
'Supporting Bands.'
)
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
self.selenium.switch_to.window(main_window)
self.wait_for_value('
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the two selected bands' ids
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42,98')
class RelatedFieldWidgetSeleniumTests(AdminWidgetSeleniumTestCase):
def test_ForeignKey_using_to_field(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_profile_add'))
main_window = self.selenium.current_window_handle
self.selenium.find_element_by_id('add_id_user').click()
self.wait_for_and_switch_to_popup()
password_field = self.selenium.find_element_by_id('id_password')
password_field.send_keys('password')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'newuser'
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
self.selenium.find_element_by_css_selector('#id_user option[value=newuser]')
self.selenium.find_element_by_id('change_id_user').click()
self.wait_for_and_switch_to_popup()
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'changednewuser'
username_field.clear()
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]')
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.wait_for_text('li.success', 'The profile “changednewuser” was added successfully.')
profiles = Profile.objects.all()
self.assertEqual(len(profiles), 1)
self.assertEqual(profiles[0].user.username, username_value)
| true
| true
|
f701f1d7ecfbfea55fbc17179f0299931e6d1dd5
| 11,669
|
py
|
Python
|
extra/tsfresh_examples/tsfresh/feature_selection/feature_selector.py
|
davidcrowland/layer_vb_tagging
|
83865d67b7a931a9eff4ba6fd4d033b2219225f1
|
[
"MIT"
] | null | null | null |
extra/tsfresh_examples/tsfresh/feature_selection/feature_selector.py
|
davidcrowland/layer_vb_tagging
|
83865d67b7a931a9eff4ba6fd4d033b2219225f1
|
[
"MIT"
] | null | null | null |
extra/tsfresh_examples/tsfresh/feature_selection/feature_selector.py
|
davidcrowland/layer_vb_tagging
|
83865d67b7a931a9eff4ba6fd4d033b2219225f1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# This file as well as the whole tsfresh package are licenced under the MIT licence (see the LICENCE.txt)
# Maximilian Christ (maximilianchrist.com), Blue Yonder Gmbh, 2016
"""
Contains a feature selection method that evaluates the importance of the different extracted features. To do so,
for every feature the influence on the target is evaluated by an univariate tests and the p-Value is calculated.
The methods that calculate the p-values are called feature selectors.
Afterwards the Benjamini Hochberg procedure which is a multiple testing procedure decides which features to keep and
which to cut off (solely based on the p-values).
"""
from __future__ import absolute_import, division, print_function
from functools import partial
from builtins import zip
from builtins import range
import os
import numpy as np
import pandas as pd
import logging
from multiprocessing import Pool
from tsfresh.feature_selection.significance_tests import target_binary_feature_real_test, \
target_real_feature_binary_test, target_real_feature_real_test, target_binary_feature_binary_test
from tsfresh import defaults
_logger = logging.getLogger(__name__)
def check_fs_sig_bh(X, y,
n_processes=defaults.N_PROCESSES,
chunksize=defaults.CHUNKSIZE,
fdr_level=defaults.FDR_LEVEL,
hypotheses_independent=defaults.HYPOTHESES_INDEPENDENT,
test_for_binary_target_real_feature=defaults.TEST_FOR_BINARY_TARGET_REAL_FEATURE):
"""
The wrapper function that calls the significance test functions in this package.
In total, for each feature from the input pandas.DataFrame an univariate feature significance test is conducted.
Those tests generate p values that are then evaluated by the Benjamini Hochberg procedure to decide which features
to keep and which to delete.
We are testing
:math:`H_0` = the Feature is not relevant and can not be added
against
:math:`H_1` = the Feature is relevant and should be kept
or in other words
:math:`H_0` = Target and Feature are independent / the Feature has no influence on the target
:math:`H_1` = Target and Feature are associated / dependent
When the target is binary this becomes
:math:`H_0 = \\left( F_{\\text{target}=1} = F_{\\text{target}=0} \\right)`
:math:`H_1 = \\left( F_{\\text{target}=1} \\neq F_{\\text{target}=0} \\right)`
Where :math:`F` is the distribution of the target.
In the same way we can state the hypothesis when the feature is binary
:math:`H_0 = \\left( T_{\\text{feature}=1} = T_{\\text{feature}=0} \\right)`
:math:`H_1 = \\left( T_{\\text{feature}=1} \\neq T_{\\text{feature}=0} \\right)`
Here :math:`T` is the distribution of the target.
TODO: And for real valued?
:param X: The DataFrame containing all the features and the target
:type X: pandas.DataFrame
:param y: The target vector
:type y: pandas.Series
:param test_for_binary_target_real_feature: Which test to be used for binary target, real feature
:type test_for_binary_target_real_feature: str
:param fdr_level: The FDR level that should be respected, this is the theoretical expected percentage of irrelevant
features among all created features.
:type fdr_level: float
:param hypotheses_independent: Can the significance of the features be assumed to be independent?
Normally, this should be set to False as the features are never
independent (e.g. mean and median)
:type hypotheses_independent: bool
:param n_processes: Number of processes to use during the p-value calculation
:type n_processes: int
:param chunksize: Size of the chunks submitted to the worker processes
:type chunksize: int
:return: A pandas.DataFrame with each column of the input DataFrame X as index with information on the significance
of this particular feature. The DataFrame has the columns
"Feature",
"type" (binary, real or const),
"p_value" (the significance of this feature as a p-value, lower means more significant)
"rejected" (if the Benjamini Hochberg procedure rejected this feature)
:rtype: pandas.DataFrame
"""
target_is_binary = len(set(y)) == 2
# todo: solve the multiclassification case. for a multi classification the algorithm considers the target to be
# regression. Instead one could perform a binary one versus all classification.
# Only allow entries for which the target is known!
y = y.astype(np.float)
X = X.copy().loc[~(y == np.NaN), :]
# Create the DataFrame df_features containing the information about the different hypotheses
# Every row contains information over one feature column from X
df_features = pd.DataFrame()
df_features['Feature'] = list(set(X.columns))
df_features = df_features.set_index('Feature', drop=False)
# Add relevant columns to df_features
df_features["rejected"] = np.nan
df_features["type"] = np.nan
df_features["p_value"] = np.nan
# Calculate the feature significance in parallel
pool = Pool(n_processes)
# Helper function which wrapps the _calculate_p_value with many arguments already set
f = partial(_calculate_p_value, y=y,
target_is_binary=target_is_binary,
test_for_binary_target_real_feature=test_for_binary_target_real_feature)
results = pool.map(f, [X[feature] for feature in df_features['Feature']], chunksize=chunksize)
p_values_of_features = pd.DataFrame(results)
df_features.update(p_values_of_features)
pool.close()
pool.join()
# Perform the real feature rejection
if "const" in set(df_features.type):
df_features_bh = benjamini_hochberg_test(df_features.loc[~(df_features.type == "const")],
hypotheses_independent, fdr_level)
df_features = pd.concat([df_features_bh, df_features.loc[df_features.type == "const"]])
else:
df_features = benjamini_hochberg_test(df_features, hypotheses_independent, fdr_level)
# It is very important that we have a boolean "rejected" column, so we do a cast here to be sure
df_features["rejected"] = df_features["rejected"].astype("bool")
if defaults.WRITE_SELECTION_REPORT:
# Write results of BH - Test to file
if not os.path.exists(defaults.RESULT_DIR):
os.mkdir(defaults.RESULT_DIR)
with open(os.path.join(defaults.RESULT_DIR, "fs_bh_results.txt"), 'w') as file_out:
file_out.write(("Performed BH Test to control the false discovery rate(FDR); \n"
"FDR-Level={0};Hypothesis independent={1}\n"
).format(fdr_level, hypotheses_independent))
df_features.to_csv(index=False, path_or_buf=file_out, sep=';', float_format='%.4f')
return df_features
def _calculate_p_value(feature_column, y, target_is_binary, test_for_binary_target_real_feature):
"""
Internal helper function to calculate the p-value of a given feature using one of the dedicated
functions target_*_feature_*_test.
:param feature_column: the feature column.
:type feature_column: pandas.Series
:param y: the binary target vector
:type y: pandas.Series
:param target_is_binary: Whether the target is binary or not
:type target_is_binary: bool
:param test_for_binary_target_real_feature: The significance test to be used for binary target and real valued
features. Either ``'mann'`` for the Mann-Whitney-U test or ``'smir'``
for the Kolmogorov-Smirnov test.
:type test_for_binary_target_real_feature: str
:return: the p-value of the feature significance test and the type of the tested feature as a Series.
Lower p-values indicate a higher feature significance.
:rtype: pd.Series
"""
# Do not process constant features
if len(pd.unique(feature_column.values)) == 1:
_logger.warning("[test_feature_significance] Feature {} is constant".format(feature_column.name))
return pd.Series({"type": "const", "rejected": False}, name=feature_column.name)
else:
if target_is_binary:
# Decide if the current feature is binary or not
if len(set(feature_column.values)) == 2:
type = "binary"
p_value = target_binary_feature_binary_test(feature_column, y)
else:
type = "real"
p_value = target_binary_feature_real_test(feature_column, y, test_for_binary_target_real_feature)
else:
# Decide if the current feature is binary or not
if len(set(feature_column.values)) == 2:
type = "binary"
p_value = target_real_feature_binary_test(feature_column, y)
else:
type = "real"
p_value = target_real_feature_real_test(feature_column, y)
return pd.Series({"p_value": p_value, "type": type}, name=feature_column.name)
def benjamini_hochberg_test(df_pvalues, hypotheses_independent, fdr_level):
"""
This is an implementation of the benjamini hochberg procedure that calculates which of the hypotheses belonging
to the different p-Values from df_p to reject. While doing so, this test controls the false discovery rate,
which is the ratio of false rejections by all rejections:
.. math::
FDR = \\mathbb{E} \\left [ \\frac{ |\\text{false rejections}| }{ |\\text{all rejections}|} \\right]
References
----------
.. [1] Benjamini, Yoav and Yekutieli, Daniel (2001).
The control of the false discovery rate in multiple testing under dependency.
Annals of statistics, 1165--1188
:param df_pvalues: This DataFrame should contain the p_values of the different hypotheses in a column named
"p_values".
:type df_pvalues: pandas.DataFrame
:param hypotheses_independent: Can the significance of the features be assumed to be independent?
Normally, this should be set to False as the features are never
independent (e.g. mean and median)
:type hypotheses_independent: bool
:param fdr_level: The FDR level that should be respected, this is the theoretical expected percentage of irrelevant
features among all created features.
:type fdr_level: float
:return: The same DataFrame as the input, but with an added boolean column "rejected".
:rtype: pandas.DataFrame
"""
# Get auxiliary variables and vectors
df_pvalues = df_pvalues.sort_values(by="p_value")
m = len(df_pvalues)
K = list(range(1, m + 1))
# Calculate the weight vector C
if hypotheses_independent:
# c(k) = 1
C = [1] * m
else:
# c(k) = \sum_{i=1}^m 1/i
C = [sum([1.0 / i for i in range(1, k + 1)]) for k in K]
# Calculate the vector T to compare to the p_value
T = [fdr_level * k / m * 1.0 / c for k, c in zip(K, C)]
# Get the last rejected p_value
try:
k_max = list(df_pvalues.p_value <= T).index(False)
except ValueError:
k_max = m
# Add the column denoting if hypothesis was rejected
df_pvalues["rejected"] = [True] * k_max + [False] * (m - k_max)
return df_pvalues
| 41.824373
| 119
| 0.675208
|
from __future__ import absolute_import, division, print_function
from functools import partial
from builtins import zip
from builtins import range
import os
import numpy as np
import pandas as pd
import logging
from multiprocessing import Pool
from tsfresh.feature_selection.significance_tests import target_binary_feature_real_test, \
target_real_feature_binary_test, target_real_feature_real_test, target_binary_feature_binary_test
from tsfresh import defaults
_logger = logging.getLogger(__name__)
def check_fs_sig_bh(X, y,
n_processes=defaults.N_PROCESSES,
chunksize=defaults.CHUNKSIZE,
fdr_level=defaults.FDR_LEVEL,
hypotheses_independent=defaults.HYPOTHESES_INDEPENDENT,
test_for_binary_target_real_feature=defaults.TEST_FOR_BINARY_TARGET_REAL_FEATURE):
target_is_binary = len(set(y)) == 2
y = y.astype(np.float)
X = X.copy().loc[~(y == np.NaN), :]
df_features = pd.DataFrame()
df_features['Feature'] = list(set(X.columns))
df_features = df_features.set_index('Feature', drop=False)
df_features["rejected"] = np.nan
df_features["type"] = np.nan
df_features["p_value"] = np.nan
pool = Pool(n_processes)
f = partial(_calculate_p_value, y=y,
target_is_binary=target_is_binary,
test_for_binary_target_real_feature=test_for_binary_target_real_feature)
results = pool.map(f, [X[feature] for feature in df_features['Feature']], chunksize=chunksize)
p_values_of_features = pd.DataFrame(results)
df_features.update(p_values_of_features)
pool.close()
pool.join()
if "const" in set(df_features.type):
df_features_bh = benjamini_hochberg_test(df_features.loc[~(df_features.type == "const")],
hypotheses_independent, fdr_level)
df_features = pd.concat([df_features_bh, df_features.loc[df_features.type == "const"]])
else:
df_features = benjamini_hochberg_test(df_features, hypotheses_independent, fdr_level)
df_features["rejected"] = df_features["rejected"].astype("bool")
if defaults.WRITE_SELECTION_REPORT:
if not os.path.exists(defaults.RESULT_DIR):
os.mkdir(defaults.RESULT_DIR)
with open(os.path.join(defaults.RESULT_DIR, "fs_bh_results.txt"), 'w') as file_out:
file_out.write(("Performed BH Test to control the false discovery rate(FDR); \n"
"FDR-Level={0};Hypothesis independent={1}\n"
).format(fdr_level, hypotheses_independent))
df_features.to_csv(index=False, path_or_buf=file_out, sep=';', float_format='%.4f')
return df_features
def _calculate_p_value(feature_column, y, target_is_binary, test_for_binary_target_real_feature):
if len(pd.unique(feature_column.values)) == 1:
_logger.warning("[test_feature_significance] Feature {} is constant".format(feature_column.name))
return pd.Series({"type": "const", "rejected": False}, name=feature_column.name)
else:
if target_is_binary:
if len(set(feature_column.values)) == 2:
type = "binary"
p_value = target_binary_feature_binary_test(feature_column, y)
else:
type = "real"
p_value = target_binary_feature_real_test(feature_column, y, test_for_binary_target_real_feature)
else:
if len(set(feature_column.values)) == 2:
type = "binary"
p_value = target_real_feature_binary_test(feature_column, y)
else:
type = "real"
p_value = target_real_feature_real_test(feature_column, y)
return pd.Series({"p_value": p_value, "type": type}, name=feature_column.name)
def benjamini_hochberg_test(df_pvalues, hypotheses_independent, fdr_level):
df_pvalues = df_pvalues.sort_values(by="p_value")
m = len(df_pvalues)
K = list(range(1, m + 1))
if hypotheses_independent:
C = [1] * m
else:
C = [sum([1.0 / i for i in range(1, k + 1)]) for k in K]
T = [fdr_level * k / m * 1.0 / c for k, c in zip(K, C)]
try:
k_max = list(df_pvalues.p_value <= T).index(False)
except ValueError:
k_max = m
df_pvalues["rejected"] = [True] * k_max + [False] * (m - k_max)
return df_pvalues
| true
| true
|
f701f1faed6d0b007d0b45d55a30e12c9f632cc2
| 7,403
|
py
|
Python
|
docs/conf.py
|
coordt/django-faq
|
1aa924ee5806dd8611fcf4a83ec46182b191ab60
|
[
"BSD-3-Clause"
] | 1
|
2019-06-14T15:29:49.000Z
|
2019-06-14T15:29:49.000Z
|
docs/conf.py
|
natgeosociety/django-faq
|
1aa924ee5806dd8611fcf4a83ec46182b191ab60
|
[
"BSD-3-Clause"
] | null | null | null |
docs/conf.py
|
natgeosociety/django-faq
|
1aa924ee5806dd8611fcf4a83ec46182b191ab60
|
[
"BSD-3-Clause"
] | 1
|
2016-05-05T21:56:37.000Z
|
2016-05-05T21:56:37.000Z
|
# -*- coding: utf-8 -*-
#
# django-faq documentation build configuration file, created by
# sphinx-quickstart on Sat Sep 17 13:09:21 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
intersphinx_mapping = {
'python': ('http://python.readthedocs.org/en/latest/', None),
'django': ('http://django.readthedocs.org/en/latest/', None),
'sphinx': ('http://sphinx.readthedocs.org/en/latest/', None),
}
# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'django-faq'
copyright = '2012, Ben Spaulding'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
release = '0.8.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-faqdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-faq.tex', 'django-faq Documentation',
'Ben Spaulding', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-faq', 'django-faq Documentation',
['Ben Spaulding'], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| 32.612335
| 80
| 0.717142
|
import sys, os
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
intersphinx_mapping = {
'python': ('http://python.readthedocs.org/en/latest/', None),
'django': ('http://django.readthedocs.org/en/latest/', None),
'sphinx': ('http://sphinx.readthedocs.org/en/latest/', None),
}
source_suffix = '.rst'
master_doc = 'index'
project = 'django-faq'
copyright = '2012, Ben Spaulding'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
release = '0.8.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-faqdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-faq.tex', 'django-faq Documentation',
'Ben Spaulding', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-faq', 'django-faq Documentation',
['Ben Spaulding'], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| true
| true
|
f701f30df1fc51076dcd0f5de3aeec82d2c612ae
| 7,050
|
py
|
Python
|
Statistical Methods and Data Analysis/Module 2 Assignment/venv/lib/python3.8/site-packages/matplotlib/tests/test_backends_interactive.py
|
ZohaibZ/DataScience
|
ba06c724293f8674375827bdf2d4f42d32788ebb
|
[
"MIT"
] | 9
|
2021-04-12T16:11:38.000Z
|
2022-03-18T09:03:58.000Z
|
Statistical Methods and Data Analysis/Module 2 Assignment/venv/lib/python3.8/site-packages/matplotlib/tests/test_backends_interactive.py
|
ZohaibZ/DataScience
|
ba06c724293f8674375827bdf2d4f42d32788ebb
|
[
"MIT"
] | 21
|
2021-04-13T01:17:40.000Z
|
2022-03-11T16:06:50.000Z
|
Statistical Methods and Data Analysis/Module 2 Assignment/venv/lib/python3.8/site-packages/matplotlib/tests/test_backends_interactive.py
|
ZohaibZ/DataScience
|
ba06c724293f8674375827bdf2d4f42d32788ebb
|
[
"MIT"
] | 2
|
2020-09-10T10:24:52.000Z
|
2021-01-05T21:54:51.000Z
|
import importlib
import importlib.util
import json
import os
import signal
import subprocess
import sys
import time
import urllib.request
import pytest
import matplotlib as mpl
# Minimal smoke-testing of the backends for which the dependencies are
# PyPI-installable on CI. They are not available for all tested Python
# versions so we don't fail on missing backends.
def _get_testable_interactive_backends():
backends = []
for deps, backend in [
(["cairo", "gi"], "gtk3agg"),
(["cairo", "gi"], "gtk3cairo"),
(["PyQt5"], "qt5agg"),
(["PyQt5", "cairocffi"], "qt5cairo"),
(["PySide2"], "qt5agg"),
(["PySide2", "cairocffi"], "qt5cairo"),
(["tkinter"], "tkagg"),
(["wx"], "wx"),
(["wx"], "wxagg"),
(["matplotlib.backends._macosx"], "macosx"),
]:
reason = None
missing = [dep for dep in deps if not importlib.util.find_spec(dep)]
if sys.platform == "linux" and not os.environ.get("DISPLAY"):
reason = "$DISPLAY is unset"
elif missing:
reason = "{} cannot be imported".format(", ".join(missing))
elif backend == 'macosx' and os.environ.get('TF_BUILD'):
reason = "macosx backend fails on Azure"
if reason:
backend = pytest.param(
backend,
marks=pytest.mark.skip(
reason=f"Skipping {backend} because {reason}"))
elif backend.startswith('wx') and sys.platform == 'darwin':
# ignore on OSX because that's currently broken (github #16849)
backend = pytest.param(
backend,
marks=pytest.mark.xfail(reason='github #16849'))
backends.append(backend)
return backends
# Using a timer not only allows testing of timers (on other backends), but is
# also necessary on gtk3 and wx, where a direct call to key_press_event("q")
# from draw_event causes breakage due to the canvas widget being deleted too
# early. Also, gtk3 redefines key_press_event with a different signature, so
# we directly invoke it from the superclass instead.
_test_script = """\
import importlib
import importlib.util
import io
import json
import sys
from unittest import TestCase
import matplotlib as mpl
from matplotlib import pyplot as plt, rcParams
from matplotlib.backend_bases import FigureCanvasBase
rcParams.update({
"webagg.open_in_browser": False,
"webagg.port_retries": 1,
})
if len(sys.argv) >= 2: # Second argument is json-encoded rcParams.
rcParams.update(json.loads(sys.argv[1]))
backend = plt.rcParams["backend"].lower()
assert_equal = TestCase().assertEqual
assert_raises = TestCase().assertRaises
if backend.endswith("agg") and not backend.startswith(("gtk3", "web")):
# Force interactive framework setup.
plt.figure()
# Check that we cannot switch to a backend using another interactive
# framework, but can switch to a backend using cairo instead of agg, or a
# non-interactive backend. In the first case, we use tkagg as the "other"
# interactive backend as it is (essentially) guaranteed to be present.
# Moreover, don't test switching away from gtk3 (as Gtk.main_level() is
# not set up at this point yet) and webagg (which uses no interactive
# framework).
if backend != "tkagg":
with assert_raises(ImportError):
mpl.use("tkagg", force=True)
def check_alt_backend(alt_backend):
mpl.use(alt_backend, force=True)
fig = plt.figure()
assert_equal(
type(fig.canvas).__module__,
"matplotlib.backends.backend_{}".format(alt_backend))
if importlib.util.find_spec("cairocffi"):
check_alt_backend(backend[:-3] + "cairo")
check_alt_backend("svg")
mpl.use(backend, force=True)
fig, ax = plt.subplots()
assert_equal(
type(fig.canvas).__module__,
"matplotlib.backends.backend_{}".format(backend))
ax.plot([0, 1], [2, 3])
timer = fig.canvas.new_timer(1.) # Test that floats are cast to int as needed.
timer.add_callback(FigureCanvasBase.key_press_event, fig.canvas, "q")
# Trigger quitting upon draw.
fig.canvas.mpl_connect("draw_event", lambda event: timer.start())
fig.canvas.mpl_connect("close_event", print)
result = io.BytesIO()
fig.savefig(result, format='png')
plt.show()
# Ensure that the window is really closed.
plt.pause(0.5)
# Test that saving works after interactive window is closed, but the figure is
# not deleted.
result_after = io.BytesIO()
fig.savefig(result_after, format='png')
if not backend.startswith('qt5') and sys.platform == 'darwin':
# FIXME: This should be enabled everywhere once Qt5 is fixed on macOS to
# not resize incorrectly.
assert_equal(result.getvalue(), result_after.getvalue())
"""
_test_timeout = 10 # Empirically, 1s is not enough on Travis.
@pytest.mark.parametrize("backend", _get_testable_interactive_backends())
@pytest.mark.parametrize("toolbar", ["toolbar2", "toolmanager"])
@pytest.mark.flaky(reruns=3)
def test_interactive_backend(backend, toolbar):
if backend == "macosx":
if toolbar == "toolmanager":
pytest.skip("toolmanager is not implemented for macosx.")
if toolbar == "toolbar2" and os.environ.get('TRAVIS'):
# See https://github.com/matplotlib/matplotlib/issues/18213
pytest.skip("toolbar2 for macosx is buggy on Travis.")
proc = subprocess.run(
[sys.executable, "-c", _test_script,
json.dumps({"toolbar": toolbar})],
env={**os.environ, "MPLBACKEND": backend, "SOURCE_DATE_EPOCH": "0"},
timeout=_test_timeout,
stdout=subprocess.PIPE, universal_newlines=True)
if proc.returncode:
pytest.fail("The subprocess returned with non-zero exit status "
f"{proc.returncode}.")
assert proc.stdout.count("CloseEvent") == 1
@pytest.mark.skipif('TF_BUILD' in os.environ,
reason="this test fails an azure for unknown reasons")
@pytest.mark.skipif(os.name == "nt", reason="Cannot send SIGINT on Windows.")
def test_webagg():
pytest.importorskip("tornado")
proc = subprocess.Popen([sys.executable, "-c", _test_script],
env={**os.environ, "MPLBACKEND": "webagg",
"SOURCE_DATE_EPOCH": "0"})
url = "http://{}:{}".format(
mpl.rcParams["webagg.address"], mpl.rcParams["webagg.port"])
timeout = time.perf_counter() + _test_timeout
while True:
try:
retcode = proc.poll()
# check that the subprocess for the server is not dead
assert retcode is None
conn = urllib.request.urlopen(url)
break
except urllib.error.URLError:
if time.perf_counter() > timeout:
pytest.fail("Failed to connect to the webagg server.")
else:
continue
conn.close()
proc.send_signal(signal.SIGINT)
assert proc.wait(timeout=_test_timeout) == 0
| 36.340206
| 79
| 0.650355
|
import importlib
import importlib.util
import json
import os
import signal
import subprocess
import sys
import time
import urllib.request
import pytest
import matplotlib as mpl
def _get_testable_interactive_backends():
backends = []
for deps, backend in [
(["cairo", "gi"], "gtk3agg"),
(["cairo", "gi"], "gtk3cairo"),
(["PyQt5"], "qt5agg"),
(["PyQt5", "cairocffi"], "qt5cairo"),
(["PySide2"], "qt5agg"),
(["PySide2", "cairocffi"], "qt5cairo"),
(["tkinter"], "tkagg"),
(["wx"], "wx"),
(["wx"], "wxagg"),
(["matplotlib.backends._macosx"], "macosx"),
]:
reason = None
missing = [dep for dep in deps if not importlib.util.find_spec(dep)]
if sys.platform == "linux" and not os.environ.get("DISPLAY"):
reason = "$DISPLAY is unset"
elif missing:
reason = "{} cannot be imported".format(", ".join(missing))
elif backend == 'macosx' and os.environ.get('TF_BUILD'):
reason = "macosx backend fails on Azure"
if reason:
backend = pytest.param(
backend,
marks=pytest.mark.skip(
reason=f"Skipping {backend} because {reason}"))
elif backend.startswith('wx') and sys.platform == 'darwin':
# ignore on OSX because that's currently broken (github #16849)
backend = pytest.param(
backend,
marks=pytest.mark.xfail(reason='github #16849'))
backends.append(backend)
return backends
_test_script = """\
import importlib
import importlib.util
import io
import json
import sys
from unittest import TestCase
import matplotlib as mpl
from matplotlib import pyplot as plt, rcParams
from matplotlib.backend_bases import FigureCanvasBase
rcParams.update({
"webagg.open_in_browser": False,
"webagg.port_retries": 1,
})
if len(sys.argv) >= 2: # Second argument is json-encoded rcParams.
rcParams.update(json.loads(sys.argv[1]))
backend = plt.rcParams["backend"].lower()
assert_equal = TestCase().assertEqual
assert_raises = TestCase().assertRaises
if backend.endswith("agg") and not backend.startswith(("gtk3", "web")):
# Force interactive framework setup.
plt.figure()
# Check that we cannot switch to a backend using another interactive
# framework, but can switch to a backend using cairo instead of agg, or a
# non-interactive backend. In the first case, we use tkagg as the "other"
# interactive backend as it is (essentially) guaranteed to be present.
# Moreover, don't test switching away from gtk3 (as Gtk.main_level() is
# not set up at this point yet) and webagg (which uses no interactive
# framework).
if backend != "tkagg":
with assert_raises(ImportError):
mpl.use("tkagg", force=True)
def check_alt_backend(alt_backend):
mpl.use(alt_backend, force=True)
fig = plt.figure()
assert_equal(
type(fig.canvas).__module__,
"matplotlib.backends.backend_{}".format(alt_backend))
if importlib.util.find_spec("cairocffi"):
check_alt_backend(backend[:-3] + "cairo")
check_alt_backend("svg")
mpl.use(backend, force=True)
fig, ax = plt.subplots()
assert_equal(
type(fig.canvas).__module__,
"matplotlib.backends.backend_{}".format(backend))
ax.plot([0, 1], [2, 3])
timer = fig.canvas.new_timer(1.) # Test that floats are cast to int as needed.
timer.add_callback(FigureCanvasBase.key_press_event, fig.canvas, "q")
# Trigger quitting upon draw.
fig.canvas.mpl_connect("draw_event", lambda event: timer.start())
fig.canvas.mpl_connect("close_event", print)
result = io.BytesIO()
fig.savefig(result, format='png')
plt.show()
# Ensure that the window is really closed.
plt.pause(0.5)
# Test that saving works after interactive window is closed, but the figure is
# not deleted.
result_after = io.BytesIO()
fig.savefig(result_after, format='png')
if not backend.startswith('qt5') and sys.platform == 'darwin':
# FIXME: This should be enabled everywhere once Qt5 is fixed on macOS to
# not resize incorrectly.
assert_equal(result.getvalue(), result_after.getvalue())
"""
_test_timeout = 10 # Empirically, 1s is not enough on Travis.
@pytest.mark.parametrize("backend", _get_testable_interactive_backends())
@pytest.mark.parametrize("toolbar", ["toolbar2", "toolmanager"])
@pytest.mark.flaky(reruns=3)
def test_interactive_backend(backend, toolbar):
if backend == "macosx":
if toolbar == "toolmanager":
pytest.skip("toolmanager is not implemented for macosx.")
if toolbar == "toolbar2" and os.environ.get('TRAVIS'):
# See https://github.com/matplotlib/matplotlib/issues/18213
pytest.skip("toolbar2 for macosx is buggy on Travis.")
proc = subprocess.run(
[sys.executable, "-c", _test_script,
json.dumps({"toolbar": toolbar})],
env={**os.environ, "MPLBACKEND": backend, "SOURCE_DATE_EPOCH": "0"},
timeout=_test_timeout,
stdout=subprocess.PIPE, universal_newlines=True)
if proc.returncode:
pytest.fail("The subprocess returned with non-zero exit status "
f"{proc.returncode}.")
assert proc.stdout.count("CloseEvent") == 1
@pytest.mark.skipif('TF_BUILD' in os.environ,
reason="this test fails an azure for unknown reasons")
@pytest.mark.skipif(os.name == "nt", reason="Cannot send SIGINT on Windows.")
def test_webagg():
pytest.importorskip("tornado")
proc = subprocess.Popen([sys.executable, "-c", _test_script],
env={**os.environ, "MPLBACKEND": "webagg",
"SOURCE_DATE_EPOCH": "0"})
url = "http://{}:{}".format(
mpl.rcParams["webagg.address"], mpl.rcParams["webagg.port"])
timeout = time.perf_counter() + _test_timeout
while True:
try:
retcode = proc.poll()
# check that the subprocess for the server is not dead
assert retcode is None
conn = urllib.request.urlopen(url)
break
except urllib.error.URLError:
if time.perf_counter() > timeout:
pytest.fail("Failed to connect to the webagg server.")
else:
continue
conn.close()
proc.send_signal(signal.SIGINT)
assert proc.wait(timeout=_test_timeout) == 0
| true
| true
|
f701f37a4174ee79bfb8e774f9a1ee876fc707d0
| 1,077
|
py
|
Python
|
tests/settings.py
|
chefgonpachi/MISO
|
cf2109550be7e7ed6740ff736b1e1778dffaab9b
|
[
"Apache-2.0"
] | 80
|
2020-11-30T00:25:51.000Z
|
2022-03-15T12:02:37.000Z
|
brownie/tests/settings.py
|
Certora/miso
|
9575fdf8aeccf1d97ba389b8428194e66187b6c1
|
[
"Apache-2.0"
] | 8
|
2021-09-01T17:33:57.000Z
|
2022-02-25T20:11:27.000Z
|
brownie/tests/settings.py
|
Certora/miso
|
9575fdf8aeccf1d97ba389b8428194e66187b6c1
|
[
"Apache-2.0"
] | 30
|
2020-11-30T00:25:57.000Z
|
2022-01-09T06:01:27.000Z
|
TENPOW18 = 10 ** 18
TENPOW6 = 10 ** 6
ZERO_ADDRESS = '0x0000000000000000000000000000000000000000'
ETH_ADDRESS = '0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE'
AUCTION_TOKENS = 10000 * TENPOW18
AUCTION_TIME = 50000
AUCTION_START_PRICE = 100 * TENPOW18
AUCTION_RESERVE = 0.001 * TENPOW18
AUCTION_MINIMUM_COMMITMENT = 10 * TENPOW18
CROWDSALE_TOKENS = 10000 * TENPOW18
CROWDSALE_TOKENS_2 = 10 * TENPOW18
CROWDSALE_TIME = 50000
CROWDSALE_RATE = 0.001 * TENPOW18
CROWDSALE_RATE_2 = 1 * TENPOW18
CROWDSALE_GOAL = 10 * TENPOW18
CROWDSALE_GOAL_2 = 5 * TENPOW18
CROWDSALE_RATE_USDC = 0.0005 * TENPOW6
CROWDSALE_RATE_USDC_2 = 2 * TENPOW6
CROWDSALE_GOAL_USDC = 10 * TENPOW6
CROWDSALE_GOAL_USDC_2 = 5 * TENPOW6
SECONDS_IN_DAY = 24*60*60
TOKENS_TO_MINT = 1000 * TENPOW18
ETH_TO_DEPOSIT = 1 * TENPOW18
POOL_LAUNCH_DEADLINE = 10 * SECONDS_IN_DAY
POOL_LAUNCH_WINDOW = 3 * SECONDS_IN_DAY
POOL_LAUNCH_LOCKTIME = 30 * SECONDS_IN_DAY
POOL_LIQUIDITY_PERCENT = 100
HYPERBOLIC_AUCTION_FACTOR = 2
DOCUMENT_NAME = "MISO"
DOCUMENT_DATA = "MISO: Do you comply?"
USDC_TOKENS = 1000000 * TENPOW18
| 24.477273
| 59
| 0.793872
|
TENPOW18 = 10 ** 18
TENPOW6 = 10 ** 6
ZERO_ADDRESS = '0x0000000000000000000000000000000000000000'
ETH_ADDRESS = '0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE'
AUCTION_TOKENS = 10000 * TENPOW18
AUCTION_TIME = 50000
AUCTION_START_PRICE = 100 * TENPOW18
AUCTION_RESERVE = 0.001 * TENPOW18
AUCTION_MINIMUM_COMMITMENT = 10 * TENPOW18
CROWDSALE_TOKENS = 10000 * TENPOW18
CROWDSALE_TOKENS_2 = 10 * TENPOW18
CROWDSALE_TIME = 50000
CROWDSALE_RATE = 0.001 * TENPOW18
CROWDSALE_RATE_2 = 1 * TENPOW18
CROWDSALE_GOAL = 10 * TENPOW18
CROWDSALE_GOAL_2 = 5 * TENPOW18
CROWDSALE_RATE_USDC = 0.0005 * TENPOW6
CROWDSALE_RATE_USDC_2 = 2 * TENPOW6
CROWDSALE_GOAL_USDC = 10 * TENPOW6
CROWDSALE_GOAL_USDC_2 = 5 * TENPOW6
SECONDS_IN_DAY = 24*60*60
TOKENS_TO_MINT = 1000 * TENPOW18
ETH_TO_DEPOSIT = 1 * TENPOW18
POOL_LAUNCH_DEADLINE = 10 * SECONDS_IN_DAY
POOL_LAUNCH_WINDOW = 3 * SECONDS_IN_DAY
POOL_LAUNCH_LOCKTIME = 30 * SECONDS_IN_DAY
POOL_LIQUIDITY_PERCENT = 100
HYPERBOLIC_AUCTION_FACTOR = 2
DOCUMENT_NAME = "MISO"
DOCUMENT_DATA = "MISO: Do you comply?"
USDC_TOKENS = 1000000 * TENPOW18
| true
| true
|
f701f391b62bbe8b77e06a6851b313415c5310ce
| 9,763
|
py
|
Python
|
google/cloud/servicecontrol_v1/services/quota_controller/async_client.py
|
parthea/python-service-control
|
94c7fc51a5f84fe5c11d35ef6b7bd64462fa6b41
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/servicecontrol_v1/services/quota_controller/async_client.py
|
parthea/python-service-control
|
94c7fc51a5f84fe5c11d35ef6b7bd64462fa6b41
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/servicecontrol_v1/services/quota_controller/async_client.py
|
parthea/python-service-control
|
94c7fc51a5f84fe5c11d35ef6b7bd64462fa6b41
|
[
"Apache-2.0"
] | 1
|
2021-04-21T11:34:34.000Z
|
2021-04-21T11:34:34.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.servicecontrol_v1.types import metric_value
from google.cloud.servicecontrol_v1.types import quota_controller
from .transports.base import QuotaControllerTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import QuotaControllerGrpcAsyncIOTransport
from .client import QuotaControllerClient
class QuotaControllerAsyncClient:
"""`Google Quota Control
API <https://cloud.google.com/service-control/overview>`__
Allows clients to allocate and release quota against a `managed
service <https://cloud.google.com/service-management/reference/rpc/google.api/servicemanagement.v1#google.api.servicemanagement.v1.ManagedService>`__.
"""
_client: QuotaControllerClient
DEFAULT_ENDPOINT = QuotaControllerClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = QuotaControllerClient.DEFAULT_MTLS_ENDPOINT
common_billing_account_path = staticmethod(
QuotaControllerClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
QuotaControllerClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(QuotaControllerClient.common_folder_path)
parse_common_folder_path = staticmethod(
QuotaControllerClient.parse_common_folder_path
)
common_organization_path = staticmethod(
QuotaControllerClient.common_organization_path
)
parse_common_organization_path = staticmethod(
QuotaControllerClient.parse_common_organization_path
)
common_project_path = staticmethod(QuotaControllerClient.common_project_path)
parse_common_project_path = staticmethod(
QuotaControllerClient.parse_common_project_path
)
common_location_path = staticmethod(QuotaControllerClient.common_location_path)
parse_common_location_path = staticmethod(
QuotaControllerClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
QuotaControllerAsyncClient: The constructed client.
"""
return QuotaControllerClient.from_service_account_info.__func__(QuotaControllerAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
QuotaControllerAsyncClient: The constructed client.
"""
return QuotaControllerClient.from_service_account_file.__func__(QuotaControllerAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@property
def transport(self) -> QuotaControllerTransport:
"""Return the transport used by the client instance.
Returns:
QuotaControllerTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(QuotaControllerClient).get_transport_class, type(QuotaControllerClient)
)
def __init__(
self,
*,
credentials: credentials.Credentials = None,
transport: Union[str, QuotaControllerTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the quota controller client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.QuotaControllerTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = QuotaControllerClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def allocate_quota(
self,
request: quota_controller.AllocateQuotaRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> quota_controller.AllocateQuotaResponse:
r"""Attempts to allocate quota for the specified consumer. It should
be called before the operation is executed.
This method requires the ``servicemanagement.services.quota``
permission on the specified service. For more information, see
`Cloud IAM <https://cloud.google.com/iam>`__.
**NOTE:** The client **must** fail-open on server errors
``INTERNAL``, ``UNKNOWN``, ``DEADLINE_EXCEEDED``, and
``UNAVAILABLE``. To ensure system reliability, the server may
inject these errors to prohibit any hard dependency on the quota
functionality.
Args:
request (:class:`google.cloud.servicecontrol_v1.types.AllocateQuotaRequest`):
The request object. Request message for the
AllocateQuota method.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.servicecontrol_v1.types.AllocateQuotaResponse:
Response message for the
AllocateQuota method.
"""
# Create or coerce a protobuf request object.
request = quota_controller.AllocateQuotaRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.allocate_quota,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-service-control",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("QuotaControllerAsyncClient",)
| 40.342975
| 154
| 0.693434
|
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions from google.api_core import exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials from google.oauth2 import service_account
from google.cloud.servicecontrol_v1.types import metric_value
from google.cloud.servicecontrol_v1.types import quota_controller
from .transports.base import QuotaControllerTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import QuotaControllerGrpcAsyncIOTransport
from .client import QuotaControllerClient
class QuotaControllerAsyncClient:
_client: QuotaControllerClient
DEFAULT_ENDPOINT = QuotaControllerClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = QuotaControllerClient.DEFAULT_MTLS_ENDPOINT
common_billing_account_path = staticmethod(
QuotaControllerClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
QuotaControllerClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(QuotaControllerClient.common_folder_path)
parse_common_folder_path = staticmethod(
QuotaControllerClient.parse_common_folder_path
)
common_organization_path = staticmethod(
QuotaControllerClient.common_organization_path
)
parse_common_organization_path = staticmethod(
QuotaControllerClient.parse_common_organization_path
)
common_project_path = staticmethod(QuotaControllerClient.common_project_path)
parse_common_project_path = staticmethod(
QuotaControllerClient.parse_common_project_path
)
common_location_path = staticmethod(QuotaControllerClient.common_location_path)
parse_common_location_path = staticmethod(
QuotaControllerClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
return QuotaControllerClient.from_service_account_info.__func__(QuotaControllerAsyncClient, info, *args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
return QuotaControllerClient.from_service_account_file.__func__(QuotaControllerAsyncClient, filename, *args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> QuotaControllerTransport:
return self._client.transport
get_transport_class = functools.partial(
type(QuotaControllerClient).get_transport_class, type(QuotaControllerClient)
)
def __init__(
self,
*,
credentials: credentials.Credentials = None,
transport: Union[str, QuotaControllerTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
self._client = QuotaControllerClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def allocate_quota(
self,
request: quota_controller.AllocateQuotaRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> quota_controller.AllocateQuotaResponse:
request = quota_controller.AllocateQuotaRequest(request)
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.allocate_quota,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-service-control",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("QuotaControllerAsyncClient",)
| true
| true
|
f701f47df818f2b9c8dc438e57bfeecedf6e1d89
| 9,208
|
py
|
Python
|
octopus/platforms/EOS/wasm.py
|
ZarvisD/octopus
|
3e238721fccfec69a69a1635b8a0dc485e525e69
|
[
"MIT"
] | 2
|
2019-01-19T07:12:02.000Z
|
2021-08-14T13:23:37.000Z
|
octopus/platforms/EOS/wasm.py
|
ZarvisD/octopus
|
3e238721fccfec69a69a1635b8a0dc485e525e69
|
[
"MIT"
] | null | null | null |
octopus/platforms/EOS/wasm.py
|
ZarvisD/octopus
|
3e238721fccfec69a69a1635b8a0dc485e525e69
|
[
"MIT"
] | 1
|
2019-01-19T07:12:05.000Z
|
2019-01-19T07:12:05.000Z
|
# extract from:
# * https://github.com/WebAssembly/design/blob/master/BinaryEncoding.md
# * https://webassembly.github.io/spec/core/binary/instructions.html
# * https://github.com/athre0z/wasm/blob/master/wasm/opcodes.py
from wasm.immtypes import *
from wasm.opcodes import INSN_ENTER_BLOCK, INSN_LEAVE_BLOCK, INSN_BRANCH, INSN_NO_FLOW
"""
TODO: add pop and pushes value per instructions
"""
_table = {
# opcode:(mnemonic/name, imm_struct, flags, pops, pushes, description)
0x00: ('unreachable', None, INSN_NO_FLOW, 0, 0, ''),
0x01: ('nop', None, 0, 0, 0, ''),
0x02: ('block', BlockImm(), INSN_ENTER_BLOCK, 0, 0, ''),
0x03: ('loop', BlockImm(), INSN_ENTER_BLOCK, 0, 0, ''),
0x04: ('if', BlockImm(), INSN_ENTER_BLOCK, 0, 0, ''),
0x05: ('else', None, INSN_ENTER_BLOCK | INSN_LEAVE_BLOCK, 0, 0, ''),
0x0b: ('end', None, INSN_LEAVE_BLOCK, 0, 0, ''),
0x0c: ('br', BranchImm(), INSN_BRANCH, 0, 0, ''),
0x0d: ('br_if', BranchImm(), INSN_BRANCH, 0, 0, ''),
0x0e: ('br_table', BranchTableImm(), INSN_BRANCH, 0, 0, ''),
0x0f: ('return', None, INSN_NO_FLOW, 0, 0, ''),
0x10: ('call', CallImm(), INSN_BRANCH, 0, 0, ''),
0x11: ('call_indirect', CallIndirectImm(), INSN_BRANCH, 0, 0, ''),
0x1a: ('drop', None, 0, 0, 0, ''),
0x1b: ('select', None, 0, 0, 0, ''),
0x20: ('get_local', LocalVarXsImm(), 0, 0, 0, ''),
0x21: ('set_local', LocalVarXsImm(), 0, 0, 0, ''),
0x22: ('tee_local', LocalVarXsImm(), 0, 0, 0, ''),
0x23: ('get_global', GlobalVarXsImm(), 0, 0, 0, ''),
0x24: ('set_global', GlobalVarXsImm(), 0, 0, 0, ''),
0x28: ('i32.load', MemoryImm(), 0, 0, 0, ''),
0x29: ('i64.load', MemoryImm(), 0, 0, 0, ''),
0x2a: ('f32.load', MemoryImm(), 0, 0, 0, ''),
0x2b: ('f64.load', MemoryImm(), 0, 0, 0, ''),
0x2c: ('i32.load8_s', MemoryImm(), 0, 0, 0, ''),
0x2d: ('i32.load8_u', MemoryImm(), 0, 0, 0, ''),
0x2e: ('i32.load16_s', MemoryImm(), 0, 0, 0, ''),
0x2f: ('i32.load16_u', MemoryImm(), 0, 0, 0, ''),
0x30: ('i64.load8_s', MemoryImm(), 0, 0, 0, ''),
0x31: ('i64.load8_u', MemoryImm(), 0, 0, 0, ''),
0x32: ('i64.load16_s', MemoryImm(), 0, 0, 0, ''),
0x33: ('i64.load16_u', MemoryImm(), 0, 0, 0, ''),
0x34: ('i64.load32_s', MemoryImm(), 0, 0, 0, ''),
0x35: ('i64.load32_u', MemoryImm(), 0, 0, 0, ''),
0x36: ('i32.store', MemoryImm(), 0, 0, 0, ''),
0x37: ('i64.store', MemoryImm(), 0, 0, 0, ''),
0x38: ('f32.store', MemoryImm(), 0, 0, 0, ''),
0x39: ('f64.store', MemoryImm(), 0, 0, 0, ''),
0x3a: ('i32.store8', MemoryImm(), 0, 0, 0, ''),
0x3b: ('i32.store16', MemoryImm(), 0, 0, 0, ''),
0x3c: ('i64.store8', MemoryImm(), 0, 0, 0, ''),
0x3d: ('i64.store16', MemoryImm(), 0, 0, 0, ''),
0x3e: ('i64.store32', MemoryImm(), 0, 0, 0, ''),
0x3f: ('current_memory', CurGrowMemImm(), 0, 0, 0, ''),
0x40: ('grow_memory', CurGrowMemImm(), 0, 0, 0, ''),
0x41: ('i32.const', I32ConstImm(), 0, 0, 0, ''),
0x42: ('i64.const', I64ConstImm(), 0, 0, 0, ''),
0x43: ('f32.const', F32ConstImm(), 0, 0, 0, ''),
0x44: ('f64.const', F64ConstImm(), 0, 0, 0, ''),
0x45: ('i32.eqz', None, 0, 0, 0, ''),
0x46: ('i32.eq', None, 0, 0, 0, ''),
0x47: ('i32.ne', None, 0, 0, 0, ''),
0x48: ('i32.lt_s', None, 0, 0, 0, ''),
0x49: ('i32.lt_u', None, 0, 0, 0, ''),
0x4a: ('i32.gt_s', None, 0, 0, 0, ''),
0x4b: ('i32.gt_u', None, 0, 0, 0, ''),
0x4c: ('i32.le_s', None, 0, 0, 0, ''),
0x4d: ('i32.le_u', None, 0, 0, 0, ''),
0x4e: ('i32.ge_s', None, 0, 0, 0, ''),
0x4f: ('i32.ge_u', None, 0, 0, 0, ''),
0x50: ('i64.eqz', None, 0, 0, 0, ''),
0x51: ('i64.eq', None, 0, 0, 0, ''),
0x52: ('i64.ne', None, 0, 0, 0, ''),
0x53: ('i64.lt_s', None, 0, 0, 0, ''),
0x54: ('i64.lt_u', None, 0, 0, 0, ''),
0x55: ('i64.gt_s', None, 0, 0, 0, ''),
0x56: ('i64.gt_u', None, 0, 0, 0, ''),
0x57: ('i64.le_s', None, 0, 0, 0, ''),
0x58: ('i64.le_u', None, 0, 0, 0, ''),
0x59: ('i64.ge_s', None, 0, 0, 0, ''),
0x5a: ('i64.ge_u', None, 0, 0, 0, ''),
0x5b: ('f32.eq', None, 0, 0, 0, ''),
0x5c: ('f32.ne', None, 0, 0, 0, ''),
0x5d: ('f32.lt', None, 0, 0, 0, ''),
0x5e: ('f32.gt', None, 0, 0, 0, ''),
0x5f: ('f32.le', None, 0, 0, 0, ''),
0x60: ('f32.ge', None, 0, 0, 0, ''),
0x61: ('f64.eq', None, 0, 0, 0, ''),
0x62: ('f64.ne', None, 0, 0, 0, ''),
0x63: ('f64.lt', None, 0, 0, 0, ''),
0x64: ('f64.gt', None, 0, 0, 0, ''),
0x65: ('f64.le', None, 0, 0, 0, ''),
0x66: ('f64.ge', None, 0, 0, 0, ''),
0x67: ('i32.clz', None, 0, 0, 0, ''),
0x68: ('i32.ctz', None, 0, 0, 0, ''),
0x69: ('i32.popcnt', None, 0, 0, 0, ''),
0x6a: ('i32.add', None, 0, 0, 0, ''),
0x6b: ('i32.sub', None, 0, 0, 0, ''),
0x6c: ('i32.mul', None, 0, 0, 0, ''),
0x6d: ('i32.div_s', None, 0, 0, 0, ''),
0x6e: ('i32.div_u', None, 0, 0, 0, ''),
0x6f: ('i32.rem_s', None, 0, 0, 0, ''),
0x70: ('i32.rem_u', None, 0, 0, 0, ''),
0x71: ('i32.and', None, 0, 0, 0, ''),
0x72: ('i32.or', None, 0, 0, 0, ''),
0x73: ('i32.xor', None, 0, 0, 0, ''),
0x74: ('i32.shl', None, 0, 0, 0, ''),
0x75: ('i32.shr_s', None, 0, 0, 0, ''),
0x76: ('i32.shr_u', None, 0, 0, 0, ''),
0x77: ('i32.rotl', None, 0, 0, 0, ''),
0x78: ('i32.rotr', None, 0, 0, 0, ''),
0x79: ('i64.clz', None, 0, 0, 0, ''),
0x7a: ('i64.ctz', None, 0, 0, 0, ''),
0x7b: ('i64.popcnt', None, 0, 0, 0, ''),
0x7c: ('i64.add', None, 0, 0, 0, ''),
0x7d: ('i64.sub', None, 0, 0, 0, ''),
0x7e: ('i64.mul', None, 0, 0, 0, ''),
0x7f: ('i64.div_s', None, 0, 0, 0, ''),
0x80: ('i64.div_u', None, 0, 0, 0, ''),
0x81: ('i64.rem_s', None, 0, 0, 0, ''),
0x82: ('i64.rem_u', None, 0, 0, 0, ''),
0x83: ('i64.and', None, 0, 0, 0, ''),
0x84: ('i64.or', None, 0, 0, 0, ''),
0x85: ('i64.xor', None, 0, 0, 0, ''),
0x86: ('i64.shl', None, 0, 0, 0, ''),
0x87: ('i64.shr_s', None, 0, 0, 0, ''),
0x88: ('i64.shr_u', None, 0, 0, 0, ''),
0x89: ('i64.rotl', None, 0, 0, 0, ''),
0x8a: ('i64.rotr', None, 0, 0, 0, ''),
0x8b: ('f32.abs', None, 0, 0, 0, ''),
0x8c: ('f32.neg', None, 0, 0, 0, ''),
0x8d: ('f32.ceil', None, 0, 0, 0, ''),
0x8e: ('f32.floor', None, 0, 0, 0, ''),
0x8f: ('f32.trunc', None, 0, 0, 0, ''),
0x90: ('f32.nearest', None, 0, 0, 0, ''),
0x91: ('f32.sqrt', None, 0, 0, 0, ''),
0x92: ('f32.add', None, 0, 0, 0, ''),
0x93: ('f32.sub', None, 0, 0, 0, ''),
0x94: ('f32.mul', None, 0, 0, 0, ''),
0x95: ('f32.div', None, 0, 0, 0, ''),
0x96: ('f32.min', None, 0, 0, 0, ''),
0x97: ('f32.max', None, 0, 0, 0, ''),
0x98: ('f32.copysign', None, 0, 0, 0, ''),
0x99: ('f64.abs', None, 0, 0, 0, ''),
0x9a: ('f64.neg', None, 0, 0, 0, ''),
0x9b: ('f64.ceil', None, 0, 0, 0, ''),
0x9c: ('f64.floor', None, 0, 0, 0, ''),
0x9d: ('f64.trunc', None, 0, 0, 0, ''),
0x9e: ('f64.nearest', None, 0, 0, 0, ''),
0x9f: ('f64.sqrt', None, 0, 0, 0, ''),
0xa0: ('f64.add', None, 0, 0, 0, ''),
0xa1: ('f64.sub', None, 0, 0, 0, ''),
0xa2: ('f64.mul', None, 0, 0, 0, ''),
0xa3: ('f64.div', None, 0, 0, 0, ''),
0xa4: ('f64.min', None, 0, 0, 0, ''),
0xa5: ('f64.max', None, 0, 0, 0, ''),
0xa6: ('f64.copysign', None, 0, 0, 0, ''),
0xa7: ('i32.wrap/i64', None, 0, 0, 0, ''),
0xa8: ('i32.trunc_s/f32', None, 0, 0, 0, ''),
0xa9: ('i32.trunc_u/f32', None, 0, 0, 0, ''),
0xaa: ('i32.trunc_s/f64', None, 0, 0, 0, ''),
0xab: ('i32.trunc_u/f64', None, 0, 0, 0, ''),
0xac: ('i64.extend_s/i32', None, 0, 0, 0, ''),
0xad: ('i64.extend_u/i32', None, 0, 0, 0, ''),
0xae: ('i64.trunc_s/f32', None, 0, 0, 0, ''),
0xaf: ('i64.trunc_u/f32', None, 0, 0, 0, ''),
0xb0: ('i64.trunc_s/f64', None, 0, 0, 0, ''),
0xb1: ('i64.trunc_u/f64', None, 0, 0, 0, ''),
0xb2: ('f32.convert_s/i32', None, 0, 0, 0, ''),
0xb3: ('f32.convert_u/i32', None, 0, 0, 0, ''),
0xb4: ('f32.convert_s/i64', None, 0, 0, 0, ''),
0xb5: ('f32.convert_u/i64', None, 0, 0, 0, ''),
0xb6: ('f32.demote/f64', None, 0, 0, 0, ''),
0xb7: ('f64.convert_s/i32', None, 0, 0, 0, ''),
0xb8: ('f64.convert_u/i32', None, 0, 0, 0, ''),
0xb9: ('f64.convert_s/i64', None, 0, 0, 0, ''),
0xba: ('f64.convert_u/i64', None, 0, 0, 0, ''),
0xbb: ('f64.promote/f32', None, 0, 0, 0, ''),
0xbc: ('i32.reinterpret/f32', None, 0, 0, 0, ''),
0xbd: ('i64.reinterpret/f64', None, 0, 0, 0, ''),
0xbe: ('f32.reinterpret/i32', None, 0, 0, 0, ''),
0xbf: ('f64.reinterpret/i64', None, 0, 0, 0, ''),
}
class Wasm(object):
"""Wasm bytecode."""
def __init__(self):
self.table = _table
self.reverse_table = self._get_reverse_table()
def _get_reverse_table(self):
"""Build an internal table used in the assembler."""
# opcode:(mnemonic/name, imm_struct, flags, pops, pushes, description)
reverse_table = {}
for (opcode, (mnemonic, imm_struct,
flags, pops, pushes, description)) in self.table.items():
reverse_table[mnemonic] = opcode, mnemonic, imm_struct, flags, pops, pushes, description
return reverse_table
| 42.43318
| 100
| 0.484796
|
from wasm.immtypes import *
from wasm.opcodes import INSN_ENTER_BLOCK, INSN_LEAVE_BLOCK, INSN_BRANCH, INSN_NO_FLOW
_table = {
0x00: ('unreachable', None, INSN_NO_FLOW, 0, 0, ''),
0x01: ('nop', None, 0, 0, 0, ''),
0x02: ('block', BlockImm(), INSN_ENTER_BLOCK, 0, 0, ''),
0x03: ('loop', BlockImm(), INSN_ENTER_BLOCK, 0, 0, ''),
0x04: ('if', BlockImm(), INSN_ENTER_BLOCK, 0, 0, ''),
0x05: ('else', None, INSN_ENTER_BLOCK | INSN_LEAVE_BLOCK, 0, 0, ''),
0x0b: ('end', None, INSN_LEAVE_BLOCK, 0, 0, ''),
0x0c: ('br', BranchImm(), INSN_BRANCH, 0, 0, ''),
0x0d: ('br_if', BranchImm(), INSN_BRANCH, 0, 0, ''),
0x0e: ('br_table', BranchTableImm(), INSN_BRANCH, 0, 0, ''),
0x0f: ('return', None, INSN_NO_FLOW, 0, 0, ''),
0x10: ('call', CallImm(), INSN_BRANCH, 0, 0, ''),
0x11: ('call_indirect', CallIndirectImm(), INSN_BRANCH, 0, 0, ''),
0x1a: ('drop', None, 0, 0, 0, ''),
0x1b: ('select', None, 0, 0, 0, ''),
0x20: ('get_local', LocalVarXsImm(), 0, 0, 0, ''),
0x21: ('set_local', LocalVarXsImm(), 0, 0, 0, ''),
0x22: ('tee_local', LocalVarXsImm(), 0, 0, 0, ''),
0x23: ('get_global', GlobalVarXsImm(), 0, 0, 0, ''),
0x24: ('set_global', GlobalVarXsImm(), 0, 0, 0, ''),
0x28: ('i32.load', MemoryImm(), 0, 0, 0, ''),
0x29: ('i64.load', MemoryImm(), 0, 0, 0, ''),
0x2a: ('f32.load', MemoryImm(), 0, 0, 0, ''),
0x2b: ('f64.load', MemoryImm(), 0, 0, 0, ''),
0x2c: ('i32.load8_s', MemoryImm(), 0, 0, 0, ''),
0x2d: ('i32.load8_u', MemoryImm(), 0, 0, 0, ''),
0x2e: ('i32.load16_s', MemoryImm(), 0, 0, 0, ''),
0x2f: ('i32.load16_u', MemoryImm(), 0, 0, 0, ''),
0x30: ('i64.load8_s', MemoryImm(), 0, 0, 0, ''),
0x31: ('i64.load8_u', MemoryImm(), 0, 0, 0, ''),
0x32: ('i64.load16_s', MemoryImm(), 0, 0, 0, ''),
0x33: ('i64.load16_u', MemoryImm(), 0, 0, 0, ''),
0x34: ('i64.load32_s', MemoryImm(), 0, 0, 0, ''),
0x35: ('i64.load32_u', MemoryImm(), 0, 0, 0, ''),
0x36: ('i32.store', MemoryImm(), 0, 0, 0, ''),
0x37: ('i64.store', MemoryImm(), 0, 0, 0, ''),
0x38: ('f32.store', MemoryImm(), 0, 0, 0, ''),
0x39: ('f64.store', MemoryImm(), 0, 0, 0, ''),
0x3a: ('i32.store8', MemoryImm(), 0, 0, 0, ''),
0x3b: ('i32.store16', MemoryImm(), 0, 0, 0, ''),
0x3c: ('i64.store8', MemoryImm(), 0, 0, 0, ''),
0x3d: ('i64.store16', MemoryImm(), 0, 0, 0, ''),
0x3e: ('i64.store32', MemoryImm(), 0, 0, 0, ''),
0x3f: ('current_memory', CurGrowMemImm(), 0, 0, 0, ''),
0x40: ('grow_memory', CurGrowMemImm(), 0, 0, 0, ''),
0x41: ('i32.const', I32ConstImm(), 0, 0, 0, ''),
0x42: ('i64.const', I64ConstImm(), 0, 0, 0, ''),
0x43: ('f32.const', F32ConstImm(), 0, 0, 0, ''),
0x44: ('f64.const', F64ConstImm(), 0, 0, 0, ''),
0x45: ('i32.eqz', None, 0, 0, 0, ''),
0x46: ('i32.eq', None, 0, 0, 0, ''),
0x47: ('i32.ne', None, 0, 0, 0, ''),
0x48: ('i32.lt_s', None, 0, 0, 0, ''),
0x49: ('i32.lt_u', None, 0, 0, 0, ''),
0x4a: ('i32.gt_s', None, 0, 0, 0, ''),
0x4b: ('i32.gt_u', None, 0, 0, 0, ''),
0x4c: ('i32.le_s', None, 0, 0, 0, ''),
0x4d: ('i32.le_u', None, 0, 0, 0, ''),
0x4e: ('i32.ge_s', None, 0, 0, 0, ''),
0x4f: ('i32.ge_u', None, 0, 0, 0, ''),
0x50: ('i64.eqz', None, 0, 0, 0, ''),
0x51: ('i64.eq', None, 0, 0, 0, ''),
0x52: ('i64.ne', None, 0, 0, 0, ''),
0x53: ('i64.lt_s', None, 0, 0, 0, ''),
0x54: ('i64.lt_u', None, 0, 0, 0, ''),
0x55: ('i64.gt_s', None, 0, 0, 0, ''),
0x56: ('i64.gt_u', None, 0, 0, 0, ''),
0x57: ('i64.le_s', None, 0, 0, 0, ''),
0x58: ('i64.le_u', None, 0, 0, 0, ''),
0x59: ('i64.ge_s', None, 0, 0, 0, ''),
0x5a: ('i64.ge_u', None, 0, 0, 0, ''),
0x5b: ('f32.eq', None, 0, 0, 0, ''),
0x5c: ('f32.ne', None, 0, 0, 0, ''),
0x5d: ('f32.lt', None, 0, 0, 0, ''),
0x5e: ('f32.gt', None, 0, 0, 0, ''),
0x5f: ('f32.le', None, 0, 0, 0, ''),
0x60: ('f32.ge', None, 0, 0, 0, ''),
0x61: ('f64.eq', None, 0, 0, 0, ''),
0x62: ('f64.ne', None, 0, 0, 0, ''),
0x63: ('f64.lt', None, 0, 0, 0, ''),
0x64: ('f64.gt', None, 0, 0, 0, ''),
0x65: ('f64.le', None, 0, 0, 0, ''),
0x66: ('f64.ge', None, 0, 0, 0, ''),
0x67: ('i32.clz', None, 0, 0, 0, ''),
0x68: ('i32.ctz', None, 0, 0, 0, ''),
0x69: ('i32.popcnt', None, 0, 0, 0, ''),
0x6a: ('i32.add', None, 0, 0, 0, ''),
0x6b: ('i32.sub', None, 0, 0, 0, ''),
0x6c: ('i32.mul', None, 0, 0, 0, ''),
0x6d: ('i32.div_s', None, 0, 0, 0, ''),
0x6e: ('i32.div_u', None, 0, 0, 0, ''),
0x6f: ('i32.rem_s', None, 0, 0, 0, ''),
0x70: ('i32.rem_u', None, 0, 0, 0, ''),
0x71: ('i32.and', None, 0, 0, 0, ''),
0x72: ('i32.or', None, 0, 0, 0, ''),
0x73: ('i32.xor', None, 0, 0, 0, ''),
0x74: ('i32.shl', None, 0, 0, 0, ''),
0x75: ('i32.shr_s', None, 0, 0, 0, ''),
0x76: ('i32.shr_u', None, 0, 0, 0, ''),
0x77: ('i32.rotl', None, 0, 0, 0, ''),
0x78: ('i32.rotr', None, 0, 0, 0, ''),
0x79: ('i64.clz', None, 0, 0, 0, ''),
0x7a: ('i64.ctz', None, 0, 0, 0, ''),
0x7b: ('i64.popcnt', None, 0, 0, 0, ''),
0x7c: ('i64.add', None, 0, 0, 0, ''),
0x7d: ('i64.sub', None, 0, 0, 0, ''),
0x7e: ('i64.mul', None, 0, 0, 0, ''),
0x7f: ('i64.div_s', None, 0, 0, 0, ''),
0x80: ('i64.div_u', None, 0, 0, 0, ''),
0x81: ('i64.rem_s', None, 0, 0, 0, ''),
0x82: ('i64.rem_u', None, 0, 0, 0, ''),
0x83: ('i64.and', None, 0, 0, 0, ''),
0x84: ('i64.or', None, 0, 0, 0, ''),
0x85: ('i64.xor', None, 0, 0, 0, ''),
0x86: ('i64.shl', None, 0, 0, 0, ''),
0x87: ('i64.shr_s', None, 0, 0, 0, ''),
0x88: ('i64.shr_u', None, 0, 0, 0, ''),
0x89: ('i64.rotl', None, 0, 0, 0, ''),
0x8a: ('i64.rotr', None, 0, 0, 0, ''),
0x8b: ('f32.abs', None, 0, 0, 0, ''),
0x8c: ('f32.neg', None, 0, 0, 0, ''),
0x8d: ('f32.ceil', None, 0, 0, 0, ''),
0x8e: ('f32.floor', None, 0, 0, 0, ''),
0x8f: ('f32.trunc', None, 0, 0, 0, ''),
0x90: ('f32.nearest', None, 0, 0, 0, ''),
0x91: ('f32.sqrt', None, 0, 0, 0, ''),
0x92: ('f32.add', None, 0, 0, 0, ''),
0x93: ('f32.sub', None, 0, 0, 0, ''),
0x94: ('f32.mul', None, 0, 0, 0, ''),
0x95: ('f32.div', None, 0, 0, 0, ''),
0x96: ('f32.min', None, 0, 0, 0, ''),
0x97: ('f32.max', None, 0, 0, 0, ''),
0x98: ('f32.copysign', None, 0, 0, 0, ''),
0x99: ('f64.abs', None, 0, 0, 0, ''),
0x9a: ('f64.neg', None, 0, 0, 0, ''),
0x9b: ('f64.ceil', None, 0, 0, 0, ''),
0x9c: ('f64.floor', None, 0, 0, 0, ''),
0x9d: ('f64.trunc', None, 0, 0, 0, ''),
0x9e: ('f64.nearest', None, 0, 0, 0, ''),
0x9f: ('f64.sqrt', None, 0, 0, 0, ''),
0xa0: ('f64.add', None, 0, 0, 0, ''),
0xa1: ('f64.sub', None, 0, 0, 0, ''),
0xa2: ('f64.mul', None, 0, 0, 0, ''),
0xa3: ('f64.div', None, 0, 0, 0, ''),
0xa4: ('f64.min', None, 0, 0, 0, ''),
0xa5: ('f64.max', None, 0, 0, 0, ''),
0xa6: ('f64.copysign', None, 0, 0, 0, ''),
0xa7: ('i32.wrap/i64', None, 0, 0, 0, ''),
0xa8: ('i32.trunc_s/f32', None, 0, 0, 0, ''),
0xa9: ('i32.trunc_u/f32', None, 0, 0, 0, ''),
0xaa: ('i32.trunc_s/f64', None, 0, 0, 0, ''),
0xab: ('i32.trunc_u/f64', None, 0, 0, 0, ''),
0xac: ('i64.extend_s/i32', None, 0, 0, 0, ''),
0xad: ('i64.extend_u/i32', None, 0, 0, 0, ''),
0xae: ('i64.trunc_s/f32', None, 0, 0, 0, ''),
0xaf: ('i64.trunc_u/f32', None, 0, 0, 0, ''),
0xb0: ('i64.trunc_s/f64', None, 0, 0, 0, ''),
0xb1: ('i64.trunc_u/f64', None, 0, 0, 0, ''),
0xb2: ('f32.convert_s/i32', None, 0, 0, 0, ''),
0xb3: ('f32.convert_u/i32', None, 0, 0, 0, ''),
0xb4: ('f32.convert_s/i64', None, 0, 0, 0, ''),
0xb5: ('f32.convert_u/i64', None, 0, 0, 0, ''),
0xb6: ('f32.demote/f64', None, 0, 0, 0, ''),
0xb7: ('f64.convert_s/i32', None, 0, 0, 0, ''),
0xb8: ('f64.convert_u/i32', None, 0, 0, 0, ''),
0xb9: ('f64.convert_s/i64', None, 0, 0, 0, ''),
0xba: ('f64.convert_u/i64', None, 0, 0, 0, ''),
0xbb: ('f64.promote/f32', None, 0, 0, 0, ''),
0xbc: ('i32.reinterpret/f32', None, 0, 0, 0, ''),
0xbd: ('i64.reinterpret/f64', None, 0, 0, 0, ''),
0xbe: ('f32.reinterpret/i32', None, 0, 0, 0, ''),
0xbf: ('f64.reinterpret/i64', None, 0, 0, 0, ''),
}
class Wasm(object):
def __init__(self):
self.table = _table
self.reverse_table = self._get_reverse_table()
def _get_reverse_table(self):
reverse_table = {}
for (opcode, (mnemonic, imm_struct,
flags, pops, pushes, description)) in self.table.items():
reverse_table[mnemonic] = opcode, mnemonic, imm_struct, flags, pops, pushes, description
return reverse_table
| true
| true
|
f701f57a6d66a5162c4485e2059098ca7ddc59f7
| 49,207
|
py
|
Python
|
tests/core_type/test_introspection.py
|
jhgg/graphql-py
|
47ad2ca029954423e4b13f5b4ef84f788e865f6f
|
[
"MIT"
] | 1
|
2021-04-28T21:35:02.000Z
|
2021-04-28T21:35:02.000Z
|
tests/core_type/test_introspection.py
|
jhgg/graphql-py
|
47ad2ca029954423e4b13f5b4ef84f788e865f6f
|
[
"MIT"
] | null | null | null |
tests/core_type/test_introspection.py
|
jhgg/graphql-py
|
47ad2ca029954423e4b13f5b4ef84f788e865f6f
|
[
"MIT"
] | null | null | null |
import json
from pytest import raises
from graphql.core import graphql
from graphql.core.error import format_error
from graphql.core.language.location import SourceLocation
from graphql.core.language.parser import parse
from graphql.core.execution import execute
from graphql.core.type import (
GraphQLSchema,
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLInputObjectType,
GraphQLInputObjectField,
GraphQLString,
GraphQLList,
GraphQLEnumType,
GraphQLEnumValue,
)
from graphql.core.type.introspection import TypeFieldResolvers
from graphql.core.validation.rules import ProvidedNonNullArguments
introspection_query = '''
query IntrospectionQuery {
__schema {
queryType { name }
mutationType { name }
types {
...FullType
}
directives {
name
args {
name
type { ...TypeRef }
defaultValue
}
onOperation
onFragment
onField
}
}
}
fragment FullType on __Type {
kind
name
fields {
name
args {
name
type { ...TypeRef }
defaultValue
}
type {
...TypeRef
}
isDeprecated
deprecationReason
}
inputFields {
name
type { ...TypeRef }
defaultValue
}
interfaces {
...TypeRef
}
enumValues {
name
isDeprecated
deprecationReason
}
possibleTypes {
...TypeRef
}
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
'''
def sort_lists(value):
if isinstance(value, dict):
new_mapping = []
for k in sorted(value.keys()):
new_mapping.append((k, sort_lists(value[k])))
return new_mapping
elif isinstance(value, list):
return sorted(map(sort_lists, value), key=repr)
return value
def test_executes_an_introspection_query():
EmptySchema = GraphQLSchema(GraphQLObjectType('QueryRoot', {}))
result = graphql(EmptySchema, introspection_query)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__schema': {'directives': [{'args': [{'defaultValue': None,
'name': 'if',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}}],
'name': 'include',
'onField': True,
'onFragment': True,
'onOperation': False},
{'args': [{'defaultValue': None,
'name': 'if',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}}],
'name': 'skip',
'onField': True,
'onFragment': True,
'onOperation': False}],
'mutationType': None,
'queryType': {'name': 'QueryRoot'},
'types': [{'enumValues': None,
'fields': [],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': 'QueryRoot',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'types',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type'}}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'queryType',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'mutationType',
'type': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'directives',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Directive'}}}}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Schema',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'kind',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'ENUM',
'name': '__TypeKind',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [{'defaultValue': 'false',
'name': 'includeDeprecated',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}],
'deprecationReason': None,
'isDeprecated': False,
'name': 'fields',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Field',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'interfaces',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'possibleTypes',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}}},
{'args': [{'defaultValue': 'false',
'name': 'includeDeprecated',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}],
'deprecationReason': None,
'isDeprecated': False,
'name': 'enumValues',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__EnumValue',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'inputFields',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__InputValue',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'ofType',
'type': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Type',
'possibleTypes': None},
{'enumValues': [{'deprecationReason': None,
'isDeprecated': False,
'name': 'SCALAR'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'OBJECT'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'INTERFACE'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'UNION'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'ENUM'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'INPUT_OBJECT'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'LIST'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'NON_NULL'}],
'fields': None,
'inputFields': None,
'interfaces': None,
'kind': 'ENUM',
'name': '__TypeKind',
'possibleTypes': None},
{'enumValues': None,
'fields': None,
'inputFields': None,
'interfaces': None,
'kind': 'SCALAR',
'name': 'String',
'possibleTypes': None},
{'enumValues': None,
'fields': None,
'inputFields': None,
'interfaces': None,
'kind': 'SCALAR',
'name': 'Boolean',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'args',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__InputValue'}}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'type',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'isDeprecated',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'deprecationReason',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Field',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'type',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'defaultValue',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__InputValue',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'isDeprecated',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'deprecationReason',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__EnumValue',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'args',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__InputValue'}}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'onOperation',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'onFragment',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'onField',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Directive',
'possibleTypes': None}]}})
def test_introspects_on_input_object():
TestInputObject = GraphQLInputObjectType('TestInputObject', {
'a': GraphQLInputObjectField(GraphQLString, default_value='foo'),
'b': GraphQLInputObjectField(GraphQLList(GraphQLString)),
})
TestType = GraphQLObjectType('TestType', {
'field': GraphQLField(
type=GraphQLString,
args={'complex': GraphQLArgument(TestInputObject)},
resolver=lambda obj, args, info: json.dumps(args.get('complex'))
)
})
schema = GraphQLSchema(TestType)
request = '''
{
__schema {
types {
kind
name
inputFields {
name
type { ...TypeRef }
defaultValue
}
}
}
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists({'kind': 'INPUT_OBJECT',
'name': 'TestInputObject',
'inputFields':
[{'name': 'a',
'type':
{'kind': 'SCALAR',
'name': 'String',
'ofType': None},
'defaultValue': '"foo"'},
{'name': 'b',
'type':
{'kind': 'LIST',
'name': None,
'ofType':
{'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
'defaultValue': None}]}) in \
sort_lists(result.data['__schema']['types'])
def test_supports_the_type_root_field():
TestType = GraphQLObjectType('TestType', {
'testField': GraphQLField(GraphQLString)
})
schema = GraphQLSchema(TestType)
request = '{ __type(name: "TestType") { name } }'
result = execute(schema, object(), parse(request))
assert not result.errors
assert result.data == {'__type': {'name': 'TestType'}}
def test_identifies_deprecated_fields():
TestType = GraphQLObjectType('TestType', {
'nonDeprecated': GraphQLField(GraphQLString),
'deprecated': GraphQLField(GraphQLString,
deprecation_reason='Removed in 1.0')
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestType") {
name
fields(includeDeprecated: true) {
name
isDeprecated
deprecationReason
}
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestType',
'fields': [
{'name': 'nonDeprecated', 'isDeprecated': False, 'deprecationReason': None},
{'name': 'deprecated', 'isDeprecated': True,
'deprecationReason': 'Removed in 1.0'},
]
}})
def test_respects_the_includedeprecated_parameter_for_fields():
TestType = GraphQLObjectType('TestType', {
'nonDeprecated': GraphQLField(GraphQLString),
'deprecated': GraphQLField(GraphQLString,
deprecation_reason='Removed in 1.0')
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestType") {
name
trueFields: fields(includeDeprecated: true) { name }
falseFields: fields(includeDeprecated: false) { name }
omittedFields: fields { name }
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestType',
'trueFields': [{'name': 'nonDeprecated'}, {'name': 'deprecated'}],
'falseFields': [{'name': 'nonDeprecated'}],
'omittedFields': [{'name': 'nonDeprecated'}],
}})
def test_identifies_deprecated_enum_values():
TestEnum = GraphQLEnumType('TestEnum', {
'NONDEPRECATED': 0,
'DEPRECATED': GraphQLEnumValue(1, deprecation_reason='Removed in 1.0'),
'ALSONONDEPRECATED': 2
})
TestType = GraphQLObjectType('TestType', {
'testEnum': GraphQLField(TestEnum)
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestEnum") {
name
enumValues(includeDeprecated: true) {
name
isDeprecated
deprecationReason
}
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestEnum',
'enumValues': [
{'name': 'NONDEPRECATED', 'isDeprecated': False, 'deprecationReason': None},
{'name': 'DEPRECATED', 'isDeprecated': True, 'deprecationReason': 'Removed in 1.0'},
{'name': 'ALSONONDEPRECATED', 'isDeprecated': False, 'deprecationReason': None},
]}})
def test_respects_the_includedeprecated_parameter_for_enum_values():
TestEnum = GraphQLEnumType('TestEnum', {
'NONDEPRECATED': 0,
'DEPRECATED': GraphQLEnumValue(1, deprecation_reason='Removed in 1.0'),
'ALSONONDEPRECATED': 2
})
TestType = GraphQLObjectType('TestType', {
'testEnum': GraphQLField(TestEnum)
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestEnum") {
name
trueValues: enumValues(includeDeprecated: true) { name }
falseValues: enumValues(includeDeprecated: false) { name }
omittedValues: enumValues { name }
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestEnum',
'trueValues': [{'name': 'NONDEPRECATED'}, {'name': 'DEPRECATED'},
{'name': 'ALSONONDEPRECATED'}],
'falseValues': [{'name': 'NONDEPRECATED'},
{'name': 'ALSONONDEPRECATED'}],
'omittedValues': [{'name': 'NONDEPRECATED'},
{'name': 'ALSONONDEPRECATED'}],
}})
def test_fails_as_expected_on_the_type_root_field_without_an_arg():
TestType = GraphQLObjectType('TestType', {
'testField': GraphQLField(GraphQLString)
})
schema = GraphQLSchema(TestType)
request = '''
{
__type {
name
}
}'''
result = graphql(schema, request)
expected_error = {'message': ProvidedNonNullArguments.missing_field_arg_message('__type', 'name', 'String!'),
'locations': [SourceLocation(line=3, column=9)]}
assert (expected_error in [format_error(error) for error in result.errors])
def test_exposes_descriptions_on_types_and_fields():
QueryRoot = GraphQLObjectType('QueryRoot', {})
schema = GraphQLSchema(QueryRoot)
request = '''{
schemaType: __type(name: "__Schema") {
name,
description,
fields {
name,
description
}
}
}
'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'schemaType': {
'name': '__Schema',
'description': 'A GraphQL Schema defines the capabilities of a ' +
'GraphQL server. It exposes all available types and ' +
'directives on the server, as well as the entry ' +
'points for query and mutation operations.',
'fields': [
{
'name': 'types',
'description': 'A list of all types supported by this server.'
},
{
'name': 'queryType',
'description': 'The type that query operations will be rooted at.'
},
{
'name': 'mutationType',
'description': 'If this server supports mutation, the type that ' +
'mutation operations will be rooted at.'
},
{
'name': 'directives',
'description': 'A list of all directives supported by this server.'
}
]
}})
def test_exposes_descriptions_on_enums():
QueryRoot = GraphQLObjectType('QueryRoot', {})
schema = GraphQLSchema(QueryRoot)
request = '''{
typeKindType: __type(name: "__TypeKind") {
name,
description,
enumValues {
name,
description
}
}
}
'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'typeKindType': {
'name': '__TypeKind',
'description': 'An enum describing what kind of type a given __Type is',
'enumValues': [
{
'description': 'Indicates this type is a scalar.',
'name': 'SCALAR'
},
{
'description': 'Indicates this type is an object. ' +
'`fields` and `interfaces` are valid fields.',
'name': 'OBJECT'
},
{
'description': 'Indicates this type is an interface. ' +
'`fields` and `possibleTypes` are valid fields.',
'name': 'INTERFACE'
},
{
'description': 'Indicates this type is a union. ' +
'`possibleTypes` is a valid field.',
'name': 'UNION'
},
{
'description': 'Indicates this type is an enum. ' +
'`enumValues` is a valid field.',
'name': 'ENUM'
},
{
'description': 'Indicates this type is an input object. ' +
'`inputFields` is a valid field.',
'name': 'INPUT_OBJECT'
},
{
'description': 'Indicates this type is a list. ' +
'`ofType` is a valid field.',
'name': 'LIST'
},
{
'description': 'Indicates this type is a non-null. ' +
'`ofType` is a valid field.',
'name': 'NON_NULL'
}
]
}})
def test_type_field_resolver_resolves_unknown_kind():
class Unk(object):
pass
with raises(ValueError) as excinfo:
TypeFieldResolvers.kind(Unk())
assert 'Unknown kind of type: ' in str(excinfo.value)
| 58.440618
| 133
| 0.25285
|
import json
from pytest import raises
from graphql.core import graphql
from graphql.core.error import format_error
from graphql.core.language.location import SourceLocation
from graphql.core.language.parser import parse
from graphql.core.execution import execute
from graphql.core.type import (
GraphQLSchema,
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLInputObjectType,
GraphQLInputObjectField,
GraphQLString,
GraphQLList,
GraphQLEnumType,
GraphQLEnumValue,
)
from graphql.core.type.introspection import TypeFieldResolvers
from graphql.core.validation.rules import ProvidedNonNullArguments
introspection_query = '''
query IntrospectionQuery {
__schema {
queryType { name }
mutationType { name }
types {
...FullType
}
directives {
name
args {
name
type { ...TypeRef }
defaultValue
}
onOperation
onFragment
onField
}
}
}
fragment FullType on __Type {
kind
name
fields {
name
args {
name
type { ...TypeRef }
defaultValue
}
type {
...TypeRef
}
isDeprecated
deprecationReason
}
inputFields {
name
type { ...TypeRef }
defaultValue
}
interfaces {
...TypeRef
}
enumValues {
name
isDeprecated
deprecationReason
}
possibleTypes {
...TypeRef
}
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
'''
def sort_lists(value):
if isinstance(value, dict):
new_mapping = []
for k in sorted(value.keys()):
new_mapping.append((k, sort_lists(value[k])))
return new_mapping
elif isinstance(value, list):
return sorted(map(sort_lists, value), key=repr)
return value
def test_executes_an_introspection_query():
EmptySchema = GraphQLSchema(GraphQLObjectType('QueryRoot', {}))
result = graphql(EmptySchema, introspection_query)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__schema': {'directives': [{'args': [{'defaultValue': None,
'name': 'if',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}}],
'name': 'include',
'onField': True,
'onFragment': True,
'onOperation': False},
{'args': [{'defaultValue': None,
'name': 'if',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}}],
'name': 'skip',
'onField': True,
'onFragment': True,
'onOperation': False}],
'mutationType': None,
'queryType': {'name': 'QueryRoot'},
'types': [{'enumValues': None,
'fields': [],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': 'QueryRoot',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'types',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type'}}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'queryType',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'mutationType',
'type': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'directives',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Directive'}}}}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Schema',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'kind',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'ENUM',
'name': '__TypeKind',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [{'defaultValue': 'false',
'name': 'includeDeprecated',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}],
'deprecationReason': None,
'isDeprecated': False,
'name': 'fields',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Field',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'interfaces',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'possibleTypes',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}}},
{'args': [{'defaultValue': 'false',
'name': 'includeDeprecated',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}],
'deprecationReason': None,
'isDeprecated': False,
'name': 'enumValues',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__EnumValue',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'inputFields',
'type': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__InputValue',
'ofType': None}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'ofType',
'type': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Type',
'possibleTypes': None},
{'enumValues': [{'deprecationReason': None,
'isDeprecated': False,
'name': 'SCALAR'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'OBJECT'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'INTERFACE'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'UNION'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'ENUM'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'INPUT_OBJECT'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'LIST'},
{'deprecationReason': None,
'isDeprecated': False,
'name': 'NON_NULL'}],
'fields': None,
'inputFields': None,
'interfaces': None,
'kind': 'ENUM',
'name': '__TypeKind',
'possibleTypes': None},
{'enumValues': None,
'fields': None,
'inputFields': None,
'interfaces': None,
'kind': 'SCALAR',
'name': 'String',
'possibleTypes': None},
{'enumValues': None,
'fields': None,
'inputFields': None,
'interfaces': None,
'kind': 'SCALAR',
'name': 'Boolean',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'args',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__InputValue'}}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'type',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'isDeprecated',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'deprecationReason',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Field',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'type',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__Type',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'defaultValue',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__InputValue',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'isDeprecated',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'deprecationReason',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__EnumValue',
'possibleTypes': None},
{'enumValues': None,
'fields': [{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'name',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'description',
'type': {'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'args',
'type': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'LIST',
'name': None,
'ofType': {'kind': 'NON_NULL',
'name': None,
'ofType': {'kind': 'OBJECT',
'name': '__InputValue'}}}}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'onOperation',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'onFragment',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}},
{'args': [],
'deprecationReason': None,
'isDeprecated': False,
'name': 'onField',
'type': {'kind': 'SCALAR',
'name': 'Boolean',
'ofType': None}}],
'inputFields': None,
'interfaces': [],
'kind': 'OBJECT',
'name': '__Directive',
'possibleTypes': None}]}})
def test_introspects_on_input_object():
TestInputObject = GraphQLInputObjectType('TestInputObject', {
'a': GraphQLInputObjectField(GraphQLString, default_value='foo'),
'b': GraphQLInputObjectField(GraphQLList(GraphQLString)),
})
TestType = GraphQLObjectType('TestType', {
'field': GraphQLField(
type=GraphQLString,
args={'complex': GraphQLArgument(TestInputObject)},
resolver=lambda obj, args, info: json.dumps(args.get('complex'))
)
})
schema = GraphQLSchema(TestType)
request = '''
{
__schema {
types {
kind
name
inputFields {
name
type { ...TypeRef }
defaultValue
}
}
}
}
fragment TypeRef on __Type {
kind
name
ofType {
kind
name
ofType {
kind
name
ofType {
kind
name
}
}
}
}
'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists({'kind': 'INPUT_OBJECT',
'name': 'TestInputObject',
'inputFields':
[{'name': 'a',
'type':
{'kind': 'SCALAR',
'name': 'String',
'ofType': None},
'defaultValue': '"foo"'},
{'name': 'b',
'type':
{'kind': 'LIST',
'name': None,
'ofType':
{'kind': 'SCALAR',
'name': 'String',
'ofType': None}},
'defaultValue': None}]}) in \
sort_lists(result.data['__schema']['types'])
def test_supports_the_type_root_field():
TestType = GraphQLObjectType('TestType', {
'testField': GraphQLField(GraphQLString)
})
schema = GraphQLSchema(TestType)
request = '{ __type(name: "TestType") { name } }'
result = execute(schema, object(), parse(request))
assert not result.errors
assert result.data == {'__type': {'name': 'TestType'}}
def test_identifies_deprecated_fields():
TestType = GraphQLObjectType('TestType', {
'nonDeprecated': GraphQLField(GraphQLString),
'deprecated': GraphQLField(GraphQLString,
deprecation_reason='Removed in 1.0')
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestType") {
name
fields(includeDeprecated: true) {
name
isDeprecated
deprecationReason
}
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestType',
'fields': [
{'name': 'nonDeprecated', 'isDeprecated': False, 'deprecationReason': None},
{'name': 'deprecated', 'isDeprecated': True,
'deprecationReason': 'Removed in 1.0'},
]
}})
def test_respects_the_includedeprecated_parameter_for_fields():
TestType = GraphQLObjectType('TestType', {
'nonDeprecated': GraphQLField(GraphQLString),
'deprecated': GraphQLField(GraphQLString,
deprecation_reason='Removed in 1.0')
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestType") {
name
trueFields: fields(includeDeprecated: true) { name }
falseFields: fields(includeDeprecated: false) { name }
omittedFields: fields { name }
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestType',
'trueFields': [{'name': 'nonDeprecated'}, {'name': 'deprecated'}],
'falseFields': [{'name': 'nonDeprecated'}],
'omittedFields': [{'name': 'nonDeprecated'}],
}})
def test_identifies_deprecated_enum_values():
TestEnum = GraphQLEnumType('TestEnum', {
'NONDEPRECATED': 0,
'DEPRECATED': GraphQLEnumValue(1, deprecation_reason='Removed in 1.0'),
'ALSONONDEPRECATED': 2
})
TestType = GraphQLObjectType('TestType', {
'testEnum': GraphQLField(TestEnum)
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestEnum") {
name
enumValues(includeDeprecated: true) {
name
isDeprecated
deprecationReason
}
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestEnum',
'enumValues': [
{'name': 'NONDEPRECATED', 'isDeprecated': False, 'deprecationReason': None},
{'name': 'DEPRECATED', 'isDeprecated': True, 'deprecationReason': 'Removed in 1.0'},
{'name': 'ALSONONDEPRECATED', 'isDeprecated': False, 'deprecationReason': None},
]}})
def test_respects_the_includedeprecated_parameter_for_enum_values():
TestEnum = GraphQLEnumType('TestEnum', {
'NONDEPRECATED': 0,
'DEPRECATED': GraphQLEnumValue(1, deprecation_reason='Removed in 1.0'),
'ALSONONDEPRECATED': 2
})
TestType = GraphQLObjectType('TestType', {
'testEnum': GraphQLField(TestEnum)
})
schema = GraphQLSchema(TestType)
request = '''{__type(name: "TestEnum") {
name
trueValues: enumValues(includeDeprecated: true) { name }
falseValues: enumValues(includeDeprecated: false) { name }
omittedValues: enumValues { name }
} }'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'__type': {
'name': 'TestEnum',
'trueValues': [{'name': 'NONDEPRECATED'}, {'name': 'DEPRECATED'},
{'name': 'ALSONONDEPRECATED'}],
'falseValues': [{'name': 'NONDEPRECATED'},
{'name': 'ALSONONDEPRECATED'}],
'omittedValues': [{'name': 'NONDEPRECATED'},
{'name': 'ALSONONDEPRECATED'}],
}})
def test_fails_as_expected_on_the_type_root_field_without_an_arg():
TestType = GraphQLObjectType('TestType', {
'testField': GraphQLField(GraphQLString)
})
schema = GraphQLSchema(TestType)
request = '''
{
__type {
name
}
}'''
result = graphql(schema, request)
expected_error = {'message': ProvidedNonNullArguments.missing_field_arg_message('__type', 'name', 'String!'),
'locations': [SourceLocation(line=3, column=9)]}
assert (expected_error in [format_error(error) for error in result.errors])
def test_exposes_descriptions_on_types_and_fields():
QueryRoot = GraphQLObjectType('QueryRoot', {})
schema = GraphQLSchema(QueryRoot)
request = '''{
schemaType: __type(name: "__Schema") {
name,
description,
fields {
name,
description
}
}
}
'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'schemaType': {
'name': '__Schema',
'description': 'A GraphQL Schema defines the capabilities of a ' +
'GraphQL server. It exposes all available types and ' +
'directives on the server, as well as the entry ' +
'points for query and mutation operations.',
'fields': [
{
'name': 'types',
'description': 'A list of all types supported by this server.'
},
{
'name': 'queryType',
'description': 'The type that query operations will be rooted at.'
},
{
'name': 'mutationType',
'description': 'If this server supports mutation, the type that ' +
'mutation operations will be rooted at.'
},
{
'name': 'directives',
'description': 'A list of all directives supported by this server.'
}
]
}})
def test_exposes_descriptions_on_enums():
QueryRoot = GraphQLObjectType('QueryRoot', {})
schema = GraphQLSchema(QueryRoot)
request = '''{
typeKindType: __type(name: "__TypeKind") {
name,
description,
enumValues {
name,
description
}
}
}
'''
result = graphql(schema, request)
assert not result.errors
assert sort_lists(result.data) == sort_lists({'typeKindType': {
'name': '__TypeKind',
'description': 'An enum describing what kind of type a given __Type is',
'enumValues': [
{
'description': 'Indicates this type is a scalar.',
'name': 'SCALAR'
},
{
'description': 'Indicates this type is an object. ' +
'`fields` and `interfaces` are valid fields.',
'name': 'OBJECT'
},
{
'description': 'Indicates this type is an interface. ' +
'`fields` and `possibleTypes` are valid fields.',
'name': 'INTERFACE'
},
{
'description': 'Indicates this type is a union. ' +
'`possibleTypes` is a valid field.',
'name': 'UNION'
},
{
'description': 'Indicates this type is an enum. ' +
'`enumValues` is a valid field.',
'name': 'ENUM'
},
{
'description': 'Indicates this type is an input object. ' +
'`inputFields` is a valid field.',
'name': 'INPUT_OBJECT'
},
{
'description': 'Indicates this type is a list. ' +
'`ofType` is a valid field.',
'name': 'LIST'
},
{
'description': 'Indicates this type is a non-null. ' +
'`ofType` is a valid field.',
'name': 'NON_NULL'
}
]
}})
def test_type_field_resolver_resolves_unknown_kind():
class Unk(object):
pass
with raises(ValueError) as excinfo:
TypeFieldResolvers.kind(Unk())
assert 'Unknown kind of type: ' in str(excinfo.value)
| true
| true
|
f701f5da9daa45c173263130bee6d726af6e96a9
| 8,044
|
py
|
Python
|
apps/meetup/views.py
|
VladimirFilonov/moscowdjango
|
e1e97139f0bff2d8a0f8b41f219e0416a4733d5e
|
[
"BSD-3-Clause"
] | 10
|
2015-10-01T18:41:50.000Z
|
2020-11-26T14:00:54.000Z
|
apps/meetup/views.py
|
VladimirFilonov/moscowdjango
|
e1e97139f0bff2d8a0f8b41f219e0416a4733d5e
|
[
"BSD-3-Clause"
] | 33
|
2016-01-13T08:52:54.000Z
|
2022-01-13T00:34:23.000Z
|
apps/meetup/views.py
|
VladimirFilonov/moscowdjango
|
e1e97139f0bff2d8a0f8b41f219e0416a4733d5e
|
[
"BSD-3-Clause"
] | 4
|
2015-10-04T12:35:03.000Z
|
2021-06-15T05:57:10.000Z
|
# coding=utf-8
import os
import sys
import django
from django.core.urlresolvers import reverse
from django.db import DatabaseError
from django.db.models import Count
from django.http import HttpResponse, Http404
from django.shortcuts import redirect, get_object_or_404
from django.utils import six
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.decorators.csrf import csrf_exempt
from .models import Talk, Photo, Speaker, Event, Tutorial, Vote
from .utils import subscribe_mail, validate_email, set_vote_cookie, can_vote
class IndexPage(ListView):
template_name = 'index.html'
context_object_name = 'events'
def get_queryset(self):
if self.request.user.is_staff:
qs = Event.objects.all()
else:
qs = Event.archived.all()
return qs.prefetch_related('talks', 'talks__speaker', 'talks__event')[:3]
def get_context_data(self, **kwargs):
context = super(IndexPage, self).get_context_data(**kwargs)
# TODO: choose how select people for index page
# I see two options:
# By last talks - Speaker.objects.order_by("-talks__event__id", "talk__position")[:9]
# Random: Speaker.objects.order_by("?")[:9]
context.update({
'speakers': Speaker.objects.order_by("?")[:10],
'main_event': Event.spotlight(self.request.user.is_staff),
'show_more_link': True,
'can_vote': can_vote(self.request)
})
return context
class EventsList(ListView):
template_name = 'event_list.html'
queryset = Event.visible.prefetch_related('talks', 'talks__speaker', 'talks__event')
context_object_name = 'events'
def get_queryset(self):
if self.request.user.is_staff:
qs = Event.objects.all()
else:
qs = Event.visible.all()
return qs.prefetch_related('talks', 'talks__speaker', 'talks__event')
class EventPage(DetailView):
template_name = 'event.html'
slug_url_kwarg = 'number'
slug_field = 'number'
def get_queryset(self):
if self.request.user.is_staff:
return Event.objects.all()
return Event.visible.all()
def get_object(self, queryset=None):
# Use a custom queryset if provided; this is required for subclasses
# like DateDetailView
if queryset is None:
queryset = self.get_queryset()
# Next, try looking up by primary key.
pk = self.kwargs.get(self.pk_url_kwarg)
slug = self.kwargs.get(self.slug_url_kwarg)
if pk is not None:
queryset = queryset.filter(pk=pk)
# Next, try looking up by slug.
if slug is not None and (pk is None or self.query_pk_and_slug):
slug_field = self.get_slug_field()
queryset = queryset.filter(**{slug_field: slug})
# If none of those are defined, it's an error.
if pk is None and slug is None:
raise AttributeError("Generic detail view %s must be called with "
"either an object pk or a slug."
% self.__class__.__name__)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except queryset.model.MultipleObjectsReturned:
obj = queryset.latest("date")
except queryset.model.DoesNotExist:
raise Http404
return obj
def get_context_data(self, **kwargs):
context = super(EventPage, self).get_context_data(**kwargs)
context.update({
'photos': context['event'].photos.all(),
'can_vote': can_vote(self.request),
})
return context
class TalkPage(DetailView):
template_name = 'talk.html'
slug_url_kwarg = 'talk_slug'
def get_queryset(self):
if self.request.user.is_staff:
return Talk.objects.select_related('event', 'speaker')
return Talk.objects.active().select_related('event', 'speaker')
def get(self, request, *args, **kwargs):
self.object = self.get_object()
# Redirect for non-canonic urls (meetup.legacy.urls)
if self.object.get_absolute_url() != request.path:
return redirect(self.object)
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
class SpeakerList(ListView):
template_name = 'speakers.html'
queryset = Speaker.objects.all().order_by('name')
context_object_name = 'speakers'
class SpeakerPage(DetailView):
template_name = 'speaker.html'
def get_object(self, queryset=None):
return get_object_or_404(
Speaker.objects.prefetch_related('talks', 'talks__event'),
slug=self.kwargs['slug']
)
class AboutPage(TemplateView):
template_name = 'about.html'
def get_context_data(self, **kwargs):
context = super(AboutPage, self).get_context_data(**kwargs)
context.update({
'photos': Photo.objects.all().order_by('-pk')[:10]
})
return context
class LivePage(TemplateView):
template_name = 'live.html'
def get_context_data(self, **kwargs):
context = super(LivePage, self).get_context_data(**kwargs)
context.update({
'event': Event.spotlight(),
})
return context
class TutorialList(ListView):
template_name = 'tutorials.html'
queryset = Tutorial.objects.all().order_by('title')
context_object_name = 'tutorials'
class TutorialPage(DetailView):
template_name = 'tutorial.html'
model = Tutorial
class Py3Page(TemplateView):
template_name = 'py3.html'
def get_context_data(self, **kwargs):
context = super(Py3Page, self).get_context_data(**kwargs)
context.update({
'django': django.get_version(),
'python': sys.version,
'py3': six.PY3,
})
return context
class VoteResults(TemplateView):
template_name = 'vote_results.html'
def get_context_data(self, **kwargs):
context = super(VoteResults, self).get_context_data(**kwargs)
talks = Talk.objects.filter(event=Event.spotlight()).annotate(num_votes=Count("votes"))
talks_votes = [talk.num_votes for talk in talks]
votes_total = sum(talks_votes)
votes_max = max(talks_votes)
if votes_total:
for talk in talks:
talk.votes_percent = int(talk.num_votes * 100 / votes_total)
if talk.num_votes == votes_max:
talk.is_leader = True
context.update({
'talks': talks,
})
return context
@csrf_exempt
def ajax_vote(request, *args, **kwargs):
if request.method == 'POST':
if not can_vote(request):
return HttpResponse(u'Можно голосовать только за один доклад', status=409)
try:
event = Talk.objects.get(pk=kwargs['talk_id']).event
if not event.votable:
return HttpResponse('Voting is closed, sorry', status=409)
Vote.objects.create(talk_id=kwargs['talk_id'],
event=event,
ua=request.META.get('HTTP_USER_AGENT'),
ip=request.META.get('REMOTE_ADDR'))
response = HttpResponse(reverse('vote-results'))
response = set_vote_cookie(response)
return response
except DatabaseError:
return HttpResponse('DB error, sorry', status=402)
return HttpResponse('Only POST', status=402)
def confirm_ownership(request, *args, **kwargs):
content = os.environ.get('CONFIRM_OWNERSHIP_%s' % kwargs['filename'], None)
if content:
content_type = 'text/html' if kwargs['filename'].endswith('.html') else 'text/plain'
return HttpResponse(content, content_type=content_type)
else:
raise Http404
| 32.435484
| 95
| 0.632645
|
import os
import sys
import django
from django.core.urlresolvers import reverse
from django.db import DatabaseError
from django.db.models import Count
from django.http import HttpResponse, Http404
from django.shortcuts import redirect, get_object_or_404
from django.utils import six
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.decorators.csrf import csrf_exempt
from .models import Talk, Photo, Speaker, Event, Tutorial, Vote
from .utils import subscribe_mail, validate_email, set_vote_cookie, can_vote
class IndexPage(ListView):
template_name = 'index.html'
context_object_name = 'events'
def get_queryset(self):
if self.request.user.is_staff:
qs = Event.objects.all()
else:
qs = Event.archived.all()
return qs.prefetch_related('talks', 'talks__speaker', 'talks__event')[:3]
def get_context_data(self, **kwargs):
context = super(IndexPage, self).get_context_data(**kwargs)
context.update({
'speakers': Speaker.objects.order_by("?")[:10],
'main_event': Event.spotlight(self.request.user.is_staff),
'show_more_link': True,
'can_vote': can_vote(self.request)
})
return context
class EventsList(ListView):
template_name = 'event_list.html'
queryset = Event.visible.prefetch_related('talks', 'talks__speaker', 'talks__event')
context_object_name = 'events'
def get_queryset(self):
if self.request.user.is_staff:
qs = Event.objects.all()
else:
qs = Event.visible.all()
return qs.prefetch_related('talks', 'talks__speaker', 'talks__event')
class EventPage(DetailView):
template_name = 'event.html'
slug_url_kwarg = 'number'
slug_field = 'number'
def get_queryset(self):
if self.request.user.is_staff:
return Event.objects.all()
return Event.visible.all()
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
pk = self.kwargs.get(self.pk_url_kwarg)
slug = self.kwargs.get(self.slug_url_kwarg)
if pk is not None:
queryset = queryset.filter(pk=pk)
if slug is not None and (pk is None or self.query_pk_and_slug):
slug_field = self.get_slug_field()
queryset = queryset.filter(**{slug_field: slug})
if pk is None and slug is None:
raise AttributeError("Generic detail view %s must be called with "
"either an object pk or a slug."
% self.__class__.__name__)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except queryset.model.MultipleObjectsReturned:
obj = queryset.latest("date")
except queryset.model.DoesNotExist:
raise Http404
return obj
def get_context_data(self, **kwargs):
context = super(EventPage, self).get_context_data(**kwargs)
context.update({
'photos': context['event'].photos.all(),
'can_vote': can_vote(self.request),
})
return context
class TalkPage(DetailView):
template_name = 'talk.html'
slug_url_kwarg = 'talk_slug'
def get_queryset(self):
if self.request.user.is_staff:
return Talk.objects.select_related('event', 'speaker')
return Talk.objects.active().select_related('event', 'speaker')
def get(self, request, *args, **kwargs):
self.object = self.get_object()
# Redirect for non-canonic urls (meetup.legacy.urls)
if self.object.get_absolute_url() != request.path:
return redirect(self.object)
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
class SpeakerList(ListView):
template_name = 'speakers.html'
queryset = Speaker.objects.all().order_by('name')
context_object_name = 'speakers'
class SpeakerPage(DetailView):
template_name = 'speaker.html'
def get_object(self, queryset=None):
return get_object_or_404(
Speaker.objects.prefetch_related('talks', 'talks__event'),
slug=self.kwargs['slug']
)
class AboutPage(TemplateView):
template_name = 'about.html'
def get_context_data(self, **kwargs):
context = super(AboutPage, self).get_context_data(**kwargs)
context.update({
'photos': Photo.objects.all().order_by('-pk')[:10]
})
return context
class LivePage(TemplateView):
template_name = 'live.html'
def get_context_data(self, **kwargs):
context = super(LivePage, self).get_context_data(**kwargs)
context.update({
'event': Event.spotlight(),
})
return context
class TutorialList(ListView):
template_name = 'tutorials.html'
queryset = Tutorial.objects.all().order_by('title')
context_object_name = 'tutorials'
class TutorialPage(DetailView):
template_name = 'tutorial.html'
model = Tutorial
class Py3Page(TemplateView):
template_name = 'py3.html'
def get_context_data(self, **kwargs):
context = super(Py3Page, self).get_context_data(**kwargs)
context.update({
'django': django.get_version(),
'python': sys.version,
'py3': six.PY3,
})
return context
class VoteResults(TemplateView):
template_name = 'vote_results.html'
def get_context_data(self, **kwargs):
context = super(VoteResults, self).get_context_data(**kwargs)
talks = Talk.objects.filter(event=Event.spotlight()).annotate(num_votes=Count("votes"))
talks_votes = [talk.num_votes for talk in talks]
votes_total = sum(talks_votes)
votes_max = max(talks_votes)
if votes_total:
for talk in talks:
talk.votes_percent = int(talk.num_votes * 100 / votes_total)
if talk.num_votes == votes_max:
talk.is_leader = True
context.update({
'talks': talks,
})
return context
@csrf_exempt
def ajax_vote(request, *args, **kwargs):
if request.method == 'POST':
if not can_vote(request):
return HttpResponse(u'Можно голосовать только за один доклад', status=409)
try:
event = Talk.objects.get(pk=kwargs['talk_id']).event
if not event.votable:
return HttpResponse('Voting is closed, sorry', status=409)
Vote.objects.create(talk_id=kwargs['talk_id'],
event=event,
ua=request.META.get('HTTP_USER_AGENT'),
ip=request.META.get('REMOTE_ADDR'))
response = HttpResponse(reverse('vote-results'))
response = set_vote_cookie(response)
return response
except DatabaseError:
return HttpResponse('DB error, sorry', status=402)
return HttpResponse('Only POST', status=402)
def confirm_ownership(request, *args, **kwargs):
content = os.environ.get('CONFIRM_OWNERSHIP_%s' % kwargs['filename'], None)
if content:
content_type = 'text/html' if kwargs['filename'].endswith('.html') else 'text/plain'
return HttpResponse(content, content_type=content_type)
else:
raise Http404
| true
| true
|
f701f680ba8554ccbd354efc16483113776c0acd
| 197
|
py
|
Python
|
codewars/7kyu/doha22/repeater/test.py
|
doha22/Training_one
|
0cd7cf86c7da0f6175834146296b763d1841766b
|
[
"MIT"
] | null | null | null |
codewars/7kyu/doha22/repeater/test.py
|
doha22/Training_one
|
0cd7cf86c7da0f6175834146296b763d1841766b
|
[
"MIT"
] | 2
|
2019-01-22T10:53:42.000Z
|
2019-01-31T08:02:48.000Z
|
codewars/7kyu/doha22/repeater/test.py
|
doha22/Training_one
|
0cd7cf86c7da0f6175834146296b763d1841766b
|
[
"MIT"
] | 13
|
2019-01-22T10:37:42.000Z
|
2019-01-25T13:30:43.000Z
|
import unittest
from repeater import repeater
def test_repeater(benchmark):
assert benchmark(repeater,'a',5) == 'aaaaa'
assert benchmark(repeater,'Wub', 6 ) == 'Wub Wub Wub Wub Wub Wub '
| 24.625
| 70
| 0.71066
|
import unittest
from repeater import repeater
def test_repeater(benchmark):
assert benchmark(repeater,'a',5) == 'aaaaa'
assert benchmark(repeater,'Wub', 6 ) == 'Wub Wub Wub Wub Wub Wub '
| true
| true
|
f701f7a9a7a4a33178b37d3b9fb2b490dd4d8458
| 243
|
py
|
Python
|
Chapter03/Setup As Admin.py
|
delgadoa1/Python-For-Offensive-Pentest
|
c768d61f15fccc77768eb400b5c0e36bebde9511
|
[
"MIT"
] | 62
|
2018-05-22T20:46:01.000Z
|
2022-03-30T11:25:58.000Z
|
Chapter03/Setup As Admin.py
|
delgadoa1/Python-For-Offensive-Pentest
|
c768d61f15fccc77768eb400b5c0e36bebde9511
|
[
"MIT"
] | null | null | null |
Chapter03/Setup As Admin.py
|
delgadoa1/Python-For-Offensive-Pentest
|
c768d61f15fccc77768eb400b5c0e36bebde9511
|
[
"MIT"
] | 61
|
2018-04-27T16:49:33.000Z
|
2022-03-27T17:59:32.000Z
|
from distutils.core import setup
import py2exe , sys, os
sys.argv.append("py2exe")
setup(
options = {'py2exe': {'bundle_files': 1}},
windows = [{'script': "DNS.py", 'uac_info': "requireAdministrator"}],
zipfile = None,
)
| 17.357143
| 73
| 0.625514
|
from distutils.core import setup
import py2exe , sys, os
sys.argv.append("py2exe")
setup(
options = {'py2exe': {'bundle_files': 1}},
windows = [{'script': "DNS.py", 'uac_info': "requireAdministrator"}],
zipfile = None,
)
| true
| true
|
f701f865b738ef986aac7ad75382e6e9b70325f2
| 6,277
|
py
|
Python
|
pyinfra_cli/inventory.py
|
ryanwersal/pyinfra
|
350c9053953531d1d258512f1e0761879df772fb
|
[
"MIT"
] | 1
|
2022-03-24T05:44:45.000Z
|
2022-03-24T05:44:45.000Z
|
pyinfra_cli/inventory.py
|
marinakravchenko21/pyinfra
|
6e14b039422e00ebc68110eabbc6a3a543c96279
|
[
"MIT"
] | null | null | null |
pyinfra_cli/inventory.py
|
marinakravchenko21/pyinfra
|
6e14b039422e00ebc68110eabbc6a3a543c96279
|
[
"MIT"
] | 1
|
2021-11-12T18:36:01.000Z
|
2021-11-12T18:36:01.000Z
|
from os import listdir, path
from types import GeneratorType
import six
from pyinfra import logger, pseudo_inventory
from pyinfra.api.inventory import Inventory
from pyinfra_cli.util import exec_file
# Hosts in an inventory can be just the hostname or a tuple (hostname, data)
ALLOWED_HOST_TYPES = tuple(
six.string_types + (tuple,),
)
# Group data can be any "core" Python type
ALLOWED_DATA_TYPES = tuple(
six.integer_types
+ (six.text_type, six.binary_type)
+ (bool, dict, list, set, tuple, float, complex),
)
def _is_inventory_group(key, value):
'''
Verify that a module-level variable (key = value) is a valid inventory group.
'''
if (
key.startswith('_')
or not isinstance(value, (list, tuple, GeneratorType))
):
return False
# If the group is a tuple of (hosts, data), check the hosts
if isinstance(value, tuple):
value = value[0]
# Expand any generators of hosts
if isinstance(value, GeneratorType):
value = list(value)
return all(
isinstance(item, ALLOWED_HOST_TYPES)
for item in value
)
def _is_group_data(key, value):
'''
Verify that a module-level variable (key = value) is a valid bit of group data.
'''
return (
isinstance(value, ALLOWED_DATA_TYPES)
and not key.startswith('_')
)
def _get_group_data(deploy_dir):
group_data = {}
group_data_directory = path.join(deploy_dir, 'group_data')
if path.exists(group_data_directory):
files = listdir(group_data_directory)
for file in files:
if not file.endswith('.py'):
continue
group_data_file = path.join(group_data_directory, file)
group_name = path.basename(file)[:-3]
logger.debug('Looking for group data in: {0}'.format(group_data_file))
# Read the files locals into a dict
attrs = exec_file(group_data_file, return_locals=True)
group_data[group_name] = {
key: value
for key, value in six.iteritems(attrs)
if _is_group_data(key, value)
}
return group_data
def _get_groups_from_filename(inventory_filename):
attrs = exec_file(inventory_filename, return_locals=True)
return {
key: value
for key, value in six.iteritems(attrs)
if _is_inventory_group(key, value)
}
def make_inventory(
inventory_filename,
deploy_dir=None,
ssh_port=None,
ssh_user=None,
ssh_key=None,
ssh_key_password=None,
ssh_password=None,
winrm_username=None,
winrm_password=None,
winrm_port=None,
):
'''
Builds a ``pyinfra.api.Inventory`` from the filesystem. If the file does not exist
and doesn't contain a / attempts to use that as the only hostname.
'''
if ssh_port is not None:
ssh_port = int(ssh_port)
file_groupname = None
# If we're not a valid file we assume a list of comma separated hostnames
if not path.exists(inventory_filename):
groups = {
'all': inventory_filename.split(','),
}
else:
groups = _get_groups_from_filename(inventory_filename)
# Used to set all the hosts to an additional group - that of the filename
# ie inventories/dev.py means all the hosts are in the dev group, if not present
file_groupname = path.basename(inventory_filename).rsplit('.')[0]
all_data = {}
if 'all' in groups:
all_hosts = groups.pop('all')
if isinstance(all_hosts, tuple):
all_hosts, all_data = all_hosts
# Build all out of the existing hosts if not defined
else:
all_hosts = []
for hosts in groups.values():
# Groups can be a list of hosts or tuple of (hosts, data)
hosts = hosts[0] if isinstance(hosts, tuple) else hosts
for host in hosts:
# Hosts can be a hostname or tuple of (hostname, data)
hostname = host[0] if isinstance(host, tuple) else host
if hostname not in all_hosts:
all_hosts.append(hostname)
groups['all'] = (all_hosts, all_data)
# Apply the filename group if not already defined
if file_groupname and file_groupname not in groups:
groups[file_groupname] = all_hosts
# In pyinfra an inventory is a combination of (hostnames + data). However, in CLI
# mode we want to be define this in separate files (inventory / group data). The
# issue is we want inventory access within the group data files - but at this point
# we're not ready to make an Inventory. So here we just create a fake one, and
# attach it to pseudo_inventory while we import the data files.
logger.debug('Creating fake inventory...')
fake_groups = {
# In API mode groups *must* be tuples of (hostnames, data)
name: group if isinstance(group, tuple) else (group, {})
for name, group in six.iteritems(groups)
}
fake_inventory = Inventory((all_hosts, all_data), **fake_groups)
pseudo_inventory.set(fake_inventory)
# Get all group data (group_data/*.py)
group_data = _get_group_data(deploy_dir)
# Reset the pseudo inventory
pseudo_inventory.reset()
# For each group load up any data
for name, hosts in six.iteritems(groups):
data = {}
if isinstance(hosts, tuple):
hosts, data = hosts
if name in group_data:
data.update(group_data.pop(name))
# Attach to group object
groups[name] = (hosts, data)
# Loop back through any leftover group data and create an empty (for now)
# group - this is because inventory @connectors can attach arbitrary groups
# to hosts, so we need to support that.
for name, data in six.iteritems(group_data):
groups[name] = ([], data)
return Inventory(
groups.pop('all'),
ssh_user=ssh_user,
ssh_key=ssh_key,
ssh_key_password=ssh_key_password,
ssh_port=ssh_port,
ssh_password=ssh_password,
winrm_username=winrm_username,
winrm_password=winrm_password,
winrm_port=winrm_port,
**groups
), file_groupname and file_groupname.lower()
| 29.890476
| 88
| 0.644416
|
from os import listdir, path
from types import GeneratorType
import six
from pyinfra import logger, pseudo_inventory
from pyinfra.api.inventory import Inventory
from pyinfra_cli.util import exec_file
ALLOWED_HOST_TYPES = tuple(
six.string_types + (tuple,),
)
ALLOWED_DATA_TYPES = tuple(
six.integer_types
+ (six.text_type, six.binary_type)
+ (bool, dict, list, set, tuple, float, complex),
)
def _is_inventory_group(key, value):
if (
key.startswith('_')
or not isinstance(value, (list, tuple, GeneratorType))
):
return False
if isinstance(value, tuple):
value = value[0]
if isinstance(value, GeneratorType):
value = list(value)
return all(
isinstance(item, ALLOWED_HOST_TYPES)
for item in value
)
def _is_group_data(key, value):
return (
isinstance(value, ALLOWED_DATA_TYPES)
and not key.startswith('_')
)
def _get_group_data(deploy_dir):
group_data = {}
group_data_directory = path.join(deploy_dir, 'group_data')
if path.exists(group_data_directory):
files = listdir(group_data_directory)
for file in files:
if not file.endswith('.py'):
continue
group_data_file = path.join(group_data_directory, file)
group_name = path.basename(file)[:-3]
logger.debug('Looking for group data in: {0}'.format(group_data_file))
attrs = exec_file(group_data_file, return_locals=True)
group_data[group_name] = {
key: value
for key, value in six.iteritems(attrs)
if _is_group_data(key, value)
}
return group_data
def _get_groups_from_filename(inventory_filename):
attrs = exec_file(inventory_filename, return_locals=True)
return {
key: value
for key, value in six.iteritems(attrs)
if _is_inventory_group(key, value)
}
def make_inventory(
inventory_filename,
deploy_dir=None,
ssh_port=None,
ssh_user=None,
ssh_key=None,
ssh_key_password=None,
ssh_password=None,
winrm_username=None,
winrm_password=None,
winrm_port=None,
):
if ssh_port is not None:
ssh_port = int(ssh_port)
file_groupname = None
if not path.exists(inventory_filename):
groups = {
'all': inventory_filename.split(','),
}
else:
groups = _get_groups_from_filename(inventory_filename)
# Used to set all the hosts to an additional group - that of the filename
# ie inventories/dev.py means all the hosts are in the dev group, if not present
file_groupname = path.basename(inventory_filename).rsplit('.')[0]
all_data = {}
if 'all' in groups:
all_hosts = groups.pop('all')
if isinstance(all_hosts, tuple):
all_hosts, all_data = all_hosts
# Build all out of the existing hosts if not defined
else:
all_hosts = []
for hosts in groups.values():
# Groups can be a list of hosts or tuple of (hosts, data)
hosts = hosts[0] if isinstance(hosts, tuple) else hosts
for host in hosts:
# Hosts can be a hostname or tuple of (hostname, data)
hostname = host[0] if isinstance(host, tuple) else host
if hostname not in all_hosts:
all_hosts.append(hostname)
groups['all'] = (all_hosts, all_data)
# Apply the filename group if not already defined
if file_groupname and file_groupname not in groups:
groups[file_groupname] = all_hosts
# In pyinfra an inventory is a combination of (hostnames + data). However, in CLI
# mode we want to be define this in separate files (inventory / group data). The
# issue is we want inventory access within the group data files - but at this point
# we're not ready to make an Inventory. So here we just create a fake one, and
logger.debug('Creating fake inventory...')
fake_groups = {
name: group if isinstance(group, tuple) else (group, {})
for name, group in six.iteritems(groups)
}
fake_inventory = Inventory((all_hosts, all_data), **fake_groups)
pseudo_inventory.set(fake_inventory)
group_data = _get_group_data(deploy_dir)
pseudo_inventory.reset()
for name, hosts in six.iteritems(groups):
data = {}
if isinstance(hosts, tuple):
hosts, data = hosts
if name in group_data:
data.update(group_data.pop(name))
groups[name] = (hosts, data)
for name, data in six.iteritems(group_data):
groups[name] = ([], data)
return Inventory(
groups.pop('all'),
ssh_user=ssh_user,
ssh_key=ssh_key,
ssh_key_password=ssh_key_password,
ssh_port=ssh_port,
ssh_password=ssh_password,
winrm_username=winrm_username,
winrm_password=winrm_password,
winrm_port=winrm_port,
**groups
), file_groupname and file_groupname.lower()
| true
| true
|
f701f8f535f597b8a4f9c0cbfcd41fb6de407627
| 338
|
py
|
Python
|
testprojects/tests/python/pants/constants_only/test_constants_only.py
|
anthonyjpratti/pants
|
d98e53af6ddd877861231bce8343f8204da0a9d1
|
[
"Apache-2.0"
] | 1
|
2020-08-26T03:30:31.000Z
|
2020-08-26T03:30:31.000Z
|
testprojects/tests/python/pants/constants_only/test_constants_only.py
|
anthonyjpratti/pants
|
d98e53af6ddd877861231bce8343f8204da0a9d1
|
[
"Apache-2.0"
] | 1
|
2021-09-02T21:06:31.000Z
|
2021-09-02T21:06:31.000Z
|
testprojects/tests/python/pants/constants_only/test_constants_only.py
|
anthonyjpratti/pants
|
d98e53af6ddd877861231bce8343f8204da0a9d1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
def test_constants_only():
try:
from pants.constants_only.constants import VALID_IDENTIFIERS # noqa
except ImportError as e:
assert False, 'Failed to correctly generate python package: %s' % e
| 33.8
| 71
| 0.760355
|
def test_constants_only():
try:
from pants.constants_only.constants import VALID_IDENTIFIERS except ImportError as e:
assert False, 'Failed to correctly generate python package: %s' % e
| true
| true
|
f701f949821e1f0ee7ad7abeb6beb95d4ae102bf
| 409
|
py
|
Python
|
bglcapi/bgapi/user/parse.py
|
edgebr/python-bgapi
|
0aeb525edf605e892b20f5c3fb11269cce0c5bdf
|
[
"MIT"
] | null | null | null |
bglcapi/bgapi/user/parse.py
|
edgebr/python-bgapi
|
0aeb525edf605e892b20f5c3fb11269cce0c5bdf
|
[
"MIT"
] | null | null | null |
bglcapi/bgapi/user/parse.py
|
edgebr/python-bgapi
|
0aeb525edf605e892b20f5c3fb11269cce0c5bdf
|
[
"MIT"
] | null | null | null |
from struct import (unpack_from, calcsize)
from bglcapi.types import MessageType
from . import rsp
from . import evt
PARSE_MAP = {
MessageType.COMMAND_RESPONSE: {
0x00: rsp.message_to_target,
},
MessageType.EVENT: {
0x00: evt.message_to_host,
},
}
def from_binary(msg_type: int, msg_id: int, data: bytes, offset: int):
return PARSE_MAP[msg_type][msg_id](data, offset)
| 20.45
| 70
| 0.691932
|
from struct import (unpack_from, calcsize)
from bglcapi.types import MessageType
from . import rsp
from . import evt
PARSE_MAP = {
MessageType.COMMAND_RESPONSE: {
0x00: rsp.message_to_target,
},
MessageType.EVENT: {
0x00: evt.message_to_host,
},
}
def from_binary(msg_type: int, msg_id: int, data: bytes, offset: int):
return PARSE_MAP[msg_type][msg_id](data, offset)
| true
| true
|
f701f97e1f188d4e04e78e513ce8208e4d9f71ef
| 1,360
|
py
|
Python
|
deploy.py
|
blockchainhelppro/CelvinRost
|
aa2661747d06e4610928466521e4da1db77aeadc
|
[
"MIT"
] | 2
|
2018-08-15T21:27:59.000Z
|
2018-08-21T17:56:12.000Z
|
deploy.py
|
blockchainhelppro/CelvinRost
|
aa2661747d06e4610928466521e4da1db77aeadc
|
[
"MIT"
] | null | null | null |
deploy.py
|
blockchainhelppro/CelvinRost
|
aa2661747d06e4610928466521e4da1db77aeadc
|
[
"MIT"
] | 1
|
2021-12-06T04:03:32.000Z
|
2021-12-06T04:03:32.000Z
|
import itertools
import toposort
from populus.utils.contracts import (
compute_direct_dependency_graph,
compute_recursive_contract_dependencies,
)
def compute_deploy_order(dependency_graph):
"""
Given a dictionary that maps contract to their dependencies,
determine the overall dependency ordering for that set of contracts.
"""
return toposort.toposort_flatten(dict(dependency_graph))
def get_deploy_order(contracts_to_deploy, compiled_contracts):
# Extract and dependencies that exist due to library linking.
dependency_graph = compute_direct_dependency_graph(compiled_contracts.values())
global_deploy_order = compute_deploy_order(dependency_graph)
# Compute the full set of dependencies needed to deploy the desired
# contracts.
all_deploy_dependencies = set(itertools.chain.from_iterable(
compute_recursive_contract_dependencies(contract_name, dependency_graph)
for contract_name in contracts_to_deploy
))
all_contracts_to_deploy = all_deploy_dependencies.union(contracts_to_deploy)
# Now compute the order that the contracts should be deployed based on
# their dependencies.
deploy_order = tuple(
contract_name
for contract_name
in global_deploy_order
if contract_name in all_contracts_to_deploy
)
return deploy_order
| 33.170732
| 83
| 0.772794
|
import itertools
import toposort
from populus.utils.contracts import (
compute_direct_dependency_graph,
compute_recursive_contract_dependencies,
)
def compute_deploy_order(dependency_graph):
return toposort.toposort_flatten(dict(dependency_graph))
def get_deploy_order(contracts_to_deploy, compiled_contracts):
dependency_graph = compute_direct_dependency_graph(compiled_contracts.values())
global_deploy_order = compute_deploy_order(dependency_graph)
all_deploy_dependencies = set(itertools.chain.from_iterable(
compute_recursive_contract_dependencies(contract_name, dependency_graph)
for contract_name in contracts_to_deploy
))
all_contracts_to_deploy = all_deploy_dependencies.union(contracts_to_deploy)
deploy_order = tuple(
contract_name
for contract_name
in global_deploy_order
if contract_name in all_contracts_to_deploy
)
return deploy_order
| true
| true
|
f701f9d8c81d5e989359e6483b6e3216e93ad459
| 28,601
|
py
|
Python
|
machine_learning_model.py
|
prakass1/InteractiveSimilarityExplorer
|
2fa5fb91c7df6424b9ed777ef4373ed7094c2348
|
[
"MIT"
] | null | null | null |
machine_learning_model.py
|
prakass1/InteractiveSimilarityExplorer
|
2fa5fb91c7df6424b9ed777ef4373ed7094c2348
|
[
"MIT"
] | null | null | null |
machine_learning_model.py
|
prakass1/InteractiveSimilarityExplorer
|
2fa5fb91c7df6424b9ed777ef4373ed7094c2348
|
[
"MIT"
] | null | null | null |
import utility
import static_sim_functions as smf
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.metrics import *
from time_series_grp import TimeSeriesGroupProcessing
from RandomNeighbors import RandomNeighbors
from sklearn.neighbors import NearestNeighbors
from sklearn.model_selection import KFold
import ml_modelling_ts as ml_ts
'''
This is just a run of the approaches using the methodologies, save the neighborhood for UI.
'''
def common_processing(df):
# Getting percentage between 0 to 1 rather than score values
df["tschq12"] = df["tschq12"].apply(lambda x: x / 100)
df["tschq16"] = df["tschq16"].apply(lambda x: x / 100)
df["tschq17"] = df["tschq17"].apply(lambda x: x / 100)
# Feature engineering family history
df["tschq04"] = df.apply(smf.create_cols_family_hist, axis=1)
return df
def get_common_cols(col1, col2):
common_elements = set(col1).intersection(col2)
return common_elements
import properties
import pandas as pd
def initial_processing():
# Read the csv of the tschq data and make the necessary things
tschq = pd.read_pickle(properties.data_location + "/input_pckl/" + "3_q.pckl")
# Cleaning tschq05 question. There is an abstraction for a row we add common value
def filter_age(x):
if isinstance(x, int):
# Append the most common value obtained
return tschq["tschq05"].value_counts().head(1).index[0]
else:
return x
tschq["tschq05"] = tschq["tschq05"].apply(filter_age)
# Drop the questionnaire_id and created_at
tschq.drop(["questionnaire_id", "created_at"], axis=1, inplace=True)
# Lets read and join two questionnaires tschq and hq
hq = pd.read_pickle("data/input_pckl/4_q.pckl")
hq.isna().sum(axis=0)
# By looking at the output we are sure that h5 and h6 do not contribute much and can be dropped
hq.drop(["hq05", "hq06"], axis=1, inplace=True)
hq_df = hq.set_index("user_id")
df = tschq.join(hq_df.iloc[:, 2:], on="user_id")
drop_cols = ["tschq01", "tschq25", "tschq07-2",
"tschq13", "tschq04-1", "tschq04-2"]
# Getting percentage between 0 to 1 rather than score values
df["tschq12"] = df["tschq12"].apply(lambda x: x / 100)
df["tschq16"] = df["tschq16"].apply(lambda x: x / 100)
df["tschq17"] = df["tschq17"].apply(lambda x: x / 100)
df["tschq04"] = df.apply(smf.create_cols_family_hist, axis=1)
df.drop(drop_cols, axis=1, inplace=True)
# Set the heom object, while using the required similarity
# Alternative
# Categorical boolean mask
categorical_feature_mask = df.iloc[:, 1:].infer_objects().dtypes == object
other_feature_mask = df.iloc[:, 1:].infer_objects().dtypes != object
# filter categorical columns using mask and turn it into a list
categorical_cols = df.iloc[:, 1:].columns[categorical_feature_mask].tolist()
num_cols = df.iloc[:, 1:].columns[other_feature_mask].tolist()
cat_idx = [df.iloc[:, 1:].columns.get_loc(val) for val in categorical_cols]
num_idx = [df.iloc[:, 1:].columns.get_loc(val) for val in num_cols]
return cat_idx, num_idx, df
import os
import traceback
def save_data_objs(df, quest_cmbs="all"):
try:
if not os.path.isdir(properties.model_location + quest_cmbs):
os.makedirs(properties.model_location + quest_cmbs)
utility.save_model("".join(quest_cmbs + "/" + quest_cmbs + "_stat_q_data"), df)
encoded_combined_df = smf.preprocess(df, quest_cmbs, age_bin=False,
process_model_name="".join(quest_cmbs + "/" +
quest_cmbs + "_stat_q_data_oe_model"),
prediction=False)
# Save this encoded_data
utility.save_model("".join(quest_cmbs + "/" +
quest_cmbs + "_stat_q_data_encoded"), encoded_combined_df)
return encoded_combined_df
# Use this data to build the data over static data.
except Exception:
print(traceback.print_exc())
def weighted_average(distress_list):
average = np.asarray(distress_list, dtype=float).mean()
return average
# Function computes the weighted average as predictions for given prediction time point
def compute_weighted_avg(n_idx, encoded_data, pred_at_list, method="mean", dist_nn=None, wt_flag=False):
preds = list()
# Prediction for four time points
for pval in pred_at_list:
distress_list = list()
for vals in n_idx:
u_id = encoded_data["user_id"].iloc[vals]
user_ts = tsg_data.get_usr_mday_ts_predict(int(u_id))
# 3rd val of the series is s03 of the neighbor
print("{}, {} Values ".format(int(pval), int(u_id)))
if len(user_ts) > int(pval):
value = user_ts[int(pval), :][3]
elif len(user_ts) <= int(pval):
value = user_ts[len(user_ts)-1, :][3]
distress_list.append(value)
if wt_flag:
print("Calling by weighted distance prediction for distress")
preds.append(weighted_distance_prediction(distress_list, dist_nn))
else:
print("Calling weighted average to predict distress")
preds.append(weighted_average(distress_list))
return preds
def weighted_distance_prediction(p_preds, distance):
# Inverse distance so that highest weight is given to the nearest one and least to the farther
inv_dist = np.divide(1, distance)
#s03 - tinnitus distress weighted by distance is given as
s03_pred = (np.sum(np.multiply(p_preds, inv_dist)) / (np.sum(inv_dist)))
return s03_pred
def compute(test_nn, encoded_data,
pred_list, method="mean", dist_nn=None, wt_dist=False):
from sklearn.linear_model import LinearRegression
preds = list()
for point in pred_list:
nn_preds = list()
intercepts_list = list()
coeff_list = list()
for nn in test_nn:
u_id = encoded_data["user_id"].iloc[nn]
user_ts = tsg_data.get_usr_mday_ts_predict(int(u_id))
# Obtain the time series until time point and fit the data for linear regression
diff_arr = np.abs(np.subtract(point, user_ts[:, 1]))
diff_near_idx = np.where(diff_arr == diff_arr.min())
print("minimum to the time point is at -- ", diff_near_idx)
# difference near index. Handling for the length of users
usr_idx = diff_near_idx[0][0]
user_ts_p = user_ts[:usr_idx]
user_ts_df = pd.DataFrame(user_ts_p, columns=["day", "day_sess_index",
"s02", "s03", "s04",
"s05", "s06", "s07"])
X = user_ts_df[["day_sess_index"]]
# We show for tinnitus distress. This can be extended to other physiological variables as well.
y = user_ts_df[["s03"]]
# Fit on X axis as time and Y as the s03 predictive value.
reg_fit = LinearRegression(normalize=True)
reg_fit.fit(X, y)
# If weighted_distance is true, then predict by each of the nn_user and add to list. This will be used for
# calculating weighted_distance_predictions.
if wt_dist:
nn_pred = reg_fit.predict(np.asarray(point).reshape(1, -1))
nn_preds.append(nn_pred[0][0])
else:
intercepts_list.append(reg_fit.intercept_)
coeff_list.append(reg_fit.coef_)
if wt_dist:
print("Predicting the value of s03 for the user by a weighted average weighted by distance")
preds.append(weighted_distance_prediction(nn_preds, dist_nn))
else:
print("Predicting the value of s3 over the averaged slope and intercepts of "
"observations of the neighbors")
# y = mx + c, where m is the average slope of the neighbors and c is the average intercept obtained.
print("The equation to estimate s03 for the user is {}".format("".join(str(np.asarray(coeff_list).mean())) +
"* time_index + " +
str(np.asarray(intercepts_list).mean())))
y = np.multiply(np.asarray(coeff_list).mean(), point) + np.asarray(intercepts_list).mean()
preds.append(y)
return preds
def compute_linear_regression(test_nn, encoded_data, pred_list, method="mean"):
#test_nn = test_user_nn
#pred_list = prediction_at_list
from sklearn.linear_model import LinearRegression
preds = list()
for point in pred_list:
attr_list = list()
intercepts_list = list()
coeff_list = list()
for nn in test_nn:
u_id = encoded_data["user_id"].iloc[nn]
user_ts = tsg_data.get_m_day_ts_enumerate(int(11))
diff_arr = np.abs(np.subtract(point, user_ts[:, 1]))
diff_near_idx = np.where(diff_arr == diff_arr.min())
print(diff_near_idx)
# difference near index
usr_vals = np.array([user_ts[n_id] for n_id in diff_near_idx[0]])
if len(usr_vals) > 1:
value = usr_vals.mean(axis=0)
print("vavg" + str(value))
else:
value = usr_vals[0]
print("v" + str(value))
attr_list.append(value)
df = pd.DataFrame(user_ts)
df.columns = ["day", "day_session_id",
"s02", "s03",
"s04", "s05",
"s06", "s07"]
reg_model = LinearRegression(normalize=True)
user_x = df[["day_session_id", "s04", "s05", "s06"]].to_numpy()
user_s03 = df[["s03"]].to_numpy().ravel()
reg_model.fit(user_x, user_s03)
intercepts_list.append(reg_model.intercept_)
coeff_list.append(reg_model.coef_)
# y = mx + c, where m is the average slope of the neighbors and c is the average intercept obtained.
# convert coeff's to numpy for manipulations
numpy_attr_list = np.array(attr_list)
print(numpy_attr_list)
avg_np_attr_list = numpy_attr_list[:, 4:].mean(axis=0)
print(avg_np_attr_list)
numpy_coeff_list = np.array(coeff_list)
print(numpy_coeff_list)
print(numpy_coeff_list.mean(axis=0))
# Day_index, s02, s04, s05, s06 ,s07 - Use only the fit independent features to estimate the dependent
y = np.multiply(numpy_coeff_list[:, 0].mean(), point) + \
np.multiply(numpy_coeff_list[:, 1].mean(), avg_np_attr_list[0]) + \
np.multiply(numpy_coeff_list[:, 2].mean(), avg_np_attr_list[1]) + \
np.multiply(numpy_coeff_list[:, 3].mean(), avg_np_attr_list[2]) + \
np.asarray(intercepts_list).mean()
preds.append(y)
print(preds)
return preds
# Create test label as ground truth at prediction point.
def create_y_labels(test_data, prediction_at, method="mean"):
y_test = list()
for i in range(0, len(test_data)):
test_ts_test1 = tsg_data.get_usr_mday_ts_predict(int(test_data.iloc[i]["user_id"]))
# print(len(test_ts_test1))
if len(test_ts_test1) >= prediction_at:
y_test.append(test_ts_test1[prediction_at - 1][2])
elif len(test_ts_test1) < prediction_at:
y_test.append(test_ts_test1[len(test_ts_test1) - 1][2])
return y_test
# Create reference points for multiple reference predictions
def get_pred_ref_points(user_id, ndays, method="mean"):
# Using the default tsg which is mean observations of the user
test_user_ts = tsg_data.get_usr_mday_ts_predict(user_id)
user_ts_idx = test_user_ts[:, 1]
# ["date", "time_idx", "s02", "s03", "s04", "s05", "s06", "s07]
user_distress = test_user_ts[:, 3]
# Near evaluation. Change this for farther evaluations
# Near -> 0.20, 0.10
# Far -> 1 - (Near)
# Near points are of the sequence of observation because we are sure all stay until here.
#prediction_at = 10
# Far prediction point is the last N% of the test user time series
# It is tested for 0.75, 0.8, 0.9
prediction_at = round(len(user_ts_idx) * 0.80)
y_labels = user_distress[prediction_at:prediction_at + ndays].tolist()
prediction_at_list = user_ts_idx[prediction_at:prediction_at + ndays].tolist()
return y_labels, prediction_at_list
def do_test(test_data, out_writer, csv_out_writer,
ndays, near_idxs, encoded_data, fold_count="final",
method="mean", dist_nn=None, wt_dist_flag=False):
for i in range(0, len(test_data)):
user_id = int(test_data.iloc[i]["user_id"])
print("User- Id ", user_id)
y_labels, prediction_at_list = get_pred_ref_points(user_id, ndays, method=method)
# y_labels = create_y_labels(X_test, preds, method="mean")
# Weighting by inverse of neighbor
if wt_dist_flag:
test_user_nn = near_idxs[i]
test_user_dist = dist_nn[i]
pred_weighted_average = compute_weighted_avg(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=test_user_dist, wt_flag=wt_dist_flag)
pred_lr = compute(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=test_user_dist, wt_dist=wt_dist_flag)
else:
test_user_nn = near_idxs[i]
pred_weighted_average = compute_weighted_avg(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=None, wt_flag=False)
pred_lr = compute(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=None, wt_dist=False)
# calculate
if not fold_count == "final":
print("Evaluating for the fold-" + str(fold_count) + " for the forecast reference points - " +
str(prediction_at_list))
out_writer.write("Evaluating for the forecast reference points -- " +
str(prediction_at_list) + "for the method evaluation -- " + str(method) + "\n")
else:
print("Evaluating for forecast reference points - " +
str(prediction_at_list))
out_writer.write("Evaluating over the forecast reference points -- " +
str(prediction_at_list) + "for the method evaluation -- " + str(method) + "\n")
print("Computing RMSE for weighted average based predictions on the User -- " + str(user_id))
print("---------------------------------------------------------------")
out_writer.write("---------------------------------------------------------------\n")
print("RMSE -- ", np.sqrt(mean_squared_error(y_labels, pred_weighted_average)))
out_writer.write("RMSE -- " + str(np.sqrt(mean_squared_error(y_labels, pred_weighted_average))) + "\n")
# Writing to csv file
if not fold_count == "final":
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_weighted_average))) + "," +
"weighted_average" + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_weighted_average[0]) + "," + str(pred_weighted_average[1])
+ "," + str(pred_weighted_average[2]) + "\n"))
else:
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_weighted_average))) + "," +
"weighted_average" + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_weighted_average[0]) + "," + str(pred_weighted_average[1])
+ "," + str(pred_weighted_average[2]) + "\n"))
print("-----------------------------------------------------------------------------")
out_writer.write("---------------------------------------------------------------\n")
print("Computing RMSE for {} {} based predictions for the user -- {}"
.format(str("weighted_distance" + str(wt_dist_flag)), str("linear_regression"), str(user_id)))
out_writer.write("Computing RMSE for {} {} based predictions for the user -- {} \n"
.format(str("weighted_distance" + str(wt_dist_flag)), str("linear_regression"), str(user_id)))
print("RMSE -- ", np.sqrt(mean_squared_error(y_labels, pred_lr)))
out_writer.write("RMSE -- " + str(np.sqrt(mean_squared_error(y_labels, pred_lr))) + "\n")
print("---------------------------------------------------------------")
out_writer.write("---------------------------------------------------------------\n")
# Write to csv file
if not fold_count == "final":
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_lr))) + "," +
str("lr") + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_lr[0]) + "," + str(pred_lr[1]) + "," + str(
pred_lr[2]) + "\n"))
else:
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_lr))) + "," +
str("lr") + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_lr[0]) + "," + str(pred_lr[1]) + "," + str(
pred_lr[2]) + "\n"))
# Change method and execute to get the predictions appropriately, these are configurations
eval_method = "mean"
# Default day readings for all test users must be at mean and prediction are between min - mean - max
tsg_data = TimeSeriesGroupProcessing(method=eval_method)
# For all combinations evaluation it must be set to True
quest_cmb_all = False
# Same random state needs to be maintained to get consistent test data over all combinations and repeatable results
random_state = 1220
# It is the setting to get the ahead prediction for tinnitus distress, 3 here means for 3 days
# min it is a day and max of about 60days between points which is not an usual scenario
ndays = 3
# Build the default NN with all the combination.
if not quest_cmb_all:
for key, val in properties.quest_comb.items():
# Build NN for each category
print("Building NN for the question combination -- " + str(key))
cat_idx, num_idx, combined_df = smf.initial_processing(key, val, append_synthethic=False)
# Build and get the knn NN for prediction over test instances.
# Save the data objs
encoded_data = save_data_objs(combined_df, key)
out_writer = open("".join("output/output_" + str(key) + "_" + str(eval_method) + "_heom_norm.txt"), "w+")
csv_out_writer = open("".join("output/output_" + str(key) + "_" + str(eval_method) + "_heom_norm.csv"), "w+")
csv_out_writer.write("".join("user_id,rmse,algorithm,"
"ref_p1,ref_p2,ref_p3,pred_p1,pred_p2,pred_p3\n"))
#Create a test set
X, test = train_test_split(encoded_data,
test_size=0.20,
random_state=random_state)
def filter_train_ids(x):
# print(x)
if x["user_id"] in train_user_ids:
return x
def filter_test_ids(x):
# print(x)
if x["user_id"] in test_user_ids:
return x
train_user_ids = X["user_id"].to_list()
X_train_data_ui = combined_df.apply(filter_train_ids, axis=1, result_type="broadcast").dropna()
X_train_data_ui["user_id"] = X_train_data_ui["user_id"].apply(int)
# Save the non encoded train data for visualization purposes
utility.save_model("".join(key + "/" + key + "_train_stat_q_data"), X_train_data_ui)
# filter and get the data to show to the UI for the test data.
test_user_ids = test["user_id"].to_list()
X_test_data_ui = combined_df.apply(filter_test_ids, axis=1, result_type="broadcast").dropna()
X_test_data_ui["user_id"] = X_test_data_ui["user_id"].apply(int)
# Save the data_ui object as json
#test_data = {}
#test_data["users"] = X_test_data_ui.to_dict("r")
#utility.save_data("".join("test_data_ui_" + key), test_data)
from HEOM import HEOM
# Can be done at prediction too.
from sklearn.metrics.pairwise import cosine_distances
from sklearn.linear_model import LinearRegression
from scipy.spatial.distance import pdist, squareform
from scipy.stats import zscore
heom = HEOM(X.to_numpy(), cat_idx, num_idx)
sim_matrix = pdist(X.to_numpy()[:, 1:], heom.heom_distance)
mean_heom_distance = sim_matrix.mean()
knn = NearestNeighbors(n_neighbors=5, metric=heom.heom_distance, radius=mean_heom_distance)
knn.fit(X.iloc[:, 1:])
dist, test_idx = knn.kneighbors(test.to_numpy()[:, 1:], n_neighbors=5)
# Execute without any varying for saving the KNN as pickle to be used by UI
do_test(test, out_writer, csv_out_writer, ndays, test_idx, X,
fold_count="final", method=eval_method, dist_nn=None, wt_dist_flag=False)
utility.save_model("".join(key + "/" + "knn_static"), knn)
utility.save_model("".join(key + "/" + "train_sim_data.pckl"), X)
out_writer.close()
csv_out_writer.close()
# All feature combinations
cat_idx, num_idx, combined_df = initial_processing()
# Build KNN for each category
print("Building KNN for the question combination -- " + str("overall"))
# Save the data objs
encoded_data = save_data_objs(combined_df, "overall")
X, test = train_test_split(encoded_data,
test_size=0.20,
random_state=random_state)
def filter_train_ids(x):
# print(x)
if x["user_id"] in train_user_ids:
return x
def filter_test_ids(x):
# print(x)
if x["user_id"] in test_user_ids:
return x
train_user_ids = X["user_id"].to_list()
X_train_data_ui = combined_df.apply(filter_train_ids, axis=1, result_type="broadcast").dropna()
X_train_data_ui["user_id"] = X_train_data_ui["user_id"].apply(int)
# Save in overall.
utility.save_model("".join("overall" + "/" + "overall" + "_train_stat_q_data"), X_train_data_ui)
# filter and get the data to show to the UI for the test data.
test_user_ids = test["user_id"].to_list()
X_test_data_ui = combined_df.apply(filter_test_ids, axis=1, result_type="broadcast").dropna()
X_test_data_ui["user_id"] = X_test_data_ui["user_id"].apply(int)
# Save the data_ui object as json
test_data = {}
test_data["users"] = X_test_data_ui.to_dict("r")
utility.save_data("test_data_ui_x_test", test_data)
# Save the results to out_writer
out_writer = open("output/overall_output_folds_" + str(eval_method) + ".txt", "w+")
csv_out_writer = open("output/overall_output_folds_" + str(eval_method) + ".csv", "w+")
# First get the time series for a given test patient and the reference point and iterate to evaluate
csv_out_writer.write("user_id,rmse,algorithm,"
"ref_p1,ref_p2,ref_p3,pred_p1,pred_p2,pred_p3\n")
# Split the data into train and test
from sklearn.model_selection import train_test_split
import utility
from HEOM import HEOM
#Can be done at prediction too.
from sklearn.metrics.pairwise import cosine_distances
from sklearn.linear_model import LinearRegression
from scipy.spatial.distance import pdist, squareform
from scipy.stats import zscore
heom = HEOM(X.to_numpy()[:, 1:], cat_idx, num_idx)
sim_matrix = pdist(X.to_numpy()[:, 1:], heom.heom_distance)
mean_heom_distance = sim_matrix.mean()
knn = NearestNeighbors(n_neighbors=5, metric=heom.heom_distance, radius=mean_heom_distance)
knn.fit(X.to_numpy()[:, 1:])
dist, idx_test = knn.kneighbors(test.to_numpy()[:, 1:], n_neighbors=5)
# First get the time series for a given test patient and the reference point and iterate to evaluate
do_test(test, out_writer, csv_out_writer, ndays, idx_test, X,
fold_count="final", method=eval_method, dist_nn=None, wt_dist_flag=False)
out_writer.close()
csv_out_writer.close()
# End save the nearest neighbor as data objects, so that can be used from the UI
utility.save_model("".join("overall/" + "knn_static"), knn)
utility.save_model("".join("overall" + "/" + "train_sim_data.pckl"), X)
'''
ML Modelling based on s02 - loudness.
Note: This has to be run once the all feature execution is completed since we build upon a custom similarity matrix,
it is essential that the same split of train test happen so that it can be verified from the application.
'''
# Create train and test containing same users in train and test as per static data. (Note: Run above code and then this
# because same set of train test users are used)
def splitData(dataset, test_user_ids):
train_data = dataset[~dataset["user_id"].isin(test_user_ids)]
test_data = dataset[dataset["user_id"].isin(test_user_ids)]
return train_data, test_data
# Save both train and test matrix
def save_ts_objs(train, test, location_name):
try:
if not os.path.isdir(properties.model_location + location_name):
os.makedirs(properties.model_location + location_name)
utility.save_model("".join(location_name + "/" + location_name + "_train_data"), train)
utility.save_model("".join(location_name + "/" + location_name + "_test_data"), test)
except Exception:
print(traceback.print_exc())
X = ml_ts.process_data(grouping="day")
# Calculate pairwise distance and create a dataframe for the same
from scipy.spatial.distance import pdist, squareform
# Cross validate here based on the same split of static data here.
# Note: Only one combination will be present
C = np.zeros((X.shape[0], X.shape[0]))
for i in range(0, len(X)):
for j in range(0, len(X)):
dist = ml_ts.compute_dist(X[:, 1][i], X[:, 1][j])
C[i][j] = dist
C_df = pd.DataFrame(C)
#C_df.to_csv("sim_ema.csv")
# Threshold overall distance for making within radius
threshold_distance = sum(C_df.mean())/len(C_df)
user_ids = []
for val in X:
user_ids.append(val[0])
C_df["user_id"] = user_ids
train_data, test_data = splitData(C_df, test_user_ids)
# Save the time series data objects as dynamic_ts into model folder
save_ts_objs(train_data, test_data, "dynamic_ts")
out_writer = open("".join("output/output_ema_" + str(eval_method) + "_.txt"), "w+")
csv_out_writer = open("".join("output/output_ema_" + str(eval_method) + "_.csv"), "w+")
csv_out_writer.write("user_id,rmse,algorithm,"
"ref_p1,ref_p2,ref_p3,pred_p1,pred_p2,pred_p3\n")
# Test on the final test set. Note there is no varying K just to save the NN here.
# It should be noted we use NearesetNeighbors and not KNearestNeighbors classifier.
knn_ema = NearestNeighbors(n_neighbors=5, metric="precomputed", radius=threshold_distance)
knn_ema.fit(train_data[train_data.index])
ema_dist, ema_idx = knn_ema.kneighbors(test_data[train_data.index], n_neighbors=5)
# First get the time series for a given test patient and the reference point and iterate to evaluate
do_test(test_data, out_writer, csv_out_writer, ndays, ema_idx, encoded_data,
fold_count="final", method=eval_method, dist_nn=None, wt_dist_flag=False)
# Close the writers
out_writer.close()
csv_out_writer.close()
# Save the similarity search index KNN
utility.save_model("".join("dynamic_ts" + "/" + "dynamic_ts" + "_knn"), knn_ema)
| 43.009023
| 120
| 0.619908
|
import utility
import static_sim_functions as smf
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.metrics import *
from time_series_grp import TimeSeriesGroupProcessing
from RandomNeighbors import RandomNeighbors
from sklearn.neighbors import NearestNeighbors
from sklearn.model_selection import KFold
import ml_modelling_ts as ml_ts
def common_processing(df):
df["tschq12"] = df["tschq12"].apply(lambda x: x / 100)
df["tschq16"] = df["tschq16"].apply(lambda x: x / 100)
df["tschq17"] = df["tschq17"].apply(lambda x: x / 100)
df["tschq04"] = df.apply(smf.create_cols_family_hist, axis=1)
return df
def get_common_cols(col1, col2):
common_elements = set(col1).intersection(col2)
return common_elements
import properties
import pandas as pd
def initial_processing():
tschq = pd.read_pickle(properties.data_location + "/input_pckl/" + "3_q.pckl")
def filter_age(x):
if isinstance(x, int):
return tschq["tschq05"].value_counts().head(1).index[0]
else:
return x
tschq["tschq05"] = tschq["tschq05"].apply(filter_age)
tschq.drop(["questionnaire_id", "created_at"], axis=1, inplace=True)
hq = pd.read_pickle("data/input_pckl/4_q.pckl")
hq.isna().sum(axis=0)
hq.drop(["hq05", "hq06"], axis=1, inplace=True)
hq_df = hq.set_index("user_id")
df = tschq.join(hq_df.iloc[:, 2:], on="user_id")
drop_cols = ["tschq01", "tschq25", "tschq07-2",
"tschq13", "tschq04-1", "tschq04-2"]
df["tschq12"] = df["tschq12"].apply(lambda x: x / 100)
df["tschq16"] = df["tschq16"].apply(lambda x: x / 100)
df["tschq17"] = df["tschq17"].apply(lambda x: x / 100)
df["tschq04"] = df.apply(smf.create_cols_family_hist, axis=1)
df.drop(drop_cols, axis=1, inplace=True)
categorical_feature_mask = df.iloc[:, 1:].infer_objects().dtypes == object
other_feature_mask = df.iloc[:, 1:].infer_objects().dtypes != object
categorical_cols = df.iloc[:, 1:].columns[categorical_feature_mask].tolist()
num_cols = df.iloc[:, 1:].columns[other_feature_mask].tolist()
cat_idx = [df.iloc[:, 1:].columns.get_loc(val) for val in categorical_cols]
num_idx = [df.iloc[:, 1:].columns.get_loc(val) for val in num_cols]
return cat_idx, num_idx, df
import os
import traceback
def save_data_objs(df, quest_cmbs="all"):
try:
if not os.path.isdir(properties.model_location + quest_cmbs):
os.makedirs(properties.model_location + quest_cmbs)
utility.save_model("".join(quest_cmbs + "/" + quest_cmbs + "_stat_q_data"), df)
encoded_combined_df = smf.preprocess(df, quest_cmbs, age_bin=False,
process_model_name="".join(quest_cmbs + "/" +
quest_cmbs + "_stat_q_data_oe_model"),
prediction=False)
utility.save_model("".join(quest_cmbs + "/" +
quest_cmbs + "_stat_q_data_encoded"), encoded_combined_df)
return encoded_combined_df
except Exception:
print(traceback.print_exc())
def weighted_average(distress_list):
average = np.asarray(distress_list, dtype=float).mean()
return average
def compute_weighted_avg(n_idx, encoded_data, pred_at_list, method="mean", dist_nn=None, wt_flag=False):
preds = list()
for pval in pred_at_list:
distress_list = list()
for vals in n_idx:
u_id = encoded_data["user_id"].iloc[vals]
user_ts = tsg_data.get_usr_mday_ts_predict(int(u_id))
print("{}, {} Values ".format(int(pval), int(u_id)))
if len(user_ts) > int(pval):
value = user_ts[int(pval), :][3]
elif len(user_ts) <= int(pval):
value = user_ts[len(user_ts)-1, :][3]
distress_list.append(value)
if wt_flag:
print("Calling by weighted distance prediction for distress")
preds.append(weighted_distance_prediction(distress_list, dist_nn))
else:
print("Calling weighted average to predict distress")
preds.append(weighted_average(distress_list))
return preds
def weighted_distance_prediction(p_preds, distance):
inv_dist = np.divide(1, distance)
s03_pred = (np.sum(np.multiply(p_preds, inv_dist)) / (np.sum(inv_dist)))
return s03_pred
def compute(test_nn, encoded_data,
pred_list, method="mean", dist_nn=None, wt_dist=False):
from sklearn.linear_model import LinearRegression
preds = list()
for point in pred_list:
nn_preds = list()
intercepts_list = list()
coeff_list = list()
for nn in test_nn:
u_id = encoded_data["user_id"].iloc[nn]
user_ts = tsg_data.get_usr_mday_ts_predict(int(u_id))
diff_arr = np.abs(np.subtract(point, user_ts[:, 1]))
diff_near_idx = np.where(diff_arr == diff_arr.min())
print("minimum to the time point is at -- ", diff_near_idx)
usr_idx = diff_near_idx[0][0]
user_ts_p = user_ts[:usr_idx]
user_ts_df = pd.DataFrame(user_ts_p, columns=["day", "day_sess_index",
"s02", "s03", "s04",
"s05", "s06", "s07"])
X = user_ts_df[["day_sess_index"]]
y = user_ts_df[["s03"]]
reg_fit = LinearRegression(normalize=True)
reg_fit.fit(X, y)
if wt_dist:
nn_pred = reg_fit.predict(np.asarray(point).reshape(1, -1))
nn_preds.append(nn_pred[0][0])
else:
intercepts_list.append(reg_fit.intercept_)
coeff_list.append(reg_fit.coef_)
if wt_dist:
print("Predicting the value of s03 for the user by a weighted average weighted by distance")
preds.append(weighted_distance_prediction(nn_preds, dist_nn))
else:
print("Predicting the value of s3 over the averaged slope and intercepts of "
"observations of the neighbors")
print("The equation to estimate s03 for the user is {}".format("".join(str(np.asarray(coeff_list).mean())) +
"* time_index + " +
str(np.asarray(intercepts_list).mean())))
y = np.multiply(np.asarray(coeff_list).mean(), point) + np.asarray(intercepts_list).mean()
preds.append(y)
return preds
def compute_linear_regression(test_nn, encoded_data, pred_list, method="mean"):
from sklearn.linear_model import LinearRegression
preds = list()
for point in pred_list:
attr_list = list()
intercepts_list = list()
coeff_list = list()
for nn in test_nn:
u_id = encoded_data["user_id"].iloc[nn]
user_ts = tsg_data.get_m_day_ts_enumerate(int(11))
diff_arr = np.abs(np.subtract(point, user_ts[:, 1]))
diff_near_idx = np.where(diff_arr == diff_arr.min())
print(diff_near_idx)
usr_vals = np.array([user_ts[n_id] for n_id in diff_near_idx[0]])
if len(usr_vals) > 1:
value = usr_vals.mean(axis=0)
print("vavg" + str(value))
else:
value = usr_vals[0]
print("v" + str(value))
attr_list.append(value)
df = pd.DataFrame(user_ts)
df.columns = ["day", "day_session_id",
"s02", "s03",
"s04", "s05",
"s06", "s07"]
reg_model = LinearRegression(normalize=True)
user_x = df[["day_session_id", "s04", "s05", "s06"]].to_numpy()
user_s03 = df[["s03"]].to_numpy().ravel()
reg_model.fit(user_x, user_s03)
intercepts_list.append(reg_model.intercept_)
coeff_list.append(reg_model.coef_)
numpy_attr_list = np.array(attr_list)
print(numpy_attr_list)
avg_np_attr_list = numpy_attr_list[:, 4:].mean(axis=0)
print(avg_np_attr_list)
numpy_coeff_list = np.array(coeff_list)
print(numpy_coeff_list)
print(numpy_coeff_list.mean(axis=0))
# Day_index, s02, s04, s05, s06 ,s07 - Use only the fit independent features to estimate the dependent
y = np.multiply(numpy_coeff_list[:, 0].mean(), point) + \
np.multiply(numpy_coeff_list[:, 1].mean(), avg_np_attr_list[0]) + \
np.multiply(numpy_coeff_list[:, 2].mean(), avg_np_attr_list[1]) + \
np.multiply(numpy_coeff_list[:, 3].mean(), avg_np_attr_list[2]) + \
np.asarray(intercepts_list).mean()
preds.append(y)
print(preds)
return preds
# Create test label as ground truth at prediction point.
def create_y_labels(test_data, prediction_at, method="mean"):
y_test = list()
for i in range(0, len(test_data)):
test_ts_test1 = tsg_data.get_usr_mday_ts_predict(int(test_data.iloc[i]["user_id"]))
# print(len(test_ts_test1))
if len(test_ts_test1) >= prediction_at:
y_test.append(test_ts_test1[prediction_at - 1][2])
elif len(test_ts_test1) < prediction_at:
y_test.append(test_ts_test1[len(test_ts_test1) - 1][2])
return y_test
# Create reference points for multiple reference predictions
def get_pred_ref_points(user_id, ndays, method="mean"):
# Using the default tsg which is mean observations of the user
test_user_ts = tsg_data.get_usr_mday_ts_predict(user_id)
user_ts_idx = test_user_ts[:, 1]
# ["date", "time_idx", "s02", "s03", "s04", "s05", "s06", "s07]
user_distress = test_user_ts[:, 3]
# Near evaluation. Change this for farther evaluations
# Near -> 0.20, 0.10
# Far -> 1 - (Near)
# Near points are of the sequence of observation because we are sure all stay until here.
#prediction_at = 10
# Far prediction point is the last N% of the test user time series
# It is tested for 0.75, 0.8, 0.9
prediction_at = round(len(user_ts_idx) * 0.80)
y_labels = user_distress[prediction_at:prediction_at + ndays].tolist()
prediction_at_list = user_ts_idx[prediction_at:prediction_at + ndays].tolist()
return y_labels, prediction_at_list
def do_test(test_data, out_writer, csv_out_writer,
ndays, near_idxs, encoded_data, fold_count="final",
method="mean", dist_nn=None, wt_dist_flag=False):
for i in range(0, len(test_data)):
user_id = int(test_data.iloc[i]["user_id"])
print("User- Id ", user_id)
y_labels, prediction_at_list = get_pred_ref_points(user_id, ndays, method=method)
# y_labels = create_y_labels(X_test, preds, method="mean")
# Weighting by inverse of neighbor
if wt_dist_flag:
test_user_nn = near_idxs[i]
test_user_dist = dist_nn[i]
pred_weighted_average = compute_weighted_avg(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=test_user_dist, wt_flag=wt_dist_flag)
pred_lr = compute(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=test_user_dist, wt_dist=wt_dist_flag)
else:
test_user_nn = near_idxs[i]
pred_weighted_average = compute_weighted_avg(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=None, wt_flag=False)
pred_lr = compute(test_user_nn, encoded_data, prediction_at_list,
method=method, dist_nn=None, wt_dist=False)
# calculate
if not fold_count == "final":
print("Evaluating for the fold-" + str(fold_count) + " for the forecast reference points - " +
str(prediction_at_list))
out_writer.write("Evaluating for the forecast reference points -- " +
str(prediction_at_list) + "for the method evaluation -- " + str(method) + "\n")
else:
print("Evaluating for forecast reference points - " +
str(prediction_at_list))
out_writer.write("Evaluating over the forecast reference points -- " +
str(prediction_at_list) + "for the method evaluation -- " + str(method) + "\n")
print("Computing RMSE for weighted average based predictions on the User -- " + str(user_id))
print("---------------------------------------------------------------")
out_writer.write("---------------------------------------------------------------\n")
print("RMSE -- ", np.sqrt(mean_squared_error(y_labels, pred_weighted_average)))
out_writer.write("RMSE -- " + str(np.sqrt(mean_squared_error(y_labels, pred_weighted_average))) + "\n")
# Writing to csv file
if not fold_count == "final":
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_weighted_average))) + "," +
"weighted_average" + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_weighted_average[0]) + "," + str(pred_weighted_average[1])
+ "," + str(pred_weighted_average[2]) + "\n"))
else:
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_weighted_average))) + "," +
"weighted_average" + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_weighted_average[0]) + "," + str(pred_weighted_average[1])
+ "," + str(pred_weighted_average[2]) + "\n"))
print("-----------------------------------------------------------------------------")
out_writer.write("---------------------------------------------------------------\n")
print("Computing RMSE for {} {} based predictions for the user -- {}"
.format(str("weighted_distance" + str(wt_dist_flag)), str("linear_regression"), str(user_id)))
out_writer.write("Computing RMSE for {} {} based predictions for the user -- {} \n"
.format(str("weighted_distance" + str(wt_dist_flag)), str("linear_regression"), str(user_id)))
print("RMSE -- ", np.sqrt(mean_squared_error(y_labels, pred_lr)))
out_writer.write("RMSE -- " + str(np.sqrt(mean_squared_error(y_labels, pred_lr))) + "\n")
print("---------------------------------------------------------------")
out_writer.write("---------------------------------------------------------------\n")
# Write to csv file
if not fold_count == "final":
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_lr))) + "," +
str("lr") + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_lr[0]) + "," + str(pred_lr[1]) + "," + str(
pred_lr[2]) + "\n"))
else:
csv_out_writer.write("".join(str(user_id) + "," +
str(np.sqrt(mean_squared_error(y_labels, pred_lr))) + "," +
str("lr") + ","
+ str(y_labels[0]) + "," + str(y_labels[1]) + "," + str(y_labels[2])
+ "," + str(pred_lr[0]) + "," + str(pred_lr[1]) + "," + str(
pred_lr[2]) + "\n"))
# Change method and execute to get the predictions appropriately, these are configurations
eval_method = "mean"
# Default day readings for all test users must be at mean and prediction are between min - mean - max
tsg_data = TimeSeriesGroupProcessing(method=eval_method)
# For all combinations evaluation it must be set to True
quest_cmb_all = False
# Same random state needs to be maintained to get consistent test data over all combinations and repeatable results
random_state = 1220
# It is the setting to get the ahead prediction for tinnitus distress, 3 here means for 3 days
# min it is a day and max of about 60days between points which is not an usual scenario
ndays = 3
# Build the default NN with all the combination.
if not quest_cmb_all:
for key, val in properties.quest_comb.items():
# Build NN for each category
print("Building NN for the question combination -- " + str(key))
cat_idx, num_idx, combined_df = smf.initial_processing(key, val, append_synthethic=False)
# Build and get the knn NN for prediction over test instances.
# Save the data objs
encoded_data = save_data_objs(combined_df, key)
out_writer = open("".join("output/output_" + str(key) + "_" + str(eval_method) + "_heom_norm.txt"), "w+")
csv_out_writer = open("".join("output/output_" + str(key) + "_" + str(eval_method) + "_heom_norm.csv"), "w+")
csv_out_writer.write("".join("user_id,rmse,algorithm,"
"ref_p1,ref_p2,ref_p3,pred_p1,pred_p2,pred_p3\n"))
#Create a test set
X, test = train_test_split(encoded_data,
test_size=0.20,
random_state=random_state)
def filter_train_ids(x):
# print(x)
if x["user_id"] in train_user_ids:
return x
def filter_test_ids(x):
# print(x)
if x["user_id"] in test_user_ids:
return x
train_user_ids = X["user_id"].to_list()
X_train_data_ui = combined_df.apply(filter_train_ids, axis=1, result_type="broadcast").dropna()
X_train_data_ui["user_id"] = X_train_data_ui["user_id"].apply(int)
# Save the non encoded train data for visualization purposes
utility.save_model("".join(key + "/" + key + "_train_stat_q_data"), X_train_data_ui)
# filter and get the data to show to the UI for the test data.
test_user_ids = test["user_id"].to_list()
X_test_data_ui = combined_df.apply(filter_test_ids, axis=1, result_type="broadcast").dropna()
X_test_data_ui["user_id"] = X_test_data_ui["user_id"].apply(int)
# Save the data_ui object as json
#test_data = {}
#test_data["users"] = X_test_data_ui.to_dict("r")
#utility.save_data("".join("test_data_ui_" + key), test_data)
from HEOM import HEOM
# Can be done at prediction too.
from sklearn.metrics.pairwise import cosine_distances
from sklearn.linear_model import LinearRegression
from scipy.spatial.distance import pdist, squareform
from scipy.stats import zscore
heom = HEOM(X.to_numpy(), cat_idx, num_idx)
sim_matrix = pdist(X.to_numpy()[:, 1:], heom.heom_distance)
mean_heom_distance = sim_matrix.mean()
knn = NearestNeighbors(n_neighbors=5, metric=heom.heom_distance, radius=mean_heom_distance)
knn.fit(X.iloc[:, 1:])
dist, test_idx = knn.kneighbors(test.to_numpy()[:, 1:], n_neighbors=5)
# Execute without any varying for saving the KNN as pickle to be used by UI
do_test(test, out_writer, csv_out_writer, ndays, test_idx, X,
fold_count="final", method=eval_method, dist_nn=None, wt_dist_flag=False)
utility.save_model("".join(key + "/" + "knn_static"), knn)
utility.save_model("".join(key + "/" + "train_sim_data.pckl"), X)
out_writer.close()
csv_out_writer.close()
# All feature combinations
cat_idx, num_idx, combined_df = initial_processing()
# Build KNN for each category
print("Building KNN for the question combination -- " + str("overall"))
# Save the data objs
encoded_data = save_data_objs(combined_df, "overall")
X, test = train_test_split(encoded_data,
test_size=0.20,
random_state=random_state)
def filter_train_ids(x):
# print(x)
if x["user_id"] in train_user_ids:
return x
def filter_test_ids(x):
# print(x)
if x["user_id"] in test_user_ids:
return x
train_user_ids = X["user_id"].to_list()
X_train_data_ui = combined_df.apply(filter_train_ids, axis=1, result_type="broadcast").dropna()
X_train_data_ui["user_id"] = X_train_data_ui["user_id"].apply(int)
# Save in overall.
utility.save_model("".join("overall" + "/" + "overall" + "_train_stat_q_data"), X_train_data_ui)
# filter and get the data to show to the UI for the test data.
test_user_ids = test["user_id"].to_list()
X_test_data_ui = combined_df.apply(filter_test_ids, axis=1, result_type="broadcast").dropna()
X_test_data_ui["user_id"] = X_test_data_ui["user_id"].apply(int)
# Save the data_ui object as json
test_data = {}
test_data["users"] = X_test_data_ui.to_dict("r")
utility.save_data("test_data_ui_x_test", test_data)
# Save the results to out_writer
out_writer = open("output/overall_output_folds_" + str(eval_method) + ".txt", "w+")
csv_out_writer = open("output/overall_output_folds_" + str(eval_method) + ".csv", "w+")
# First get the time series for a given test patient and the reference point and iterate to evaluate
csv_out_writer.write("user_id,rmse,algorithm,"
"ref_p1,ref_p2,ref_p3,pred_p1,pred_p2,pred_p3\n")
# Split the data into train and test
from sklearn.model_selection import train_test_split
import utility
from HEOM import HEOM
#Can be done at prediction too.
from sklearn.metrics.pairwise import cosine_distances
from sklearn.linear_model import LinearRegression
from scipy.spatial.distance import pdist, squareform
from scipy.stats import zscore
heom = HEOM(X.to_numpy()[:, 1:], cat_idx, num_idx)
sim_matrix = pdist(X.to_numpy()[:, 1:], heom.heom_distance)
mean_heom_distance = sim_matrix.mean()
knn = NearestNeighbors(n_neighbors=5, metric=heom.heom_distance, radius=mean_heom_distance)
knn.fit(X.to_numpy()[:, 1:])
dist, idx_test = knn.kneighbors(test.to_numpy()[:, 1:], n_neighbors=5)
# First get the time series for a given test patient and the reference point and iterate to evaluate
do_test(test, out_writer, csv_out_writer, ndays, idx_test, X,
fold_count="final", method=eval_method, dist_nn=None, wt_dist_flag=False)
out_writer.close()
csv_out_writer.close()
# End save the nearest neighbor as data objects, so that can be used from the UI
utility.save_model("".join("overall/" + "knn_static"), knn)
utility.save_model("".join("overall" + "/" + "train_sim_data.pckl"), X)
# Create train and test containing same users in train and test as per static data. (Note: Run above code and then this
# because same set of train test users are used)
def splitData(dataset, test_user_ids):
train_data = dataset[~dataset["user_id"].isin(test_user_ids)]
test_data = dataset[dataset["user_id"].isin(test_user_ids)]
return train_data, test_data
# Save both train and test matrix
def save_ts_objs(train, test, location_name):
try:
if not os.path.isdir(properties.model_location + location_name):
os.makedirs(properties.model_location + location_name)
utility.save_model("".join(location_name + "/" + location_name + "_train_data"), train)
utility.save_model("".join(location_name + "/" + location_name + "_test_data"), test)
except Exception:
print(traceback.print_exc())
X = ml_ts.process_data(grouping="day")
# Calculate pairwise distance and create a dataframe for the same
from scipy.spatial.distance import pdist, squareform
# Cross validate here based on the same split of static data here.
# Note: Only one combination will be present
C = np.zeros((X.shape[0], X.shape[0]))
for i in range(0, len(X)):
for j in range(0, len(X)):
dist = ml_ts.compute_dist(X[:, 1][i], X[:, 1][j])
C[i][j] = dist
C_df = pd.DataFrame(C)
#C_df.to_csv("sim_ema.csv")
# Threshold overall distance for making within radius
threshold_distance = sum(C_df.mean())/len(C_df)
user_ids = []
for val in X:
user_ids.append(val[0])
C_df["user_id"] = user_ids
train_data, test_data = splitData(C_df, test_user_ids)
# Save the time series data objects as dynamic_ts into model folder
save_ts_objs(train_data, test_data, "dynamic_ts")
out_writer = open("".join("output/output_ema_" + str(eval_method) + "_.txt"), "w+")
csv_out_writer = open("".join("output/output_ema_" + str(eval_method) + "_.csv"), "w+")
csv_out_writer.write("user_id,rmse,algorithm,"
"ref_p1,ref_p2,ref_p3,pred_p1,pred_p2,pred_p3\n")
# Test on the final test set. Note there is no varying K just to save the NN here.
# It should be noted we use NearesetNeighbors and not KNearestNeighbors classifier.
knn_ema = NearestNeighbors(n_neighbors=5, metric="precomputed", radius=threshold_distance)
knn_ema.fit(train_data[train_data.index])
ema_dist, ema_idx = knn_ema.kneighbors(test_data[train_data.index], n_neighbors=5)
# First get the time series for a given test patient and the reference point and iterate to evaluate
do_test(test_data, out_writer, csv_out_writer, ndays, ema_idx, encoded_data,
fold_count="final", method=eval_method, dist_nn=None, wt_dist_flag=False)
# Close the writers
out_writer.close()
csv_out_writer.close()
# Save the similarity search index KNN
utility.save_model("".join("dynamic_ts" + "/" + "dynamic_ts" + "_knn"), knn_ema)
| true
| true
|
f701fac9269066e7967ab93ea5152f17e3b246d9
| 4,528
|
py
|
Python
|
gpsxml2png.py
|
felmoltor/kismet-heatmap
|
d145a865f80db16ad8c6d0bb1dd35e0238706f3b
|
[
"BSD-2-Clause"
] | 11
|
2016-10-13T13:40:59.000Z
|
2021-05-29T23:56:59.000Z
|
gpsxml2png.py
|
felmoltor/kismet-heatmap
|
d145a865f80db16ad8c6d0bb1dd35e0238706f3b
|
[
"BSD-2-Clause"
] | 1
|
2018-07-27T02:32:05.000Z
|
2018-12-25T10:12:25.000Z
|
gpsxml2png.py
|
felmoltor/kismet-heatmap
|
d145a865f80db16ad8c6d0bb1dd35e0238706f3b
|
[
"BSD-2-Clause"
] | 1
|
2021-11-19T15:22:19.000Z
|
2021-11-19T15:22:19.000Z
|
#!/usr/bin/env python2
"""
Copyright (c) 2016, Bliksem Labs B.V.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import sys
import numpy
from scipy.interpolate import griddata
import matplotlib.pyplot as plt
try:
from lxml import etree
except ImportError:
try:
# Python 2.5
import xml.etree.cElementTree as etree
except ImportError:
try:
# Python 2.5
import xml.etree.ElementTree as etree
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree
except ImportError:
print("Failed to import ElementTree from any known place")
# Process the Kismet GPSXML into columns.
def parse_xml(filename):
tree = etree.parse(open(filename, 'rb'))
ts = []
bssid = []
signal = []
lat = []
lon = []
walked_lon = []
walked_lat = []
for z in tree.findall('.//gps-point'):
# A lon/lat filter might be applied here
# if float(z.get('lon')) < 3.942:
# continue
if z.get('bssid') == 'GP:SD:TR:AC:KL:OG':
walked_lon.append(float(z.get('lon')))
walked_lat.append(float(z.get('lat')))
elif z.get('signal_dbm') is not None:
bssid.append(z.get('bssid'))
ts.append(int(z.get('time-sec')))
lat.append(float(z.get('lat')))
lon.append(float(z.get('lon')))
signal.append(int(z.get('signal_dbm')))
return (ts, bssid, signal, lat, lon, walked_lon, walked_lat,)
# Draw parsed data on a surface
def draw_data(ts, bssid, signal, lat, lon, walked_lon, walked_lat):
# We create a grid of 1000x1000
grid_x, grid_y = numpy.mgrid[min(walked_lon):max(walked_lon):1000j, min(walked_lat):max(walked_lat):1000j]
# We want to draw all unique APs
bssids = list(set(bssid))
# For each BSSID...
for s in bssids:
points_lon = []
points_lat = []
values = []
h = []
# Apply all points on an intermediate surface
# so we can distinct points where we were, without reception
for i in range(0, len(bssid)):
if bssid[i] == s:
hc = hash((lon[i], lat[i]))
if hc not in h:
points_lon.append(lon[i])
points_lat.append(lat[i])
values.append(float(signal[i]))
h.append(hash((lon[i], lat[i])))
# Optional: apply -100dBm where we don't have gathered data
for i in range(0, len(walked_lon)):
hc = hash((walked_lon[i], walked_lat[i]))
if hc not in h:
points_lon.append(lon[i])
points_lat.append(lat[i])
values.append(float(-100))
h.append(hash((walked_lon[i], walked_lat[i])))
# Interpolate the data
grid = griddata((points_lon, points_lat), numpy.array(values), (grid_x, grid_y), method='cubic')
# Store the bitmap in the current folder.
plt.show()
plt.imsave('%s.png' % (s), grid.T)
# Calculate the World File for use in Qgis
a = ((max(walked_lon)-min(walked_lon))/1000)
b = 0
c = 0
d = ((max(walked_lat)-min(walked_lat))/1000)
e = min(walked_lon)
f = min(walked_lat)
# Write the World File
open('%s.pngw' % (s), 'w').write('%.16f\n%d\n%d\n%.16f\n%.16f\n%.16f' % (a, b, c, d, e, f,))
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage %s << /path/to/Kismet.gpsxml >>" % (sys.argv[0]))
sys.exit(-1)
draw_data(*parse_xml(sys.argv[1]))
| 29.594771
| 107
| 0.686396
|
import sys
import numpy
from scipy.interpolate import griddata
import matplotlib.pyplot as plt
try:
from lxml import etree
except ImportError:
try:
import xml.etree.cElementTree as etree
except ImportError:
try:
import xml.etree.ElementTree as etree
except ImportError:
try:
import cElementTree as etree
except ImportError:
try:
import elementtree.ElementTree as etree
except ImportError:
print("Failed to import ElementTree from any known place")
def parse_xml(filename):
tree = etree.parse(open(filename, 'rb'))
ts = []
bssid = []
signal = []
lat = []
lon = []
walked_lon = []
walked_lat = []
for z in tree.findall('.//gps-point'):
if z.get('bssid') == 'GP:SD:TR:AC:KL:OG':
walked_lon.append(float(z.get('lon')))
walked_lat.append(float(z.get('lat')))
elif z.get('signal_dbm') is not None:
bssid.append(z.get('bssid'))
ts.append(int(z.get('time-sec')))
lat.append(float(z.get('lat')))
lon.append(float(z.get('lon')))
signal.append(int(z.get('signal_dbm')))
return (ts, bssid, signal, lat, lon, walked_lon, walked_lat,)
def draw_data(ts, bssid, signal, lat, lon, walked_lon, walked_lat):
grid_x, grid_y = numpy.mgrid[min(walked_lon):max(walked_lon):1000j, min(walked_lat):max(walked_lat):1000j]
bssids = list(set(bssid))
for s in bssids:
points_lon = []
points_lat = []
values = []
h = []
for i in range(0, len(bssid)):
if bssid[i] == s:
hc = hash((lon[i], lat[i]))
if hc not in h:
points_lon.append(lon[i])
points_lat.append(lat[i])
values.append(float(signal[i]))
h.append(hash((lon[i], lat[i])))
for i in range(0, len(walked_lon)):
hc = hash((walked_lon[i], walked_lat[i]))
if hc not in h:
points_lon.append(lon[i])
points_lat.append(lat[i])
values.append(float(-100))
h.append(hash((walked_lon[i], walked_lat[i])))
# Interpolate the data
grid = griddata((points_lon, points_lat), numpy.array(values), (grid_x, grid_y), method='cubic')
# Store the bitmap in the current folder.
plt.show()
plt.imsave('%s.png' % (s), grid.T)
# Calculate the World File for use in Qgis
a = ((max(walked_lon)-min(walked_lon))/1000)
b = 0
c = 0
d = ((max(walked_lat)-min(walked_lat))/1000)
e = min(walked_lon)
f = min(walked_lat)
# Write the World File
open('%s.pngw' % (s), 'w').write('%.16f\n%d\n%d\n%.16f\n%.16f\n%.16f' % (a, b, c, d, e, f,))
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage %s << /path/to/Kismet.gpsxml >>" % (sys.argv[0]))
sys.exit(-1)
draw_data(*parse_xml(sys.argv[1]))
| true
| true
|
f701facbb5489dd40223e0d27d529651494be784
| 5,612
|
py
|
Python
|
netapp/santricity/models/symbol/thin_volume_reinitialize_descriptor.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 5
|
2016-08-23T17:52:22.000Z
|
2019-05-16T08:45:30.000Z
|
netapp/santricity/models/symbol/thin_volume_reinitialize_descriptor.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 2
|
2016-11-10T05:30:21.000Z
|
2019-04-05T15:03:37.000Z
|
netapp/santricity/models/symbol/thin_volume_reinitialize_descriptor.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 7
|
2016-08-25T16:11:44.000Z
|
2021-02-22T05:31:25.000Z
|
# coding: utf-8
"""
ThinVolumeReinitializeDescriptor.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from pprint import pformat
from six import iteritems
class ThinVolumeReinitializeDescriptor(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
ThinVolumeReinitializeDescriptor - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'thin_vol': 'str', # (required parameter)
'init_params': 'ThinVolumeReinitializeParams'
}
self.attribute_map = {
'thin_vol': 'thinVol', # (required parameter)
'init_params': 'initParams'
}
self._thin_vol = None
self._init_params = None
@property
def thin_vol(self):
"""
Gets the thin_vol of this ThinVolumeReinitializeDescriptor.
The thin volume to re-initialize.
:return: The thin_vol of this ThinVolumeReinitializeDescriptor.
:rtype: str
:required/optional: required
"""
return self._thin_vol
@thin_vol.setter
def thin_vol(self, thin_vol):
"""
Sets the thin_vol of this ThinVolumeReinitializeDescriptor.
The thin volume to re-initialize.
:param thin_vol: The thin_vol of this ThinVolumeReinitializeDescriptor.
:type: str
"""
self._thin_vol = thin_vol
@property
def init_params(self):
"""
Gets the init_params of this ThinVolumeReinitializeDescriptor.
Operation-specific parameters for the re-initialize.
:return: The init_params of this ThinVolumeReinitializeDescriptor.
:rtype: ThinVolumeReinitializeParams
:required/optional: required
"""
return self._init_params
@init_params.setter
def init_params(self, init_params):
"""
Sets the init_params of this ThinVolumeReinitializeDescriptor.
Operation-specific parameters for the re-initialize.
:param init_params: The init_params of this ThinVolumeReinitializeDescriptor.
:type: ThinVolumeReinitializeParams
"""
self._init_params = init_params
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
if self is None:
return None
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if self is None or other is None:
return None
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 36.921053
| 844
| 0.64273
|
from pprint import pformat
from six import iteritems
class ThinVolumeReinitializeDescriptor(object):
def __init__(self):
self.swagger_types = {
'thin_vol': 'str', 'init_params': 'ThinVolumeReinitializeParams'
}
self.attribute_map = {
'thin_vol': 'thinVol', 'init_params': 'initParams'
}
self._thin_vol = None
self._init_params = None
@property
def thin_vol(self):
return self._thin_vol
@thin_vol.setter
def thin_vol(self, thin_vol):
self._thin_vol = thin_vol
@property
def init_params(self):
return self._init_params
@init_params.setter
def init_params(self, init_params):
self._init_params = init_params
def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pformat(self.to_dict())
def __repr__(self):
if self is None:
return None
return self.to_str()
def __eq__(self, other):
if self is None or other is None:
return None
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
f701fae7234155f95bb3a662b5f3cf38b6fbec3f
| 9,618
|
py
|
Python
|
gamestonk_terminal/options/op_helpers.py
|
keshabb/GamestonkTerminal
|
a0acdfb13f806c35c82a7c4dc81ea98de52814e0
|
[
"MIT"
] | 1
|
2021-07-25T20:34:29.000Z
|
2021-07-25T20:34:29.000Z
|
gamestonk_terminal/options/op_helpers.py
|
TomiToivio/GamestonkTerminal
|
419c3691db220c467d2979b19ca308b3b800c0bd
|
[
"MIT"
] | 1
|
2022-02-10T06:49:37.000Z
|
2022-02-10T06:49:37.000Z
|
gamestonk_terminal/options/op_helpers.py
|
TomiToivio/GamestonkTerminal
|
419c3691db220c467d2979b19ca308b3b800c0bd
|
[
"MIT"
] | null | null | null |
"""Option helper functions"""
__docformat__ = "numpy"
import argparse
from typing import List
import pandas as pd
import numpy as np
from gamestonk_terminal.helper_funcs import (
parse_known_args_and_warn,
check_non_negative,
)
# pylint: disable=R1710
def load(other_args: List[str]) -> str:
"""Load ticker into object
Parameters
----------
other_args: List[str]
Agrparse arguments
Returns
-------
str:
Ticker
"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="opload",
description="Load a ticker into option menu",
)
parser.add_argument(
"-t",
"--ticker",
action="store",
dest="ticker",
required="-h" not in other_args,
help="Stock ticker",
)
try:
if other_args:
if "-t" not in other_args and "-h" not in other_args:
other_args.insert(0, "-t")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return ""
print("")
return ns_parser.ticker
except Exception as e:
print(e, "\n")
return ""
except SystemExit:
print("")
return ""
# pylint: disable=no-else-return
def select_option_date(avalaiable_dates: List[str], other_args: List[str]) -> str:
"""Select an option date out of a supplied list
Parameters
----------
avalaiable_dates: List[str]
Possible date options
other_args: List[str]
Arparse arguments
Returns
-------
expiry_date: str
Selected expiry date
"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="exp",
description="See and set expiration date",
)
parser.add_argument(
"-d",
"--date",
dest="n_date",
action="store",
type=int,
default=-1,
choices=range(len(avalaiable_dates)),
help="Select index for expiry date.",
)
parser.add_argument(
"-D",
dest="date",
type=str,
choices=avalaiable_dates + [""],
help="Select date (YYYY-MM-DD)",
default="",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-d")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return ""
# Print possible expiry dates
if ns_parser.n_date == -1 and not ns_parser.date:
print("\nAvailable expiry dates:")
for i, d in enumerate(avalaiable_dates):
print(f" {(2 - len(str(i))) * ' '}{i}. {d}")
print("")
return ""
# It means an expiry date was correctly selected
else:
if ns_parser.date:
if ns_parser.date in avalaiable_dates:
print(f"Expiraration set to {ns_parser.date} \n")
return ns_parser.date
else:
print("Expiration not an option")
return ""
else:
expiry_date = avalaiable_dates[ns_parser.n_date]
print(f"Expiraration set to {expiry_date} \n")
return expiry_date
except Exception as e:
print(e, "\n")
return ""
def get_loss_at_strike(strike: float, chain: pd.DataFrame) -> float:
"""Function to get the loss at the given expiry
Parameters
----------
strike: Union[int,float]
Value to calculate total loss at
chain: Dataframe:
Dataframe containing at least strike and openInterest
Returns
-------
loss: Union[float,int]
Total loss
"""
itm_calls = chain[chain.index < strike][["OI_call"]]
itm_calls["loss"] = (strike - itm_calls.index) * itm_calls["OI_call"]
call_loss = itm_calls["loss"].sum()
itm_puts = chain[chain.index > strike][["OI_put"]]
itm_puts["loss"] = (itm_puts.index - strike) * itm_puts["OI_put"]
put_loss = itm_puts.loss.sum()
loss = call_loss + put_loss
return loss
def calculate_max_pain(chain: pd.DataFrame) -> int:
"""Returns the max pain for a given call/put dataframe
Parameters
----------
chain: DataFrame
Dataframe to calculate value from
Returns
-------
max_pain : int
Max pain value
"""
strikes = np.array(chain.index)
if ("OI_call" not in chain.columns) or ("OI_put" not in chain.columns):
print("Incorrect columns. Unable to parse max pain")
return np.nan
loss = []
for price_at_exp in strikes:
loss.append(get_loss_at_strike(price_at_exp, chain))
chain["loss"] = loss
max_pain = chain["loss"].idxmin()
return max_pain
def vol(other_args: List[str]):
"""Parse volume argparse
Parameters
----------
other_args: List[str]
Argparse arguments
Returns
-------
ns_parser: argparse.Namespace
Parsed namespace
"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="vol",
description="Plot volume. Volume refers to the number of contracts traded today.",
)
parser.add_argument(
"-m",
"--min",
default=-1,
type=check_non_negative,
help="Min strike to plot",
dest="min",
)
parser.add_argument(
"-M",
"--max",
default=-1,
type=check_non_negative,
help="Max strike to plot",
dest="max",
)
parser.add_argument(
"--calls",
action="store_true",
default=False,
dest="calls",
help="Flag to plot call options only",
)
parser.add_argument(
"--puts",
action="store_true",
default=False,
dest="puts",
help="Flag to plot put options only",
)
parser.add_argument(
"--source",
type=str,
default="tr",
choices=["tr", "yf"],
dest="source",
help="Source to get data from",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
return ns_parser
except Exception as e:
print(e, "\n")
def voi(other_args: List[str]):
"""Parse Volume + open interest argparse
Parameters
----------
other_args: List[str]
Argparse arguments
Returns
-------
ns_parser: argparse.Namespace
Parsed namespace
"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="voi",
description="""
Plots Volume + Open Interest of calls vs puts.
""",
)
parser.add_argument(
"-v",
"--minv",
dest="min_vol",
type=check_non_negative,
default=-1,
help="minimum volume (considering open interest) threshold of the plot.",
)
parser.add_argument(
"-m",
"--min",
dest="min_sp",
type=check_non_negative,
default=-1,
help="minimum strike price to consider in the plot.",
)
parser.add_argument(
"-M",
"--max",
dest="max_sp",
type=check_non_negative,
default=-1,
help="maximum strike price to consider in the plot.",
)
parser.add_argument(
"--source",
type=str,
default="tr",
choices=["tr", "yf"],
dest="source",
help="Source to get data from",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return None
return ns_parser
except Exception as e:
print(e, "\n")
return None
def oi(other_args: List[str]):
"""Parse Open Interest argparse
Parameters
----------
other_args: List[str]
Argparse arguments
Returns
-------
ns_parser: argparse.Namespace
Parsed namespace
"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="oi",
description="Plot open interest. Open interest represents the number of contracts that exist.",
)
parser.add_argument(
"-m",
"--min",
default=-1,
type=check_non_negative,
help="Min strike to plot",
dest="min",
)
parser.add_argument(
"-M",
"--max",
default=-1,
type=check_non_negative,
help="Max strike to plot",
dest="max",
)
parser.add_argument(
"--calls",
action="store_true",
default=False,
dest="calls",
help="Flag to plot call options only",
)
parser.add_argument(
"--puts",
action="store_true",
default=False,
dest="puts",
help="Flag to plot put options only",
)
parser.add_argument(
"--source",
type=str,
default="tr",
choices=["tr", "yf"],
dest="source",
help="Source to get data from",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return None
return ns_parser
except Exception as e:
print(e, "\n")
return None
| 23.288136
| 104
| 0.555521
|
__docformat__ = "numpy"
import argparse
from typing import List
import pandas as pd
import numpy as np
from gamestonk_terminal.helper_funcs import (
parse_known_args_and_warn,
check_non_negative,
)
def load(other_args: List[str]) -> str:
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="opload",
description="Load a ticker into option menu",
)
parser.add_argument(
"-t",
"--ticker",
action="store",
dest="ticker",
required="-h" not in other_args,
help="Stock ticker",
)
try:
if other_args:
if "-t" not in other_args and "-h" not in other_args:
other_args.insert(0, "-t")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return ""
print("")
return ns_parser.ticker
except Exception as e:
print(e, "\n")
return ""
except SystemExit:
print("")
return ""
def select_option_date(avalaiable_dates: List[str], other_args: List[str]) -> str:
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="exp",
description="See and set expiration date",
)
parser.add_argument(
"-d",
"--date",
dest="n_date",
action="store",
type=int,
default=-1,
choices=range(len(avalaiable_dates)),
help="Select index for expiry date.",
)
parser.add_argument(
"-D",
dest="date",
type=str,
choices=avalaiable_dates + [""],
help="Select date (YYYY-MM-DD)",
default="",
)
try:
if other_args:
if "-" not in other_args[0]:
other_args.insert(0, "-d")
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return ""
if ns_parser.n_date == -1 and not ns_parser.date:
print("\nAvailable expiry dates:")
for i, d in enumerate(avalaiable_dates):
print(f" {(2 - len(str(i))) * ' '}{i}. {d}")
print("")
return ""
else:
if ns_parser.date:
if ns_parser.date in avalaiable_dates:
print(f"Expiraration set to {ns_parser.date} \n")
return ns_parser.date
else:
print("Expiration not an option")
return ""
else:
expiry_date = avalaiable_dates[ns_parser.n_date]
print(f"Expiraration set to {expiry_date} \n")
return expiry_date
except Exception as e:
print(e, "\n")
return ""
def get_loss_at_strike(strike: float, chain: pd.DataFrame) -> float:
itm_calls = chain[chain.index < strike][["OI_call"]]
itm_calls["loss"] = (strike - itm_calls.index) * itm_calls["OI_call"]
call_loss = itm_calls["loss"].sum()
itm_puts = chain[chain.index > strike][["OI_put"]]
itm_puts["loss"] = (itm_puts.index - strike) * itm_puts["OI_put"]
put_loss = itm_puts.loss.sum()
loss = call_loss + put_loss
return loss
def calculate_max_pain(chain: pd.DataFrame) -> int:
strikes = np.array(chain.index)
if ("OI_call" not in chain.columns) or ("OI_put" not in chain.columns):
print("Incorrect columns. Unable to parse max pain")
return np.nan
loss = []
for price_at_exp in strikes:
loss.append(get_loss_at_strike(price_at_exp, chain))
chain["loss"] = loss
max_pain = chain["loss"].idxmin()
return max_pain
def vol(other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="vol",
description="Plot volume. Volume refers to the number of contracts traded today.",
)
parser.add_argument(
"-m",
"--min",
default=-1,
type=check_non_negative,
help="Min strike to plot",
dest="min",
)
parser.add_argument(
"-M",
"--max",
default=-1,
type=check_non_negative,
help="Max strike to plot",
dest="max",
)
parser.add_argument(
"--calls",
action="store_true",
default=False,
dest="calls",
help="Flag to plot call options only",
)
parser.add_argument(
"--puts",
action="store_true",
default=False,
dest="puts",
help="Flag to plot put options only",
)
parser.add_argument(
"--source",
type=str,
default="tr",
choices=["tr", "yf"],
dest="source",
help="Source to get data from",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return
return ns_parser
except Exception as e:
print(e, "\n")
def voi(other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="voi",
description="""
Plots Volume + Open Interest of calls vs puts.
""",
)
parser.add_argument(
"-v",
"--minv",
dest="min_vol",
type=check_non_negative,
default=-1,
help="minimum volume (considering open interest) threshold of the plot.",
)
parser.add_argument(
"-m",
"--min",
dest="min_sp",
type=check_non_negative,
default=-1,
help="minimum strike price to consider in the plot.",
)
parser.add_argument(
"-M",
"--max",
dest="max_sp",
type=check_non_negative,
default=-1,
help="maximum strike price to consider in the plot.",
)
parser.add_argument(
"--source",
type=str,
default="tr",
choices=["tr", "yf"],
dest="source",
help="Source to get data from",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return None
return ns_parser
except Exception as e:
print(e, "\n")
return None
def oi(other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="oi",
description="Plot open interest. Open interest represents the number of contracts that exist.",
)
parser.add_argument(
"-m",
"--min",
default=-1,
type=check_non_negative,
help="Min strike to plot",
dest="min",
)
parser.add_argument(
"-M",
"--max",
default=-1,
type=check_non_negative,
help="Max strike to plot",
dest="max",
)
parser.add_argument(
"--calls",
action="store_true",
default=False,
dest="calls",
help="Flag to plot call options only",
)
parser.add_argument(
"--puts",
action="store_true",
default=False,
dest="puts",
help="Flag to plot put options only",
)
parser.add_argument(
"--source",
type=str,
default="tr",
choices=["tr", "yf"],
dest="source",
help="Source to get data from",
)
try:
ns_parser = parse_known_args_and_warn(parser, other_args)
if not ns_parser:
return None
return ns_parser
except Exception as e:
print(e, "\n")
return None
| true
| true
|
f701fb01fdb4c37afc1d4e5189833e347bf52cf0
| 132
|
py
|
Python
|
conda_pack/__init__.py
|
calebho/conda-pack
|
06b55eff31bc1b8aab765b64924238fa73176793
|
[
"BSD-3-Clause"
] | null | null | null |
conda_pack/__init__.py
|
calebho/conda-pack
|
06b55eff31bc1b8aab765b64924238fa73176793
|
[
"BSD-3-Clause"
] | null | null | null |
conda_pack/__init__.py
|
calebho/conda-pack
|
06b55eff31bc1b8aab765b64924238fa73176793
|
[
"BSD-3-Clause"
] | null | null | null |
from . import _version
from .core import CondaEnv, CondaPackException, File, pack
__version__ = _version.get_versions()['version']
| 26.4
| 58
| 0.787879
|
from . import _version
from .core import CondaEnv, CondaPackException, File, pack
__version__ = _version.get_versions()['version']
| true
| true
|
f701fef52c14cd1ab59e97d3208e9fba872d50d7
| 14,368
|
py
|
Python
|
flare/framework/agent.py
|
skylian/flare
|
c920abcae975cc49c052f5f2abb6bbee5c39a11e
|
[
"Apache-2.0"
] | null | null | null |
flare/framework/agent.py
|
skylian/flare
|
c920abcae975cc49c052f5f2abb6bbee5c39a11e
|
[
"Apache-2.0"
] | null | null | null |
flare/framework/agent.py
|
skylian/flare
|
c920abcae975cc49c052f5f2abb6bbee5c39a11e
|
[
"Apache-2.0"
] | 1
|
2018-10-15T21:38:29.000Z
|
2018-10-15T21:38:29.000Z
|
from abc import ABCMeta, abstractmethod
from multiprocessing import Process, Value
import numpy as np
from flare.common.log import GameLogEntry
from flare.common.communicator import AgentCommunicator
from flare.common.replay_buffer import NoReplacementQueue, ReplayBuffer, Experience
class AgentHelper(object):
"""
AgentHelper abstracts some part of Agent's data processing and the I/O
communication between Agent and ComputationDataProcessor (CDP). It receives a
Communicator from one CDP and uses it to send data to the CDP.
"""
__metaclass__ = ABCMeta
def __init__(self, name, communicator, sample_interval):
assert isinstance(communicator, AgentCommunicator)
self.name = name
self.comm = communicator
self.counter = 0
assert sample_interval >= 2
self.sample_interval = sample_interval
def unpack_exps(self, exp_seqs):
"""
The input `exp_seqs` is always a list of sequences, each sequence
containing multiple Experience instances.
"""
def concat_lists(lists):
return [x for l in lists for x in l]
def extract_key(seq, k):
assert seq
return [e.val(k) for e in seq]
ret = dict(
inputs={},
next_inputs={},
next_alive={},
rewards={},
actions={},
next_actions={},
states=None,
next_states=None)
for k in self.input_keys:
ipt_seqs = [extract_key(exp_seq, k) for exp_seq in exp_seqs]
ret["inputs"][k] = [ipt_seq[:-1] for ipt_seq in ipt_seqs]
ret["next_inputs"][k] = [ipt_seq[1:] for ipt_seq in ipt_seqs]
for k in self.action_keys:
act_seqs = [extract_key(exp_seq, k) for exp_seq in exp_seqs]
ret["actions"][k] = [act_seq[:-1] for act_seq in act_seqs]
ret["next_actions"][k] = [act_seq[1:] for act_seq in act_seqs]
for k in self.reward_keys:
ret["rewards"][
k] = [extract_key(exp_seq[:-1], k) for exp_seq in exp_seqs]
if self.state_keys:
ret["states"] = dict()
ret["next_states"] = dict()
for k in self.state_keys:
## we only take the first(second) element of a seq for states(next_states)
ret["states"][
k] = [extract_key(exp_seq[:1], k)[0] for exp_seq in exp_seqs]
ret["next_states"][k] = [
extract_key(exp_seq[1:2], k)[0] for exp_seq in exp_seqs
]
ret["next_alive"]["alive"] \
= [extract_key(exp_seq[1:], "alive") for exp_seq in exp_seqs]
## HERE we decide whether the data are instances or seqs
## according to the existence of states
if not self.state_keys:
# sample instances
for k in ret.keys():
if ret[k] is not None:
for kk in ret[k].keys():
ret[k][kk] = concat_lists(ret[k][kk])
return ret, len(exp_seqs)
def predict(self, inputs, states=dict()):
"""
Process the input data (if necessary), send them to CDP for prediction,
and receive the outcome.
Args:
inputs(dict): data used for prediction. It is caller's job
to make sure inputs contains all data needed and they are in the
right form.
"""
data = dict(inputs=inputs, states=states)
self.comm.put_prediction_data(data, 1)
ret = self.comm.get_prediction_return()
return ret
@abstractmethod
def add_experience(self, e):
"""
Implements how to record an experience.
Will be called by self.store_data()
"""
pass
def _store_data(self, alive, data):
"""
Store the past experience for later use, e.g., experience replay.
Args:
data(dict): data to store.
"""
assert isinstance(data, dict)
data["alive"] = [alive]
t = Experience(data)
self.add_experience(t)
self.counter += 1
if self.counter % self.sample_interval == 0:
return self.learn()
@abstractmethod
def sample_experiences(self):
"""
Implements how to retrieve experiences from past.
Will be called by self.learn()
"""
pass
def learn(self):
"""
Sample data from past experiences and send them to CDP for learning.
Optionally, it receives learning outcomes sent back from CW and does
some processing.
Depends on users' need, this function can be called in three ways:
1. In Agent's run_one_episode
2. In store_data(), e.g., learning once every few steps
3. As a separate thread, e.g., using experience replay
"""
exp_seqs = self.sample_experiences()
if not exp_seqs:
return
data, size = self.unpack_exps(exp_seqs)
self.comm.put_training_data(data, size)
ret = self.comm.get_training_return()
return ret
class OnlineHelper(AgentHelper):
"""
Online helper. It calls `learn()` every `sample_interval`
steps.
While waiting for learning return, the calling `Agent` is blocked.
"""
def __init__(self, name, communicator, sample_interval=5):
super(OnlineHelper, self).__init__(name, communicator, sample_interval)
# NoReplacementQueue used to store past experience.
self.exp_queue = NoReplacementQueue()
@staticmethod
def exp_replay():
return False
def add_experience(self, e):
self.exp_queue.add(e)
def sample_experiences(self):
return self.exp_queue.sample()
class ExpReplayHelper(AgentHelper):
"""
Example of applying experience replay. It starts a separate threads to
run learn().
"""
def __init__(self,
name,
communicator,
buffer_capacity,
num_experiences,
sample_interval=5,
num_seqs=1):
super(ExpReplayHelper, self).__init__(name, communicator,
sample_interval)
# replay buffer for experience replay
self.replay_buffer = ReplayBuffer(buffer_capacity)
self.num_experiences = num_experiences
self.num_seqs = num_seqs
@staticmethod
def exp_replay():
return True
def add_experience(self, e):
self.replay_buffer.add(e)
def sample_experiences(self):
return self.replay_buffer.sample(self.num_experiences, self.num_seqs)
class Agent(Process):
"""
Agent implements the control flow and logics of how Robot interacts with
the environment and does computation. It is a subclass of Process. The entry
function of the Agent process is run().
Some members:
env: the environment
num_games: number of games to run
learning: Whether learn or not (only do testing)
helpers: a dictionary of `AgentHelper`, each corresponds to one
`ComputationTask`
log_q: communication channel between `Agent` and the centralized logger
running: the `Agent` will keep running as long as `running` is True.
"""
__metaclass__ = ABCMeta
def __init__(self, num_games, actrep, learning):
super(Agent, self).__init__()
self.id = -1 # just created, not added to the Robot yet
self.num_games = num_games
self.learning = learning
self.state_specs = None
self.helpers = {}
self.log_q = None
self.running = Value('i', 0)
self.daemon = True ## Process member
self.alive = 1
self.env_f = None
self.actrep = actrep
def set_env(self, env_class, *args, **kwargs):
"""
Set the environment for the agent. For now, only create a lambda
function. Once the agent process starts running, we will call this
function.
env_class: The environment class to create
args, kwargs: The arguments for creating the class
"""
self.env_f = lambda: env_class(*args, **kwargs)
def add_agent_helper(self, helper, input_keys, action_keys, state_keys,
reward_keys):
"""
Add an AgentHelper, with its name (also the name of its
correspoding `ComputationTask`) as key.
"""
assert isinstance(helper, AgentHelper)
helper.input_keys = input_keys
helper.action_keys = action_keys
helper.state_keys = state_keys
helper.reward_keys = reward_keys
self.helpers[helper.name] = helper
def _make_zero_states(self, prop):
dtype = prop["dtype"] if "dtype" in prop else "float32"
return np.zeros(prop["shape"]).astype(dtype)
## The following three functions hide the `AgentHelper` from the users of
## `Agent`.
def predict(self, alg_name, inputs, states=dict()):
## Convert single instances to batches of size 1
## The reason for this conversion is that we want to reuse the
## _pack_data() and _unpack_data() of the CDP for handling both training
## and prediction data. These two functions assume that data are stored
## as mini batches instead of single instances in the prediction and learning
## queues.
inputs_ = {k: [v] for k, v in inputs.items()}
states_ = {k: [v] for k, v in states.items()}
prediction, next_states = self.helpers[alg_name].predict(inputs_,
states_)
## convert back to single instances
prediction = {k: v[0] for k, v in prediction.items()}
next_states = {k: v[0] for k, v in next_states.items()}
return prediction, next_states
def run(self):
"""
Default entry function of Agent process.
"""
assert self.env_f is not None, "You should first call self.set_env()!"
## Only call the env function now to make sure there is only one
## environment (OpenGL context) in each process
self.env = self.env_f()
self.running.value = 1
for i in range(self.num_games):
self._run_one_episode()
if not self.running.value:
return
self.running.value = 0
def _store_data(self, alg_name, data):
if self.learning: ## only store when the agent is learning
return self.helpers[alg_name]._store_data(self.alive, data)
def _run_one_episode(self):
def __store_data(observations, actions, states, rewards):
learning_ret = self._cts_store_data(observations, actions, states,
rewards) ## written by user
if learning_ret is not None:
for k, v in learning_ret.items():
self.log_entry.add_key(k, v)
observations = self._reset_env()
states = self._get_init_states() ## written by user
while self.alive and (not self.env.time_out()):
actions, next_states = self._cts_predict(
observations, states) ## written by user
assert isinstance(actions, dict)
assert isinstance(next_states, dict)
next_observations, rewards, next_game_over = self._step_env(
actions)
__store_data(observations, actions, states, rewards)
observations = next_observations
states = next_states
## next_game_over == 1: success
## next_game_over == -1: failure
self.alive = 1 - abs(next_game_over)
## self.alive: 0 -- success/failure
## 1 -- normal
## -1 -- timeout
if self.env.time_out():
self.alive = -1
actions, _ = self._cts_predict(observations, states)
zero_rewards = {k: [0] * len(v) for k, v in rewards.items()}
__store_data(observations, actions, states, zero_rewards)
## Record success. For games that do not have a defintion of
## 'success' (e.g., 'breakout' never ends), this quantity will
## always be zero
self.log_entry.add_key("success", next_game_over > 0)
return self._total_reward()
def _reset_env(self):
self.alive = 1
## currently we only support a single logger for all CTs
self.log_entry = GameLogEntry(self.id, 'All')
obs = self.env.reset()
assert isinstance(obs, dict)
return obs
def _step_env(self, actions):
next_observations, rewards, next_game_over = self.env.step(actions,
self.actrep)
assert isinstance(next_observations, dict)
assert isinstance(rewards, dict)
self.log_entry.add_key("num_steps", 1)
self.log_entry.add_key("total_reward", sum(map(sum, rewards.values())))
return next_observations, rewards, next_game_over
def _total_reward(self):
self.log_q.put(self.log_entry)
return self.log_entry.total_reward
def _get_init_states(self):
"""
By default, there is no state. The user needs to override this function
to return a dictionary of init states if necessary.
"""
return dict()
@abstractmethod
def _cts_predict(self, observations, states):
"""
The user needs to override this function to specify how different CTs
make predictions given observations and states.
Output: actions: a dictionary of actions, each action being a vector
If the action is discrete, then it is a length-one
list of an integer.
states (optional): a dictionary of states, each state being a floating vector
"""
pass
@abstractmethod
def _cts_store_data(self, observations, actions, states, rewards):
"""
The user needs to override this function to specify how different CTs
store their corresponding experiences, by calling self._store_data().
Each input should be a dictionary.
"""
pass
| 36.374684
| 93
| 0.600362
|
from abc import ABCMeta, abstractmethod
from multiprocessing import Process, Value
import numpy as np
from flare.common.log import GameLogEntry
from flare.common.communicator import AgentCommunicator
from flare.common.replay_buffer import NoReplacementQueue, ReplayBuffer, Experience
class AgentHelper(object):
__metaclass__ = ABCMeta
def __init__(self, name, communicator, sample_interval):
assert isinstance(communicator, AgentCommunicator)
self.name = name
self.comm = communicator
self.counter = 0
assert sample_interval >= 2
self.sample_interval = sample_interval
def unpack_exps(self, exp_seqs):
def concat_lists(lists):
return [x for l in lists for x in l]
def extract_key(seq, k):
assert seq
return [e.val(k) for e in seq]
ret = dict(
inputs={},
next_inputs={},
next_alive={},
rewards={},
actions={},
next_actions={},
states=None,
next_states=None)
for k in self.input_keys:
ipt_seqs = [extract_key(exp_seq, k) for exp_seq in exp_seqs]
ret["inputs"][k] = [ipt_seq[:-1] for ipt_seq in ipt_seqs]
ret["next_inputs"][k] = [ipt_seq[1:] for ipt_seq in ipt_seqs]
for k in self.action_keys:
act_seqs = [extract_key(exp_seq, k) for exp_seq in exp_seqs]
ret["actions"][k] = [act_seq[:-1] for act_seq in act_seqs]
ret["next_actions"][k] = [act_seq[1:] for act_seq in act_seqs]
for k in self.reward_keys:
ret["rewards"][
k] = [extract_key(exp_seq[:-1], k) for exp_seq in exp_seqs]
if self.state_keys:
ret["states"] = dict()
ret["next_states"] = dict()
for k in self.state_keys:
ret["states"][
k] = [extract_key(exp_seq[:1], k)[0] for exp_seq in exp_seqs]
ret["next_states"][k] = [
extract_key(exp_seq[1:2], k)[0] for exp_seq in exp_seqs
]
ret["next_alive"]["alive"] \
= [extract_key(exp_seq[1:], "alive") for exp_seq in exp_seqs]
if not self.state_keys:
for k in ret.keys():
if ret[k] is not None:
for kk in ret[k].keys():
ret[k][kk] = concat_lists(ret[k][kk])
return ret, len(exp_seqs)
def predict(self, inputs, states=dict()):
data = dict(inputs=inputs, states=states)
self.comm.put_prediction_data(data, 1)
ret = self.comm.get_prediction_return()
return ret
@abstractmethod
def add_experience(self, e):
pass
def _store_data(self, alive, data):
assert isinstance(data, dict)
data["alive"] = [alive]
t = Experience(data)
self.add_experience(t)
self.counter += 1
if self.counter % self.sample_interval == 0:
return self.learn()
@abstractmethod
def sample_experiences(self):
pass
def learn(self):
exp_seqs = self.sample_experiences()
if not exp_seqs:
return
data, size = self.unpack_exps(exp_seqs)
self.comm.put_training_data(data, size)
ret = self.comm.get_training_return()
return ret
class OnlineHelper(AgentHelper):
def __init__(self, name, communicator, sample_interval=5):
super(OnlineHelper, self).__init__(name, communicator, sample_interval)
self.exp_queue = NoReplacementQueue()
@staticmethod
def exp_replay():
return False
def add_experience(self, e):
self.exp_queue.add(e)
def sample_experiences(self):
return self.exp_queue.sample()
class ExpReplayHelper(AgentHelper):
def __init__(self,
name,
communicator,
buffer_capacity,
num_experiences,
sample_interval=5,
num_seqs=1):
super(ExpReplayHelper, self).__init__(name, communicator,
sample_interval)
self.replay_buffer = ReplayBuffer(buffer_capacity)
self.num_experiences = num_experiences
self.num_seqs = num_seqs
@staticmethod
def exp_replay():
return True
def add_experience(self, e):
self.replay_buffer.add(e)
def sample_experiences(self):
return self.replay_buffer.sample(self.num_experiences, self.num_seqs)
class Agent(Process):
__metaclass__ = ABCMeta
def __init__(self, num_games, actrep, learning):
super(Agent, self).__init__()
self.id = -1 self.num_games = num_games
self.learning = learning
self.state_specs = None
self.helpers = {}
self.log_q = None
self.running = Value('i', 0)
self.daemon = True self.alive = 1
self.env_f = None
self.actrep = actrep
def set_env(self, env_class, *args, **kwargs):
self.env_f = lambda: env_class(*args, **kwargs)
def add_agent_helper(self, helper, input_keys, action_keys, state_keys,
reward_keys):
assert isinstance(helper, AgentHelper)
helper.input_keys = input_keys
helper.action_keys = action_keys
helper.state_keys = state_keys
helper.reward_keys = reward_keys
self.helpers[helper.name] = helper
def _make_zero_states(self, prop):
dtype = prop["dtype"] if "dtype" in prop else "float32"
return np.zeros(prop["shape"]).astype(dtype)
def predict(self, alg_name, inputs, states=dict()):
inputs_ = {k: [v] for k, v in inputs.items()}
states_ = {k: [v] for k, v in states.items()}
prediction, next_states = self.helpers[alg_name].predict(inputs_,
states_)
prediction = {k: v[0] for k, v in prediction.items()}
next_states = {k: v[0] for k, v in next_states.items()}
return prediction, next_states
def run(self):
assert self.env_f is not None, "You should first call self.set_env()!"
self.env = self.env_f()
self.running.value = 1
for i in range(self.num_games):
self._run_one_episode()
if not self.running.value:
return
self.running.value = 0
def _store_data(self, alg_name, data):
if self.learning: return self.helpers[alg_name]._store_data(self.alive, data)
def _run_one_episode(self):
def __store_data(observations, actions, states, rewards):
learning_ret = self._cts_store_data(observations, actions, states,
rewards) if learning_ret is not None:
for k, v in learning_ret.items():
self.log_entry.add_key(k, v)
observations = self._reset_env()
states = self._get_init_states()
while self.alive and (not self.env.time_out()):
actions, next_states = self._cts_predict(
observations, states) assert isinstance(actions, dict)
assert isinstance(next_states, dict)
next_observations, rewards, next_game_over = self._step_env(
actions)
__store_data(observations, actions, states, rewards)
observations = next_observations
states = next_states
self.alive = 1 - abs(next_game_over)
if self.env.time_out():
self.alive = -1
actions, _ = self._cts_predict(observations, states)
zero_rewards = {k: [0] * len(v) for k, v in rewards.items()}
__store_data(observations, actions, states, zero_rewards)
self.log_entry.add_key("success", next_game_over > 0)
return self._total_reward()
def _reset_env(self):
self.alive = 1
self.log_entry = GameLogEntry(self.id, 'All')
obs = self.env.reset()
assert isinstance(obs, dict)
return obs
def _step_env(self, actions):
next_observations, rewards, next_game_over = self.env.step(actions,
self.actrep)
assert isinstance(next_observations, dict)
assert isinstance(rewards, dict)
self.log_entry.add_key("num_steps", 1)
self.log_entry.add_key("total_reward", sum(map(sum, rewards.values())))
return next_observations, rewards, next_game_over
def _total_reward(self):
self.log_q.put(self.log_entry)
return self.log_entry.total_reward
def _get_init_states(self):
return dict()
@abstractmethod
def _cts_predict(self, observations, states):
pass
@abstractmethod
def _cts_store_data(self, observations, actions, states, rewards):
pass
| true
| true
|
f701ff2a39c9302a8a5530d6b4a34c9cdf05ddd0
| 1,302
|
py
|
Python
|
custom_components/ge_home/devices/washer_dryer.py
|
olds/ha_gehome
|
5cb24deab64bcade45861da0497a84631845922c
|
[
"MIT"
] | 41
|
2021-08-02T02:15:54.000Z
|
2022-03-30T11:11:42.000Z
|
custom_components/ge_home/devices/washer_dryer.py
|
olds/ha_gehome
|
5cb24deab64bcade45861da0497a84631845922c
|
[
"MIT"
] | 46
|
2021-08-03T02:20:59.000Z
|
2022-03-30T11:17:15.000Z
|
custom_components/ge_home/devices/washer_dryer.py
|
olds/ha_gehome
|
5cb24deab64bcade45861da0497a84631845922c
|
[
"MIT"
] | 15
|
2021-08-31T00:21:33.000Z
|
2022-03-30T12:53:21.000Z
|
import logging
from typing import List
from homeassistant.helpers.entity import Entity
from gehomesdk import ErdCode, ErdApplianceType
from .washer import WasherApi
from .dryer import DryerApi
from ..entities import GeErdSensor, GeErdBinarySensor
_LOGGER = logging.getLogger(__name__)
class WasherDryerApi(WasherApi, DryerApi):
"""API class for washer/dryer objects"""
APPLIANCE_TYPE = ErdApplianceType.COMBINATION_WASHER_DRYER
def get_all_entities(self) -> List[Entity]:
base_entities = self.get_base_entities()
common_entities = [
GeErdSensor(self, ErdCode.LAUNDRY_MACHINE_STATE),
GeErdSensor(self, ErdCode.LAUNDRY_CYCLE),
GeErdSensor(self, ErdCode.LAUNDRY_SUB_CYCLE),
GeErdBinarySensor(self, ErdCode.LAUNDRY_END_OF_CYCLE),
GeErdSensor(self, ErdCode.LAUNDRY_TIME_REMAINING),
GeErdSensor(self, ErdCode.LAUNDRY_DELAY_TIME_REMAINING),
GeErdBinarySensor(self, ErdCode.LAUNDRY_DOOR),
GeErdBinarySensor(self, ErdCode.LAUNDRY_REMOTE_STATUS),
]
washer_entities = self.get_washer_entities()
dryer_entities = self.get_dryer_entities()
entities = base_entities + common_entities + washer_entities + dryer_entities
return entities
| 35.189189
| 85
| 0.725038
|
import logging
from typing import List
from homeassistant.helpers.entity import Entity
from gehomesdk import ErdCode, ErdApplianceType
from .washer import WasherApi
from .dryer import DryerApi
from ..entities import GeErdSensor, GeErdBinarySensor
_LOGGER = logging.getLogger(__name__)
class WasherDryerApi(WasherApi, DryerApi):
APPLIANCE_TYPE = ErdApplianceType.COMBINATION_WASHER_DRYER
def get_all_entities(self) -> List[Entity]:
base_entities = self.get_base_entities()
common_entities = [
GeErdSensor(self, ErdCode.LAUNDRY_MACHINE_STATE),
GeErdSensor(self, ErdCode.LAUNDRY_CYCLE),
GeErdSensor(self, ErdCode.LAUNDRY_SUB_CYCLE),
GeErdBinarySensor(self, ErdCode.LAUNDRY_END_OF_CYCLE),
GeErdSensor(self, ErdCode.LAUNDRY_TIME_REMAINING),
GeErdSensor(self, ErdCode.LAUNDRY_DELAY_TIME_REMAINING),
GeErdBinarySensor(self, ErdCode.LAUNDRY_DOOR),
GeErdBinarySensor(self, ErdCode.LAUNDRY_REMOTE_STATUS),
]
washer_entities = self.get_washer_entities()
dryer_entities = self.get_dryer_entities()
entities = base_entities + common_entities + washer_entities + dryer_entities
return entities
| true
| true
|
f701ffb04ee003deff62ba9a43c50dc9568f0b5f
| 6,275
|
py
|
Python
|
pyldpc/decoder.py
|
c-w-m/pyldpc
|
c7eb471359086b7336d7b40f11cc912f0daf0476
|
[
"BSD-3-Clause"
] | 69
|
2017-03-12T17:48:54.000Z
|
2022-03-25T13:18:46.000Z
|
pyldpc/decoder.py
|
hichamjanati/pyldpc
|
a821ccd1eb3a13b8a0f66ebba8d9923ce2f528ef
|
[
"BSD-3-Clause"
] | 20
|
2018-03-14T14:29:41.000Z
|
2021-12-29T15:40:27.000Z
|
pyldpc/decoder.py
|
c-w-m/pyldpc
|
c7eb471359086b7336d7b40f11cc912f0daf0476
|
[
"BSD-3-Clause"
] | 29
|
2017-12-26T22:07:34.000Z
|
2022-03-27T13:55:57.000Z
|
"""Decoding module."""
import numpy as np
import warnings
from . import utils
from numba import njit, int64, types, float64
def decode(H, y, snr, maxiter=1000):
"""Decode a Gaussian noise corrupted n bits message using BP algorithm.
Decoding is performed in parallel if multiple codewords are passed in y.
Parameters
----------
H: array (n_equations, n_code). Decoding matrix H.
y: array (n_code, n_messages) or (n_code,). Received message(s) in the
codeword space.
maxiter: int. Maximum number of iterations of the BP algorithm.
Returns
-------
x: array (n_code,) or (n_code, n_messages) the solutions in the
codeword space.
"""
m, n = H.shape
bits_hist, bits_values, nodes_hist, nodes_values = utils._bitsandnodes(H)
_n_bits = np.unique(H.sum(0))
_n_nodes = np.unique(H.sum(1))
if _n_bits * _n_nodes == 1:
solver = _logbp_numba_regular
bits_values = bits_values.reshape(n, -1)
nodes_values = nodes_values.reshape(m, -1)
else:
solver = _logbp_numba
var = 10 ** (-snr / 10)
if y.ndim == 1:
y = y[:, None]
# step 0: initialization
Lc = 2 * y / var
_, n_messages = y.shape
Lq = np.zeros(shape=(m, n, n_messages))
Lr = np.zeros(shape=(m, n, n_messages))
for n_iter in range(maxiter):
Lq, Lr, L_posteriori = solver(bits_hist, bits_values, nodes_hist,
nodes_values, Lc, Lq, Lr, n_iter)
x = np.array(L_posteriori <= 0).astype(int)
product = utils.incode(H, x)
if product:
break
if n_iter == maxiter - 1:
warnings.warn("""Decoding stopped before convergence. You may want
to increase maxiter""")
return x.squeeze()
output_type_log2 = types.Tuple((float64[:, :, :], float64[:, :, :],
float64[:, :]))
@njit(output_type_log2(int64[:], int64[:], int64[:], int64[:], float64[:, :],
float64[:, :, :], float64[:, :, :], int64), cache=True)
def _logbp_numba(bits_hist, bits_values, nodes_hist, nodes_values, Lc, Lq, Lr,
n_iter):
"""Perform inner ext LogBP solver."""
m, n, n_messages = Lr.shape
# step 1 : Horizontal
bits_counter = 0
nodes_counter = 0
for i in range(m):
# ni = bits[i]
ff = bits_hist[i]
ni = bits_values[bits_counter: bits_counter + ff]
bits_counter += ff
for j in ni:
nij = ni[:]
X = np.ones(n_messages)
if n_iter == 0:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lc[nij[kk]])
else:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lq[i, nij[kk]])
num = 1 + X
denom = 1 - X
for ll in range(n_messages):
if num[ll] == 0:
Lr[i, j, ll] = -1
elif denom[ll] == 0:
Lr[i, j, ll] = 1
else:
Lr[i, j, ll] = np.log(num[ll] / denom[ll])
# step 2 : Vertical
for j in range(n):
# mj = nodes[j]
ff = nodes_hist[j]
mj = nodes_values[nodes_counter: nodes_counter + ff]
nodes_counter += ff
for i in mj:
mji = mj[:]
Lq[i, j] = Lc[j]
for kk in range(len(mji)):
if mji[kk] != i:
Lq[i, j] += Lr[mji[kk], j]
# LLR a posteriori:
L_posteriori = np.zeros((n, n_messages))
nodes_counter = 0
for j in range(n):
ff = nodes_hist[j]
mj = nodes_values[nodes_counter: nodes_counter + ff]
nodes_counter += ff
L_posteriori[j] = Lc[j] + Lr[mj, j].sum(axis=0)
return Lq, Lr, L_posteriori
@njit(output_type_log2(int64[:], int64[:, :], int64[:], int64[:, :],
float64[:, :], float64[:, :, :], float64[:, :, :],
int64), cache=True)
def _logbp_numba_regular(bits_hist, bits_values, nodes_hist, nodes_values, Lc,
Lq, Lr, n_iter):
"""Perform inner ext LogBP solver."""
m, n, n_messages = Lr.shape
# step 1 : Horizontal
for i in range(m):
ni = bits_values[i]
for j in ni:
nij = ni[:]
X = np.ones(n_messages)
if n_iter == 0:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lc[nij[kk]])
else:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lq[i, nij[kk]])
num = 1 + X
denom = 1 - X
for ll in range(n_messages):
if num[ll] == 0:
Lr[i, j, ll] = -1
elif denom[ll] == 0:
Lr[i, j, ll] = 1
else:
Lr[i, j, ll] = np.log(num[ll] / denom[ll])
# step 2 : Vertical
for j in range(n):
mj = nodes_values[j]
for i in mj:
mji = mj[:]
Lq[i, j] = Lc[j]
for kk in range(len(mji)):
if mji[kk] != i:
Lq[i, j] += Lr[mji[kk], j]
# LLR a posteriori:
L_posteriori = np.zeros((n, n_messages))
for j in range(n):
mj = nodes_values[j]
L_posteriori[j] = Lc[j] + Lr[mj, j].sum(axis=0)
return Lq, Lr, L_posteriori
def get_message(tG, x):
"""Compute the original `n_bits` message from a `n_code` codeword `x`.
Parameters
----------
tG: array (n_code, n_bits) coding matrix tG.
x: array (n_code,) decoded codeword of length `n_code`.
Returns
-------
message: array (n_bits,). Original binary message.
"""
n, k = tG.shape
rtG, rx = utils.gausselimination(tG, x)
message = np.zeros(k).astype(int)
message[k - 1] = rx[k - 1]
for i in reversed(range(k - 1)):
message[i] = rx[i]
message[i] -= utils.binaryproduct(rtG[i, list(range(i+1, k))],
message[list(range(i+1, k))])
return abs(message)
| 29.599057
| 79
| 0.492271
|
import numpy as np
import warnings
from . import utils
from numba import njit, int64, types, float64
def decode(H, y, snr, maxiter=1000):
m, n = H.shape
bits_hist, bits_values, nodes_hist, nodes_values = utils._bitsandnodes(H)
_n_bits = np.unique(H.sum(0))
_n_nodes = np.unique(H.sum(1))
if _n_bits * _n_nodes == 1:
solver = _logbp_numba_regular
bits_values = bits_values.reshape(n, -1)
nodes_values = nodes_values.reshape(m, -1)
else:
solver = _logbp_numba
var = 10 ** (-snr / 10)
if y.ndim == 1:
y = y[:, None]
Lc = 2 * y / var
_, n_messages = y.shape
Lq = np.zeros(shape=(m, n, n_messages))
Lr = np.zeros(shape=(m, n, n_messages))
for n_iter in range(maxiter):
Lq, Lr, L_posteriori = solver(bits_hist, bits_values, nodes_hist,
nodes_values, Lc, Lq, Lr, n_iter)
x = np.array(L_posteriori <= 0).astype(int)
product = utils.incode(H, x)
if product:
break
if n_iter == maxiter - 1:
warnings.warn("""Decoding stopped before convergence. You may want
to increase maxiter""")
return x.squeeze()
output_type_log2 = types.Tuple((float64[:, :, :], float64[:, :, :],
float64[:, :]))
@njit(output_type_log2(int64[:], int64[:], int64[:], int64[:], float64[:, :],
float64[:, :, :], float64[:, :, :], int64), cache=True)
def _logbp_numba(bits_hist, bits_values, nodes_hist, nodes_values, Lc, Lq, Lr,
n_iter):
m, n, n_messages = Lr.shape
bits_counter = 0
nodes_counter = 0
for i in range(m):
ff = bits_hist[i]
ni = bits_values[bits_counter: bits_counter + ff]
bits_counter += ff
for j in ni:
nij = ni[:]
X = np.ones(n_messages)
if n_iter == 0:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lc[nij[kk]])
else:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lq[i, nij[kk]])
num = 1 + X
denom = 1 - X
for ll in range(n_messages):
if num[ll] == 0:
Lr[i, j, ll] = -1
elif denom[ll] == 0:
Lr[i, j, ll] = 1
else:
Lr[i, j, ll] = np.log(num[ll] / denom[ll])
for j in range(n):
ff = nodes_hist[j]
mj = nodes_values[nodes_counter: nodes_counter + ff]
nodes_counter += ff
for i in mj:
mji = mj[:]
Lq[i, j] = Lc[j]
for kk in range(len(mji)):
if mji[kk] != i:
Lq[i, j] += Lr[mji[kk], j]
L_posteriori = np.zeros((n, n_messages))
nodes_counter = 0
for j in range(n):
ff = nodes_hist[j]
mj = nodes_values[nodes_counter: nodes_counter + ff]
nodes_counter += ff
L_posteriori[j] = Lc[j] + Lr[mj, j].sum(axis=0)
return Lq, Lr, L_posteriori
@njit(output_type_log2(int64[:], int64[:, :], int64[:], int64[:, :],
float64[:, :], float64[:, :, :], float64[:, :, :],
int64), cache=True)
def _logbp_numba_regular(bits_hist, bits_values, nodes_hist, nodes_values, Lc,
Lq, Lr, n_iter):
m, n, n_messages = Lr.shape
for i in range(m):
ni = bits_values[i]
for j in ni:
nij = ni[:]
X = np.ones(n_messages)
if n_iter == 0:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lc[nij[kk]])
else:
for kk in range(len(nij)):
if nij[kk] != j:
X *= np.tanh(0.5 * Lq[i, nij[kk]])
num = 1 + X
denom = 1 - X
for ll in range(n_messages):
if num[ll] == 0:
Lr[i, j, ll] = -1
elif denom[ll] == 0:
Lr[i, j, ll] = 1
else:
Lr[i, j, ll] = np.log(num[ll] / denom[ll])
for j in range(n):
mj = nodes_values[j]
for i in mj:
mji = mj[:]
Lq[i, j] = Lc[j]
for kk in range(len(mji)):
if mji[kk] != i:
Lq[i, j] += Lr[mji[kk], j]
L_posteriori = np.zeros((n, n_messages))
for j in range(n):
mj = nodes_values[j]
L_posteriori[j] = Lc[j] + Lr[mj, j].sum(axis=0)
return Lq, Lr, L_posteriori
def get_message(tG, x):
n, k = tG.shape
rtG, rx = utils.gausselimination(tG, x)
message = np.zeros(k).astype(int)
message[k - 1] = rx[k - 1]
for i in reversed(range(k - 1)):
message[i] = rx[i]
message[i] -= utils.binaryproduct(rtG[i, list(range(i+1, k))],
message[list(range(i+1, k))])
return abs(message)
| true
| true
|
f701ffdb06c403bc4f6a1e71c86342279da88863
| 69,260
|
py
|
Python
|
bitmex_swagger/api/order_api.py
|
silencewwt/bitmex-swagger-client
|
01403685eeb12eb27d53a0310d3bc7541793aa0f
|
[
"MIT"
] | 1
|
2018-08-04T15:05:43.000Z
|
2018-08-04T15:05:43.000Z
|
bitmex_swagger/api/order_api.py
|
silencewwt/bitmex-swagger
|
01403685eeb12eb27d53a0310d3bc7541793aa0f
|
[
"MIT"
] | null | null | null |
bitmex_swagger/api/order_api.py
|
silencewwt/bitmex-swagger
|
01403685eeb12eb27d53a0310d3bc7541793aa0f
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
BitMEX API
## REST API for the BitMEX Trading Platform [View Changelog](/app/apiChangelog) ---- #### Getting Started Base URI: [https://www.bitmex.com/api/v1](/api/v1) ##### Fetching Data All REST endpoints are documented below. You can try out any query right from this interface. Most table queries accept `count`, `start`, and `reverse` params. Set `reverse=true` to get rows newest-first. Additional documentation regarding filters, timestamps, and authentication is available in [the main API documentation](/app/restAPI). *All* table data is available via the [Websocket](/app/wsAPI). We highly recommend using the socket if you want to have the quickest possible data without being subject to ratelimits. ##### Return Types By default, all data is returned as JSON. Send `?_format=csv` to get CSV data or `?_format=xml` to get XML data. ##### Trade Data Queries *This is only a small subset of what is available, to get you started.* Fill in the parameters and click the `Try it out!` button to try any of these queries. * [Pricing Data](#!/Quote/Quote_get) * [Trade Data](#!/Trade/Trade_get) * [OrderBook Data](#!/OrderBook/OrderBook_getL2) * [Settlement Data](#!/Settlement/Settlement_get) * [Exchange Statistics](#!/Stats/Stats_history) Every function of the BitMEX.com platform is exposed here and documented. Many more functions are available. ##### Swagger Specification [⇩ Download Swagger JSON](swagger.json) ---- ## All API Endpoints Click to expand a section. # noqa: E501
OpenAPI spec version: 1.2.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from bitmex_swagger.api_client import ApiClient
class OrderApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def order_amend(self, **kwargs): # noqa: E501
"""Amend the quantity or price of an open order. # noqa: E501
Send an `orderID` or `origClOrdID` to identify the order you wish to amend. Both order quantity and price can be amended. Only one `qty` field can be used to amend. Use the `leavesQty` field to specify how much of the order you wish to remain open. This can be useful if you want to adjust your position's delta by a certain amount, regardless of how much of the order has already filled. > A `leavesQty` can be used to make a \"Filled\" order live again, if it is received within 60 seconds of the fill. Use the `simpleOrderQty` and `simpleLeavesQty` fields to specify order size in Bitcoin, rather than contracts. These fields will round up to the nearest contract. Like order placement, amending can be done in bulk. Simply send a request to `PUT /api/v1/order/bulk` with a JSON body of the shape: `{\"orders\": [{...}, {...}]}`, each object containing the fields used in this endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_amend(async=True)
>>> result = thread.get()
:param async bool
:param str order_id: Order ID
:param str orig_cl_ord_id: Client Order ID. See POST /order.
:param str cl_ord_id: Optional new Client Order ID, requires `origClOrdID`.
:param float simple_order_qty: Optional order quantity in units of the underlying instrument (i.e. Bitcoin).
:param float order_qty: Optional order quantity in units of the instrument (i.e. contracts).
:param float simple_leaves_qty: Optional leaves quantity in units of the underlying instrument (i.e. Bitcoin). Useful for amending partially filled orders.
:param float leaves_qty: Optional leaves quantity in units of the instrument (i.e. contracts). Useful for amending partially filled orders.
:param float price: Optional limit price for 'Limit', 'StopLimit', and 'LimitIfTouched' orders.
:param float stop_px: Optional trigger price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders. Use a price below the current price for stop-sell orders and buy-if-touched orders.
:param float peg_offset_value: Optional trailing offset from the current price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders; use a negative offset for stop-sell orders and buy-if-touched orders. Optional offset from the peg price for 'Pegged' orders.
:param str text: Optional amend annotation. e.g. 'Adjust skew'.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_amend_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.order_amend_with_http_info(**kwargs) # noqa: E501
return data
def order_amend_with_http_info(self, **kwargs): # noqa: E501
"""Amend the quantity or price of an open order. # noqa: E501
Send an `orderID` or `origClOrdID` to identify the order you wish to amend. Both order quantity and price can be amended. Only one `qty` field can be used to amend. Use the `leavesQty` field to specify how much of the order you wish to remain open. This can be useful if you want to adjust your position's delta by a certain amount, regardless of how much of the order has already filled. > A `leavesQty` can be used to make a \"Filled\" order live again, if it is received within 60 seconds of the fill. Use the `simpleOrderQty` and `simpleLeavesQty` fields to specify order size in Bitcoin, rather than contracts. These fields will round up to the nearest contract. Like order placement, amending can be done in bulk. Simply send a request to `PUT /api/v1/order/bulk` with a JSON body of the shape: `{\"orders\": [{...}, {...}]}`, each object containing the fields used in this endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_amend_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str order_id: Order ID
:param str orig_cl_ord_id: Client Order ID. See POST /order.
:param str cl_ord_id: Optional new Client Order ID, requires `origClOrdID`.
:param float simple_order_qty: Optional order quantity in units of the underlying instrument (i.e. Bitcoin).
:param float order_qty: Optional order quantity in units of the instrument (i.e. contracts).
:param float simple_leaves_qty: Optional leaves quantity in units of the underlying instrument (i.e. Bitcoin). Useful for amending partially filled orders.
:param float leaves_qty: Optional leaves quantity in units of the instrument (i.e. contracts). Useful for amending partially filled orders.
:param float price: Optional limit price for 'Limit', 'StopLimit', and 'LimitIfTouched' orders.
:param float stop_px: Optional trigger price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders. Use a price below the current price for stop-sell orders and buy-if-touched orders.
:param float peg_offset_value: Optional trailing offset from the current price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders; use a negative offset for stop-sell orders and buy-if-touched orders. Optional offset from the peg price for 'Pegged' orders.
:param str text: Optional amend annotation. e.g. 'Adjust skew'.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_id', 'orig_cl_ord_id', 'cl_ord_id', 'simple_order_qty', 'order_qty', 'simple_leaves_qty', 'leaves_qty', 'price', 'stop_px', 'peg_offset_value', 'text'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_amend" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'order_id' in params:
form_params.append(('orderID', params['order_id'])) # noqa: E501
if 'orig_cl_ord_id' in params:
form_params.append(('origClOrdID', params['orig_cl_ord_id'])) # noqa: E501
if 'cl_ord_id' in params:
form_params.append(('clOrdID', params['cl_ord_id'])) # noqa: E501
if 'simple_order_qty' in params:
form_params.append(('simpleOrderQty', params['simple_order_qty'])) # noqa: E501
if 'order_qty' in params:
form_params.append(('orderQty', params['order_qty'])) # noqa: E501
if 'simple_leaves_qty' in params:
form_params.append(('simpleLeavesQty', params['simple_leaves_qty'])) # noqa: E501
if 'leaves_qty' in params:
form_params.append(('leavesQty', params['leaves_qty'])) # noqa: E501
if 'price' in params:
form_params.append(('price', params['price'])) # noqa: E501
if 'stop_px' in params:
form_params.append(('stopPx', params['stop_px'])) # noqa: E501
if 'peg_offset_value' in params:
form_params.append(('pegOffsetValue', params['peg_offset_value'])) # noqa: E501
if 'text' in params:
form_params.append(('text', params['text'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey', 'apiNonce', 'apiSignature'] # noqa: E501
return self.api_client.call_api(
'/order', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_amend_bulk(self, **kwargs): # noqa: E501
"""Amend multiple orders for the same symbol. # noqa: E501
Similar to POST /amend, but with multiple orders. `application/json` only. Ratelimited at 10%. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_amend_bulk(async=True)
>>> result = thread.get()
:param async bool
:param str orders: An array of orders.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_amend_bulk_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.order_amend_bulk_with_http_info(**kwargs) # noqa: E501
return data
def order_amend_bulk_with_http_info(self, **kwargs): # noqa: E501
"""Amend multiple orders for the same symbol. # noqa: E501
Similar to POST /amend, but with multiple orders. `application/json` only. Ratelimited at 10%. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_amend_bulk_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str orders: An array of orders.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orders'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_amend_bulk" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'orders' in params:
form_params.append(('orders', params['orders'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey', 'apiNonce', 'apiSignature'] # noqa: E501
return self.api_client.call_api(
'/order/bulk', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_cancel(self, **kwargs): # noqa: E501
"""Cancel order(s). Send multiple order IDs to cancel in bulk. # noqa: E501
Either an orderID or a clOrdID must be provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel(async=True)
>>> result = thread.get()
:param async bool
:param str order_id: Order ID(s).
:param str cl_ord_id: Client Order ID(s). See POST /order.
:param str text: Optional cancellation annotation. e.g. 'Spread Exceeded'.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_cancel_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.order_cancel_with_http_info(**kwargs) # noqa: E501
return data
def order_cancel_with_http_info(self, **kwargs): # noqa: E501
"""Cancel order(s). Send multiple order IDs to cancel in bulk. # noqa: E501
Either an orderID or a clOrdID must be provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str order_id: Order ID(s).
:param str cl_ord_id: Client Order ID(s). See POST /order.
:param str text: Optional cancellation annotation. e.g. 'Spread Exceeded'.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_id', 'cl_ord_id', 'text'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_cancel" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'order_id' in params:
form_params.append(('orderID', params['order_id'])) # noqa: E501
if 'cl_ord_id' in params:
form_params.append(('clOrdID', params['cl_ord_id'])) # noqa: E501
if 'text' in params:
form_params.append(('text', params['text'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey', 'apiNonce', 'apiSignature'] # noqa: E501
return self.api_client.call_api(
'/order', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_cancel_all(self, **kwargs): # noqa: E501
"""Cancels all of your orders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel_all(async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Optional symbol. If provided, only cancels orders for that symbol.
:param str filter: Optional filter for cancellation. Use to only cancel some orders, e.g. `{\"side\": \"Buy\"}`.
:param str text: Optional cancellation annotation. e.g. 'Spread Exceeded'
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_cancel_all_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.order_cancel_all_with_http_info(**kwargs) # noqa: E501
return data
def order_cancel_all_with_http_info(self, **kwargs): # noqa: E501
"""Cancels all of your orders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel_all_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Optional symbol. If provided, only cancels orders for that symbol.
:param str filter: Optional filter for cancellation. Use to only cancel some orders, e.g. `{\"side\": \"Buy\"}`.
:param str text: Optional cancellation annotation. e.g. 'Spread Exceeded'
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['symbol', 'filter', 'text'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_cancel_all" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'symbol' in params:
form_params.append(('symbol', params['symbol'])) # noqa: E501
if 'filter' in params:
form_params.append(('filter', params['filter'])) # noqa: E501
if 'text' in params:
form_params.append(('text', params['text'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey', 'apiNonce', 'apiSignature'] # noqa: E501
return self.api_client.call_api(
'/order/all', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_cancel_all_after(self, timeout, **kwargs): # noqa: E501
"""Automatically cancel all your orders after a specified timeout. # noqa: E501
Useful as a dead-man's switch to ensure your orders are canceled in case of an outage. If called repeatedly, the existing offset will be canceled and a new one will be inserted in its place. Example usage: call this route at 15s intervals with an offset of 60000 (60s). If this route is not called within 60 seconds, all your orders will be automatically canceled. This is also available via [WebSocket](https://www.bitmex.com/app/wsAPI#Dead-Mans-Switch-Auto-Cancel). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel_all_after(timeout, async=True)
>>> result = thread.get()
:param async bool
:param float timeout: Timeout in ms. Set to 0 to cancel this timer. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_cancel_all_after_with_http_info(timeout, **kwargs) # noqa: E501
else:
(data) = self.order_cancel_all_after_with_http_info(timeout, **kwargs) # noqa: E501
return data
def order_cancel_all_after_with_http_info(self, timeout, **kwargs): # noqa: E501
"""Automatically cancel all your orders after a specified timeout. # noqa: E501
Useful as a dead-man's switch to ensure your orders are canceled in case of an outage. If called repeatedly, the existing offset will be canceled and a new one will be inserted in its place. Example usage: call this route at 15s intervals with an offset of 60000 (60s). If this route is not called within 60 seconds, all your orders will be automatically canceled. This is also available via [WebSocket](https://www.bitmex.com/app/wsAPI#Dead-Mans-Switch-Auto-Cancel). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel_all_after_with_http_info(timeout, async=True)
>>> result = thread.get()
:param async bool
:param float timeout: Timeout in ms. Set to 0 to cancel this timer. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['timeout'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_cancel_all_after" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'timeout' is set
if ('timeout' not in params or
params['timeout'] is None):
raise ValueError("Missing the required parameter `timeout` when calling `order_cancel_all_after`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'timeout' in params:
form_params.append(('timeout', params['timeout'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey', 'apiNonce', 'apiSignature'] # noqa: E501
return self.api_client.call_api(
'/order/cancelAllAfter', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_close_position(self, symbol, **kwargs): # noqa: E501
"""Close a position. [Deprecated, use POST /order with execInst: 'Close'] # noqa: E501
If no `price` is specified, a market order will be submitted to close the whole of your position. This will also close all other open orders in this symbol. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_close_position(symbol, async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Symbol of position to close. (required)
:param float price: Optional limit price.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_close_position_with_http_info(symbol, **kwargs) # noqa: E501
else:
(data) = self.order_close_position_with_http_info(symbol, **kwargs) # noqa: E501
return data
def order_close_position_with_http_info(self, symbol, **kwargs): # noqa: E501
"""Close a position. [Deprecated, use POST /order with execInst: 'Close'] # noqa: E501
If no `price` is specified, a market order will be submitted to close the whole of your position. This will also close all other open orders in this symbol. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_close_position_with_http_info(symbol, async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Symbol of position to close. (required)
:param float price: Optional limit price.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['symbol', 'price'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_close_position" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'symbol' is set
if ('symbol' not in params or
params['symbol'] is None):
raise ValueError("Missing the required parameter `symbol` when calling `order_close_position`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'symbol' in params:
form_params.append(('symbol', params['symbol'])) # noqa: E501
if 'price' in params:
form_params.append(('price', params['price'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey', 'apiNonce', 'apiSignature'] # noqa: E501
return self.api_client.call_api(
'/order/closePosition', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_get_orders(self, **kwargs): # noqa: E501
"""Get your orders. # noqa: E501
To get open orders only, send {\"open\": true} in the filter param. See <a href=\"http://www.onixs.biz/fix-dictionary/5.0.SP2/msgType_D_68.html\">the FIX Spec</a> for explanations of these fields. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_get_orders(async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Instrument symbol. Send a bare series (e.g. XBU) to get data for the nearest expiring contract in that series. You can also send a timeframe, e.g. `XBU:monthly`. Timeframes are `daily`, `weekly`, `monthly`, `quarterly`, and `biquarterly`.
:param str filter: Generic table filter. Send JSON key/value pairs, such as `{\"key\": \"value\"}`. You can key on individual fields, and do more advanced querying on timestamps. See the [Timestamp Docs](https://www.bitmex.com/app/restAPI#Timestamp-Filters) for more details.
:param str columns: Array of column names to fetch. If omitted, will return all columns. Note that this method will always return item keys, even when not specified, so you may receive more columns that you expect.
:param float count: Number of results to fetch.
:param float start: Starting point for results.
:param bool reverse: If true, will sort results newest first.
:param datetime start_time: Starting date filter for results.
:param datetime end_time: Ending date filter for results.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_get_orders_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.order_get_orders_with_http_info(**kwargs) # noqa: E501
return data
def order_get_orders_with_http_info(self, **kwargs): # noqa: E501
"""Get your orders. # noqa: E501
To get open orders only, send {\"open\": true} in the filter param. See <a href=\"http://www.onixs.biz/fix-dictionary/5.0.SP2/msgType_D_68.html\">the FIX Spec</a> for explanations of these fields. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_get_orders_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Instrument symbol. Send a bare series (e.g. XBU) to get data for the nearest expiring contract in that series. You can also send a timeframe, e.g. `XBU:monthly`. Timeframes are `daily`, `weekly`, `monthly`, `quarterly`, and `biquarterly`.
:param str filter: Generic table filter. Send JSON key/value pairs, such as `{\"key\": \"value\"}`. You can key on individual fields, and do more advanced querying on timestamps. See the [Timestamp Docs](https://www.bitmex.com/app/restAPI#Timestamp-Filters) for more details.
:param str columns: Array of column names to fetch. If omitted, will return all columns. Note that this method will always return item keys, even when not specified, so you may receive more columns that you expect.
:param float count: Number of results to fetch.
:param float start: Starting point for results.
:param bool reverse: If true, will sort results newest first.
:param datetime start_time: Starting date filter for results.
:param datetime end_time: Ending date filter for results.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['symbol', 'filter', 'columns', 'count', 'start', 'reverse', 'start_time', 'end_time'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_get_orders" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'symbol' in params:
query_params.append(('symbol', params['symbol'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'columns' in params:
query_params.append(('columns', params['columns'])) # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'reverse' in params:
query_params.append(('reverse', params['reverse'])) # noqa: E501
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey', 'apiNonce', 'apiSignature'] # noqa: E501
return self.api_client.call_api(
'/order', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_new(self, symbol, **kwargs): # noqa: E501
"""Create a new order. # noqa: E501
## Placing Orders This endpoint is used for placing orders. See individual fields below for more details on their use. #### Order Types All orders require a `symbol`. All other fields are optional except when otherwise specified. These are the valid `ordType`s: * **Limit**: The default order type. Specify an `orderQty` and `price`. * **Market**: A traditional Market order. A Market order will execute until filled or your bankruptcy price is reached, at which point it will cancel. * **MarketWithLeftOverAsLimit**: A market order that, after eating through the order book as far as permitted by available margin, will become a limit order. The difference between this type and `Market` only affects the behavior in thin books. Upon reaching the deepest possible price, if there is quantity left over, a `Market` order will cancel the remaining quantity. `MarketWithLeftOverAsLimit` will keep the remaining quantity in the books as a `Limit`. * **Stop**: A Stop Market order. Specify an `orderQty` and `stopPx`. When the `stopPx` is reached, the order will be entered into the book. * On sell orders, the order will trigger if the triggering price is lower than the `stopPx`. On buys, higher. * Note: Stop orders do not consume margin until triggered. Be sure that the required margin is available in your account so that it may trigger fully. * `Close` Stops don't require an `orderQty`. See Execution Instructions below. * **StopLimit**: Like a Stop Market, but enters a Limit order instead of a Market order. Specify an `orderQty`, `stopPx`, and `price`. * **MarketIfTouched**: Similar to a Stop, but triggers are done in the opposite direction. Useful for Take Profit orders. * **LimitIfTouched**: As above; use for Take Profit Limit orders. #### Execution Instructions The following `execInst`s are supported. If using multiple, separate with a comma (e.g. `LastPrice,Close`). * **ParticipateDoNotInitiate**: Also known as a Post-Only order. If this order would have executed on placement, it will cancel instead. * **MarkPrice, LastPrice, IndexPrice**: Used by stop and if-touched orders to determine the triggering price. Use only one. By default, `'MarkPrice'` is used. Also used for Pegged orders to define the value of `'LastPeg'`. * **ReduceOnly**: A `'ReduceOnly'` order can only reduce your position, not increase it. If you have a `'ReduceOnly'` limit order that rests in the order book while the position is reduced by other orders, then its order quantity will be amended down or canceled. If there are multiple `'ReduceOnly'` orders the least aggressive will be amended first. * **Close**: `'Close'` implies `'ReduceOnly'`. A `'Close'` order will cancel other active limit orders with the same side and symbol if the open quantity exceeds the current position. This is useful for stops: by canceling these orders, a `'Close'` Stop is ensured to have the margin required to execute, and can only execute up to the full size of your position. If `orderQty` is not specified, a `'Close'` order has an `orderQty` equal to your current position's size. * Note that a `Close` order without an `orderQty` requires a `side`, so that BitMEX knows if it should trigger above or below the `stopPx`. #### Linked Orders Linked Orders are an advanced capability. It is very powerful, but its use requires careful coding and testing. Please follow this document carefully and use the [Testnet Exchange](https://testnet.bitmex.com) while developing. BitMEX offers four advanced Linked Order types: * **OCO**: *One Cancels the Other*. A very flexible version of the standard Stop / Take Profit technique. Multiple orders may be linked together using a single `clOrdLinkID`. Send a `contingencyType` of `OneCancelsTheOther` on the orders. The first order that fully or partially executes (or activates for `Stop` orders) will cancel all other orders with the same `clOrdLinkID`. * **OTO**: *One Triggers the Other*. Send a `contingencyType` of `'OneTriggersTheOther'` on the primary order and then subsequent orders with the same `clOrdLinkID` will be not be triggered until the primary order fully executes. * **OUOA**: *One Updates the Other Absolute*. Send a `contingencyType` of `'OneUpdatesTheOtherAbsolute'` on the orders. Then as one order has a execution, other orders with the same `clOrdLinkID` will have their order quantity amended down by the execution quantity. * **OUOP**: *One Updates the Other Proportional*. Send a `contingencyType` of `'OneUpdatesTheOtherProportional'` on the orders. Then as one order has a execution, other orders with the same `clOrdLinkID` will have their order quantity reduced proportionally by the fill percentage. #### Trailing Stops You may use `pegPriceType` of `'TrailingStopPeg'` to create Trailing Stops. The pegged `stopPx` will move as the market moves away from the peg, and freeze as the market moves toward it. To use, combine with `pegOffsetValue` to set the `stopPx` of your order. The peg is set to the triggering price specified in the `execInst` (default `'MarkPrice'`). Use a negative offset for stop-sell and buy-if-touched orders. Requires `ordType`: `'Stop', 'StopLimit', 'MarketIfTouched', 'LimitIfTouched'`. #### Simple Quantities Send a `simpleOrderQty` instead of an `orderQty` to create an order denominated in the underlying currency. This is useful for opening up a position with 1 XBT of exposure without having to calculate how many contracts it is. #### Rate Limits See the [Bulk Order Documentation](#!/Order/Order_newBulk) if you need to place multiple orders at the same time. Bulk orders require fewer risk checks in the trading engine and thus are ratelimited at **1/10** the normal rate. You can also improve your reactivity to market movements while staying under your ratelimit by using the [Amend](#!/Order/Order_amend) and [Amend Bulk](#!/Order/Order_amendBulk) endpoints. This allows you to stay in the market and avoids the cancel/replace cycle. #### Tracking Your Orders If you want to keep track of order IDs yourself, set a unique `clOrdID` per order. This `clOrdID` will come back as a property on the order and any related executions (including on the WebSocket), and can be used to get or cancel the order. Max length is 36 characters. You can also change the `clOrdID` by amending an order, supplying an `origClOrdID`, and your desired new ID as the `clOrdID` param, like so: ``` # Amends an order's leavesQty, and updates its clOrdID to \"def-456\" PUT /api/v1/order {\"origClOrdID\": \"abc-123\", \"clOrdID\": \"def-456\", \"leavesQty\": 1000} ``` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_new(symbol, async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Instrument symbol. e.g. 'XBTUSD'. (required)
:param str side: Order side. Valid options: Buy, Sell. Defaults to 'Buy' unless `orderQty` or `simpleOrderQty` is negative.
:param float simple_order_qty: Order quantity in units of the underlying instrument (i.e. Bitcoin).
:param float order_qty: Order quantity in units of the instrument (i.e. contracts).
:param float price: Optional limit price for 'Limit', 'StopLimit', and 'LimitIfTouched' orders.
:param float display_qty: Optional quantity to display in the book. Use 0 for a fully hidden order.
:param float stop_px: Optional trigger price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders. Use a price below the current price for stop-sell orders and buy-if-touched orders. Use `execInst` of 'MarkPrice' or 'LastPrice' to define the current price used for triggering.
:param str cl_ord_id: Optional Client Order ID. This clOrdID will come back on the order and any related executions.
:param str cl_ord_link_id: Optional Client Order Link ID for contingent orders.
:param float peg_offset_value: Optional trailing offset from the current price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders; use a negative offset for stop-sell orders and buy-if-touched orders. Optional offset from the peg price for 'Pegged' orders.
:param str peg_price_type: Optional peg price type. Valid options: LastPeg, MidPricePeg, MarketPeg, PrimaryPeg, TrailingStopPeg.
:param str ord_type: Order type. Valid options: Market, Limit, Stop, StopLimit, MarketIfTouched, LimitIfTouched, MarketWithLeftOverAsLimit, Pegged. Defaults to 'Limit' when `price` is specified. Defaults to 'Stop' when `stopPx` is specified. Defaults to 'StopLimit' when `price` and `stopPx` are specified.
:param str time_in_force: Time in force. Valid options: Day, GoodTillCancel, ImmediateOrCancel, FillOrKill. Defaults to 'GoodTillCancel' for 'Limit', 'StopLimit', 'LimitIfTouched', and 'MarketWithLeftOverAsLimit' orders.
:param str exec_inst: Optional execution instructions. Valid options: ParticipateDoNotInitiate, AllOrNone, MarkPrice, IndexPrice, LastPrice, Close, ReduceOnly, Fixed. 'AllOrNone' instruction requires `displayQty` to be 0. 'MarkPrice', 'IndexPrice' or 'LastPrice' instruction valid for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders.
:param str contingency_type: Optional contingency type for use with `clOrdLinkID`. Valid options: OneCancelsTheOther, OneTriggersTheOther, OneUpdatesTheOtherAbsolute, OneUpdatesTheOtherProportional.
:param str text: Optional order annotation. e.g. 'Take profit'.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_new_with_http_info(symbol, **kwargs) # noqa: E501
else:
(data) = self.order_new_with_http_info(symbol, **kwargs) # noqa: E501
return data
def order_new_with_http_info(self, symbol, **kwargs): # noqa: E501
"""Create a new order. # noqa: E501
## Placing Orders This endpoint is used for placing orders. See individual fields below for more details on their use. #### Order Types All orders require a `symbol`. All other fields are optional except when otherwise specified. These are the valid `ordType`s: * **Limit**: The default order type. Specify an `orderQty` and `price`. * **Market**: A traditional Market order. A Market order will execute until filled or your bankruptcy price is reached, at which point it will cancel. * **MarketWithLeftOverAsLimit**: A market order that, after eating through the order book as far as permitted by available margin, will become a limit order. The difference between this type and `Market` only affects the behavior in thin books. Upon reaching the deepest possible price, if there is quantity left over, a `Market` order will cancel the remaining quantity. `MarketWithLeftOverAsLimit` will keep the remaining quantity in the books as a `Limit`. * **Stop**: A Stop Market order. Specify an `orderQty` and `stopPx`. When the `stopPx` is reached, the order will be entered into the book. * On sell orders, the order will trigger if the triggering price is lower than the `stopPx`. On buys, higher. * Note: Stop orders do not consume margin until triggered. Be sure that the required margin is available in your account so that it may trigger fully. * `Close` Stops don't require an `orderQty`. See Execution Instructions below. * **StopLimit**: Like a Stop Market, but enters a Limit order instead of a Market order. Specify an `orderQty`, `stopPx`, and `price`. * **MarketIfTouched**: Similar to a Stop, but triggers are done in the opposite direction. Useful for Take Profit orders. * **LimitIfTouched**: As above; use for Take Profit Limit orders. #### Execution Instructions The following `execInst`s are supported. If using multiple, separate with a comma (e.g. `LastPrice,Close`). * **ParticipateDoNotInitiate**: Also known as a Post-Only order. If this order would have executed on placement, it will cancel instead. * **MarkPrice, LastPrice, IndexPrice**: Used by stop and if-touched orders to determine the triggering price. Use only one. By default, `'MarkPrice'` is used. Also used for Pegged orders to define the value of `'LastPeg'`. * **ReduceOnly**: A `'ReduceOnly'` order can only reduce your position, not increase it. If you have a `'ReduceOnly'` limit order that rests in the order book while the position is reduced by other orders, then its order quantity will be amended down or canceled. If there are multiple `'ReduceOnly'` orders the least aggressive will be amended first. * **Close**: `'Close'` implies `'ReduceOnly'`. A `'Close'` order will cancel other active limit orders with the same side and symbol if the open quantity exceeds the current position. This is useful for stops: by canceling these orders, a `'Close'` Stop is ensured to have the margin required to execute, and can only execute up to the full size of your position. If `orderQty` is not specified, a `'Close'` order has an `orderQty` equal to your current position's size. * Note that a `Close` order without an `orderQty` requires a `side`, so that BitMEX knows if it should trigger above or below the `stopPx`. #### Linked Orders Linked Orders are an advanced capability. It is very powerful, but its use requires careful coding and testing. Please follow this document carefully and use the [Testnet Exchange](https://testnet.bitmex.com) while developing. BitMEX offers four advanced Linked Order types: * **OCO**: *One Cancels the Other*. A very flexible version of the standard Stop / Take Profit technique. Multiple orders may be linked together using a single `clOrdLinkID`. Send a `contingencyType` of `OneCancelsTheOther` on the orders. The first order that fully or partially executes (or activates for `Stop` orders) will cancel all other orders with the same `clOrdLinkID`. * **OTO**: *One Triggers the Other*. Send a `contingencyType` of `'OneTriggersTheOther'` on the primary order and then subsequent orders with the same `clOrdLinkID` will be not be triggered until the primary order fully executes. * **OUOA**: *One Updates the Other Absolute*. Send a `contingencyType` of `'OneUpdatesTheOtherAbsolute'` on the orders. Then as one order has a execution, other orders with the same `clOrdLinkID` will have their order quantity amended down by the execution quantity. * **OUOP**: *One Updates the Other Proportional*. Send a `contingencyType` of `'OneUpdatesTheOtherProportional'` on the orders. Then as one order has a execution, other orders with the same `clOrdLinkID` will have their order quantity reduced proportionally by the fill percentage. #### Trailing Stops You may use `pegPriceType` of `'TrailingStopPeg'` to create Trailing Stops. The pegged `stopPx` will move as the market moves away from the peg, and freeze as the market moves toward it. To use, combine with `pegOffsetValue` to set the `stopPx` of your order. The peg is set to the triggering price specified in the `execInst` (default `'MarkPrice'`). Use a negative offset for stop-sell and buy-if-touched orders. Requires `ordType`: `'Stop', 'StopLimit', 'MarketIfTouched', 'LimitIfTouched'`. #### Simple Quantities Send a `simpleOrderQty` instead of an `orderQty` to create an order denominated in the underlying currency. This is useful for opening up a position with 1 XBT of exposure without having to calculate how many contracts it is. #### Rate Limits See the [Bulk Order Documentation](#!/Order/Order_newBulk) if you need to place multiple orders at the same time. Bulk orders require fewer risk checks in the trading engine and thus are ratelimited at **1/10** the normal rate. You can also improve your reactivity to market movements while staying under your ratelimit by using the [Amend](#!/Order/Order_amend) and [Amend Bulk](#!/Order/Order_amendBulk) endpoints. This allows you to stay in the market and avoids the cancel/replace cycle. #### Tracking Your Orders If you want to keep track of order IDs yourself, set a unique `clOrdID` per order. This `clOrdID` will come back as a property on the order and any related executions (including on the WebSocket), and can be used to get or cancel the order. Max length is 36 characters. You can also change the `clOrdID` by amending an order, supplying an `origClOrdID`, and your desired new ID as the `clOrdID` param, like so: ``` # Amends an order's leavesQty, and updates its clOrdID to \"def-456\" PUT /api/v1/order {\"origClOrdID\": \"abc-123\", \"clOrdID\": \"def-456\", \"leavesQty\": 1000} ``` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_new_with_http_info(symbol, async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Instrument symbol. e.g. 'XBTUSD'. (required)
:param str side: Order side. Valid options: Buy, Sell. Defaults to 'Buy' unless `orderQty` or `simpleOrderQty` is negative.
:param float simple_order_qty: Order quantity in units of the underlying instrument (i.e. Bitcoin).
:param float order_qty: Order quantity in units of the instrument (i.e. contracts).
:param float price: Optional limit price for 'Limit', 'StopLimit', and 'LimitIfTouched' orders.
:param float display_qty: Optional quantity to display in the book. Use 0 for a fully hidden order.
:param float stop_px: Optional trigger price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders. Use a price below the current price for stop-sell orders and buy-if-touched orders. Use `execInst` of 'MarkPrice' or 'LastPrice' to define the current price used for triggering.
:param str cl_ord_id: Optional Client Order ID. This clOrdID will come back on the order and any related executions.
:param str cl_ord_link_id: Optional Client Order Link ID for contingent orders.
:param float peg_offset_value: Optional trailing offset from the current price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders; use a negative offset for stop-sell orders and buy-if-touched orders. Optional offset from the peg price for 'Pegged' orders.
:param str peg_price_type: Optional peg price type. Valid options: LastPeg, MidPricePeg, MarketPeg, PrimaryPeg, TrailingStopPeg.
:param str ord_type: Order type. Valid options: Market, Limit, Stop, StopLimit, MarketIfTouched, LimitIfTouched, MarketWithLeftOverAsLimit, Pegged. Defaults to 'Limit' when `price` is specified. Defaults to 'Stop' when `stopPx` is specified. Defaults to 'StopLimit' when `price` and `stopPx` are specified.
:param str time_in_force: Time in force. Valid options: Day, GoodTillCancel, ImmediateOrCancel, FillOrKill. Defaults to 'GoodTillCancel' for 'Limit', 'StopLimit', 'LimitIfTouched', and 'MarketWithLeftOverAsLimit' orders.
:param str exec_inst: Optional execution instructions. Valid options: ParticipateDoNotInitiate, AllOrNone, MarkPrice, IndexPrice, LastPrice, Close, ReduceOnly, Fixed. 'AllOrNone' instruction requires `displayQty` to be 0. 'MarkPrice', 'IndexPrice' or 'LastPrice' instruction valid for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders.
:param str contingency_type: Optional contingency type for use with `clOrdLinkID`. Valid options: OneCancelsTheOther, OneTriggersTheOther, OneUpdatesTheOtherAbsolute, OneUpdatesTheOtherProportional.
:param str text: Optional order annotation. e.g. 'Take profit'.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['symbol', 'side', 'simple_order_qty', 'order_qty', 'price', 'display_qty', 'stop_px', 'cl_ord_id', 'cl_ord_link_id', 'peg_offset_value', 'peg_price_type', 'ord_type', 'time_in_force', 'exec_inst', 'contingency_type', 'text'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_new" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'symbol' is set
if ('symbol' not in params or
params['symbol'] is None):
raise ValueError("Missing the required parameter `symbol` when calling `order_new`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'symbol' in params:
form_params.append(('symbol', params['symbol'])) # noqa: E501
if 'side' in params:
form_params.append(('side', params['side'])) # noqa: E501
if 'simple_order_qty' in params:
form_params.append(('simpleOrderQty', params['simple_order_qty'])) # noqa: E501
if 'order_qty' in params:
form_params.append(('orderQty', params['order_qty'])) # noqa: E501
if 'price' in params:
form_params.append(('price', params['price'])) # noqa: E501
if 'display_qty' in params:
form_params.append(('displayQty', params['display_qty'])) # noqa: E501
if 'stop_px' in params:
form_params.append(('stopPx', params['stop_px'])) # noqa: E501
if 'cl_ord_id' in params:
form_params.append(('clOrdID', params['cl_ord_id'])) # noqa: E501
if 'cl_ord_link_id' in params:
form_params.append(('clOrdLinkID', params['cl_ord_link_id'])) # noqa: E501
if 'peg_offset_value' in params:
form_params.append(('pegOffsetValue', params['peg_offset_value'])) # noqa: E501
if 'peg_price_type' in params:
form_params.append(('pegPriceType', params['peg_price_type'])) # noqa: E501
if 'ord_type' in params:
form_params.append(('ordType', params['ord_type'])) # noqa: E501
if 'time_in_force' in params:
form_params.append(('timeInForce', params['time_in_force'])) # noqa: E501
if 'exec_inst' in params:
form_params.append(('execInst', params['exec_inst'])) # noqa: E501
if 'contingency_type' in params:
form_params.append(('contingencyType', params['contingency_type'])) # noqa: E501
if 'text' in params:
form_params.append(('text', params['text'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey', 'apiNonce', 'apiSignature'] # noqa: E501
return self.api_client.call_api(
'/order', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_new_bulk(self, **kwargs): # noqa: E501
"""Create multiple new orders for the same symbol. # noqa: E501
This endpoint is used for placing bulk orders. Valid order types are Market, Limit, Stop, StopLimit, MarketIfTouched, LimitIfTouched, MarketWithLeftOverAsLimit, and Pegged. Each individual order object in the array should have the same properties as an individual POST /order call. This endpoint is much faster for getting many orders into the book at once. Because it reduces load on BitMEX systems, this endpoint is ratelimited at `ceil(0.1 * orders)`. Submitting 10 orders via a bulk order call will only count as 1 request, 15 as 2, 32 as 4, and so on. For now, only `application/json` is supported on this endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_new_bulk(async=True)
>>> result = thread.get()
:param async bool
:param str orders: An array of orders.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_new_bulk_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.order_new_bulk_with_http_info(**kwargs) # noqa: E501
return data
def order_new_bulk_with_http_info(self, **kwargs): # noqa: E501
"""Create multiple new orders for the same symbol. # noqa: E501
This endpoint is used for placing bulk orders. Valid order types are Market, Limit, Stop, StopLimit, MarketIfTouched, LimitIfTouched, MarketWithLeftOverAsLimit, and Pegged. Each individual order object in the array should have the same properties as an individual POST /order call. This endpoint is much faster for getting many orders into the book at once. Because it reduces load on BitMEX systems, this endpoint is ratelimited at `ceil(0.1 * orders)`. Submitting 10 orders via a bulk order call will only count as 1 request, 15 as 2, 32 as 4, and so on. For now, only `application/json` is supported on this endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_new_bulk_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str orders: An array of orders.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orders'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_new_bulk" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'orders' in params:
form_params.append(('orders', params['orders'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey', 'apiNonce', 'apiSignature'] # noqa: E501
return self.api_client.call_api(
'/order/bulk', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 66.087786
| 6,648
| 0.665232
|
"""
BitMEX API
## REST API for the BitMEX Trading Platform [View Changelog](/app/apiChangelog) ---- #### Getting Started Base URI: [https://www.bitmex.com/api/v1](/api/v1) ##### Fetching Data All REST endpoints are documented below. You can try out any query right from this interface. Most table queries accept `count`, `start`, and `reverse` params. Set `reverse=true` to get rows newest-first. Additional documentation regarding filters, timestamps, and authentication is available in [the main API documentation](/app/restAPI). *All* table data is available via the [Websocket](/app/wsAPI). We highly recommend using the socket if you want to have the quickest possible data without being subject to ratelimits. ##### Return Types By default, all data is returned as JSON. Send `?_format=csv` to get CSV data or `?_format=xml` to get XML data. ##### Trade Data Queries *This is only a small subset of what is available, to get you started.* Fill in the parameters and click the `Try it out!` button to try any of these queries. * [Pricing Data](#!/Quote/Quote_get) * [Trade Data](#!/Trade/Trade_get) * [OrderBook Data](#!/OrderBook/OrderBook_getL2) * [Settlement Data](#!/Settlement/Settlement_get) * [Exchange Statistics](#!/Stats/Stats_history) Every function of the BitMEX.com platform is exposed here and documented. Many more functions are available. ##### Swagger Specification [⇩ Download Swagger JSON](swagger.json) ---- ## All API Endpoints Click to expand a section. # noqa: E501
OpenAPI spec version: 1.2.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re
import six
from bitmex_swagger.api_client import ApiClient
class OrderApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def order_amend(self, **kwargs): """Amend the quantity or price of an open order. # noqa: E501
Send an `orderID` or `origClOrdID` to identify the order you wish to amend. Both order quantity and price can be amended. Only one `qty` field can be used to amend. Use the `leavesQty` field to specify how much of the order you wish to remain open. This can be useful if you want to adjust your position's delta by a certain amount, regardless of how much of the order has already filled. > A `leavesQty` can be used to make a \"Filled\" order live again, if it is received within 60 seconds of the fill. Use the `simpleOrderQty` and `simpleLeavesQty` fields to specify order size in Bitcoin, rather than contracts. These fields will round up to the nearest contract. Like order placement, amending can be done in bulk. Simply send a request to `PUT /api/v1/order/bulk` with a JSON body of the shape: `{\"orders\": [{...}, {...}]}`, each object containing the fields used in this endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_amend(async=True)
>>> result = thread.get()
:param async bool
:param str order_id: Order ID
:param str orig_cl_ord_id: Client Order ID. See POST /order.
:param str cl_ord_id: Optional new Client Order ID, requires `origClOrdID`.
:param float simple_order_qty: Optional order quantity in units of the underlying instrument (i.e. Bitcoin).
:param float order_qty: Optional order quantity in units of the instrument (i.e. contracts).
:param float simple_leaves_qty: Optional leaves quantity in units of the underlying instrument (i.e. Bitcoin). Useful for amending partially filled orders.
:param float leaves_qty: Optional leaves quantity in units of the instrument (i.e. contracts). Useful for amending partially filled orders.
:param float price: Optional limit price for 'Limit', 'StopLimit', and 'LimitIfTouched' orders.
:param float stop_px: Optional trigger price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders. Use a price below the current price for stop-sell orders and buy-if-touched orders.
:param float peg_offset_value: Optional trailing offset from the current price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders; use a negative offset for stop-sell orders and buy-if-touched orders. Optional offset from the peg price for 'Pegged' orders.
:param str text: Optional amend annotation. e.g. 'Adjust skew'.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_amend_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.order_amend_with_http_info(**kwargs) # noqa: E501
return data
def order_amend_with_http_info(self, **kwargs): # noqa: E501
"""Amend the quantity or price of an open order. # noqa: E501
Send an `orderID` or `origClOrdID` to identify the order you wish to amend. Both order quantity and price can be amended. Only one `qty` field can be used to amend. Use the `leavesQty` field to specify how much of the order you wish to remain open. This can be useful if you want to adjust your position's delta by a certain amount, regardless of how much of the order has already filled. > A `leavesQty` can be used to make a \"Filled\" order live again, if it is received within 60 seconds of the fill. Use the `simpleOrderQty` and `simpleLeavesQty` fields to specify order size in Bitcoin, rather than contracts. These fields will round up to the nearest contract. Like order placement, amending can be done in bulk. Simply send a request to `PUT /api/v1/order/bulk` with a JSON body of the shape: `{\"orders\": [{...}, {...}]}`, each object containing the fields used in this endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_amend_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str order_id: Order ID
:param str orig_cl_ord_id: Client Order ID. See POST /order.
:param str cl_ord_id: Optional new Client Order ID, requires `origClOrdID`.
:param float simple_order_qty: Optional order quantity in units of the underlying instrument (i.e. Bitcoin).
:param float order_qty: Optional order quantity in units of the instrument (i.e. contracts).
:param float simple_leaves_qty: Optional leaves quantity in units of the underlying instrument (i.e. Bitcoin). Useful for amending partially filled orders.
:param float leaves_qty: Optional leaves quantity in units of the instrument (i.e. contracts). Useful for amending partially filled orders.
:param float price: Optional limit price for 'Limit', 'StopLimit', and 'LimitIfTouched' orders.
:param float stop_px: Optional trigger price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders. Use a price below the current price for stop-sell orders and buy-if-touched orders.
:param float peg_offset_value: Optional trailing offset from the current price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders; use a negative offset for stop-sell orders and buy-if-touched orders. Optional offset from the peg price for 'Pegged' orders.
:param str text: Optional amend annotation. e.g. 'Adjust skew'.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_id', 'orig_cl_ord_id', 'cl_ord_id', 'simple_order_qty', 'order_qty', 'simple_leaves_qty', 'leaves_qty', 'price', 'stop_px', 'peg_offset_value', 'text'] all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_amend" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'order_id' in params:
form_params.append(('orderID', params['order_id'])) if 'orig_cl_ord_id' in params:
form_params.append(('origClOrdID', params['orig_cl_ord_id'])) if 'cl_ord_id' in params:
form_params.append(('clOrdID', params['cl_ord_id'])) if 'simple_order_qty' in params:
form_params.append(('simpleOrderQty', params['simple_order_qty'])) if 'order_qty' in params:
form_params.append(('orderQty', params['order_qty'])) if 'simple_leaves_qty' in params:
form_params.append(('simpleLeavesQty', params['simple_leaves_qty'])) if 'leaves_qty' in params:
form_params.append(('leavesQty', params['leaves_qty'])) if 'price' in params:
form_params.append(('price', params['price'])) if 'stop_px' in params:
form_params.append(('stopPx', params['stop_px'])) if 'peg_offset_value' in params:
form_params.append(('pegOffsetValue', params['peg_offset_value'])) if 'text' in params:
form_params.append(('text', params['text']))
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json', 'application/x-www-form-urlencoded'])
auth_settings = ['apiKey', 'apiNonce', 'apiSignature']
return self.api_client.call_api(
'/order', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order', auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_amend_bulk(self, **kwargs): """Amend multiple orders for the same symbol. # noqa: E501
Similar to POST /amend, but with multiple orders. `application/json` only. Ratelimited at 10%. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_amend_bulk(async=True)
>>> result = thread.get()
:param async bool
:param str orders: An array of orders.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_amend_bulk_with_http_info(**kwargs) else:
(data) = self.order_amend_bulk_with_http_info(**kwargs) return data
def order_amend_bulk_with_http_info(self, **kwargs): """Amend multiple orders for the same symbol. # noqa: E501
Similar to POST /amend, but with multiple orders. `application/json` only. Ratelimited at 10%. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_amend_bulk_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str orders: An array of orders.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orders'] all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_amend_bulk" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'orders' in params:
form_params.append(('orders', params['orders']))
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json', 'application/x-www-form-urlencoded'])
auth_settings = ['apiKey', 'apiNonce', 'apiSignature']
return self.api_client.call_api(
'/order/bulk', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_cancel(self, **kwargs): """Cancel order(s). Send multiple order IDs to cancel in bulk. # noqa: E501
Either an orderID or a clOrdID must be provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel(async=True)
>>> result = thread.get()
:param async bool
:param str order_id: Order ID(s).
:param str cl_ord_id: Client Order ID(s). See POST /order.
:param str text: Optional cancellation annotation. e.g. 'Spread Exceeded'.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_cancel_with_http_info(**kwargs) else:
(data) = self.order_cancel_with_http_info(**kwargs) return data
def order_cancel_with_http_info(self, **kwargs): """Cancel order(s). Send multiple order IDs to cancel in bulk. # noqa: E501
Either an orderID or a clOrdID must be provided. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str order_id: Order ID(s).
:param str cl_ord_id: Client Order ID(s). See POST /order.
:param str text: Optional cancellation annotation. e.g. 'Spread Exceeded'.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['order_id', 'cl_ord_id', 'text'] all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_cancel" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'order_id' in params:
form_params.append(('orderID', params['order_id'])) if 'cl_ord_id' in params:
form_params.append(('clOrdID', params['cl_ord_id'])) if 'text' in params:
form_params.append(('text', params['text']))
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json', 'application/x-www-form-urlencoded'])
auth_settings = ['apiKey', 'apiNonce', 'apiSignature']
return self.api_client.call_api(
'/order', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_cancel_all(self, **kwargs): """Cancels all of your orders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel_all(async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Optional symbol. If provided, only cancels orders for that symbol.
:param str filter: Optional filter for cancellation. Use to only cancel some orders, e.g. `{\"side\": \"Buy\"}`.
:param str text: Optional cancellation annotation. e.g. 'Spread Exceeded'
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_cancel_all_with_http_info(**kwargs) else:
(data) = self.order_cancel_all_with_http_info(**kwargs) return data
def order_cancel_all_with_http_info(self, **kwargs): """Cancels all of your orders. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel_all_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Optional symbol. If provided, only cancels orders for that symbol.
:param str filter: Optional filter for cancellation. Use to only cancel some orders, e.g. `{\"side\": \"Buy\"}`.
:param str text: Optional cancellation annotation. e.g. 'Spread Exceeded'
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['symbol', 'filter', 'text'] all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_cancel_all" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'symbol' in params:
form_params.append(('symbol', params['symbol'])) if 'filter' in params:
form_params.append(('filter', params['filter'])) if 'text' in params:
form_params.append(('text', params['text']))
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json', 'application/x-www-form-urlencoded'])
auth_settings = ['apiKey', 'apiNonce', 'apiSignature']
return self.api_client.call_api(
'/order/all', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_cancel_all_after(self, timeout, **kwargs): """Automatically cancel all your orders after a specified timeout. # noqa: E501
Useful as a dead-man's switch to ensure your orders are canceled in case of an outage. If called repeatedly, the existing offset will be canceled and a new one will be inserted in its place. Example usage: call this route at 15s intervals with an offset of 60000 (60s). If this route is not called within 60 seconds, all your orders will be automatically canceled. This is also available via [WebSocket](https://www.bitmex.com/app/wsAPI#Dead-Mans-Switch-Auto-Cancel). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel_all_after(timeout, async=True)
>>> result = thread.get()
:param async bool
:param float timeout: Timeout in ms. Set to 0 to cancel this timer. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_cancel_all_after_with_http_info(timeout, **kwargs) # noqa: E501
else:
(data) = self.order_cancel_all_after_with_http_info(timeout, **kwargs) # noqa: E501
return data
def order_cancel_all_after_with_http_info(self, timeout, **kwargs): # noqa: E501
"""Automatically cancel all your orders after a specified timeout. # noqa: E501
Useful as a dead-man's switch to ensure your orders are canceled in case of an outage. If called repeatedly, the existing offset will be canceled and a new one will be inserted in its place. Example usage: call this route at 15s intervals with an offset of 60000 (60s). If this route is not called within 60 seconds, all your orders will be automatically canceled. This is also available via [WebSocket](https://www.bitmex.com/app/wsAPI#Dead-Mans-Switch-Auto-Cancel). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_cancel_all_after_with_http_info(timeout, async=True)
>>> result = thread.get()
:param async bool
:param float timeout: Timeout in ms. Set to 0 to cancel this timer. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['timeout'] all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_cancel_all_after" % key
)
params[key] = val
del params['kwargs']
if ('timeout' not in params or
params['timeout'] is None):
raise ValueError("Missing the required parameter `timeout` when calling `order_cancel_all_after`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'timeout' in params:
form_params.append(('timeout', params['timeout']))
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json', 'application/x-www-form-urlencoded'])
auth_settings = ['apiKey', 'apiNonce', 'apiSignature']
return self.api_client.call_api(
'/order/cancelAllAfter', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_close_position(self, symbol, **kwargs): """Close a position. [Deprecated, use POST /order with execInst: 'Close'] # noqa: E501
If no `price` is specified, a market order will be submitted to close the whole of your position. This will also close all other open orders in this symbol. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_close_position(symbol, async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Symbol of position to close. (required)
:param float price: Optional limit price.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_close_position_with_http_info(symbol, **kwargs) else:
(data) = self.order_close_position_with_http_info(symbol, **kwargs) return data
def order_close_position_with_http_info(self, symbol, **kwargs): """Close a position. [Deprecated, use POST /order with execInst: 'Close'] # noqa: E501
If no `price` is specified, a market order will be submitted to close the whole of your position. This will also close all other open orders in this symbol. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_close_position_with_http_info(symbol, async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Symbol of position to close. (required)
:param float price: Optional limit price.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['symbol', 'price'] all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_close_position" % key
)
params[key] = val
del params['kwargs']
if ('symbol' not in params or
params['symbol'] is None):
raise ValueError("Missing the required parameter `symbol` when calling `order_close_position`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'symbol' in params:
form_params.append(('symbol', params['symbol'])) if 'price' in params:
form_params.append(('price', params['price']))
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json', 'application/x-www-form-urlencoded'])
auth_settings = ['apiKey', 'apiNonce', 'apiSignature']
return self.api_client.call_api(
'/order/closePosition', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order', auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_get_orders(self, **kwargs): """Get your orders. # noqa: E501
To get open orders only, send {\"open\": true} in the filter param. See <a href=\"http://www.onixs.biz/fix-dictionary/5.0.SP2/msgType_D_68.html\">the FIX Spec</a> for explanations of these fields. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_get_orders(async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Instrument symbol. Send a bare series (e.g. XBU) to get data for the nearest expiring contract in that series. You can also send a timeframe, e.g. `XBU:monthly`. Timeframes are `daily`, `weekly`, `monthly`, `quarterly`, and `biquarterly`.
:param str filter: Generic table filter. Send JSON key/value pairs, such as `{\"key\": \"value\"}`. You can key on individual fields, and do more advanced querying on timestamps. See the [Timestamp Docs](https://www.bitmex.com/app/restAPI#Timestamp-Filters) for more details.
:param str columns: Array of column names to fetch. If omitted, will return all columns. Note that this method will always return item keys, even when not specified, so you may receive more columns that you expect.
:param float count: Number of results to fetch.
:param float start: Starting point for results.
:param bool reverse: If true, will sort results newest first.
:param datetime start_time: Starting date filter for results.
:param datetime end_time: Ending date filter for results.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_get_orders_with_http_info(**kwargs) else:
(data) = self.order_get_orders_with_http_info(**kwargs) return data
def order_get_orders_with_http_info(self, **kwargs): """Get your orders. # noqa: E501
To get open orders only, send {\"open\": true} in the filter param. See <a href=\"http://www.onixs.biz/fix-dictionary/5.0.SP2/msgType_D_68.html\">the FIX Spec</a> for explanations of these fields. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_get_orders_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Instrument symbol. Send a bare series (e.g. XBU) to get data for the nearest expiring contract in that series. You can also send a timeframe, e.g. `XBU:monthly`. Timeframes are `daily`, `weekly`, `monthly`, `quarterly`, and `biquarterly`.
:param str filter: Generic table filter. Send JSON key/value pairs, such as `{\"key\": \"value\"}`. You can key on individual fields, and do more advanced querying on timestamps. See the [Timestamp Docs](https://www.bitmex.com/app/restAPI#Timestamp-Filters) for more details.
:param str columns: Array of column names to fetch. If omitted, will return all columns. Note that this method will always return item keys, even when not specified, so you may receive more columns that you expect.
:param float count: Number of results to fetch.
:param float start: Starting point for results.
:param bool reverse: If true, will sort results newest first.
:param datetime start_time: Starting date filter for results.
:param datetime end_time: Ending date filter for results.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['symbol', 'filter', 'columns', 'count', 'start', 'reverse', 'start_time', 'end_time'] all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_get_orders" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'symbol' in params:
query_params.append(('symbol', params['symbol'])) if 'filter' in params:
query_params.append(('filter', params['filter'])) if 'columns' in params:
query_params.append(('columns', params['columns'])) if 'count' in params:
query_params.append(('count', params['count'])) if 'start' in params:
query_params.append(('start', params['start'])) if 'reverse' in params:
query_params.append(('reverse', params['reverse'])) if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) if 'end_time' in params:
query_params.append(('endTime', params['end_time']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json', 'application/x-www-form-urlencoded'])
auth_settings = ['apiKey', 'apiNonce', 'apiSignature']
return self.api_client.call_api(
'/order', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_new(self, symbol, **kwargs): """Create a new order. # noqa: E501
## Placing Orders This endpoint is used for placing orders. See individual fields below for more details on their use. #### Order Types All orders require a `symbol`. All other fields are optional except when otherwise specified. These are the valid `ordType`s: * **Limit**: The default order type. Specify an `orderQty` and `price`. * **Market**: A traditional Market order. A Market order will execute until filled or your bankruptcy price is reached, at which point it will cancel. * **MarketWithLeftOverAsLimit**: A market order that, after eating through the order book as far as permitted by available margin, will become a limit order. The difference between this type and `Market` only affects the behavior in thin books. Upon reaching the deepest possible price, if there is quantity left over, a `Market` order will cancel the remaining quantity. `MarketWithLeftOverAsLimit` will keep the remaining quantity in the books as a `Limit`. * **Stop**: A Stop Market order. Specify an `orderQty` and `stopPx`. When the `stopPx` is reached, the order will be entered into the book. * On sell orders, the order will trigger if the triggering price is lower than the `stopPx`. On buys, higher. * Note: Stop orders do not consume margin until triggered. Be sure that the required margin is available in your account so that it may trigger fully. * `Close` Stops don't require an `orderQty`. See Execution Instructions below. * **StopLimit**: Like a Stop Market, but enters a Limit order instead of a Market order. Specify an `orderQty`, `stopPx`, and `price`. * **MarketIfTouched**: Similar to a Stop, but triggers are done in the opposite direction. Useful for Take Profit orders. * **LimitIfTouched**: As above; use for Take Profit Limit orders. #### Execution Instructions The following `execInst`s are supported. If using multiple, separate with a comma (e.g. `LastPrice,Close`). * **ParticipateDoNotInitiate**: Also known as a Post-Only order. If this order would have executed on placement, it will cancel instead. * **MarkPrice, LastPrice, IndexPrice**: Used by stop and if-touched orders to determine the triggering price. Use only one. By default, `'MarkPrice'` is used. Also used for Pegged orders to define the value of `'LastPeg'`. * **ReduceOnly**: A `'ReduceOnly'` order can only reduce your position, not increase it. If you have a `'ReduceOnly'` limit order that rests in the order book while the position is reduced by other orders, then its order quantity will be amended down or canceled. If there are multiple `'ReduceOnly'` orders the least aggressive will be amended first. * **Close**: `'Close'` implies `'ReduceOnly'`. A `'Close'` order will cancel other active limit orders with the same side and symbol if the open quantity exceeds the current position. This is useful for stops: by canceling these orders, a `'Close'` Stop is ensured to have the margin required to execute, and can only execute up to the full size of your position. If `orderQty` is not specified, a `'Close'` order has an `orderQty` equal to your current position's size. * Note that a `Close` order without an `orderQty` requires a `side`, so that BitMEX knows if it should trigger above or below the `stopPx`. #### Linked Orders Linked Orders are an advanced capability. It is very powerful, but its use requires careful coding and testing. Please follow this document carefully and use the [Testnet Exchange](https://testnet.bitmex.com) while developing. BitMEX offers four advanced Linked Order types: * **OCO**: *One Cancels the Other*. A very flexible version of the standard Stop / Take Profit technique. Multiple orders may be linked together using a single `clOrdLinkID`. Send a `contingencyType` of `OneCancelsTheOther` on the orders. The first order that fully or partially executes (or activates for `Stop` orders) will cancel all other orders with the same `clOrdLinkID`. * **OTO**: *One Triggers the Other*. Send a `contingencyType` of `'OneTriggersTheOther'` on the primary order and then subsequent orders with the same `clOrdLinkID` will be not be triggered until the primary order fully executes. * **OUOA**: *One Updates the Other Absolute*. Send a `contingencyType` of `'OneUpdatesTheOtherAbsolute'` on the orders. Then as one order has a execution, other orders with the same `clOrdLinkID` will have their order quantity amended down by the execution quantity. * **OUOP**: *One Updates the Other Proportional*. Send a `contingencyType` of `'OneUpdatesTheOtherProportional'` on the orders. Then as one order has a execution, other orders with the same `clOrdLinkID` will have their order quantity reduced proportionally by the fill percentage. #### Trailing Stops You may use `pegPriceType` of `'TrailingStopPeg'` to create Trailing Stops. The pegged `stopPx` will move as the market moves away from the peg, and freeze as the market moves toward it. To use, combine with `pegOffsetValue` to set the `stopPx` of your order. The peg is set to the triggering price specified in the `execInst` (default `'MarkPrice'`). Use a negative offset for stop-sell and buy-if-touched orders. Requires `ordType`: `'Stop', 'StopLimit', 'MarketIfTouched', 'LimitIfTouched'`. #### Simple Quantities Send a `simpleOrderQty` instead of an `orderQty` to create an order denominated in the underlying currency. This is useful for opening up a position with 1 XBT of exposure without having to calculate how many contracts it is. #### Rate Limits See the [Bulk Order Documentation](#!/Order/Order_newBulk) if you need to place multiple orders at the same time. Bulk orders require fewer risk checks in the trading engine and thus are ratelimited at **1/10** the normal rate. You can also improve your reactivity to market movements while staying under your ratelimit by using the [Amend](#!/Order/Order_amend) and [Amend Bulk](#!/Order/Order_amendBulk) endpoints. This allows you to stay in the market and avoids the cancel/replace cycle. #### Tracking Your Orders If you want to keep track of order IDs yourself, set a unique `clOrdID` per order. This `clOrdID` will come back as a property on the order and any related executions (including on the WebSocket), and can be used to get or cancel the order. Max length is 36 characters. You can also change the `clOrdID` by amending an order, supplying an `origClOrdID`, and your desired new ID as the `clOrdID` param, like so: ``` # Amends an order's leavesQty, and updates its clOrdID to \"def-456\" PUT /api/v1/order {\"origClOrdID\": \"abc-123\", \"clOrdID\": \"def-456\", \"leavesQty\": 1000} ``` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_new(symbol, async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Instrument symbol. e.g. 'XBTUSD'. (required)
:param str side: Order side. Valid options: Buy, Sell. Defaults to 'Buy' unless `orderQty` or `simpleOrderQty` is negative.
:param float simple_order_qty: Order quantity in units of the underlying instrument (i.e. Bitcoin).
:param float order_qty: Order quantity in units of the instrument (i.e. contracts).
:param float price: Optional limit price for 'Limit', 'StopLimit', and 'LimitIfTouched' orders.
:param float display_qty: Optional quantity to display in the book. Use 0 for a fully hidden order.
:param float stop_px: Optional trigger price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders. Use a price below the current price for stop-sell orders and buy-if-touched orders. Use `execInst` of 'MarkPrice' or 'LastPrice' to define the current price used for triggering.
:param str cl_ord_id: Optional Client Order ID. This clOrdID will come back on the order and any related executions.
:param str cl_ord_link_id: Optional Client Order Link ID for contingent orders.
:param float peg_offset_value: Optional trailing offset from the current price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders; use a negative offset for stop-sell orders and buy-if-touched orders. Optional offset from the peg price for 'Pegged' orders.
:param str peg_price_type: Optional peg price type. Valid options: LastPeg, MidPricePeg, MarketPeg, PrimaryPeg, TrailingStopPeg.
:param str ord_type: Order type. Valid options: Market, Limit, Stop, StopLimit, MarketIfTouched, LimitIfTouched, MarketWithLeftOverAsLimit, Pegged. Defaults to 'Limit' when `price` is specified. Defaults to 'Stop' when `stopPx` is specified. Defaults to 'StopLimit' when `price` and `stopPx` are specified.
:param str time_in_force: Time in force. Valid options: Day, GoodTillCancel, ImmediateOrCancel, FillOrKill. Defaults to 'GoodTillCancel' for 'Limit', 'StopLimit', 'LimitIfTouched', and 'MarketWithLeftOverAsLimit' orders.
:param str exec_inst: Optional execution instructions. Valid options: ParticipateDoNotInitiate, AllOrNone, MarkPrice, IndexPrice, LastPrice, Close, ReduceOnly, Fixed. 'AllOrNone' instruction requires `displayQty` to be 0. 'MarkPrice', 'IndexPrice' or 'LastPrice' instruction valid for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders.
:param str contingency_type: Optional contingency type for use with `clOrdLinkID`. Valid options: OneCancelsTheOther, OneTriggersTheOther, OneUpdatesTheOtherAbsolute, OneUpdatesTheOtherProportional.
:param str text: Optional order annotation. e.g. 'Take profit'.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_new_with_http_info(symbol, **kwargs) # noqa: E501
else:
(data) = self.order_new_with_http_info(symbol, **kwargs) # noqa: E501
return data
def order_new_with_http_info(self, symbol, **kwargs): # noqa: E501
"""Create a new order. # noqa: E501
## Placing Orders This endpoint is used for placing orders. See individual fields below for more details on their use. #### Order Types All orders require a `symbol`. All other fields are optional except when otherwise specified. These are the valid `ordType`s: * **Limit**: The default order type. Specify an `orderQty` and `price`. * **Market**: A traditional Market order. A Market order will execute until filled or your bankruptcy price is reached, at which point it will cancel. * **MarketWithLeftOverAsLimit**: A market order that, after eating through the order book as far as permitted by available margin, will become a limit order. The difference between this type and `Market` only affects the behavior in thin books. Upon reaching the deepest possible price, if there is quantity left over, a `Market` order will cancel the remaining quantity. `MarketWithLeftOverAsLimit` will keep the remaining quantity in the books as a `Limit`. * **Stop**: A Stop Market order. Specify an `orderQty` and `stopPx`. When the `stopPx` is reached, the order will be entered into the book. * On sell orders, the order will trigger if the triggering price is lower than the `stopPx`. On buys, higher. * Note: Stop orders do not consume margin until triggered. Be sure that the required margin is available in your account so that it may trigger fully. * `Close` Stops don't require an `orderQty`. See Execution Instructions below. * **StopLimit**: Like a Stop Market, but enters a Limit order instead of a Market order. Specify an `orderQty`, `stopPx`, and `price`. * **MarketIfTouched**: Similar to a Stop, but triggers are done in the opposite direction. Useful for Take Profit orders. * **LimitIfTouched**: As above; use for Take Profit Limit orders. #### Execution Instructions The following `execInst`s are supported. If using multiple, separate with a comma (e.g. `LastPrice,Close`). * **ParticipateDoNotInitiate**: Also known as a Post-Only order. If this order would have executed on placement, it will cancel instead. * **MarkPrice, LastPrice, IndexPrice**: Used by stop and if-touched orders to determine the triggering price. Use only one. By default, `'MarkPrice'` is used. Also used for Pegged orders to define the value of `'LastPeg'`. * **ReduceOnly**: A `'ReduceOnly'` order can only reduce your position, not increase it. If you have a `'ReduceOnly'` limit order that rests in the order book while the position is reduced by other orders, then its order quantity will be amended down or canceled. If there are multiple `'ReduceOnly'` orders the least aggressive will be amended first. * **Close**: `'Close'` implies `'ReduceOnly'`. A `'Close'` order will cancel other active limit orders with the same side and symbol if the open quantity exceeds the current position. This is useful for stops: by canceling these orders, a `'Close'` Stop is ensured to have the margin required to execute, and can only execute up to the full size of your position. If `orderQty` is not specified, a `'Close'` order has an `orderQty` equal to your current position's size. * Note that a `Close` order without an `orderQty` requires a `side`, so that BitMEX knows if it should trigger above or below the `stopPx`. #### Linked Orders Linked Orders are an advanced capability. It is very powerful, but its use requires careful coding and testing. Please follow this document carefully and use the [Testnet Exchange](https://testnet.bitmex.com) while developing. BitMEX offers four advanced Linked Order types: * **OCO**: *One Cancels the Other*. A very flexible version of the standard Stop / Take Profit technique. Multiple orders may be linked together using a single `clOrdLinkID`. Send a `contingencyType` of `OneCancelsTheOther` on the orders. The first order that fully or partially executes (or activates for `Stop` orders) will cancel all other orders with the same `clOrdLinkID`. * **OTO**: *One Triggers the Other*. Send a `contingencyType` of `'OneTriggersTheOther'` on the primary order and then subsequent orders with the same `clOrdLinkID` will be not be triggered until the primary order fully executes. * **OUOA**: *One Updates the Other Absolute*. Send a `contingencyType` of `'OneUpdatesTheOtherAbsolute'` on the orders. Then as one order has a execution, other orders with the same `clOrdLinkID` will have their order quantity amended down by the execution quantity. * **OUOP**: *One Updates the Other Proportional*. Send a `contingencyType` of `'OneUpdatesTheOtherProportional'` on the orders. Then as one order has a execution, other orders with the same `clOrdLinkID` will have their order quantity reduced proportionally by the fill percentage. #### Trailing Stops You may use `pegPriceType` of `'TrailingStopPeg'` to create Trailing Stops. The pegged `stopPx` will move as the market moves away from the peg, and freeze as the market moves toward it. To use, combine with `pegOffsetValue` to set the `stopPx` of your order. The peg is set to the triggering price specified in the `execInst` (default `'MarkPrice'`). Use a negative offset for stop-sell and buy-if-touched orders. Requires `ordType`: `'Stop', 'StopLimit', 'MarketIfTouched', 'LimitIfTouched'`. #### Simple Quantities Send a `simpleOrderQty` instead of an `orderQty` to create an order denominated in the underlying currency. This is useful for opening up a position with 1 XBT of exposure without having to calculate how many contracts it is. #### Rate Limits See the [Bulk Order Documentation](#!/Order/Order_newBulk) if you need to place multiple orders at the same time. Bulk orders require fewer risk checks in the trading engine and thus are ratelimited at **1/10** the normal rate. You can also improve your reactivity to market movements while staying under your ratelimit by using the [Amend](#!/Order/Order_amend) and [Amend Bulk](#!/Order/Order_amendBulk) endpoints. This allows you to stay in the market and avoids the cancel/replace cycle. #### Tracking Your Orders If you want to keep track of order IDs yourself, set a unique `clOrdID` per order. This `clOrdID` will come back as a property on the order and any related executions (including on the WebSocket), and can be used to get or cancel the order. Max length is 36 characters. You can also change the `clOrdID` by amending an order, supplying an `origClOrdID`, and your desired new ID as the `clOrdID` param, like so: ``` # Amends an order's leavesQty, and updates its clOrdID to \"def-456\" PUT /api/v1/order {\"origClOrdID\": \"abc-123\", \"clOrdID\": \"def-456\", \"leavesQty\": 1000} ``` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_new_with_http_info(symbol, async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Instrument symbol. e.g. 'XBTUSD'. (required)
:param str side: Order side. Valid options: Buy, Sell. Defaults to 'Buy' unless `orderQty` or `simpleOrderQty` is negative.
:param float simple_order_qty: Order quantity in units of the underlying instrument (i.e. Bitcoin).
:param float order_qty: Order quantity in units of the instrument (i.e. contracts).
:param float price: Optional limit price for 'Limit', 'StopLimit', and 'LimitIfTouched' orders.
:param float display_qty: Optional quantity to display in the book. Use 0 for a fully hidden order.
:param float stop_px: Optional trigger price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders. Use a price below the current price for stop-sell orders and buy-if-touched orders. Use `execInst` of 'MarkPrice' or 'LastPrice' to define the current price used for triggering.
:param str cl_ord_id: Optional Client Order ID. This clOrdID will come back on the order and any related executions.
:param str cl_ord_link_id: Optional Client Order Link ID for contingent orders.
:param float peg_offset_value: Optional trailing offset from the current price for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders; use a negative offset for stop-sell orders and buy-if-touched orders. Optional offset from the peg price for 'Pegged' orders.
:param str peg_price_type: Optional peg price type. Valid options: LastPeg, MidPricePeg, MarketPeg, PrimaryPeg, TrailingStopPeg.
:param str ord_type: Order type. Valid options: Market, Limit, Stop, StopLimit, MarketIfTouched, LimitIfTouched, MarketWithLeftOverAsLimit, Pegged. Defaults to 'Limit' when `price` is specified. Defaults to 'Stop' when `stopPx` is specified. Defaults to 'StopLimit' when `price` and `stopPx` are specified.
:param str time_in_force: Time in force. Valid options: Day, GoodTillCancel, ImmediateOrCancel, FillOrKill. Defaults to 'GoodTillCancel' for 'Limit', 'StopLimit', 'LimitIfTouched', and 'MarketWithLeftOverAsLimit' orders.
:param str exec_inst: Optional execution instructions. Valid options: ParticipateDoNotInitiate, AllOrNone, MarkPrice, IndexPrice, LastPrice, Close, ReduceOnly, Fixed. 'AllOrNone' instruction requires `displayQty` to be 0. 'MarkPrice', 'IndexPrice' or 'LastPrice' instruction valid for 'Stop', 'StopLimit', 'MarketIfTouched', and 'LimitIfTouched' orders.
:param str contingency_type: Optional contingency type for use with `clOrdLinkID`. Valid options: OneCancelsTheOther, OneTriggersTheOther, OneUpdatesTheOtherAbsolute, OneUpdatesTheOtherProportional.
:param str text: Optional order annotation. e.g. 'Take profit'.
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['symbol', 'side', 'simple_order_qty', 'order_qty', 'price', 'display_qty', 'stop_px', 'cl_ord_id', 'cl_ord_link_id', 'peg_offset_value', 'peg_price_type', 'ord_type', 'time_in_force', 'exec_inst', 'contingency_type', 'text'] all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_new" % key
)
params[key] = val
del params['kwargs']
if ('symbol' not in params or
params['symbol'] is None):
raise ValueError("Missing the required parameter `symbol` when calling `order_new`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'symbol' in params:
form_params.append(('symbol', params['symbol'])) if 'side' in params:
form_params.append(('side', params['side'])) if 'simple_order_qty' in params:
form_params.append(('simpleOrderQty', params['simple_order_qty'])) if 'order_qty' in params:
form_params.append(('orderQty', params['order_qty'])) if 'price' in params:
form_params.append(('price', params['price'])) if 'display_qty' in params:
form_params.append(('displayQty', params['display_qty'])) if 'stop_px' in params:
form_params.append(('stopPx', params['stop_px'])) if 'cl_ord_id' in params:
form_params.append(('clOrdID', params['cl_ord_id'])) if 'cl_ord_link_id' in params:
form_params.append(('clOrdLinkID', params['cl_ord_link_id'])) if 'peg_offset_value' in params:
form_params.append(('pegOffsetValue', params['peg_offset_value'])) if 'peg_price_type' in params:
form_params.append(('pegPriceType', params['peg_price_type'])) if 'ord_type' in params:
form_params.append(('ordType', params['ord_type'])) if 'time_in_force' in params:
form_params.append(('timeInForce', params['time_in_force'])) if 'exec_inst' in params:
form_params.append(('execInst', params['exec_inst'])) if 'contingency_type' in params:
form_params.append(('contingencyType', params['contingency_type'])) if 'text' in params:
form_params.append(('text', params['text']))
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json', 'application/x-www-form-urlencoded'])
auth_settings = ['apiKey', 'apiNonce', 'apiSignature']
return self.api_client.call_api(
'/order', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order', auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def order_new_bulk(self, **kwargs): """Create multiple new orders for the same symbol. # noqa: E501
This endpoint is used for placing bulk orders. Valid order types are Market, Limit, Stop, StopLimit, MarketIfTouched, LimitIfTouched, MarketWithLeftOverAsLimit, and Pegged. Each individual order object in the array should have the same properties as an individual POST /order call. This endpoint is much faster for getting many orders into the book at once. Because it reduces load on BitMEX systems, this endpoint is ratelimited at `ceil(0.1 * orders)`. Submitting 10 orders via a bulk order call will only count as 1 request, 15 as 2, 32 as 4, and so on. For now, only `application/json` is supported on this endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_new_bulk(async=True)
>>> result = thread.get()
:param async bool
:param str orders: An array of orders.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.order_new_bulk_with_http_info(**kwargs) else:
(data) = self.order_new_bulk_with_http_info(**kwargs) return data
def order_new_bulk_with_http_info(self, **kwargs): """Create multiple new orders for the same symbol. # noqa: E501
This endpoint is used for placing bulk orders. Valid order types are Market, Limit, Stop, StopLimit, MarketIfTouched, LimitIfTouched, MarketWithLeftOverAsLimit, and Pegged. Each individual order object in the array should have the same properties as an individual POST /order call. This endpoint is much faster for getting many orders into the book at once. Because it reduces load on BitMEX systems, this endpoint is ratelimited at `ceil(0.1 * orders)`. Submitting 10 orders via a bulk order call will only count as 1 request, 15 as 2, 32 as 4, and so on. For now, only `application/json` is supported on this endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.order_new_bulk_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str orders: An array of orders.
:return: list[Order]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['orders'] all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method order_new_bulk" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'orders' in params:
form_params.append(('orders', params['orders']))
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json', 'application/x-www-form-urlencoded'])
auth_settings = ['apiKey', 'apiNonce', 'apiSignature']
return self.api_client.call_api(
'/order/bulk', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Order]', auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| false
| true
|
f70200850eca38956d7410a98fa9d47cd58d4c0d
| 2,895
|
py
|
Python
|
kubeyard/dependencies.py
|
socialwifi/kubeyard
|
094576e6ad4ff7b9eb419234a3392523cf9e2b18
|
[
"BSD-3-Clause"
] | 16
|
2018-11-30T09:53:23.000Z
|
2020-06-07T16:36:10.000Z
|
kubeyard/dependencies.py
|
socialwifi/kubeyard
|
094576e6ad4ff7b9eb419234a3392523cf9e2b18
|
[
"BSD-3-Clause"
] | 34
|
2018-12-17T15:22:16.000Z
|
2020-08-14T11:04:24.000Z
|
kubeyard/dependencies.py
|
socialwifi/kubeyard
|
094576e6ad4ff7b9eb419234a3392523cf9e2b18
|
[
"BSD-3-Clause"
] | 4
|
2018-12-17T09:57:09.000Z
|
2020-05-14T11:16:34.000Z
|
import logging
import time
import sh
logger = logging.getLogger(__name__)
def is_command_available(name):
try:
sh.bash('which', name)
except sh.ErrorReturnCode:
return False
else:
return True
class KubernetesDependency:
def ensure_running(self):
logger.debug('Checking if container "{}" is running...'.format(self.name))
if self.is_container_running():
logger.debug('"{}" is running'.format(self.name))
else:
logger.debug('Starting "{}"...'.format(self.name))
self.run_container()
logger.debug('"{}" started'.format(self.name))
def run_container(self):
self._apply_definition()
self._wait_until_ready()
self._wait_for_started_log()
def _apply_definition(self):
sh.kubectl('apply', '--record', '-f', self.definition)
try:
sh.kubectl('expose', '-f', self.definition)
except sh.ErrorReturnCode_1 as e:
if b'already exists' not in e.stderr:
raise e
else:
logger.debug('Service for "{}" exists'.format(self.name))
def _wait_until_ready(self):
logger.debug('Waiting for "{}" to start (possibly downloading image)...'.format(self.name))
ready = False
while not ready:
ready = self.is_container_running()
if not ready:
time.sleep(1)
logger.debug('"{}" started'.format(self.name))
def _wait_for_started_log(self):
logger.debug('Waiting for started log for "{}"...'.format(self.name))
for log in sh.kubectl('logs', '-f', self.pod_name, _iter='out'):
if self.started_log in log:
break
logger.debug('Started log for "{}" found'.format(self.name))
def is_container_running(self):
try:
container_ready = str(sh.kubectl(
'get', 'pods',
'--selector', self.selector,
'--output', 'jsonpath="{.items[*].status.containerStatuses[*].ready}"',
)).strip()
except sh.ErrorReturnCode as e:
logger.debug(e)
return False
else:
return container_ready == '"true"'
def run_command(self, *args):
return sh.kubectl('exec', self.pod_name, '--', *args)
@property
def pod_name(self):
return str(sh.kubectl(
'get', 'pods',
'--output', 'custom-columns=NAME:.metadata.name',
'--no-headers',
'--selector', self.selector,
)).strip()
@property
def selector(self):
return 'app={}'.format(self.name)
@property
def started_log(self):
raise NotImplementedError
@property
def name(self):
raise NotImplementedError
@property
def definition(self):
raise NotImplementedError
| 29.242424
| 99
| 0.56753
|
import logging
import time
import sh
logger = logging.getLogger(__name__)
def is_command_available(name):
try:
sh.bash('which', name)
except sh.ErrorReturnCode:
return False
else:
return True
class KubernetesDependency:
def ensure_running(self):
logger.debug('Checking if container "{}" is running...'.format(self.name))
if self.is_container_running():
logger.debug('"{}" is running'.format(self.name))
else:
logger.debug('Starting "{}"...'.format(self.name))
self.run_container()
logger.debug('"{}" started'.format(self.name))
def run_container(self):
self._apply_definition()
self._wait_until_ready()
self._wait_for_started_log()
def _apply_definition(self):
sh.kubectl('apply', '--record', '-f', self.definition)
try:
sh.kubectl('expose', '-f', self.definition)
except sh.ErrorReturnCode_1 as e:
if b'already exists' not in e.stderr:
raise e
else:
logger.debug('Service for "{}" exists'.format(self.name))
def _wait_until_ready(self):
logger.debug('Waiting for "{}" to start (possibly downloading image)...'.format(self.name))
ready = False
while not ready:
ready = self.is_container_running()
if not ready:
time.sleep(1)
logger.debug('"{}" started'.format(self.name))
def _wait_for_started_log(self):
logger.debug('Waiting for started log for "{}"...'.format(self.name))
for log in sh.kubectl('logs', '-f', self.pod_name, _iter='out'):
if self.started_log in log:
break
logger.debug('Started log for "{}" found'.format(self.name))
def is_container_running(self):
try:
container_ready = str(sh.kubectl(
'get', 'pods',
'--selector', self.selector,
'--output', 'jsonpath="{.items[*].status.containerStatuses[*].ready}"',
)).strip()
except sh.ErrorReturnCode as e:
logger.debug(e)
return False
else:
return container_ready == '"true"'
def run_command(self, *args):
return sh.kubectl('exec', self.pod_name, '--', *args)
@property
def pod_name(self):
return str(sh.kubectl(
'get', 'pods',
'--output', 'custom-columns=NAME:.metadata.name',
'--no-headers',
'--selector', self.selector,
)).strip()
@property
def selector(self):
return 'app={}'.format(self.name)
@property
def started_log(self):
raise NotImplementedError
@property
def name(self):
raise NotImplementedError
@property
def definition(self):
raise NotImplementedError
| true
| true
|
f7020126c0821383f6a8544cd6c1e7094992bb87
| 25
|
py
|
Python
|
btd6_memory_info/generated/Unity/Collections/LowLevel/Unsafe/UnsafeUtility/unsafe_utility.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
btd6_memory_info/generated/Unity/Collections/LowLevel/Unsafe/UnsafeUtility/unsafe_utility.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
btd6_memory_info/generated/Unity/Collections/LowLevel/Unsafe/UnsafeUtility/unsafe_utility.py
|
56kyle/bloons_auto
|
419d55b51d1cddc49099593970adf1c67985b389
|
[
"MIT"
] | null | null | null |
class UnsafeUtility: pass
| 25
| 25
| 0.88
|
class UnsafeUtility: pass
| true
| true
|
f7020143781683c04a23b0b50295a1890ca68391
| 6,509
|
py
|
Python
|
server/accounts/models.py
|
janheise/zentral
|
cd809483573301e7d1aa5d3fc2da2c74a62405ab
|
[
"Apache-2.0"
] | null | null | null |
server/accounts/models.py
|
janheise/zentral
|
cd809483573301e7d1aa5d3fc2da2c74a62405ab
|
[
"Apache-2.0"
] | null | null | null |
server/accounts/models.py
|
janheise/zentral
|
cd809483573301e7d1aa5d3fc2da2c74a62405ab
|
[
"Apache-2.0"
] | null | null | null |
import enum
from itertools import chain
from django.contrib.auth.models import AbstractUser, UserManager as DjangoUserManager
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.urls import reverse
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
import pyotp
from zentral.utils.base64 import trimmed_urlsafe_b64decode
class UserManager(DjangoUserManager):
pass
class User(AbstractUser):
email = models.EmailField(unique=True)
is_remote = models.BooleanField(default=False)
is_service_account = models.BooleanField(default=False)
password_updated_at = models.DateTimeField(blank=True, null=True, editable=False)
objects = UserManager()
class Meta:
ordering = ("username",)
def __str__(self):
if self.is_service_account:
return self.username
else:
return self.email or self.username
def get_type_display(self):
return "user" if not self.is_service_account else "service account"
def get_absolute_url(self):
return reverse("accounts:user", args=(self.pk,))
def set_password(self, *args, **kwargs):
if not self.is_remote and not self.is_service_account:
super().set_password(*args, **kwargs)
self.password_updated_at = timezone.now()
else:
self.set_unusable_password()
def save(self, *args, **kwargs):
if self.is_service_account:
# service accounts cannot be superusers
self.is_superuser = False
if self.is_service_account or self.is_remote:
# service accounts or remote users cannot have a valid password
self.set_unusable_password()
else:
if self.pk:
old_user = self._meta.model.objects.get(pk=self.pk)
if old_user.password != self.password:
if old_user.has_usable_password():
UserPasswordHistory.objects.create(
user=self,
password=old_user.password,
created_at=old_user.password_updated_at or old_user.date_joined
)
self.password_updated_at = timezone.now()
elif self.password:
self.password_updated_at = timezone.now()
super().save(*args, **kwargs)
def username_and_email_editable(self):
return not self.is_remote
def is_superuser_editable(self):
return (not self.is_superuser or
User.objects.exclude(pk=self.pk).filter(is_superuser=True).count() > 0)
def editable(self):
return self.username_and_email_editable() or self.is_superuser_editable()
def deletable(self):
return not self.is_superuser
@cached_property
def has_verification_device(self):
return len(self._all_verification_devices) > 0
@cached_property
def _all_verification_devices(self):
return list(chain(self.usertotp_set.all(),
self.userwebauthn_set.all()))
def get_verification_devices(self):
return sorted(self._all_verification_devices,
key=lambda vd: vd.name)
def get_prioritized_verification_devices(self, user_agent):
verification_devices = sorted(self._all_verification_devices,
key=lambda vd: (-1 * vd.PRIORITY, vd.name))
ua_verification_devices = [vd for vd in verification_devices if vd.test_user_agent(user_agent)]
if not ua_verification_devices and verification_devices:
raise ValueError("No verification devices compatible with this user agent")
else:
return ua_verification_devices
@cached_property
def group_name_set(self):
"""A set with all the group names. Used for authz."""
return set(self.groups.values_list("name", flat=True))
class UserPasswordHistory(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
password = models.CharField(_('password'), max_length=128)
created_at = models.DateTimeField(editable=False)
class UserVerificationDevice(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
name = models.CharField(max_length=256)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
def get_type_for_display(self):
return self.TYPE
def __str__(self):
return "{} {}".format(self.get_type_for_display(), self.name)
def get_delete_url(self):
return reverse(self.delete_url_name, args=(self.pk,))
def serialize_for_event(self):
return {"type": self.TYPE,
"pk": self.pk}
class UserTOTP(UserVerificationDevice):
TYPE = "TOTP"
PRIORITY = 10
secret = models.CharField(max_length=256)
delete_url_name = "accounts:delete_totp"
class Meta:
unique_together = (("user", "name"),)
def get_verification_url(self):
return reverse("accounts:verify_totp")
def verify(self, code):
return pyotp.TOTP(self.secret).verify(code)
def test_user_agent(self, user_agent):
return True
class WebAuthnTransport(enum.Enum):
USB = "usb"
NFC = "nfc"
BLE = "ble"
INTERNAL = "internal"
@classmethod
def choices(cls):
return tuple((i.value, i.value) for i in cls)
class UserWebAuthn(UserVerificationDevice):
TYPE = "WebAuthn"
PRIORITY = 100
delete_url_name = "accounts:delete_webauthn_device"
key_handle = models.TextField()
public_key = models.BinaryField()
rp_id = models.TextField()
transports = ArrayField(models.CharField(max_length=8, choices=WebAuthnTransport.choices()))
sign_count = models.PositiveIntegerField()
class Meta:
unique_together = (("user", "key_handle"), ("user", "name"))
def get_type_for_display(self):
return "Security key"
def get_verification_url(self):
return reverse("accounts:verify_webauthn")
def test_user_agent(self, user_agent):
return True
def get_key_handle_bytes(self):
return trimmed_urlsafe_b64decode(self.key_handle)
def get_appid(self):
if self.rp_id.startswith("https://"):
# legacy U2F registration
return self.rp_id
| 32.708543
| 103
| 0.665233
|
import enum
from itertools import chain
from django.contrib.auth.models import AbstractUser, UserManager as DjangoUserManager
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.urls import reverse
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
import pyotp
from zentral.utils.base64 import trimmed_urlsafe_b64decode
class UserManager(DjangoUserManager):
pass
class User(AbstractUser):
email = models.EmailField(unique=True)
is_remote = models.BooleanField(default=False)
is_service_account = models.BooleanField(default=False)
password_updated_at = models.DateTimeField(blank=True, null=True, editable=False)
objects = UserManager()
class Meta:
ordering = ("username",)
def __str__(self):
if self.is_service_account:
return self.username
else:
return self.email or self.username
def get_type_display(self):
return "user" if not self.is_service_account else "service account"
def get_absolute_url(self):
return reverse("accounts:user", args=(self.pk,))
def set_password(self, *args, **kwargs):
if not self.is_remote and not self.is_service_account:
super().set_password(*args, **kwargs)
self.password_updated_at = timezone.now()
else:
self.set_unusable_password()
def save(self, *args, **kwargs):
if self.is_service_account:
self.is_superuser = False
if self.is_service_account or self.is_remote:
self.set_unusable_password()
else:
if self.pk:
old_user = self._meta.model.objects.get(pk=self.pk)
if old_user.password != self.password:
if old_user.has_usable_password():
UserPasswordHistory.objects.create(
user=self,
password=old_user.password,
created_at=old_user.password_updated_at or old_user.date_joined
)
self.password_updated_at = timezone.now()
elif self.password:
self.password_updated_at = timezone.now()
super().save(*args, **kwargs)
def username_and_email_editable(self):
return not self.is_remote
def is_superuser_editable(self):
return (not self.is_superuser or
User.objects.exclude(pk=self.pk).filter(is_superuser=True).count() > 0)
def editable(self):
return self.username_and_email_editable() or self.is_superuser_editable()
def deletable(self):
return not self.is_superuser
@cached_property
def has_verification_device(self):
return len(self._all_verification_devices) > 0
@cached_property
def _all_verification_devices(self):
return list(chain(self.usertotp_set.all(),
self.userwebauthn_set.all()))
def get_verification_devices(self):
return sorted(self._all_verification_devices,
key=lambda vd: vd.name)
def get_prioritized_verification_devices(self, user_agent):
verification_devices = sorted(self._all_verification_devices,
key=lambda vd: (-1 * vd.PRIORITY, vd.name))
ua_verification_devices = [vd for vd in verification_devices if vd.test_user_agent(user_agent)]
if not ua_verification_devices and verification_devices:
raise ValueError("No verification devices compatible with this user agent")
else:
return ua_verification_devices
@cached_property
def group_name_set(self):
return set(self.groups.values_list("name", flat=True))
class UserPasswordHistory(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
password = models.CharField(_('password'), max_length=128)
created_at = models.DateTimeField(editable=False)
class UserVerificationDevice(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
name = models.CharField(max_length=256)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
def get_type_for_display(self):
return self.TYPE
def __str__(self):
return "{} {}".format(self.get_type_for_display(), self.name)
def get_delete_url(self):
return reverse(self.delete_url_name, args=(self.pk,))
def serialize_for_event(self):
return {"type": self.TYPE,
"pk": self.pk}
class UserTOTP(UserVerificationDevice):
TYPE = "TOTP"
PRIORITY = 10
secret = models.CharField(max_length=256)
delete_url_name = "accounts:delete_totp"
class Meta:
unique_together = (("user", "name"),)
def get_verification_url(self):
return reverse("accounts:verify_totp")
def verify(self, code):
return pyotp.TOTP(self.secret).verify(code)
def test_user_agent(self, user_agent):
return True
class WebAuthnTransport(enum.Enum):
USB = "usb"
NFC = "nfc"
BLE = "ble"
INTERNAL = "internal"
@classmethod
def choices(cls):
return tuple((i.value, i.value) for i in cls)
class UserWebAuthn(UserVerificationDevice):
TYPE = "WebAuthn"
PRIORITY = 100
delete_url_name = "accounts:delete_webauthn_device"
key_handle = models.TextField()
public_key = models.BinaryField()
rp_id = models.TextField()
transports = ArrayField(models.CharField(max_length=8, choices=WebAuthnTransport.choices()))
sign_count = models.PositiveIntegerField()
class Meta:
unique_together = (("user", "key_handle"), ("user", "name"))
def get_type_for_display(self):
return "Security key"
def get_verification_url(self):
return reverse("accounts:verify_webauthn")
def test_user_agent(self, user_agent):
return True
def get_key_handle_bytes(self):
return trimmed_urlsafe_b64decode(self.key_handle)
def get_appid(self):
if self.rp_id.startswith("https://"):
return self.rp_id
| true
| true
|
f70201eec12852521092e1ed9d3f5bc7a9cd90c2
| 976
|
py
|
Python
|
deepdab/ai/td0_policy.py
|
lantunes/deepdab
|
0e30f102b9d7c37d3691540496b1649f2704d586
|
[
"Apache-2.0"
] | 1
|
2019-04-04T02:26:51.000Z
|
2019-04-04T02:26:51.000Z
|
deepdab/ai/td0_policy.py
|
lantunes/deepdab
|
0e30f102b9d7c37d3691540496b1649f2704d586
|
[
"Apache-2.0"
] | null | null | null |
deepdab/ai/td0_policy.py
|
lantunes/deepdab
|
0e30f102b9d7c37d3691540496b1649f2704d586
|
[
"Apache-2.0"
] | null | null | null |
from deepdab.ai import *
class TDZeroPolicy(TabularPolicy):
def __init__(self, board_size, learning_rate=0.0, gamma=0.0, epsilon=0.0, initial_state_value=0.0, table_file_path=None):
super(TDZeroPolicy, self).__init__(board_size=board_size, epsilon=epsilon,
initial_state_value=initial_state_value, table_file_path=table_file_path)
self._learning_rate = learning_rate
self._gamma = gamma
def update_value(self, reward, initial_state, selected_state):
initial_state_string = self._find_state_string(initial_state)
selected_state_string = self._find_state_string(selected_state)
initial_state_value = self._value_table[initial_state_string]
selected_state_value = self._value_table[selected_state_string]
self._value_table[initial_state_string] = initial_state_value + self._learning_rate * (reward + (self._gamma * selected_state_value) - initial_state_value)
| 57.411765
| 163
| 0.743852
|
from deepdab.ai import *
class TDZeroPolicy(TabularPolicy):
def __init__(self, board_size, learning_rate=0.0, gamma=0.0, epsilon=0.0, initial_state_value=0.0, table_file_path=None):
super(TDZeroPolicy, self).__init__(board_size=board_size, epsilon=epsilon,
initial_state_value=initial_state_value, table_file_path=table_file_path)
self._learning_rate = learning_rate
self._gamma = gamma
def update_value(self, reward, initial_state, selected_state):
initial_state_string = self._find_state_string(initial_state)
selected_state_string = self._find_state_string(selected_state)
initial_state_value = self._value_table[initial_state_string]
selected_state_value = self._value_table[selected_state_string]
self._value_table[initial_state_string] = initial_state_value + self._learning_rate * (reward + (self._gamma * selected_state_value) - initial_state_value)
| true
| true
|
f7020205688de508cdcaf88f06fa23f6198f18be
| 132
|
py
|
Python
|
tests/classification/digits/ws_digits_SGDClassifier_db2_code_gen.py
|
antoinecarme/sklearn2sql_heroku
|
d680db10683daa419324461eeea851dd8b103ad5
|
[
"BSD-3-Clause"
] | 1
|
2019-07-09T14:45:18.000Z
|
2019-07-09T14:45:18.000Z
|
tests/classification/digits/ws_digits_SGDClassifier_db2_code_gen.py
|
antoinecarme/sklearn2sql_heroku
|
d680db10683daa419324461eeea851dd8b103ad5
|
[
"BSD-3-Clause"
] | 5
|
2017-11-13T13:35:37.000Z
|
2021-11-11T12:57:20.000Z
|
tests/classification/digits/ws_digits_SGDClassifier_db2_code_gen.py
|
antoinecarme/sklearn2sql_heroku
|
d680db10683daa419324461eeea851dd8b103ad5
|
[
"BSD-3-Clause"
] | 1
|
2021-09-19T15:05:33.000Z
|
2021-09-19T15:05:33.000Z
|
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("SGDClassifier" , "digits" , "db2")
| 26.4
| 72
| 0.80303
|
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("SGDClassifier" , "digits" , "db2")
| true
| true
|
f702024c3c01565b670bab7999a264ce4f0d7f8d
| 260
|
py
|
Python
|
slack_app/tasks.py
|
webscopeio/django-slack-app
|
65abb3717460c51a19c1238eb0572f25c47b2a42
|
[
"MIT"
] | 3
|
2020-06-23T10:02:48.000Z
|
2020-10-28T11:59:28.000Z
|
slack_app/tasks.py
|
webscopeio/django-slack-integration
|
65abb3717460c51a19c1238eb0572f25c47b2a42
|
[
"MIT"
] | 2
|
2020-02-17T11:42:03.000Z
|
2020-02-18T13:46:38.000Z
|
slack_app/tasks.py
|
webscopeio/django-slack-integration
|
65abb3717460c51a19c1238eb0572f25c47b2a42
|
[
"MIT"
] | 4
|
2020-10-11T11:02:58.000Z
|
2022-03-14T08:23:42.000Z
|
from celery import shared_task
from .signals import slack_event_received
@shared_task
def receive_slack_signal_task(sender, event_type, event_data, **data):
slack_event_received.send(sender=sender, event_type=event_type, event_data=event_data, **data)
| 26
| 98
| 0.819231
|
from celery import shared_task
from .signals import slack_event_received
@shared_task
def receive_slack_signal_task(sender, event_type, event_data, **data):
slack_event_received.send(sender=sender, event_type=event_type, event_data=event_data, **data)
| true
| true
|
f702027b07c86443c8a600458c57b03f9799e7d6
| 370
|
py
|
Python
|
10_days_of_statistics_5_4.py
|
sercangul/HackerRank
|
e6d7056babe03baafee8d7f1cacdca7c28b72ded
|
[
"Apache-2.0"
] | null | null | null |
10_days_of_statistics_5_4.py
|
sercangul/HackerRank
|
e6d7056babe03baafee8d7f1cacdca7c28b72ded
|
[
"Apache-2.0"
] | null | null | null |
10_days_of_statistics_5_4.py
|
sercangul/HackerRank
|
e6d7056babe03baafee8d7f1cacdca7c28b72ded
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 3 19:19:34 2019
@author: sercangul
"""
from math import erf
std = 10
h1 = 80
h2 = 60
mean = 70
def N(mean, std, x):
return 0.5 + 0.5 * erf((x-mean)/(std* 2**0.5))
print (round(((1 - N(mean,std,h1))*100),2))
print (round(((1 - N(mean,std,h2))*100),2))
print (round(((N(mean,std,h2)*100)),2))
| 17.619048
| 50
| 0.575676
|
from math import erf
std = 10
h1 = 80
h2 = 60
mean = 70
def N(mean, std, x):
return 0.5 + 0.5 * erf((x-mean)/(std* 2**0.5))
print (round(((1 - N(mean,std,h1))*100),2))
print (round(((1 - N(mean,std,h2))*100),2))
print (round(((N(mean,std,h2)*100)),2))
| true
| true
|
f7020407270c486b259d7d6e15d11395208c8459
| 22,252
|
py
|
Python
|
finder/planets.py
|
astrohr/dagor-preprocessing
|
7b44d39f5559cab5e8cf12234193903336d77e36
|
[
"MIT"
] | null | null | null |
finder/planets.py
|
astrohr/dagor-preprocessing
|
7b44d39f5559cab5e8cf12234193903336d77e36
|
[
"MIT"
] | null | null | null |
finder/planets.py
|
astrohr/dagor-preprocessing
|
7b44d39f5559cab5e8cf12234193903336d77e36
|
[
"MIT"
] | null | null | null |
import requests
import requests_cache
from bs4 import BeautifulSoup
import json
from lxml import html
import pdb
import re
import sys
import logging
import datetime
import time
# import winsound
from jinja2 import Environment, FileSystemLoader
import math
import itertools
from playsound import playsound
class Planet:
# planet's current location prediction could be scattered throughout the sky. What is (maxRa, maxDec) (in arc seconds) until we discard the planet
maxScatteredness = (1500, 1000)
# Warn when object is scattered (but don't flag it as discarded)
maxScatterednessWarning = (1000, 800)
# Min score for planet to be worth observing
minScore = 25
# Min Magnitude
minMagnitude = 22
#maxNotSeenDays
maxNotSeenDays = 4
def __init__(self, info):
parts = info.split()
self.name = parts[0]
self.score = int(parts[1])
self.numObservations = int(parts[12])
self.arc = float(parts[-3])
self.notSeenDays = float(parts[-1])
# Rectacension
self.ra = float(parts[5])
# Declination
self.dec = float(parts[6])
self.magnitude = float(parts[7])
# Object not good for observing
self.discard = False
self.scatterednessUrl = False
def analyzePlanet(self):
# pdb.set_trace()
print("\n" + str(datetime.datetime.utcnow()) + " Working on: " + self.name)
self.getEphemerides()
if self.haveWeObserved():
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: we have observed it already before')
if self.score < Planet.minScore:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: score too low (' + str(self.score) + ')')
if self.scatterednessUrl:
self.scatteredness = self.getScatteredness()
if self.scatteredness[0] > Planet.maxScatteredness[0] or self.scatteredness[1] > Planet.maxScatteredness[1]:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: predicted locations too scattered (' + str(self.scatteredness[0]) + ', ' + str(self.scatteredness[1]) + ')')
elif self.scatteredness[0] > Planet.maxScatterednessWarning[0] or self.scatteredness[1] > Planet.maxScatterednessWarning[1]:
logging.warning('Location of planet ' + self.name + ' is very scattered! (' + str(self.scatteredness[0]) + ', ' + str(self.scatteredness[1]) + ')')
# pdb.set_trace()
# filter not seen > 1.2 days
if self.notSeenDays > Planet.maxNotSeenDays:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: too long not seen (' + str(self.notSeenDays) + ' days)')
# Get Max Altitude
# TODO - do something with maximum altitude
if len(self.ephemerides) > 0:
self.maxAltitudeEphemeride = self.maxAlt()
if self.maxAltitudeEphemeride:
pass
# print("Max Altitude Date: " + self.maxAltitudeEphemeride.date)
if self.maxAltitudeEphemeride.effMagnitude > Planet.minMagnitude:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: effective magnitude too low (' + str(self.maxAltitudeEphemeride.effMagnitude) + ')' + ' Magnitude (' + str(self.maxAltitudeEphemeride.magnitude) + ')')
else:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: no maximum altitude obtained')
self.nearestToNow()
self.currentEphemerideInterpolation()
else:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: no ephemerides available')
if not self.discard:
logging.warning('PLANET OK: ' + self.name)
def getEphemerides(self):
url = "https://cgi.minorplanetcenter.net/cgi-bin/confirmeph2.cgi"
# print(self.name)
resp = requests.post(url, data={"mb": -30, "mf": 30, "dl": -90, "du": +90, "nl": 0, "nu": 100, "sort": "d", "W": "j", "obj": self.name, "Parallax": 1, "obscode": "L01", "long": None, "lat": None, "alt": None, "int": 1, "start": 0, "raty": "a", "mot": "m", "dmot": "p", "out": "f", "sun": "x", "oalt": 20})
resp1 = resp.text
page = BeautifulSoup(resp1, "html5lib")
links = page.find("pre")
lines = (links.text).split("\n")
lines = lines[2:-1]
lines = [l for l in lines if "<suppressed>" not in l]
# if self.name == 'ZTF00Wh':
# pdb.set_trace()
# if html.find("pre").find_all('a')[2]['href']
if len(page.find("pre").find_all('a')) > 1 and page.find("pre").find_all('a')[1]['href']:
self.scatterednessUrl = page.find("pre").find_all('a')[1]['href']
tree = html.fromstring(resp.content)
mapLinks = tree.xpath("//pre/a[text()='Map']/@href")
if len(mapLinks) > 0:
self.mapLink = mapLinks[0]
if len(tree.xpath("//a[text()='observations']/@href")) > 0:
self.observationsUrl = tree.xpath("//a[text()='observations']/@href")[0]
self.ephemerides = []
ephReport = {}
for l in lines:
eph = Ephemeride(l)
if eph.isValid():
self.ephemerides.append(eph)
ephReport["OK"] = ephReport["OK"] + 1 if "OK" in ephReport else 1
else:
ephReport[eph.discardReason] = ephReport[eph.discardReason] + 1 if eph.discardReason in ephReport else 1
if len(ephReport):
print("Ephemerides report: " + json.dumps(ephReport))
# print(ephDropReasons)
def maxAlt(self):
maxAlt = float("-inf")
index = None
# logging.warning('Obtaining efemeride for: ' + self.name)
for i, eph in enumerate(self.ephemerides):
# logging.warning('Eph.alt: ' + str(eph.alt))
if eph.alt > maxAlt:
maxAlt = eph.alt
index = i
if index is None:
self.discard = True
return None
return self.ephemerides[index]
def nearestToNow(self):
secondsFromNow = float("inf")
index = None
for i, eph in enumerate(self.ephemerides):
if eph.secondsFromNowPlus600() < secondsFromNow:
secondsFromNow = eph.secondsFromNowPlus600()
index = i
if isinstance(index, int):
self.nearestToNowEphemeride = self.ephemerides[index]
return index
return None
def currentEphemerideInterpolation(self):
index = self.nearestToNow()
interpolatedEph = Ephemeride(self.ephemerides[index].line)
if index:
eph = Ephemeride(self.ephemerides[index].line)
if eph.secondsFromNowPlus600() > 0:
if len(self.ephemerides) > index + 1:
currentEph = eph
nextEph = self.ephemerides[index + 1]
else:
self.currentInterpolatedEphemeride = eph
return eph
elif eph.secondsFromNowPlus600() == 0:
self.currentInterpolatedEphemeride = eph
return eph
else:
if index > 0:
currentEph = self.ephemerides[index - 1]
nextEph = eph
else:
self.currentInterpolatedEphemeride = eph
return eph
timeInterval = nextEph.dateUnix - currentEph.dateUnix
dt = time.mktime(datetime.datetime.utcnow().timetuple()) + 600 - currentEph.dateUnix
dtPerc = dt / timeInterval
interpolatedEph.azimuth = currentEph.azimuth + ((nextEph.azimuth - currentEph.azimuth) * dtPerc)
interpolatedEph.alt = currentEph.alt + ((nextEph.alt - currentEph.alt) * dtPerc)
interpolatedEph.dateUnix = currentEph.dateUnix + dt
interpolatedEph.updateLineFromData()
# print('Interpolated Ephemeride: ')
# print(interpolatedEph.line)
self.currentInterpolatedEphemeride = interpolatedEph
return interpolatedEph
self.currentInterpolatedEphemeride = None
return None
# Have we observed the planet before
def haveWeObserved(self):
resp = requests.get(self.observationsUrl)
tree = html.fromstring(resp.content)
text = tree.xpath('//pre/text()')
# pdb.set_trace()
if re.search("L01\n", text[0]):
return True
return False
# scatteredness of results
def getScatteredness(self):
resp = requests.get(self.scatterednessUrl).text
html = BeautifulSoup(resp, "html5lib")
links = html.find("pre")
observationPoints = re.findall(r'([+-][0-9]+) +([+-][0-9]+).*Ephemeris # [0-9]+$', links.text, re.M)
minRa, maxRa, minDec, maxDec = 0, 0, 0, 0
for point in observationPoints:
if int(point[0]) < minRa:
minRa = int(point[0])
elif int(point[0]) > maxRa:
maxRa = int(point[0])
if int(point[1]) < minDec:
minDec = int(point[1])
elif int(point[1]) > maxDec:
maxDec = int(point[1])
return (maxRa - minRa, maxDec - minDec)
# planet1 = Planet()
class Ephemeride:
# Maximum sun altiude (otherwise we can't observe the planet)
maxSunAlt = -15
# Minimum altitude of object (below can't be seen due to horizon or clouds)
minAlt = 15
# Minimum distance of object from the Moon
minMoonDistance = 20
# Minimum motion (speed = "/min)
minMotion = 2.5
# Why did the ephemeride get discarded (if at all)
discardReason = ''
def __init__(self, info):
# Date UT * R.A. (J2000) Decl. Elong. V Motion Object Sun Moon
# h m "/min P.A. Azi. Alt. Alt. Phase Dist. Alt.
# 2018 10 12 1900 23 26 39.1 +30 55 48 146.2 22.0 0.22 129.4 289 +62 -28 0.15 114 -03
self.line = info
parts = self.line.split()
self.date = parts[0] + ' ' + parts[1] + ' ' + parts[2] + ' ' + parts[3]
self.dateUnix = time.mktime(datetime.datetime.strptime(self.date, "%Y %m %d %H%M").timetuple())
# Azimuth of object at that time
self.azimuth = float(parts[14])
# Altitude of object (above horizon) at that time
self.alt = float(parts[15])
# Altitude of sun at the time
self.sunAlt = float(parts[16])
# Distance from the moon
self.moonDistance = float(parts[18])
self.magnitude = float(parts[11])
# Effective magnitude - Manitude that takes into account atmospheric extiction due to (low) altitude of planet
self.effMagnitude = self.getEffectiveMagnitude()
self.motion = float(parts[12])
# Observation time needed (in minutes) - approximates the imaging time needed to get a good picture
self.observationTime = self.getObservationTime()
# pdb.set_trace()
# logging.warning('Magnitude vs Effective Magnitude: ' + str(self.magnitude) + " : " + str(self.effMagnitude))
def isValid(self):
if self.sunAlt > Ephemeride.maxSunAlt:
self.discardReason = 'nearSun'
return False
if self.alt < Ephemeride.minAlt:
self.discardReason = 'altLow'
return False
if self.moonDistance < Ephemeride.minMoonDistance:
self.discardReason = 'nearMoon'
return False
if self.dateUnix > Main.endObservationTimestamp:
self.discardReason = 'tooLate'
return False
if self.motion < Ephemeride.minMotion:
self.discardReason = 'tooSlow'
return False
return True
def getEffectiveMagnitude(self):
if self.alt < 40:
return self.magnitude + ((self.alt - 40) * 0.1)
else:
return self.magnitude
def getObservationTime(self):
return round(10 + (self.effMagnitude - 18) * 5, 2)
def secondsFromNowPlus600(self):
""" Number of seconds from (Now + 600 seconds) """
currentTimePlus600 = time.mktime(datetime.datetime.utcnow().timetuple()) + 600
return math.fabs(self.dateUnix - currentTimePlus600)
def updateLineFromData(self):
line = self.line.split(' ')
line[0] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%Y")
line[1] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%m")
line[2] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%d")
line[3] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%H%M")
# Azimuth & Alititude
line[22] = str(round(self.azimuth)).zfill(3)
line[24] = str(round(self.alt)) if self.alt < 0 else ('+' + str(round(self.alt)))
self.line = ' '.join(line)
class Map:
def __init__(self, planets):
renderPlanets = []
for planet in planets:
if not planet.discard and planet.currentInterpolatedEphemeride:
# pdb.set_trace()
renderDict = {}
renderDict["name"] = planet.name
renderDict["magnitude"] = planet.currentInterpolatedEphemeride.magnitude
# Displacement from center of map
radius = (90 - planet.currentInterpolatedEphemeride.alt)
# Angle of displacement
angle = math.radians(planet.currentInterpolatedEphemeride.azimuth + 180)
# Convert the radius and angle to X and Y
renderDict["coordinates"] = []
renderDict["coordinates"].append(-(1000 * radius * math.sin(angle) / 90) + 1250)
renderDict["coordinates"].append(-(1000 * radius * math.cos(angle) / 90) + 1250)
renderPlanets.append(renderDict)
env = Environment(loader=FileSystemLoader('.'))
template = env.get_template('skymap.template')
output = template.render({"params": renderPlanets})
with open('skymap.html', 'w') as f:
f.write(output)
class Main:
# Observation date starts at 11:00 and ends next day at 10:59:59 (for file timestamps, etc)
observationDate = datetime.datetime.utcnow() - datetime.timedelta(hours=11)
# observation ends next day at 11:00:00 (so we'll discard later observations, etc)
endObservationTimestamp = time.mktime((observationDate.replace(hour=0, minute=0, second=0, microsecond=0) + datetime.timedelta(hours=35)).timetuple())
def __init__(self):
global debugging
if debugging:
# Cache all server responses to make faster development
requests_cache.install_cache('reqeusts-cache', allowable_methods=('GET', 'POST'))
self.planets = []
self.repeatMode = True
self.beeperOn = False
self.firstRun = True
self.setInitParams()
self.getData()
self.writeToFile()
Map(self.planets)
print('\nFirst run completed successfully! Now go, play! Make something big!')
# pdb.set_trace()
while self.repeatMode:
if self.firstRun:
print("\n=============================================================")
self.firstRun = False
self.beeperOn = True
self.getData()
self.writeToFile()
Map(self.planets)
time.sleep(300)
def setInitParams(self):
repeat = input('Continuous observation True/False (' + str(self.repeatMode) + ')? ')
if re.fullmatch(r'(True)|(False)', repeat):
if repeat == 'True':
self.repeatMode = True
else:
self.repeatMode = False
print('Continuous observation: ' + str(self.repeatMode))
minScore = input('Minimum score (' + str(Planet.minScore) + ')? ')
if minScore.isdigit():
Planet.minScore = int(minScore)
print('Minimum score: ' + str(Planet.minScore))
minMagnitude = input('Minimum efective magnitude (' + str(Planet.minMagnitude) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', minMagnitude):
Planet.minMagnitude = float(minMagnitude)
print('Minimum efective magnitude: ' + str(Planet.minMagnitude))
minAlt = input('Minimum altitude (' + str(Ephemeride.minAlt) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', minAlt):
Ephemeride.minAlt = float(minAlt)
print('Minimum altitude: ' + str(Ephemeride.minAlt))
maxScatteredness1 = input('Maximum scateredness in x coordinate (' + str(Planet.maxScatteredness[0]) + ')? ')
if maxScatteredness1.isdigit():
Planet.maxScatteredness = (int(maxScatteredness1), Planet.maxScatteredness[1])
maxScatteredness2 = input('Maximum scateredness in y coordinate (' + str(Planet.maxScatteredness[1]) + ')? ')
if maxScatteredness2.isdigit():
Planet.maxScatteredness = (Planet.maxScatteredness[0], int(maxScatteredness2))
print('Maximum scateredness: (' + str(Planet.maxScatteredness[0]) + ', ' + str(Planet.maxScatteredness[1]) + ')')
maxNotSeenDays = input('Maximum not seen days (' + str(Planet.maxNotSeenDays) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', maxNotSeenDays):
Planet.maxNotSeenDays = float(maxNotSeenDays)
print('Maximum not seen days: ' + str(Planet.maxNotSeenDays))
maxSunAlt = input('Maximum sun altitude (' + str(Ephemeride.maxSunAlt) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', maxSunAlt):
Ephemeride.maxSunAlt = float(maxSunAlt)
print('Maximum sun altitude: ' + str(Ephemeride.maxSunAlt))
minMoonDistance = input('Minimum distance from the moon (' + str(Ephemeride.minMoonDistance) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', minMoonDistance):
Ephemeride.minMoonDistance = float(minMoonDistance)
print('Minimum distance from the moon: ' + str(Ephemeride.minMoonDistance))
minMotion = input('Minimum motion (speed) (' + str(Ephemeride.minMotion) + ')? ')
if re.fullmatch(r'[0-9]+\.?[0-9]*', minMotion):
Ephemeride.minMotion = float(minMotion)
print('Minimum motion (speed): ' + str(Ephemeride.minMotion))
def getData(self):
url = "https://www.minorplanetcenter.net/iau/NEO/neocp.txt"
resp = requests.get(url).text[:-1].split("\n")
currentPlanets = []
for planetString in resp:
# for planetString in itertools.islice(resp, 0, 4):
p = Planet(planetString)
currentPlanets.append(p.name)
if p.name not in (i.name for i in self.planets):
p.analyzePlanet()
self.planets.append(p)
if self.beeperOn:
playsound('up.wav')
else:
# print('Plane already known (' + p.name + ')')
pass
# Did any of planets get removed?
if not self.firstRun:
for i in range(len(self.planets) -1, -1, -1):
if self.planets[i].name not in currentPlanets:
del self.planets[i]
if not self.planets[i].discard:
print('\n' + str(datetime.datetime.utcnow()) + ' Planet ' + self.planets[i].name + ' was removed!')
playsound('down.wav')
elif not self.planets[i].discard:
# Update the nearest to now ephemeride (so it can be put into file)
self.planets[i].nearestToNow()
self.planets[i].currentEphemerideInterpolation()
def sortByMaxAlt(self):
return sorted([p for p in self.planets if not p.discard], key=lambda planet: planet.maxAltitudeEphemeride.dateUnix)
def writeToFile(self):
# logging.warning('Writing output to file')
# pdb.set_trace()
with open(Main.observationDate.strftime("%Y-%m-%d") + ".txt", "w") as f:
header = """Date UT * R.A. (J2000) Decl. Elong. V Motion Object Sun Moon
h m "/min P.A. Azi. Alt. Alt. Phase Dist. Alt."""+"\n\n\n"
f.write(header + "\n")
sortedPlanets = self.sortByMaxAlt()
for p in sortedPlanets:
if not p.discard:
# pdb.set_trace()
fileLine = "* " + p.name + " score=" + str(p.score) + ', obs=' + str(p.numObservations) + ', arc=' + str(p.arc) + ', notSeen=' + str(p.notSeenDays) + "days, obsExposure=" + str(p.maxAltitudeEphemeride.observationTime) + 'min'
if hasattr(p, 'scatteredness'):
fileLine += ', scatteredness=(' + str(p.scatteredness[0]) + ',' + str(p.scatteredness[1]) + ')'
if hasattr(p, 'mapLink'):
fileLine += ', mapLink=' + p.mapLink
f.write(fileLine + "\n")
# Comment out highest ephemeride
f.write("// " + p.maxAltitudeEphemeride.line + "\n")
# And print current ephemeride
f.write("// " + p.nearestToNowEphemeride.line + "\n")
# And print current interpolated ephemeride
if p.currentInterpolatedEphemeride:
f.write(p.currentInterpolatedEphemeride.line + "\n\n")
else:
f.write(p.nearestToNowEphemeride.line + "\n\n")
f.close()
if __name__ == "__main__":
debugging = False
if '--debug' in sys.argv:
debugging = True
# logger = logging.getLogger()
logging.basicConfig(level=logging.INFO, format="%(message)s")
# Start the program
main = Main()
# pdb.set_trace()
| 42.223909
| 313
| 0.574915
|
import requests
import requests_cache
from bs4 import BeautifulSoup
import json
from lxml import html
import pdb
import re
import sys
import logging
import datetime
import time
from jinja2 import Environment, FileSystemLoader
import math
import itertools
from playsound import playsound
class Planet:
maxScatteredness = (1500, 1000)
# Warn when object is scattered (but don't flag it as discarded)
maxScatterednessWarning = (1000, 800)
minScore = 25
minMagnitude = 22
maxNotSeenDays = 4
def __init__(self, info):
parts = info.split()
self.name = parts[0]
self.score = int(parts[1])
self.numObservations = int(parts[12])
self.arc = float(parts[-3])
self.notSeenDays = float(parts[-1])
self.ra = float(parts[5])
self.dec = float(parts[6])
self.magnitude = float(parts[7])
self.discard = False
self.scatterednessUrl = False
def analyzePlanet(self):
print("\n" + str(datetime.datetime.utcnow()) + " Working on: " + self.name)
self.getEphemerides()
if self.haveWeObserved():
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: we have observed it already before')
if self.score < Planet.minScore:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: score too low (' + str(self.score) + ')')
if self.scatterednessUrl:
self.scatteredness = self.getScatteredness()
if self.scatteredness[0] > Planet.maxScatteredness[0] or self.scatteredness[1] > Planet.maxScatteredness[1]:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: predicted locations too scattered (' + str(self.scatteredness[0]) + ', ' + str(self.scatteredness[1]) + ')')
elif self.scatteredness[0] > Planet.maxScatterednessWarning[0] or self.scatteredness[1] > Planet.maxScatterednessWarning[1]:
logging.warning('Location of planet ' + self.name + ' is very scattered! (' + str(self.scatteredness[0]) + ', ' + str(self.scatteredness[1]) + ')')
if self.notSeenDays > Planet.maxNotSeenDays:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: too long not seen (' + str(self.notSeenDays) + ' days)')
if len(self.ephemerides) > 0:
self.maxAltitudeEphemeride = self.maxAlt()
if self.maxAltitudeEphemeride:
pass
if self.maxAltitudeEphemeride.effMagnitude > Planet.minMagnitude:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: effective magnitude too low (' + str(self.maxAltitudeEphemeride.effMagnitude) + ')' + ' Magnitude (' + str(self.maxAltitudeEphemeride.magnitude) + ')')
else:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: no maximum altitude obtained')
self.nearestToNow()
self.currentEphemerideInterpolation()
else:
self.discard = True
logging.warning('Planet ' + self.name + ' discarded. Reason: no ephemerides available')
if not self.discard:
logging.warning('PLANET OK: ' + self.name)
def getEphemerides(self):
url = "https://cgi.minorplanetcenter.net/cgi-bin/confirmeph2.cgi"
resp = requests.post(url, data={"mb": -30, "mf": 30, "dl": -90, "du": +90, "nl": 0, "nu": 100, "sort": "d", "W": "j", "obj": self.name, "Parallax": 1, "obscode": "L01", "long": None, "lat": None, "alt": None, "int": 1, "start": 0, "raty": "a", "mot": "m", "dmot": "p", "out": "f", "sun": "x", "oalt": 20})
resp1 = resp.text
page = BeautifulSoup(resp1, "html5lib")
links = page.find("pre")
lines = (links.text).split("\n")
lines = lines[2:-1]
lines = [l for l in lines if "<suppressed>" not in l]
if len(page.find("pre").find_all('a')) > 1 and page.find("pre").find_all('a')[1]['href']:
self.scatterednessUrl = page.find("pre").find_all('a')[1]['href']
tree = html.fromstring(resp.content)
mapLinks = tree.xpath("//pre/a[text()='Map']/@href")
if len(mapLinks) > 0:
self.mapLink = mapLinks[0]
if len(tree.xpath("//a[text()='observations']/@href")) > 0:
self.observationsUrl = tree.xpath("//a[text()='observations']/@href")[0]
self.ephemerides = []
ephReport = {}
for l in lines:
eph = Ephemeride(l)
if eph.isValid():
self.ephemerides.append(eph)
ephReport["OK"] = ephReport["OK"] + 1 if "OK" in ephReport else 1
else:
ephReport[eph.discardReason] = ephReport[eph.discardReason] + 1 if eph.discardReason in ephReport else 1
if len(ephReport):
print("Ephemerides report: " + json.dumps(ephReport))
def maxAlt(self):
maxAlt = float("-inf")
index = None
for i, eph in enumerate(self.ephemerides):
if eph.alt > maxAlt:
maxAlt = eph.alt
index = i
if index is None:
self.discard = True
return None
return self.ephemerides[index]
def nearestToNow(self):
secondsFromNow = float("inf")
index = None
for i, eph in enumerate(self.ephemerides):
if eph.secondsFromNowPlus600() < secondsFromNow:
secondsFromNow = eph.secondsFromNowPlus600()
index = i
if isinstance(index, int):
self.nearestToNowEphemeride = self.ephemerides[index]
return index
return None
def currentEphemerideInterpolation(self):
index = self.nearestToNow()
interpolatedEph = Ephemeride(self.ephemerides[index].line)
if index:
eph = Ephemeride(self.ephemerides[index].line)
if eph.secondsFromNowPlus600() > 0:
if len(self.ephemerides) > index + 1:
currentEph = eph
nextEph = self.ephemerides[index + 1]
else:
self.currentInterpolatedEphemeride = eph
return eph
elif eph.secondsFromNowPlus600() == 0:
self.currentInterpolatedEphemeride = eph
return eph
else:
if index > 0:
currentEph = self.ephemerides[index - 1]
nextEph = eph
else:
self.currentInterpolatedEphemeride = eph
return eph
timeInterval = nextEph.dateUnix - currentEph.dateUnix
dt = time.mktime(datetime.datetime.utcnow().timetuple()) + 600 - currentEph.dateUnix
dtPerc = dt / timeInterval
interpolatedEph.azimuth = currentEph.azimuth + ((nextEph.azimuth - currentEph.azimuth) * dtPerc)
interpolatedEph.alt = currentEph.alt + ((nextEph.alt - currentEph.alt) * dtPerc)
interpolatedEph.dateUnix = currentEph.dateUnix + dt
interpolatedEph.updateLineFromData()
self.currentInterpolatedEphemeride = interpolatedEph
return interpolatedEph
self.currentInterpolatedEphemeride = None
return None
def haveWeObserved(self):
resp = requests.get(self.observationsUrl)
tree = html.fromstring(resp.content)
text = tree.xpath('//pre/text()')
if re.search("L01\n", text[0]):
return True
return False
def getScatteredness(self):
resp = requests.get(self.scatterednessUrl).text
html = BeautifulSoup(resp, "html5lib")
links = html.find("pre")
observationPoints = re.findall(r'([+-][0-9]+) +([+-][0-9]+).*Ephemeris # [0-9]+$', links.text, re.M)
minRa, maxRa, minDec, maxDec = 0, 0, 0, 0
for point in observationPoints:
if int(point[0]) < minRa:
minRa = int(point[0])
elif int(point[0]) > maxRa:
maxRa = int(point[0])
if int(point[1]) < minDec:
minDec = int(point[1])
elif int(point[1]) > maxDec:
maxDec = int(point[1])
return (maxRa - minRa, maxDec - minDec)
class Ephemeride:
maxSunAlt = -15
# Minimum altitude of object (below can't be seen due to horizon or clouds)
minAlt = 15
minMoonDistance = 20
minMotion = 2.5
# Why did the ephemeride get discarded (if at all)
discardReason = ''
def __init__(self, info):
# Date UT * R.A. (J2000) Decl. Elong. V Motion Object Sun Moon
# h m "/min P.A. Azi. Alt. Alt. Phase Dist. Alt.
self.line = info
parts = self.line.split()
self.date = parts[0] + ' ' + parts[1] + ' ' + parts[2] + ' ' + parts[3]
self.dateUnix = time.mktime(datetime.datetime.strptime(self.date, "%Y %m %d %H%M").timetuple())
self.azimuth = float(parts[14])
self.alt = float(parts[15])
self.sunAlt = float(parts[16])
self.moonDistance = float(parts[18])
self.magnitude = float(parts[11])
self.effMagnitude = self.getEffectiveMagnitude()
self.motion = float(parts[12])
self.observationTime = self.getObservationTime()
def isValid(self):
if self.sunAlt > Ephemeride.maxSunAlt:
self.discardReason = 'nearSun'
return False
if self.alt < Ephemeride.minAlt:
self.discardReason = 'altLow'
return False
if self.moonDistance < Ephemeride.minMoonDistance:
self.discardReason = 'nearMoon'
return False
if self.dateUnix > Main.endObservationTimestamp:
self.discardReason = 'tooLate'
return False
if self.motion < Ephemeride.minMotion:
self.discardReason = 'tooSlow'
return False
return True
def getEffectiveMagnitude(self):
if self.alt < 40:
return self.magnitude + ((self.alt - 40) * 0.1)
else:
return self.magnitude
def getObservationTime(self):
return round(10 + (self.effMagnitude - 18) * 5, 2)
def secondsFromNowPlus600(self):
currentTimePlus600 = time.mktime(datetime.datetime.utcnow().timetuple()) + 600
return math.fabs(self.dateUnix - currentTimePlus600)
def updateLineFromData(self):
line = self.line.split(' ')
line[0] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%Y")
line[1] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%m")
line[2] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%d")
line[3] = datetime.datetime.fromtimestamp(self.dateUnix).strftime("%H%M")
line[22] = str(round(self.azimuth)).zfill(3)
line[24] = str(round(self.alt)) if self.alt < 0 else ('+' + str(round(self.alt)))
self.line = ' '.join(line)
class Map:
def __init__(self, planets):
renderPlanets = []
for planet in planets:
if not planet.discard and planet.currentInterpolatedEphemeride:
renderDict = {}
renderDict["name"] = planet.name
renderDict["magnitude"] = planet.currentInterpolatedEphemeride.magnitude
radius = (90 - planet.currentInterpolatedEphemeride.alt)
angle = math.radians(planet.currentInterpolatedEphemeride.azimuth + 180)
renderDict["coordinates"] = []
renderDict["coordinates"].append(-(1000 * radius * math.sin(angle) / 90) + 1250)
renderDict["coordinates"].append(-(1000 * radius * math.cos(angle) / 90) + 1250)
renderPlanets.append(renderDict)
env = Environment(loader=FileSystemLoader('.'))
template = env.get_template('skymap.template')
output = template.render({"params": renderPlanets})
with open('skymap.html', 'w') as f:
f.write(output)
class Main:
observationDate = datetime.datetime.utcnow() - datetime.timedelta(hours=11)
endObservationTimestamp = time.mktime((observationDate.replace(hour=0, minute=0, second=0, microsecond=0) + datetime.timedelta(hours=35)).timetuple())
def __init__(self):
global debugging
if debugging:
# Cache all server responses to make faster development
requests_cache.install_cache('reqeusts-cache', allowable_methods=('GET', 'POST'))
self.planets = []
self.repeatMode = True
self.beeperOn = False
self.firstRun = True
self.setInitParams()
self.getData()
self.writeToFile()
Map(self.planets)
print('\nFirst run completed successfully! Now go, play! Make something big!')
# pdb.set_trace()
while self.repeatMode:
if self.firstRun:
print("\n=============================================================")
self.firstRun = False
self.beeperOn = True
self.getData()
self.writeToFile()
Map(self.planets)
time.sleep(300)
def setInitParams(self):
repeat = input('Continuous observation True/False (' + str(self.repeatMode) + ')? ')
if re.fullmatch(r'(True)|(False)', repeat):
if repeat == 'True':
self.repeatMode = True
else:
self.repeatMode = False
print('Continuous observation: ' + str(self.repeatMode))
minScore = input('Minimum score (' + str(Planet.minScore) + ')? ')
if minScore.isdigit():
Planet.minScore = int(minScore)
print('Minimum score: ' + str(Planet.minScore))
minMagnitude = input('Minimum efective magnitude (' + str(Planet.minMagnitude) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', minMagnitude):
Planet.minMagnitude = float(minMagnitude)
print('Minimum efective magnitude: ' + str(Planet.minMagnitude))
minAlt = input('Minimum altitude (' + str(Ephemeride.minAlt) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', minAlt):
Ephemeride.minAlt = float(minAlt)
print('Minimum altitude: ' + str(Ephemeride.minAlt))
maxScatteredness1 = input('Maximum scateredness in x coordinate (' + str(Planet.maxScatteredness[0]) + ')? ')
if maxScatteredness1.isdigit():
Planet.maxScatteredness = (int(maxScatteredness1), Planet.maxScatteredness[1])
maxScatteredness2 = input('Maximum scateredness in y coordinate (' + str(Planet.maxScatteredness[1]) + ')? ')
if maxScatteredness2.isdigit():
Planet.maxScatteredness = (Planet.maxScatteredness[0], int(maxScatteredness2))
print('Maximum scateredness: (' + str(Planet.maxScatteredness[0]) + ', ' + str(Planet.maxScatteredness[1]) + ')')
maxNotSeenDays = input('Maximum not seen days (' + str(Planet.maxNotSeenDays) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', maxNotSeenDays):
Planet.maxNotSeenDays = float(maxNotSeenDays)
print('Maximum not seen days: ' + str(Planet.maxNotSeenDays))
maxSunAlt = input('Maximum sun altitude (' + str(Ephemeride.maxSunAlt) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', maxSunAlt):
Ephemeride.maxSunAlt = float(maxSunAlt)
print('Maximum sun altitude: ' + str(Ephemeride.maxSunAlt))
minMoonDistance = input('Minimum distance from the moon (' + str(Ephemeride.minMoonDistance) + ')? ')
if re.fullmatch(r'[+-]?[0-9]+\.?[0-9]*', minMoonDistance):
Ephemeride.minMoonDistance = float(minMoonDistance)
print('Minimum distance from the moon: ' + str(Ephemeride.minMoonDistance))
minMotion = input('Minimum motion (speed) (' + str(Ephemeride.minMotion) + ')? ')
if re.fullmatch(r'[0-9]+\.?[0-9]*', minMotion):
Ephemeride.minMotion = float(minMotion)
print('Minimum motion (speed): ' + str(Ephemeride.minMotion))
def getData(self):
url = "https://www.minorplanetcenter.net/iau/NEO/neocp.txt"
resp = requests.get(url).text[:-1].split("\n")
currentPlanets = []
for planetString in resp:
# for planetString in itertools.islice(resp, 0, 4):
p = Planet(planetString)
currentPlanets.append(p.name)
if p.name not in (i.name for i in self.planets):
p.analyzePlanet()
self.planets.append(p)
if self.beeperOn:
playsound('up.wav')
else:
# print('Plane already known (' + p.name + ')')
pass
# Did any of planets get removed?
if not self.firstRun:
for i in range(len(self.planets) -1, -1, -1):
if self.planets[i].name not in currentPlanets:
del self.planets[i]
if not self.planets[i].discard:
print('\n' + str(datetime.datetime.utcnow()) + ' Planet ' + self.planets[i].name + ' was removed!')
playsound('down.wav')
elif not self.planets[i].discard:
# Update the nearest to now ephemeride (so it can be put into file)
self.planets[i].nearestToNow()
self.planets[i].currentEphemerideInterpolation()
def sortByMaxAlt(self):
return sorted([p for p in self.planets if not p.discard], key=lambda planet: planet.maxAltitudeEphemeride.dateUnix)
def writeToFile(self):
# logging.warning('Writing output to file')
# pdb.set_trace()
with open(Main.observationDate.strftime("%Y-%m-%d") + ".txt", "w") as f:
header = """Date UT * R.A. (J2000) Decl. Elong. V Motion Object Sun Moon
h m "/min P.A. Azi. Alt. Alt. Phase Dist. Alt."""+"\n\n\n"
f.write(header + "\n")
sortedPlanets = self.sortByMaxAlt()
for p in sortedPlanets:
if not p.discard:
# pdb.set_trace()
fileLine = "* " + p.name + " score=" + str(p.score) + ', obs=' + str(p.numObservations) + ', arc=' + str(p.arc) + ', notSeen=' + str(p.notSeenDays) + "days, obsExposure=" + str(p.maxAltitudeEphemeride.observationTime) + 'min'
if hasattr(p, 'scatteredness'):
fileLine += ', scatteredness=(' + str(p.scatteredness[0]) + ',' + str(p.scatteredness[1]) + ')'
if hasattr(p, 'mapLink'):
fileLine += ', mapLink=' + p.mapLink
f.write(fileLine + "\n")
# Comment out highest ephemeride
f.write("// " + p.maxAltitudeEphemeride.line + "\n")
# And print current ephemeride
f.write("// " + p.nearestToNowEphemeride.line + "\n")
# And print current interpolated ephemeride
if p.currentInterpolatedEphemeride:
f.write(p.currentInterpolatedEphemeride.line + "\n\n")
else:
f.write(p.nearestToNowEphemeride.line + "\n\n")
f.close()
if __name__ == "__main__":
debugging = False
if '--debug' in sys.argv:
debugging = True
# logger = logging.getLogger()
logging.basicConfig(level=logging.INFO, format="%(message)s")
# Start the program
main = Main()
# pdb.set_trace()
| true
| true
|
f70204479a221f5e4f1dc384ae7a40ef6e0943b2
| 2,124
|
py
|
Python
|
cml/install_dependencies.py
|
meechos/CML_AMP_Summarize
|
6ded75ef2823cd3a70f8ab1a773c09068300d069
|
[
"Apache-2.0"
] | 1
|
2021-09-24T18:53:46.000Z
|
2021-09-24T18:53:46.000Z
|
cml/install_dependencies.py
|
meechos/CML_AMP_Summarize
|
6ded75ef2823cd3a70f8ab1a773c09068300d069
|
[
"Apache-2.0"
] | 2
|
2021-09-03T16:42:58.000Z
|
2021-09-04T04:54:23.000Z
|
cml/install_dependencies.py
|
meechos/CML_AMP_Summarize
|
6ded75ef2823cd3a70f8ab1a773c09068300d069
|
[
"Apache-2.0"
] | 3
|
2021-11-17T14:08:48.000Z
|
2022-03-31T22:02:09.000Z
|
# ###########################################################################
#
# CLOUDERA APPLIED MACHINE LEARNING PROTOTYPE (AMP)
# (C) Cloudera, Inc. 2021
# All rights reserved.
#
# Applicable Open Source License: Apache 2.0
#
# NOTE: Cloudera open source products are modular software products
# made up of hundreds of individual components, each of which was
# individually copyrighted. Each Cloudera open source product is a
# collective work under U.S. Copyright Law. Your license to use the
# collective work is as provided in your written agreement with
# Cloudera. Used apart from the collective work, this file is
# licensed for your use pursuant to the open source license
# identified above.
#
# This code is provided to you pursuant a written agreement with
# (i) Cloudera, Inc. or (ii) a third-party authorized to distribute
# this code. If you do not have a written agreement with Cloudera nor
# with an authorized and properly licensed third party, you do not
# have any rights to access nor to use this code.
#
# Absent a written agreement with Cloudera, Inc. (“Cloudera”) to the
# contrary, A) CLOUDERA PROVIDES THIS CODE TO YOU WITHOUT WARRANTIES OF ANY
# KIND; (B) CLOUDERA DISCLAIMS ANY AND ALL EXPRESS AND IMPLIED
# WARRANTIES WITH RESPECT TO THIS CODE, INCLUDING BUT NOT LIMITED TO
# IMPLIED WARRANTIES OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE; (C) CLOUDERA IS NOT LIABLE TO YOU,
# AND WILL NOT DEFEND, INDEMNIFY, NOR HOLD YOU HARMLESS FOR ANY CLAIMS
# ARISING FROM OR RELATED TO THE CODE; AND (D)WITH RESPECT TO YOUR EXERCISE
# OF ANY RIGHTS GRANTED TO YOU FOR THE CODE, CLOUDERA IS NOT LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, PUNITIVE OR
# CONSEQUENTIAL DAMAGES INCLUDING, BUT NOT LIMITED TO, DAMAGES
# RELATED TO LOST REVENUE, LOST PROFITS, LOSS OF INCOME, LOSS OF
# BUSINESS ADVANTAGE OR UNAVAILABILITY, OR LOSS OR CORRUPTION OF
# DATA.
#
# ###########################################################################
!pip3 install -r requirements.txt
!python3 -m spacy download en_core_web_sm
| 49.395349
| 77
| 0.704802
|
!pip3 install -r requirements.txt
!python3 -m spacy download en_core_web_sm
| false
| true
|
f70206340a2324d17bb06e8517c8d216679dd42c
| 1,006
|
py
|
Python
|
bizfriendly/settings.py
|
codeforamerica/bizfriendly-api
|
b3f3b9f83652ec67752d629baaf0bc1d4ec67695
|
[
"BSD-Source-Code"
] | 13
|
2015-04-27T14:26:19.000Z
|
2021-11-21T16:11:17.000Z
|
bizfriendly/settings.py
|
codeforamerica/bizfriendly-api
|
b3f3b9f83652ec67752d629baaf0bc1d4ec67695
|
[
"BSD-Source-Code"
] | 15
|
2015-04-25T22:29:50.000Z
|
2016-09-01T16:59:21.000Z
|
bizfriendly/settings.py
|
codeforamerica/bizfriendly-api
|
b3f3b9f83652ec67752d629baaf0bc1d4ec67695
|
[
"BSD-Source-Code"
] | 9
|
2015-06-19T19:48:40.000Z
|
2021-04-16T10:27:29.000Z
|
from bizfriendly import app
from flask.ext.heroku import Heroku
import os
heroku = Heroku(app) # Sets CONFIG automagically
app.config.update(
# DEBUG = True,
# SQLALCHEMY_DATABASE_URI = 'postgres://hackyourcity@localhost/howtocity',
# SQLALCHEMY_DATABASE_URI = 'postgres://postgres:root@localhost/howtocity',
# SECRET_KEY = '123456'
)
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY')
app.config['MAIL_GUN_KEY'] = os.environ.get('MAIL_GUN_KEY')
app.config['AWS_ACCESS_KEY_ID'] = os.environ.get('AWS_ACCESS_KEY_ID')
app.config['AWS_SECRET_ACCESS_KEY'] = os.environ.get('AWS_SECRET_ACCESS_KEY')
app.config['S3_BUCKET_NAME'] = os.environ.get('S3_BUCKET_NAME')
def add_cors_header(response):
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'Authorization, Content-Type'
response.headers['Access-Control-Allow-Methods'] = 'POST, GET, PUT, PATCH, DELETE, OPTIONS'
return response
app.after_request(add_cors_header)
| 41.916667
| 95
| 0.752485
|
from bizfriendly import app
from flask.ext.heroku import Heroku
import os
heroku = Heroku(app) app.config.update(
)
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY')
app.config['MAIL_GUN_KEY'] = os.environ.get('MAIL_GUN_KEY')
app.config['AWS_ACCESS_KEY_ID'] = os.environ.get('AWS_ACCESS_KEY_ID')
app.config['AWS_SECRET_ACCESS_KEY'] = os.environ.get('AWS_SECRET_ACCESS_KEY')
app.config['S3_BUCKET_NAME'] = os.environ.get('S3_BUCKET_NAME')
def add_cors_header(response):
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'Authorization, Content-Type'
response.headers['Access-Control-Allow-Methods'] = 'POST, GET, PUT, PATCH, DELETE, OPTIONS'
return response
app.after_request(add_cors_header)
| true
| true
|
f70206cc800c5e0d94292d32c57ac589cda0f286
| 172
|
py
|
Python
|
learn1/46.py
|
raghavi101/HackerRank
|
48bf812f1b3c60b5201a95458be76ae9b7323a88
|
[
"MIT"
] | null | null | null |
learn1/46.py
|
raghavi101/HackerRank
|
48bf812f1b3c60b5201a95458be76ae9b7323a88
|
[
"MIT"
] | null | null | null |
learn1/46.py
|
raghavi101/HackerRank
|
48bf812f1b3c60b5201a95458be76ae9b7323a88
|
[
"MIT"
] | null | null | null |
from itertools import product
aarr = list(map(int, input().split()))
barr = list(map(int, input().split()))
print(' '.join(str(i) for i in list(product(*[aarr, barr]))))
| 24.571429
| 61
| 0.651163
|
from itertools import product
aarr = list(map(int, input().split()))
barr = list(map(int, input().split()))
print(' '.join(str(i) for i in list(product(*[aarr, barr]))))
| true
| true
|
f702081e249e1d95321d52a2424e1f7a8a4b4c3a
| 5,409
|
py
|
Python
|
previous/week12/object-oriented/FactorySystemOO.py
|
code-lab-org/sys611
|
3b8c46788dee629a9f2d6b7f84373e041b918ff0
|
[
"MIT"
] | 3
|
2021-04-07T03:52:07.000Z
|
2022-03-04T18:16:16.000Z
|
previous/week12/object-oriented/FactorySystemOO.py
|
code-lab-org/sys611
|
3b8c46788dee629a9f2d6b7f84373e041b918ff0
|
[
"MIT"
] | null | null | null |
previous/week12/object-oriented/FactorySystemOO.py
|
code-lab-org/sys611
|
3b8c46788dee629a9f2d6b7f84373e041b918ff0
|
[
"MIT"
] | 6
|
2021-02-12T01:57:23.000Z
|
2022-03-04T18:05:27.000Z
|
"""
SYS-611: Example factory model in SimPy (object-oriented).
@author: Paul T. Grogan, [email protected]
"""
# import the python3 behavior for importing, division, and printing in python2
from __future__ import absolute_import, division, print_function
# import the simpy package
# see https://simpy.readthedocs.io/en/latest/api_reference for documentation
import simpy
# import the numpy package and refer to it as `np`
# see http://docs.scipy.org/doc/numpy/reference/ for documentation
import numpy as np
# import the matplotlib pyplot package and refer to it as `plt`
# see http://matplotlib.org/api/pyplot_api.html for documentation
import matplotlib.pyplot as plt
#%% SECTION TO CONFIGURE SIMULATION
# number of simulation runs to perform
NUM_RUNS = 1
# simulation duration (hours)
SIM_DURATION = 5*8*52
# number of spares to purchase (S)
NUM_SPARES = 20
# number of repairers to hire (R)
NUM_REPAIRERS = 5
#%% SECTION TO DEFINE SIMULATION
class Factory(object):
""" Defines a factory simulation. """
def __init__(self, env, num_repairers, num_spares):
""" Initializes this factory.
Args:
env (simpy.Environment): the simulation environment
num_repairers (int): the number of repairers to hire
num_spares (int): the number of spares to purchase
"""
self.repairers = simpy.Resource(env, capacity=num_repairers)
self.spares = simpy.Container(env, init=num_spares, capacity=num_spares)
self.env = env
self.cost = 0
self.daily_cost = 3.75*8*num_repairers + 30*num_spares
def run(self):
""" Process to run this simulation. """
# launch the 50 machine processes
for i in range(50):
self.env.process(factory.operate_machine(i+1))
# update the daily costs each day
while True:
self.cost += self.daily_cost
yield self.env.timeout(8.0)
def operate_machine(self, machine):
""" Process to operate a machine.
Args:
machine (int): the machine number
"""
while True:
# wait until the machine breaks
yield self.env.timeout(np.random.uniform(132,182))
time_broken = self.env.now
if NUM_RUNS <= 1:
print('machine {} broke at {:.2f} ({} spares available)'.format(
machine, time_broken, self.spares.level))
# launch the repair process
self.env.process(self.repair_machine())
# wait for a spare to become available
yield self.spares.get(1)
time_replaced = self.env.now
if NUM_RUNS <= 1:
print('machine {} replaced at {:.2f}'.format(machine, time_replaced))
# update the cost for being out of service
self.cost += 20*(time_replaced-time_broken)
def repair_machine(self):
""" Process to repair a machine. """
with self.repairers.request() as request:
# wait for a repairer to become available
yield request
# perform the repair
yield self.env.timeout(np.random.uniform(4,10))
# put the machine back in the spares pool
yield self.spares.put(1)
if NUM_RUNS <= 1:
print('repair complete at {:.2f} ({} spares available)'.format(
self.env.now, self.spares.level))
# arrays to record data
obs_time = []
obs_cost = []
obs_spares = []
def observe(env, factory):
""" Process to observe the factory during a simulation.
Args:
env (simpy.Environment): the simulation environment
factory (Factory): the factory
"""
while True:
obs_time.append(env.now)
obs_cost.append(factory.cost)
obs_spares.append(factory.spares.level)
yield env.timeout(1.0)
#%% SECTION TO RUN ANALYSIS
# array to store outputs
COST = []
for i in range(NUM_RUNS):
# set the random number seed
np.random.seed(i)
# create the simpy environment
env = simpy.Environment()
# create the factory
factory = Factory(env, NUM_REPAIRERS, NUM_SPARES)
# add the factory run process
env.process(factory.run())
# add the observation process
env.process(observe(env, factory))
# run simulation
env.run(until=SIM_DURATION)
# record the final observed cost
COST.append(obs_cost[-1])
if NUM_RUNS <= 1:
# output the total cost
print('Total cost: {:.2f}'.format(factory.cost))
# plot the number of spares available
plt.figure()
plt.step(obs_time, obs_spares, where='post')
plt.xlabel('Time (hour)')
plt.ylabel('Number Spares Available')
# plot the total cost accumulation
plt.figure()
plt.step(obs_time, obs_cost, where='post')
plt.xlabel('Time (hour)')
plt.ylabel('Total Cost')
# print final results to console
print('Factory costs for N={:} runs with R={:} repairers and S={:} spares:'.format(
NUM_RUNS, NUM_REPAIRERS, NUM_SPARES))
print('\n'.join('{:.2f}'.format(i) for i in COST))
#%% SECTION TO WRITE RESULTS TO CSV FILE
import csv
with open('factory.csv', 'w') as output:
writer = csv.writer(output)
for sample in COST:
writer.writerow([sample])
| 32.781818
| 87
| 0.621557
|
from __future__ import absolute_import, division, print_function
import simpy
import numpy as np
import matplotlib.pyplot as plt
NUM_RUNS = 1
SIM_DURATION = 5*8*52
NUM_SPARES = 20
NUM_REPAIRERS = 5
class Factory(object):
def __init__(self, env, num_repairers, num_spares):
self.repairers = simpy.Resource(env, capacity=num_repairers)
self.spares = simpy.Container(env, init=num_spares, capacity=num_spares)
self.env = env
self.cost = 0
self.daily_cost = 3.75*8*num_repairers + 30*num_spares
def run(self):
for i in range(50):
self.env.process(factory.operate_machine(i+1))
while True:
self.cost += self.daily_cost
yield self.env.timeout(8.0)
def operate_machine(self, machine):
while True:
yield self.env.timeout(np.random.uniform(132,182))
time_broken = self.env.now
if NUM_RUNS <= 1:
print('machine {} broke at {:.2f} ({} spares available)'.format(
machine, time_broken, self.spares.level))
self.env.process(self.repair_machine())
yield self.spares.get(1)
time_replaced = self.env.now
if NUM_RUNS <= 1:
print('machine {} replaced at {:.2f}'.format(machine, time_replaced))
self.cost += 20*(time_replaced-time_broken)
def repair_machine(self):
with self.repairers.request() as request:
yield request
yield self.env.timeout(np.random.uniform(4,10))
yield self.spares.put(1)
if NUM_RUNS <= 1:
print('repair complete at {:.2f} ({} spares available)'.format(
self.env.now, self.spares.level))
obs_time = []
obs_cost = []
obs_spares = []
def observe(env, factory):
while True:
obs_time.append(env.now)
obs_cost.append(factory.cost)
obs_spares.append(factory.spares.level)
yield env.timeout(1.0)
COST = []
for i in range(NUM_RUNS):
np.random.seed(i)
env = simpy.Environment()
factory = Factory(env, NUM_REPAIRERS, NUM_SPARES)
env.process(factory.run())
env.process(observe(env, factory))
env.run(until=SIM_DURATION)
COST.append(obs_cost[-1])
if NUM_RUNS <= 1:
print('Total cost: {:.2f}'.format(factory.cost))
plt.figure()
plt.step(obs_time, obs_spares, where='post')
plt.xlabel('Time (hour)')
plt.ylabel('Number Spares Available')
plt.figure()
plt.step(obs_time, obs_cost, where='post')
plt.xlabel('Time (hour)')
plt.ylabel('Total Cost')
print('Factory costs for N={:} runs with R={:} repairers and S={:} spares:'.format(
NUM_RUNS, NUM_REPAIRERS, NUM_SPARES))
print('\n'.join('{:.2f}'.format(i) for i in COST))
import csv
with open('factory.csv', 'w') as output:
writer = csv.writer(output)
for sample in COST:
writer.writerow([sample])
| true
| true
|
f702082ae713bdb7800692e65c05918519ebe0f6
| 809
|
py
|
Python
|
flower/views/workers.py
|
KonstantinKlepikov/flower
|
89e71c8c00dcb51bc584e908fc6b2ba97706e89a
|
[
"BSD-3-Clause"
] | 1
|
2020-07-04T23:10:39.000Z
|
2020-07-04T23:10:39.000Z
|
flower/views/workers.py
|
KonstantinKlepikov/flower
|
89e71c8c00dcb51bc584e908fc6b2ba97706e89a
|
[
"BSD-3-Clause"
] | null | null | null |
flower/views/workers.py
|
KonstantinKlepikov/flower
|
89e71c8c00dcb51bc584e908fc6b2ba97706e89a
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
from tornado import web
from tornado import gen
from ..views import BaseHandler
from ..api.workers import ListWorkers
logger = logging.getLogger(__name__)
class WorkerView(BaseHandler):
@web.authenticated
@gen.coroutine
def get(self, name):
try:
yield ListWorkers.update_workers(app=self.application, workername=name)
except Exception as e:
logger.error(e)
worker = ListWorkers.worker_cache.get(name)
if worker is None:
raise web.HTTPError(404, "Unknown worker '%s'" % name)
if 'stats' not in worker:
raise web.HTTPError(
404,
"Unable to get stats for '%s' worker" % name
)
self.render("worker.html", worker=dict(worker, name=name))
| 24.515152
| 83
| 0.621755
|
import logging
from tornado import web
from tornado import gen
from ..views import BaseHandler
from ..api.workers import ListWorkers
logger = logging.getLogger(__name__)
class WorkerView(BaseHandler):
@web.authenticated
@gen.coroutine
def get(self, name):
try:
yield ListWorkers.update_workers(app=self.application, workername=name)
except Exception as e:
logger.error(e)
worker = ListWorkers.worker_cache.get(name)
if worker is None:
raise web.HTTPError(404, "Unknown worker '%s'" % name)
if 'stats' not in worker:
raise web.HTTPError(
404,
"Unable to get stats for '%s' worker" % name
)
self.render("worker.html", worker=dict(worker, name=name))
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.